aboutsummaryrefslogtreecommitdiff
path: root/pc/scenario_tests/goog_cc_test.cc
blob: 4a996b86844caaa9b2ab31dc0a7c72353cd8b4b8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
/*
 *  Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

#include "api/stats/rtc_stats_collector_callback.h"
#include "api/stats/rtcstats_objects.h"
#include "pc/test/mock_peer_connection_observers.h"
#include "test/field_trial.h"
#include "test/gtest.h"
#include "test/peer_scenario/peer_scenario.h"
#include "test/peer_scenario/peer_scenario_client.h"

namespace webrtc {
namespace test {

// TODO(terelius): Use fake encoder and enable on Android once
// https://bugs.chromium.org/p/webrtc/issues/detail?id=11408 is fixed.
#if defined(WEBRTC_ANDROID)
#define MAYBE_NoBweChangeFromVideoUnmute DISABLED_NoBweChangeFromVideoUnmute
#else
#define MAYBE_NoBweChangeFromVideoUnmute NoBweChangeFromVideoUnmute
#endif
TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) {
  // If transport wide sequence numbers are used for audio, and the call
  // switches from audio only to video only, there will be a sharp change in
  // packets sizes. This will create a change in propagation time which might be
  // detected as an overuse. Using separate overuse detectors for audio and
  // video avoids the issue.
  std::string audio_twcc_trials("WebRTC-Audio-AlrProbing/Disabled/");
  std::string separate_audio_video(
      "WebRTC-Bwe-SeparateAudioPackets/"
      "enabled:true,packet_threshold:15,time_threshold:1000ms/");
  ScopedFieldTrials field_trial(audio_twcc_trials + separate_audio_video);
  PeerScenario s(*test_info_);
  auto* caller = s.CreateClient(PeerScenarioClient::Config());
  auto* callee = s.CreateClient(PeerScenarioClient::Config());

  BuiltInNetworkBehaviorConfig net_conf;
  net_conf.link_capacity_kbps = 350;
  net_conf.queue_delay_ms = 50;
  auto send_node = s.net()->CreateEmulatedNode(net_conf);
  auto ret_node = s.net()->CreateEmulatedNode(net_conf);

  PeerScenarioClient::VideoSendTrackConfig video_conf;
  video_conf.generator.squares_video->framerate = 15;
  auto video = caller->CreateVideo("VIDEO", video_conf);
  auto audio = caller->CreateAudio("AUDIO", cricket::AudioOptions());

  // Start ICE and exchange SDP.
  s.SimpleConnection(caller, callee, {send_node}, {ret_node});

  // Limit the encoder bitrate to ensure that there are no actual BWE overuses.
  ASSERT_EQ(caller->pc()->GetSenders().size(), 2u);  // 2 senders.
  int num_video_streams = 0;
  for (auto& rtp_sender : caller->pc()->GetSenders()) {
    auto parameters = rtp_sender->GetParameters();
    ASSERT_EQ(parameters.encodings.size(), 1u);  // 1 stream per sender.
    for (auto& encoding_parameters : parameters.encodings) {
      if (encoding_parameters.ssrc == video.sender->ssrc()) {
        num_video_streams++;
        encoding_parameters.max_bitrate_bps = 220000;
        encoding_parameters.max_framerate = 15;
      }
    }
    rtp_sender->SetParameters(parameters);
  }
  ASSERT_EQ(num_video_streams, 1);  // Exactly 1 video stream.

  auto get_bwe = [&] {
    rtc::scoped_refptr<webrtc::MockRTCStatsCollectorCallback> callback(
        new rtc::RefCountedObject<webrtc::MockRTCStatsCollectorCallback>());
    caller->pc()->GetStats(callback);
    s.net()->time_controller()->Wait([&] { return callback->called(); });
    auto stats =
        callback->report()->GetStatsOfType<RTCIceCandidatePairStats>()[0];
    return DataRate::BitsPerSec(*stats->available_outgoing_bitrate);
  };

  s.ProcessMessages(TimeDelta::Seconds(15));
  const DataRate initial_bwe = get_bwe();
  EXPECT_GE(initial_bwe, DataRate::KilobitsPerSec(300));

  // 10 seconds audio only. Bandwidth should not drop.
  video.capturer->Stop();
  s.ProcessMessages(TimeDelta::Seconds(10));
  EXPECT_GE(get_bwe(), initial_bwe);

  // Resume video but stop audio. Bandwidth should not drop.
  video.capturer->Start();
  RTCError status = caller->pc()->RemoveTrackNew(audio.sender);
  ASSERT_TRUE(status.ok());
  audio.track->set_enabled(false);
  for (int i = 0; i < 10; i++) {
    s.ProcessMessages(TimeDelta::Seconds(1));
    EXPECT_GE(get_bwe(), initial_bwe);
  }
}

}  // namespace test
}  // namespace webrtc