aboutsummaryrefslogtreecommitdiff
path: root/cast/standalone_sender/looping_file_sender.cc
blob: 4362add6b99f90d63c8b84e35cfc2eba5f0d57aa (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "cast/standalone_sender/looping_file_sender.h"

#include <utility>

#if defined(CAST_STANDALONE_SENDER_HAVE_LIBAOM)
#include "cast/standalone_sender/streaming_av1_encoder.h"
#endif
#include "cast/standalone_sender/streaming_vpx_encoder.h"
#include "util/osp_logging.h"
#include "util/trace_logging.h"

namespace openscreen {
namespace cast {

LoopingFileSender::LoopingFileSender(Environment* environment,
                                     ConnectionSettings settings,
                                     const SenderSession* session,
                                     SenderSession::ConfiguredSenders senders,
                                     ShutdownCallback shutdown_callback)
    : env_(environment),
      settings_(std::move(settings)),
      session_(session),
      shutdown_callback_(std::move(shutdown_callback)),
      audio_encoder_(senders.audio_sender->config().channels,
                     StreamingOpusEncoder::kDefaultCastAudioFramesPerSecond,
                     senders.audio_sender),
      video_encoder_(CreateVideoEncoder(
          StreamingVideoEncoder::Parameters{.codec = settings.codec},
          env_->task_runner(),
          senders.video_sender)),
      next_task_(env_->now_function(), env_->task_runner()),
      console_update_task_(env_->now_function(), env_->task_runner()) {
  // Opus and Vp8 are the default values for the config, and if these are set
  // to a different value that means we offered a codec that we do not
  // support, which is a developer error.
  OSP_CHECK(senders.audio_config.codec == AudioCodec::kOpus);
  OSP_CHECK(senders.video_config.codec == VideoCodec::kVp8 ||
            senders.video_config.codec == VideoCodec::kVp9 ||
            senders.video_config.codec == VideoCodec::kAv1);
  OSP_LOG_INFO << "Max allowed media bitrate (audio + video) will be "
               << settings_.max_bitrate;
  bandwidth_being_utilized_ = settings_.max_bitrate / 2;
  UpdateEncoderBitrates();

  next_task_.Schedule([this] { SendFileAgain(); }, Alarm::kImmediately);
}

LoopingFileSender::~LoopingFileSender() = default;

void LoopingFileSender::SetPlaybackRate(double rate) {
  video_capturer_->SetPlaybackRate(rate);
  audio_capturer_->SetPlaybackRate(rate);
}

void LoopingFileSender::UpdateEncoderBitrates() {
  if (bandwidth_being_utilized_ >= kHighBandwidthThreshold) {
    audio_encoder_.UseHighQuality();
  } else {
    audio_encoder_.UseStandardQuality();
  }
  video_encoder_->SetTargetBitrate(bandwidth_being_utilized_ -
                                   audio_encoder_.GetBitrate());
}

void LoopingFileSender::ControlForNetworkCongestion() {
  bandwidth_estimate_ = session_->GetEstimatedNetworkBandwidth();
  if (bandwidth_estimate_ > 0) {
    // Don't ever try to use *all* of the network bandwidth! However, don't go
    // below the absolute minimum requirement either.
    constexpr double kGoodNetworkCitizenFactor = 0.8;
    const int usable_bandwidth = std::max<int>(
        kGoodNetworkCitizenFactor * bandwidth_estimate_, kMinRequiredBitrate);

    // See "congestion control" discussion in the class header comments for
    // BandwidthEstimator.
    if (usable_bandwidth > bandwidth_being_utilized_) {
      constexpr double kConservativeIncrease = 1.1;
      bandwidth_being_utilized_ = std::min<int>(
          bandwidth_being_utilized_ * kConservativeIncrease, usable_bandwidth);
    } else {
      bandwidth_being_utilized_ = usable_bandwidth;
    }

    // Repsect the user's maximum bitrate setting.
    bandwidth_being_utilized_ =
        std::min(bandwidth_being_utilized_, settings_.max_bitrate);

    UpdateEncoderBitrates();
  } else {
    // There is no current bandwidth estimate. So, nothing should be adjusted.
  }

  next_task_.ScheduleFromNow([this] { ControlForNetworkCongestion(); },
                             kCongestionCheckInterval);
}

void LoopingFileSender::SendFileAgain() {
  OSP_LOG_INFO << "Sending " << settings_.path_to_file
               << " (starts in one second)...";
  TRACE_DEFAULT_SCOPED(TraceCategory::kStandaloneSender);

  OSP_DCHECK_EQ(num_capturers_running_, 0);
  num_capturers_running_ = 2;
  capture_start_time_ = latest_frame_time_ = env_->now() + seconds(1);
  audio_capturer_.emplace(
      env_, settings_.path_to_file.c_str(), audio_encoder_.num_channels(),
      audio_encoder_.sample_rate(), capture_start_time_, this);
  video_capturer_.emplace(env_, settings_.path_to_file.c_str(),
                          capture_start_time_, this);

  next_task_.ScheduleFromNow([this] { ControlForNetworkCongestion(); },
                             kCongestionCheckInterval);
  console_update_task_.Schedule([this] { UpdateStatusOnConsole(); },
                                capture_start_time_);
}

void LoopingFileSender::OnAudioData(const float* interleaved_samples,
                                    int num_samples,
                                    Clock::time_point capture_time) {
  TRACE_DEFAULT_SCOPED(TraceCategory::kStandaloneSender);
  latest_frame_time_ = std::max(capture_time, latest_frame_time_);
  audio_encoder_.EncodeAndSend(interleaved_samples, num_samples, capture_time);
}

void LoopingFileSender::OnVideoFrame(const AVFrame& av_frame,
                                     Clock::time_point capture_time) {
  TRACE_DEFAULT_SCOPED(TraceCategory::kStandaloneSender);
  latest_frame_time_ = std::max(capture_time, latest_frame_time_);
  StreamingVideoEncoder::VideoFrame frame{};
  frame.width = av_frame.width - av_frame.crop_left - av_frame.crop_right;
  frame.height = av_frame.height - av_frame.crop_top - av_frame.crop_bottom;
  frame.yuv_planes[0] = av_frame.data[0] + av_frame.crop_left +
                        av_frame.linesize[0] * av_frame.crop_top;
  frame.yuv_planes[1] = av_frame.data[1] + av_frame.crop_left / 2 +
                        av_frame.linesize[1] * av_frame.crop_top / 2;
  frame.yuv_planes[2] = av_frame.data[2] + av_frame.crop_left / 2 +
                        av_frame.linesize[2] * av_frame.crop_top / 2;
  for (int i = 0; i < 3; ++i) {
    frame.yuv_strides[i] = av_frame.linesize[i];
  }
  // TODO(jophba): Add performance metrics visual overlay (based on Stats
  // callback).
  video_encoder_->EncodeAndSend(frame, capture_time, {});
}

void LoopingFileSender::UpdateStatusOnConsole() {
  const Clock::duration elapsed = latest_frame_time_ - capture_start_time_;
  const auto seconds_part = to_seconds(elapsed);
  const auto millis_part = to_milliseconds(elapsed - seconds_part);
  // The control codes here attempt to erase the current line the cursor is
  // on, and then print out the updated status text. If the terminal does not
  // support simple ANSI escape codes, the following will still work, but
  // there might sometimes be old status lines not getting erased (i.e., just
  // partially overwritten).
  fprintf(stdout,
          "\r\x1b[2K\rLoopingFileSender: At %01" PRId64
          ".%03ds in file (est. network bandwidth: %d kbps). \n",
          static_cast<int64_t>(seconds_part.count()),
          static_cast<int>(millis_part.count()), bandwidth_estimate_ / 1024);
  fflush(stdout);

  console_update_task_.ScheduleFromNow([this] { UpdateStatusOnConsole(); },
                                       kConsoleUpdateInterval);
}

void LoopingFileSender::OnEndOfFile(SimulatedCapturer* capturer) {
  OSP_LOG_INFO << "The " << ToTrackName(capturer)
               << " capturer has reached the end of the media stream.";
  --num_capturers_running_;
  if (num_capturers_running_ == 0) {
    console_update_task_.Cancel();

    if (settings_.should_loop_video) {
      OSP_DLOG_INFO << "Starting the media stream over again.";
      next_task_.Schedule([this] { SendFileAgain(); }, Alarm::kImmediately);
    } else {
      OSP_DLOG_INFO << "Video complete. Exiting...";
      shutdown_callback_();
    }
  }
}

void LoopingFileSender::OnError(SimulatedCapturer* capturer,
                                std::string message) {
  OSP_LOG_ERROR << "The " << ToTrackName(capturer)
                << " has failed: " << message;
  --num_capturers_running_;
  // If both fail, the application just pauses. This accounts for things like
  // "file not found" errors. However, if only one track fails, then keep
  // going.
}

const char* LoopingFileSender::ToTrackName(SimulatedCapturer* capturer) const {
  const char* which;
  if (capturer == &*audio_capturer_) {
    which = "audio";
  } else if (capturer == &*video_capturer_) {
    which = "video";
  } else {
    OSP_NOTREACHED();
    which = "";
  }
  return which;
}

std::unique_ptr<StreamingVideoEncoder> LoopingFileSender::CreateVideoEncoder(
    const StreamingVideoEncoder::Parameters& params,
    TaskRunner* task_runner,
    Sender* sender) {
  switch (params.codec) {
    case VideoCodec::kVp8:
    case VideoCodec::kVp9:
      return std::make_unique<StreamingVpxEncoder>(params, task_runner, sender);
    case VideoCodec::kAv1:
#if defined(CAST_STANDALONE_SENDER_HAVE_LIBAOM)
      return std::make_unique<StreamingAv1Encoder>(params, task_runner, sender);
#else
      OSP_LOG_FATAL << "AV1 codec selected, but could not be used because "
                       "LibAOM not installed.";
#endif
    default:
      // Since we only support VP8, VP9, and AV1, any other codec value here
      // should be due only to developer error.
      OSP_LOG_ERROR << "Unsupported codec " << CodecToString(params.codec);
      OSP_NOTREACHED();
  }
}

}  // namespace cast
}  // namespace openscreen