aboutsummaryrefslogtreecommitdiff
path: root/modules/video_coding/codecs
diff options
context:
space:
mode:
Diffstat (limited to 'modules/video_coding/codecs')
-rw-r--r--modules/video_coding/codecs/av1/av1_svc_config.cc3
-rw-r--r--modules/video_coding/codecs/av1/av1_svc_config_unittest.cc8
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder.cc25
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc32
-rw-r--r--modules/video_coding/codecs/h264/h264.cc2
-rw-r--r--modules/video_coding/codecs/h264/h264_decoder_impl.cc26
-rw-r--r--modules/video_coding/codecs/h264/h264_encoder_impl.cc2
-rw-r--r--modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc5
-rw-r--r--modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc6
-rw-r--r--modules/video_coding/codecs/test/videocodec_test_libvpx.cc2
-rw-r--r--modules/video_coding/codecs/test/videoprocessor.cc2
-rw-r--r--modules/video_coding/codecs/vp8/default_temporal_layers.cc3
-rw-r--r--modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc19
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc10
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h9
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc28
-rw-r--r--modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc17
-rw-r--r--modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc3
-rw-r--r--modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h7
19 files changed, 168 insertions, 41 deletions
diff --git a/modules/video_coding/codecs/av1/av1_svc_config.cc b/modules/video_coding/codecs/av1/av1_svc_config.cc
index 1e61477b78..b15443c563 100644
--- a/modules/video_coding/codecs/av1/av1_svc_config.cc
+++ b/modules/video_coding/codecs/av1/av1_svc_config.cc
@@ -51,8 +51,9 @@ bool SetAv1SvcConfig(VideoCodec& video_codec) {
if (info.num_spatial_layers == 1) {
SpatialLayer& spatial_layer = video_codec.spatialLayers[0];
spatial_layer.minBitrate = video_codec.minBitrate;
- spatial_layer.targetBitrate = video_codec.startBitrate;
spatial_layer.maxBitrate = video_codec.maxBitrate;
+ spatial_layer.targetBitrate =
+ (video_codec.minBitrate + video_codec.maxBitrate) / 2;
return true;
}
diff --git a/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc b/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc
index 02ded1c70d..e6035328da 100644
--- a/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc
+++ b/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc
@@ -97,19 +97,21 @@ TEST(Av1SvcConfigTest, SetsNumberOfTemporalLayers) {
EXPECT_EQ(video_codec.spatialLayers[0].numberOfTemporalLayers, 3);
}
-TEST(Av1SvcConfigTest, CopiesBitrateForSingleSpatialLayer) {
+TEST(Av1SvcConfigTest, CopiesMinMaxBitrateForSingleSpatialLayer) {
VideoCodec video_codec;
video_codec.codecType = kVideoCodecAV1;
video_codec.SetScalabilityMode("L1T3");
video_codec.minBitrate = 100;
- video_codec.startBitrate = 200;
video_codec.maxBitrate = 500;
EXPECT_TRUE(SetAv1SvcConfig(video_codec));
EXPECT_EQ(video_codec.spatialLayers[0].minBitrate, 100u);
- EXPECT_EQ(video_codec.spatialLayers[0].targetBitrate, 200u);
EXPECT_EQ(video_codec.spatialLayers[0].maxBitrate, 500u);
+ EXPECT_LE(video_codec.spatialLayers[0].minBitrate,
+ video_codec.spatialLayers[0].targetBitrate);
+ EXPECT_LE(video_codec.spatialLayers[0].targetBitrate,
+ video_codec.spatialLayers[0].maxBitrate);
}
TEST(Av1SvcConfigTest, SetsBitratesForMultipleSpatialLayers) {
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
index 8c82476b7a..3b5fdd78e2 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
@@ -588,12 +588,26 @@ int32_t LibaomAv1Encoder::Encode(
// kNative. As a workaround to this, we perform ToI420() a second time.
// TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct
// ToI420() implementaion, remove his workaround.
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ converted_buffer->type())
+ << " image to I420. Can't encode frame.";
+ return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
+ }
if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 &&
converted_buffer->type() != VideoFrameBuffer::Type::kI420A) {
converted_buffer = converted_buffer->ToI420();
RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
}
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ converted_buffer->type())
+ << " image to I420. Can't encode frame.";
+ return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
+ }
prepped_input_frame = VideoFrame(converted_buffer, frame.timestamp(),
frame.render_time_ms(), frame.rotation());
}
@@ -671,8 +685,15 @@ int32_t LibaomAv1Encoder::Encode(
encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
// If encoded image width/height info are added to aom_codec_cx_pkt_t,
// use those values in lieu of the values in frame.
- encoded_image._encodedHeight = frame.height();
- encoded_image._encodedWidth = frame.width();
+ if (svc_params_) {
+ int n = svc_params_->scaling_factor_num[layer_frame.SpatialId()];
+ int d = svc_params_->scaling_factor_den[layer_frame.SpatialId()];
+ encoded_image._encodedWidth = cfg_.g_w * n / d;
+ encoded_image._encodedHeight = cfg_.g_h * n / d;
+ } else {
+ encoded_image._encodedWidth = cfg_.g_w;
+ encoded_image._encodedHeight = cfg_.g_h;
+ }
encoded_image.timing_.flags = VideoSendTiming::kInvalid;
int qp = -1;
ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp);
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
index ea77e091af..96057a0ce2 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
@@ -25,6 +25,7 @@ namespace webrtc {
namespace {
using ::testing::ElementsAre;
+using ::testing::Field;
using ::testing::IsEmpty;
using ::testing::SizeIs;
@@ -135,5 +136,36 @@ TEST(LibaomAv1EncoderTest, EncoderInfoProvidesFpsAllocation) {
EXPECT_THAT(encoder_info.fps_allocation[3], IsEmpty());
}
+TEST(LibaomAv1EncoderTest, PopulatesEncodedFrameSize) {
+ std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_GT(codec_settings.width, 4);
+ // Configure encoder with 3 spatial layers.
+ codec_settings.SetScalabilityMode("L3T1");
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ using Frame = EncodedVideoFrameProducer::EncodedFrame;
+ std::vector<Frame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder).SetNumInputFrames(1).Encode();
+ EXPECT_THAT(
+ encoded_frames,
+ ElementsAre(
+ Field(&Frame::encoded_image,
+ AllOf(Field(&EncodedImage::_encodedWidth,
+ codec_settings.width / 4),
+ Field(&EncodedImage::_encodedHeight,
+ codec_settings.height / 4))),
+ Field(&Frame::encoded_image,
+ AllOf(Field(&EncodedImage::_encodedWidth,
+ codec_settings.width / 2),
+ Field(&EncodedImage::_encodedHeight,
+ codec_settings.height / 2))),
+ Field(&Frame::encoded_image,
+ AllOf(Field(&EncodedImage::_encodedWidth, codec_settings.width),
+ Field(&EncodedImage::_encodedHeight,
+ codec_settings.height)))));
+}
+
} // namespace
} // namespace webrtc
diff --git a/modules/video_coding/codecs/h264/h264.cc b/modules/video_coding/codecs/h264/h264.cc
index 016d0aa538..14e1691153 100644
--- a/modules/video_coding/codecs/h264/h264.cc
+++ b/modules/video_coding/codecs/h264/h264.cc
@@ -17,6 +17,7 @@
#include "absl/types/optional.h"
#include "api/video_codecs/sdp_video_format.h"
#include "media/base/media_constants.h"
+#include "rtc_base/trace_event.h"
#if defined(WEBRTC_USE_H264)
#include "modules/video_coding/codecs/h264/h264_decoder_impl.h"
@@ -65,6 +66,7 @@ void DisableRtcUseH264() {
}
std::vector<SdpVideoFormat> SupportedH264Codecs() {
+ TRACE_EVENT0("webrtc", __func__);
if (!IsH264CodecSupported())
return std::vector<SdpVideoFormat>();
// We only support encoding Constrained Baseline Profile (CBP), but the
diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index 6f37b52fd8..83f9a77614 100644
--- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -54,6 +54,16 @@ enum H264DecoderImplEvent {
kH264DecoderEventMax = 16,
};
+struct ScopedPtrAVFreePacket {
+ void operator()(AVPacket* packet) { av_packet_free(&packet); }
+};
+typedef std::unique_ptr<AVPacket, ScopedPtrAVFreePacket> ScopedAVPacket;
+
+ScopedAVPacket MakeScopedAVPacket() {
+ ScopedAVPacket packet(av_packet_alloc());
+ return packet;
+}
+
} // namespace
int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
@@ -202,7 +212,7 @@ int32_t H264DecoderImpl::InitDecode(const VideoCodec* codec_settings,
// a pointer |this|.
av_context_->opaque = this;
- AVCodec* codec = avcodec_find_decoder(av_context_->codec_id);
+ const AVCodec* codec = avcodec_find_decoder(av_context_->codec_id);
if (!codec) {
// This is an indication that FFmpeg has not been initialized or it has not
// been compiled/initialized with the correct set of codecs.
@@ -261,21 +271,25 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
- AVPacket packet;
- av_init_packet(&packet);
+ ScopedAVPacket packet = MakeScopedAVPacket();
+ if (!packet) {
+ ReportError();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
// packet.data has a non-const type, but isn't modified by
// avcodec_send_packet.
- packet.data = const_cast<uint8_t*>(input_image.data());
+ packet->data = const_cast<uint8_t*>(input_image.data());
if (input_image.size() >
static_cast<size_t>(std::numeric_limits<int>::max())) {
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
- packet.size = static_cast<int>(input_image.size());
+ packet->size = static_cast<int>(input_image.size());
int64_t frame_timestamp_us = input_image.ntp_time_ms_ * 1000; // ms -> μs
av_context_->reordered_opaque = frame_timestamp_us;
- int result = avcodec_send_packet(av_context_.get(), &packet);
+ int result = avcodec_send_packet(av_context_.get(), packet.get());
+
if (result < 0) {
RTC_LOG(LS_ERROR) << "avcodec_send_packet error: " << result;
ReportError();
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 949c51bafa..733f00f5c0 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -445,7 +445,7 @@ int32_t H264EncoderImpl::Encode(
pictures_[i].iStride[0], pictures_[i].pData[1],
pictures_[i].iStride[1], pictures_[i].pData[2],
pictures_[i].iStride[2], configurations_[i].width,
- configurations_[i].height, libyuv::kFilterBilinear);
+ configurations_[i].height, libyuv::kFilterBox);
}
if (!configurations_[i].sending) {
diff --git a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
index 8d23c6d858..2332fcddfb 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
@@ -248,9 +248,8 @@ void MultiplexDecoderAdapter::MergeAlphaImages(
[yuv_buffer, alpha_buffer] {});
}
if (supports_augmenting_data_) {
- merged_buffer = rtc::scoped_refptr<webrtc::AugmentedVideoFrameBuffer>(
- new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
- merged_buffer, std::move(augmenting_data), augmenting_data_length));
+ merged_buffer = rtc::make_ref_counted<AugmentedVideoFrameBuffer>(
+ merged_buffer, std::move(augmenting_data), augmenting_data_length);
}
VideoFrame merged_image = VideoFrame::Builder()
diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
index 2e7b0cb3c0..7ecb24a87c 100644
--- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
+++ b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
@@ -90,9 +90,9 @@ class TestMultiplexAdapter : public VideoCodecUnitTest,
for (int i = 0; i < 16; i++) {
data[i] = i;
}
- rtc::scoped_refptr<AugmentedVideoFrameBuffer> augmented_video_frame_buffer =
- new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
- video_buffer, std::move(data), 16);
+ auto augmented_video_frame_buffer =
+ rtc::make_ref_counted<AugmentedVideoFrameBuffer>(video_buffer,
+ std::move(data), 16);
return std::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(augmented_video_frame_buffer)
diff --git a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc
index 8076e40fd4..fa768927b0 100644
--- a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc
+++ b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc
@@ -301,7 +301,7 @@ TEST(VideoCodecTestLibvpx, MAYBE_ChangeFramerateVP8) {
{31, 30, 0.85, 0.84}, {31.5, 30.5, 0.86, 0.84}, {30.5, 29, 0.83, 0.78}};
#else
std::vector<QualityThresholds> quality_thresholds = {
- {31, 30, 0.87, 0.86}, {32, 31, 0.89, 0.86}, {32, 30, 0.87, 0.82}};
+ {31, 30, 0.87, 0.85}, {32, 31, 0.88, 0.85}, {32, 30, 0.87, 0.82}};
#endif
fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
}
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index a4918ae73d..23eadfc0db 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -650,6 +650,8 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
EncodedImage copied_image = encoded_image;
copied_image.SetEncodedData(buffer);
+ if (base_image.size())
+ copied_image._frameType = base_image._frameType;
// Replace previous EncodedImage for this spatial layer.
merged_encoded_frames_.at(spatial_idx) = std::move(copied_image);
diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/modules/video_coding/codecs/vp8/default_temporal_layers.cc
index e2d9b1ebd2..c84d9acb1c 100644
--- a/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -265,7 +265,8 @@ DefaultTemporalLayers::DefaultTemporalLayers(int number_of_temporal_layers)
temporal_ids_(GetTemporalIds(num_layers_)),
temporal_pattern_(GetDependencyInfo(num_layers_)),
is_static_buffer_(DetermineStaticBuffers(temporal_pattern_)),
- pattern_idx_(kUninitializedPatternIndex) {
+ pattern_idx_(kUninitializedPatternIndex),
+ new_bitrates_bps_(std::vector<uint32_t>(num_layers_, 0u)) {
RTC_CHECK_GE(kMaxTemporalStreams, number_of_temporal_layers);
RTC_CHECK_GE(number_of_temporal_layers, 0);
RTC_CHECK_LE(number_of_temporal_layers, 4);
diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
index 64ad40ab76..a18ac40e7d 100644
--- a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
+++ b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
@@ -687,6 +687,25 @@ TEST_F(TemporalLayersTest, KeyFrame) {
}
}
+TEST_F(TemporalLayersTest, SetsTlCountOnFirstConfigUpdate) {
+ // Create an instance and fetch config update without setting any rate.
+ constexpr int kNumLayers = 2;
+ DefaultTemporalLayers tl(kNumLayers);
+ Vp8EncoderConfig config = tl.UpdateConfiguration(0);
+
+ // Config should indicate correct number of temporal layers, but zero bitrate.
+ ASSERT_TRUE(config.temporal_layer_config.has_value());
+ EXPECT_EQ(config.temporal_layer_config->ts_number_layers,
+ uint32_t{kNumLayers});
+ std::array<uint32_t, Vp8EncoderConfig::TemporalLayerConfig::kMaxLayers>
+ kZeroRate = {};
+ EXPECT_EQ(config.temporal_layer_config->ts_target_bitrate, kZeroRate);
+
+ // On second call, no new update.
+ config = tl.UpdateConfiguration(0);
+ EXPECT_FALSE(config.temporal_layer_config.has_value());
+}
+
class TemporalLayersReferenceTest : public TemporalLayersTest,
public ::testing::WithParamInterface<int> {
public:
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
index 979ded9a63..9d6ffdba90 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
@@ -54,13 +54,9 @@ constexpr bool kIsArm = false;
#endif
absl::optional<LibvpxVp8Decoder::DeblockParams> DefaultDeblockParams() {
- if (kIsArm) {
- // For ARM, this is only called when deblocking is explicitly enabled, and
- // the default strength is set by the ctor.
- return LibvpxVp8Decoder::DeblockParams();
- }
- // For non-arm, don't use the explicit deblocking settings by default.
- return absl::nullopt;
+ return LibvpxVp8Decoder::DeblockParams(/*max_level=*/8,
+ /*degrade_qp=*/60,
+ /*min_qp=*/30);
}
absl::optional<LibvpxVp8Decoder::DeblockParams>
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
index 8d84b67ce3..60295e5d5d 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
@@ -42,9 +42,12 @@ class LibvpxVp8Decoder : public VideoDecoder {
const char* ImplementationName() const override;
struct DeblockParams {
- int max_level = 6; // Deblocking strength: [0, 16].
- int degrade_qp = 1; // If QP value is below, start lowering |max_level|.
- int min_qp = 0; // If QP value is below, turn off deblocking.
+ DeblockParams() : max_level(6), degrade_qp(1), min_qp(0) {}
+ DeblockParams(int max_level, int degrade_qp, int min_qp)
+ : max_level(max_level), degrade_qp(degrade_qp), min_qp(min_qp) {}
+ int max_level; // Deblocking strength: [0, 16].
+ int degrade_qp; // If QP value is below, start lowering |max_level|.
+ int min_qp; // If QP value is below, turn off deblocking.
};
private:
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index 2411c1622e..e2849dbe6f 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -161,6 +161,18 @@ void ApplyVp8EncoderConfigToVpxConfig(const Vp8EncoderConfig& encoder_config,
}
}
+bool IsCompatibleVideoFrameBufferType(VideoFrameBuffer::Type left,
+ VideoFrameBuffer::Type right) {
+ if (left == VideoFrameBuffer::Type::kI420 ||
+ left == VideoFrameBuffer::Type::kI420A) {
+ // LibvpxVp8Encoder does not care about the alpha channel, I420A and I420
+ // are considered compatible.
+ return right == VideoFrameBuffer::Type::kI420 ||
+ right == VideoFrameBuffer::Type::kI420A;
+ }
+ return left == right;
+}
+
void SetRawImagePlanes(vpx_image_t* raw_image, VideoFrameBuffer* buffer) {
switch (buffer->type()) {
case VideoFrameBuffer::Type::kI420:
@@ -1324,6 +1336,13 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 &&
converted_buffer->type() != VideoFrameBuffer::Type::kI420A) {
converted_buffer = converted_buffer->ToI420();
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ converted_buffer->type())
+ << " image to I420. Can't encode frame.";
+ return {};
+ }
RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
}
@@ -1376,9 +1395,8 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
}
scaled_buffer = mapped_scaled_buffer;
}
- RTC_DCHECK_EQ(scaled_buffer->type(), mapped_buffer->type())
- << "Scaled frames must have the same type as the mapped frame.";
- if (scaled_buffer->type() != mapped_buffer->type()) {
+ if (!IsCompatibleVideoFrameBufferType(scaled_buffer->type(),
+ mapped_buffer->type())) {
RTC_LOG(LS_ERROR) << "When scaling "
<< VideoFrameBufferTypeToString(buffer_to_scale->type())
<< ", the image was unexpectedly converted to "
@@ -1386,6 +1404,10 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
<< " instead of "
<< VideoFrameBufferTypeToString(mapped_buffer->type())
<< ". Can't encode frame.";
+ RTC_NOTREACHED() << "Scaled buffer type "
+ << VideoFrameBufferTypeToString(scaled_buffer->type())
+ << " is not compatible with mapped buffer type "
+ << VideoFrameBufferTypeToString(mapped_buffer->type());
return {};
}
SetRawImagePlanes(&raw_images_[i], scaled_buffer);
diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
index feb2339404..20ab027684 100644
--- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
+++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
@@ -1074,8 +1074,15 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image,
break;
}
default: {
- i010_copy =
- I010Buffer::Copy(*input_image.video_frame_buffer()->ToI420());
+ auto i420_buffer = input_image.video_frame_buffer()->ToI420();
+ if (!i420_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ input_image.video_frame_buffer()->type())
+ << " image to I420. Can't encode frame.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ i010_copy = I010Buffer::Copy(*i420_buffer);
i010_buffer = i010_copy.get();
}
}
@@ -1914,6 +1921,12 @@ rtc::scoped_refptr<VideoFrameBuffer> LibvpxVp9Encoder::PrepareBufferForProfile0(
if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 &&
converted_buffer->type() != VideoFrameBuffer::Type::kI420A) {
converted_buffer = converted_buffer->ToI420();
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(buffer->type())
+ << " image to I420. Can't encode frame.";
+ return {};
+ }
RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
}
diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
index 4d0a6983ac..d1f58b1bb8 100644
--- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
+++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
@@ -15,7 +15,6 @@
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
-#include "rtc_base/ref_counted_object.h"
#include "vpx/vpx_codec.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vpx_frame_buffer.h"
@@ -68,7 +67,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) {
}
// Otherwise create one.
if (available_buffer == nullptr) {
- available_buffer = new rtc::RefCountedObject<Vp9FrameBuffer>();
+ available_buffer = new Vp9FrameBuffer();
allocated_buffers_.push_back(available_buffer);
if (allocated_buffers_.size() > max_num_buffers_) {
RTC_LOG(LS_WARNING)
diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
index d37a9fc0e2..bce10be4d9 100644
--- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
+++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
@@ -16,9 +16,9 @@
#include <vector>
+#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/synchronization/mutex.h"
struct vpx_codec_ctx;
@@ -65,13 +65,14 @@ constexpr size_t kDefaultMaxNumBuffers = 68;
// vpx_codec_destroy(decoder_ctx);
class Vp9FrameBufferPool {
public:
- class Vp9FrameBuffer : public rtc::RefCountInterface {
+ class Vp9FrameBuffer final
+ : public rtc::RefCountedNonVirtual<Vp9FrameBuffer> {
public:
uint8_t* GetData();
size_t GetDataSize() const;
void SetSize(size_t size);
- virtual bool HasOneRef() const = 0;
+ using rtc::RefCountedNonVirtual<Vp9FrameBuffer>::HasOneRef;
private:
// Data as an easily resizable buffer.