aboutsummaryrefslogtreecommitdiff
path: root/common_video
diff options
context:
space:
mode:
authorJorge E. Moreira <jemoreira@google.com>2022-12-06 16:34:41 -0800
committerJorge E. Moreira <jemoreira@google.com>2022-12-27 23:04:04 -0800
commit16476ad1d28bf15b814cd4bd26a54f659de792a5 (patch)
treef82dfebdc1fbc199686a75e5ad546e8a4b3a4d84 /common_video
parent28cc9bf0d7bf661eda5f53f43b29e711530cf8a6 (diff)
parente093c481bf8fd8e141fee4c007d63da488ce0ef5 (diff)
downloadwebrtc-16476ad1d28bf15b814cd4bd26a54f659de792a5.tar.gz
Merge commit 'upstream-main' into master
Bug: 261600888 Test: none, build files to be updated in follow up cl Change-Id: Ib520938290c6bbdee4a9f73b6419b6c947a96ec4
Diffstat (limited to 'common_video')
-rw-r--r--common_video/BUILD.gn59
-rw-r--r--common_video/OWNERS.webrtc1
-rw-r--r--common_video/frame_counts.h34
-rw-r--r--common_video/frame_rate_estimator.h2
-rw-r--r--common_video/framerate_controller.cc88
-rw-r--r--common_video/framerate_controller.h46
-rw-r--r--common_video/framerate_controller_unittest.cc162
-rw-r--r--common_video/h264/OWNERS.webrtc1
-rw-r--r--common_video/h264/h264_bitstream_parser.cc183
-rw-r--r--common_video/h264/h264_bitstream_parser.h5
-rw-r--r--common_video/h264/h264_bitstream_parser_unittest.cc38
-rw-r--r--common_video/h264/h264_common.h1
-rw-r--r--common_video/h264/pps_parser.cc157
-rw-r--r--common_video/h264/pps_parser.h16
-rw-r--r--common_video/h264/pps_parser_unittest.cc4
-rw-r--r--common_video/h264/profile_level_id.h19
-rw-r--r--common_video/h264/profile_level_id_unittest.cc201
-rw-r--r--common_video/h264/sps_parser.cc148
-rw-r--r--common_video/h264/sps_parser.h9
-rw-r--r--common_video/h264/sps_parser_unittest.cc131
-rw-r--r--common_video/h264/sps_vui_rewriter.cc363
-rw-r--r--common_video/h264/sps_vui_rewriter.h17
-rw-r--r--common_video/h264/sps_vui_rewriter_unittest.cc90
-rw-r--r--common_video/i420_buffer_pool.cc106
-rw-r--r--common_video/i420_buffer_pool_unittest.cc117
-rw-r--r--common_video/include/i420_buffer_pool.h79
-rw-r--r--common_video/include/incoming_video_stream.h47
-rw-r--r--common_video/include/video_frame.h17
-rw-r--r--common_video/include/video_frame_buffer.h35
-rw-r--r--common_video/include/video_frame_buffer_pool.h82
-rw-r--r--common_video/incoming_video_stream.cc64
-rw-r--r--common_video/libyuv/include/webrtc_libyuv.h37
-rw-r--r--common_video/libyuv/libyuv_unittest.cc55
-rw-r--r--common_video/libyuv/webrtc_libyuv.cc107
-rw-r--r--common_video/video_frame_buffer.cc100
-rw-r--r--common_video/video_frame_buffer_pool.cc312
-rw-r--r--common_video/video_frame_buffer_pool_unittest.cc132
-rw-r--r--common_video/video_frame_unittest.cc508
-rw-r--r--common_video/video_render_frames.cc116
-rw-r--r--common_video/video_render_frames.h55
40 files changed, 1848 insertions, 1896 deletions
diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn
index 8c25eb0953..2aa969f4fa 100644
--- a/common_video/BUILD.gn
+++ b/common_video/BUILD.gn
@@ -15,34 +15,33 @@ rtc_library("common_video") {
"bitrate_adjuster.cc",
"frame_rate_estimator.cc",
"frame_rate_estimator.h",
+ "framerate_controller.cc",
+ "framerate_controller.h",
"h264/h264_bitstream_parser.cc",
"h264/h264_bitstream_parser.h",
"h264/h264_common.cc",
"h264/h264_common.h",
"h264/pps_parser.cc",
"h264/pps_parser.h",
- "h264/profile_level_id.h",
"h264/sps_parser.cc",
"h264/sps_parser.h",
"h264/sps_vui_rewriter.cc",
"h264/sps_vui_rewriter.h",
- "i420_buffer_pool.cc",
"include/bitrate_adjuster.h",
- "include/i420_buffer_pool.h",
- "include/incoming_video_stream.h",
"include/quality_limitation_reason.h",
- "include/video_frame.h",
"include/video_frame_buffer.h",
- "incoming_video_stream.cc",
+ "include/video_frame_buffer_pool.h",
"libyuv/include/webrtc_libyuv.h",
"libyuv/webrtc_libyuv.cc",
"video_frame_buffer.cc",
- "video_render_frames.cc",
- "video_render_frames.h",
+ "video_frame_buffer_pool.cc",
]
deps = [
+ "../api:array_view",
+ "../api:make_ref_counted",
"../api:scoped_refptr",
+ "../api:sequence_checker",
"../api/task_queue",
"../api/units:time_delta",
"../api/units:timestamp",
@@ -50,23 +49,42 @@ rtc_library("common_video") {
"../api/video:video_bitrate_allocation",
"../api/video:video_bitrate_allocator",
"../api/video:video_frame",
- "../api/video:video_frame_i420",
+ "../api/video:video_frame_i010",
"../api/video:video_rtp_headers",
"../api/video_codecs:bitstream_parser_api",
- "../media:rtc_h264_profile_id",
+ "../api/video_codecs:video_codecs_api",
"../rtc_base",
+ "../rtc_base:bit_buffer",
+ "../rtc_base:bitstream_reader",
+ "../rtc_base:buffer",
"../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:race_checker",
+ "../rtc_base:rate_statistics",
+ "../rtc_base:refcount",
"../rtc_base:rtc_task_queue",
"../rtc_base:safe_minmax",
+ "../rtc_base:timeutils",
"../rtc_base/synchronization:mutex",
"../rtc_base/system:rtc_export",
"../system_wrappers:metrics",
"//third_party/libyuv",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/numeric:bits",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("frame_counts") {
+ visibility = [ "*" ]
+
+ sources = [ "frame_counts.h" ]
}
-if (rtc_include_tests) {
+if (rtc_include_tests && !build_with_chromium) {
common_video_resources = [ "../resources/foreman_cif.yuv" ]
if (is_ios) {
@@ -83,30 +101,32 @@ if (rtc_include_tests) {
sources = [
"bitrate_adjuster_unittest.cc",
"frame_rate_estimator_unittest.cc",
+ "framerate_controller_unittest.cc",
"h264/h264_bitstream_parser_unittest.cc",
"h264/pps_parser_unittest.cc",
- "h264/profile_level_id_unittest.cc",
"h264/sps_parser_unittest.cc",
"h264/sps_vui_rewriter_unittest.cc",
- "i420_buffer_pool_unittest.cc",
"libyuv/libyuv_unittest.cc",
+ "video_frame_buffer_pool_unittest.cc",
"video_frame_unittest.cc",
]
deps = [
":common_video",
- "../:webrtc_common",
"../api:scoped_refptr",
"../api/units:time_delta",
"../api/video:video_frame",
"../api/video:video_frame_i010",
- "../api/video:video_frame_i420",
"../api/video:video_rtp_headers",
- "../media:rtc_h264_profile_id",
+ "../api/video_codecs:video_codecs_api",
"../rtc_base",
+ "../rtc_base:bit_buffer",
+ "../rtc_base:buffer",
"../rtc_base:checks",
- "../rtc_base:rtc_base_approved",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
"../rtc_base:rtc_base_tests_utils",
+ "../rtc_base:timeutils",
"../system_wrappers:system_wrappers",
"../test:fileutils",
"../test:frame_utils",
@@ -114,10 +134,11 @@ if (rtc_include_tests) {
"../test:test_support",
"../test:video_test_common",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+
data = common_video_resources
if (is_android) {
deps += [ "//testing/android/native_test:native_test_support" ]
diff --git a/common_video/OWNERS.webrtc b/common_video/OWNERS.webrtc
index b0088e401f..455e247d90 100644
--- a/common_video/OWNERS.webrtc
+++ b/common_video/OWNERS.webrtc
@@ -1,3 +1,4 @@
magjed@webrtc.org
marpan@webrtc.org
+sprang@webrtc.org
stefan@webrtc.org
diff --git a/common_video/frame_counts.h b/common_video/frame_counts.h
new file mode 100644
index 0000000000..505d3129ef
--- /dev/null
+++ b/common_video/frame_counts.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_FRAME_COUNTS_H_
+#define COMMON_VIDEO_FRAME_COUNTS_H_
+
+#include <cstdint>
+
+namespace webrtc {
+
+struct FrameCounts {
+ FrameCounts() : key_frames(0), delta_frames(0) {}
+ int key_frames;
+ int delta_frames;
+};
+
+// Callback, used to notify an observer whenever frame counts have been updated.
+class FrameCountObserver {
+ public:
+ virtual ~FrameCountObserver() {}
+ virtual void FrameCountUpdated(const FrameCounts& frame_counts,
+ uint32_t ssrc) = 0;
+};
+
+} // namespace webrtc
+
+#endif // COMMON_VIDEO_FRAME_COUNTS_H_
diff --git a/common_video/frame_rate_estimator.h b/common_video/frame_rate_estimator.h
index 4cdd284c34..95219a534d 100644
--- a/common_video/frame_rate_estimator.h
+++ b/common_video/frame_rate_estimator.h
@@ -43,7 +43,7 @@ class FrameRateEstimator {
// Get the current average FPS, based on the frames currently in the window.
absl::optional<double> GetAverageFps() const;
- // Move the window so it ends at |now|, and return the new fps estimate.
+ // Move the window so it ends at `now`, and return the new fps estimate.
absl::optional<double> GetAverageFps(Timestamp now);
// Completely clear the averaging window.
diff --git a/common_video/framerate_controller.cc b/common_video/framerate_controller.cc
new file mode 100644
index 0000000000..23e9c70cbd
--- /dev/null
+++ b/common_video/framerate_controller.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/framerate_controller.h"
+
+#include <limits>
+
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+namespace {
+constexpr double kMinFramerate = 0.5;
+} // namespace
+
+FramerateController::FramerateController()
+ : FramerateController(std::numeric_limits<double>::max()) {}
+
+FramerateController::FramerateController(double max_framerate)
+ : max_framerate_(max_framerate) {}
+
+FramerateController::~FramerateController() {}
+
+void FramerateController::SetMaxFramerate(double max_framerate) {
+ max_framerate_ = max_framerate;
+}
+
+double FramerateController::GetMaxFramerate() const {
+ return max_framerate_;
+}
+
+bool FramerateController::ShouldDropFrame(int64_t in_timestamp_ns) {
+ if (max_framerate_ < kMinFramerate)
+ return true;
+
+ // If `max_framerate_` is not set (i.e. maxdouble), `frame_interval_ns` is
+ // rounded to 0.
+ int64_t frame_interval_ns = rtc::kNumNanosecsPerSec / max_framerate_;
+ if (frame_interval_ns <= 0) {
+ // Frame rate throttling not enabled.
+ return false;
+ }
+
+ if (next_frame_timestamp_ns_) {
+ // Time until next frame should be outputted.
+ const int64_t time_until_next_frame_ns =
+ (*next_frame_timestamp_ns_ - in_timestamp_ns);
+ // Continue if timestamp is within expected range.
+ if (std::abs(time_until_next_frame_ns) < 2 * frame_interval_ns) {
+ // Drop if a frame shouldn't be outputted yet.
+ if (time_until_next_frame_ns > 0)
+ return true;
+ // Time to output new frame.
+ *next_frame_timestamp_ns_ += frame_interval_ns;
+ return false;
+ }
+ }
+
+ // First timestamp received or timestamp is way outside expected range, so
+ // reset. Set first timestamp target to just half the interval to prefer
+ // keeping frames in case of jitter.
+ next_frame_timestamp_ns_ = in_timestamp_ns + frame_interval_ns / 2;
+ return false;
+}
+
+void FramerateController::Reset() {
+ max_framerate_ = std::numeric_limits<double>::max();
+ next_frame_timestamp_ns_ = absl::nullopt;
+}
+
+void FramerateController::KeepFrame(int64_t in_timestamp_ns) {
+ if (ShouldDropFrame(in_timestamp_ns)) {
+ if (max_framerate_ < kMinFramerate)
+ return;
+
+ int64_t frame_interval_ns = rtc::kNumNanosecsPerSec / max_framerate_;
+ if (next_frame_timestamp_ns_)
+ *next_frame_timestamp_ns_ += frame_interval_ns;
+ }
+}
+
+} // namespace webrtc
diff --git a/common_video/framerate_controller.h b/common_video/framerate_controller.h
new file mode 100644
index 0000000000..371ffd419f
--- /dev/null
+++ b/common_video/framerate_controller.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_FRAMERATE_CONTROLLER_H_
+#define COMMON_VIDEO_FRAMERATE_CONTROLLER_H_
+
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// Determines which frames that should be dropped based on input framerate and
+// requested framerate.
+class FramerateController {
+ public:
+ FramerateController();
+ explicit FramerateController(double max_framerate);
+ ~FramerateController();
+
+ // Sets max framerate (default is maxdouble).
+ void SetMaxFramerate(double max_framerate);
+ double GetMaxFramerate() const;
+
+ // Returns true if the frame should be dropped, false otherwise.
+ bool ShouldDropFrame(int64_t in_timestamp_ns);
+
+ void Reset();
+
+ void KeepFrame(int64_t in_timestamp_ns);
+
+ private:
+ double max_framerate_;
+ absl::optional<int64_t> next_frame_timestamp_ns_;
+};
+
+} // namespace webrtc
+
+#endif // COMMON_VIDEO_FRAMERATE_CONTROLLER_H_
diff --git a/common_video/framerate_controller_unittest.cc b/common_video/framerate_controller_unittest.cc
new file mode 100644
index 0000000000..690076ca61
--- /dev/null
+++ b/common_video/framerate_controller_unittest.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/framerate_controller.h"
+
+#include <limits>
+
+#include "rtc_base/time_utils.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+constexpr int kInputFps = 30;
+constexpr int kNumFrames = 60;
+} // namespace
+
+class FramerateControllerTest : public ::testing::Test {
+ protected:
+ int64_t GetNextTimestampNs() {
+ int64_t interval_us = rtc::kNumMicrosecsPerSec / kInputFps;
+ next_timestamp_us_ += interval_us;
+ return next_timestamp_us_ * rtc::kNumNanosecsPerMicrosec;
+ }
+
+ int64_t next_timestamp_us_ = rtc::TimeMicros();
+ FramerateController controller_;
+};
+
+TEST_F(FramerateControllerTest, NoFramesDroppedIfNothingRequested) {
+ // Default max framerate is maxdouble.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_FALSE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, AllFramesDroppedIfZeroRequested) {
+ controller_.SetMaxFramerate(0);
+
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_TRUE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, AllFramesDroppedIfNegativeRequested) {
+ controller_.SetMaxFramerate(-1);
+
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_TRUE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, EverySecondFrameDroppedIfHalfRequested) {
+ controller_.SetMaxFramerate(kInputFps / 2);
+
+ // The first frame should not be dropped.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_EQ(i % 2 == 0, controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, EveryThirdFrameDroppedIfTwoThirdsRequested) {
+ controller_.SetMaxFramerate(kInputFps * 2 / 3);
+
+ // The first frame should not be dropped.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_EQ(i % 3 == 0, controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, NoFrameDroppedIfTwiceRequested) {
+ controller_.SetMaxFramerate(kInputFps * 2);
+
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_FALSE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, TestAverageFramerate) {
+ const double kMaxFps = 18.2;
+ controller_.SetMaxFramerate(kMaxFps);
+
+ const int kNumSec = 10;
+ int frames_kept = 0;
+ for (int i = 0; i < kInputFps * kNumSec; ++i) {
+ if (!controller_.ShouldDropFrame(GetNextTimestampNs()))
+ ++frames_kept;
+ }
+ double average_fps = static_cast<double>(frames_kept) / kNumSec;
+ EXPECT_NEAR(kMaxFps, average_fps, 0.01);
+}
+
+TEST_F(FramerateControllerTest, NoFrameDroppedForLargeTimestampOffset) {
+ controller_.SetMaxFramerate(kInputFps);
+ EXPECT_FALSE(controller_.ShouldDropFrame(0));
+
+ const int64_t kLargeOffsetNs = -987654321LL * 1000;
+ EXPECT_FALSE(controller_.ShouldDropFrame(kLargeOffsetNs));
+
+ int64_t input_interval_ns = rtc::kNumNanosecsPerSec / kInputFps;
+ EXPECT_FALSE(controller_.ShouldDropFrame(kLargeOffsetNs + input_interval_ns));
+}
+
+TEST_F(FramerateControllerTest, NoFrameDroppedIfInputWithJitterRequested) {
+ controller_.SetMaxFramerate(kInputFps);
+
+ // Input fps with jitter.
+ int64_t input_interval_ns = rtc::kNumNanosecsPerSec / kInputFps;
+ EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 0 / 10));
+ EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 10 / 10 - 1));
+ EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 25 / 10));
+ EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 30 / 10));
+ EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 35 / 10));
+ EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 50 / 10));
+}
+
+TEST_F(FramerateControllerTest, FrameDroppedWhenReductionRequested) {
+ controller_.SetMaxFramerate(kInputFps);
+
+ // Expect no frame drop.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_FALSE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+
+ // Reduce max frame rate.
+ controller_.SetMaxFramerate(kInputFps / 2);
+
+ // Verify that every other frame is dropped.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_EQ(i % 2 == 0, controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, NoFramesDroppedAfterReset) {
+ controller_.SetMaxFramerate(0);
+
+ // All frames dropped.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_TRUE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+
+ controller_.Reset();
+
+ // Expect no frame drop after reset.
+ for (int i = 1; i < kNumFrames; ++i)
+ EXPECT_FALSE(controller_.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+TEST_F(FramerateControllerTest, TestKeepFrame) {
+ FramerateController controller(kInputFps / 2);
+
+ EXPECT_FALSE(controller.ShouldDropFrame(GetNextTimestampNs()));
+ EXPECT_TRUE(controller.ShouldDropFrame(GetNextTimestampNs()));
+ EXPECT_FALSE(controller.ShouldDropFrame(GetNextTimestampNs()));
+ EXPECT_TRUE(controller.ShouldDropFrame(GetNextTimestampNs()));
+ EXPECT_FALSE(controller.ShouldDropFrame(GetNextTimestampNs()));
+
+ // Next frame should be dropped.
+ // Keep this frame (e.g. in case of a key frame).
+ controller.KeepFrame(GetNextTimestampNs());
+ // Expect next frame to be dropped instead.
+ EXPECT_TRUE(controller.ShouldDropFrame(GetNextTimestampNs()));
+}
+
+} // namespace webrtc
diff --git a/common_video/h264/OWNERS.webrtc b/common_video/h264/OWNERS.webrtc
new file mode 100644
index 0000000000..361ed7e84a
--- /dev/null
+++ b/common_video/h264/OWNERS.webrtc
@@ -0,0 +1 @@
+ssilkin@webrtc.org
diff --git a/common_video/h264/h264_bitstream_parser.cc b/common_video/h264/h264_bitstream_parser.cc
index 5a75f48f88..2311d0d2ee 100644
--- a/common_video/h264/h264_bitstream_parser.cc
+++ b/common_video/h264/h264_bitstream_parser.cc
@@ -15,29 +15,20 @@
#include <vector>
#include "common_video/h264/h264_common.h"
-#include "rtc_base/bit_buffer.h"
+#include "rtc_base/bitstream_reader.h"
#include "rtc_base/logging.h"
+namespace webrtc {
namespace {
-const int kMaxAbsQpDeltaValue = 51;
-const int kMinQpValue = 0;
-const int kMaxQpValue = 51;
+constexpr int kMaxAbsQpDeltaValue = 51;
+constexpr int kMinQpValue = 0;
+constexpr int kMaxQpValue = 51;
} // namespace
-namespace webrtc {
-
-#define RETURN_ON_FAIL(x, res) \
- if (!(x)) { \
- RTC_LOG_F(LS_ERROR) << "FAILED: " #x; \
- return res; \
- }
-
-#define RETURN_INV_ON_FAIL(x) RETURN_ON_FAIL(x, kInvalidStream)
-
-H264BitstreamParser::H264BitstreamParser() {}
-H264BitstreamParser::~H264BitstreamParser() {}
+H264BitstreamParser::H264BitstreamParser() = default;
+H264BitstreamParser::~H264BitstreamParser() = default;
H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
const uint8_t* source,
@@ -52,95 +43,90 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
if (slice_rbsp.size() < H264::kNaluTypeSize)
return kInvalidStream;
- rtc::BitBuffer slice_reader(slice_rbsp.data() + H264::kNaluTypeSize,
- slice_rbsp.size() - H264::kNaluTypeSize);
+ BitstreamReader slice_reader(slice_rbsp);
+ slice_reader.ConsumeBits(H264::kNaluTypeSize * 8);
+
// Check to see if this is an IDR slice, which has an extra field to parse
// out.
bool is_idr = (source[0] & 0x0F) == H264::NaluType::kIdr;
uint8_t nal_ref_idc = (source[0] & 0x60) >> 5;
- uint32_t golomb_tmp;
- uint32_t bits_tmp;
// first_mb_in_slice: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
// slice_type: ue(v)
- uint32_t slice_type;
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&slice_type));
+ uint32_t slice_type = slice_reader.ReadExponentialGolomb();
// slice_type's 5..9 range is used to indicate that all slices of a picture
// have the same value of slice_type % 5, we don't care about that, so we map
// to the corresponding 0..4 range.
slice_type %= 5;
// pic_parameter_set_id: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
if (sps_->separate_colour_plane_flag == 1) {
// colour_plane_id
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2));
+ slice_reader.ConsumeBits(2);
}
// frame_num: u(v)
// Represented by log2_max_frame_num bits.
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&bits_tmp, sps_->log2_max_frame_num));
- uint32_t field_pic_flag = 0;
+ slice_reader.ConsumeBits(sps_->log2_max_frame_num);
+ bool field_pic_flag = false;
if (sps_->frame_mbs_only_flag == 0) {
// field_pic_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&field_pic_flag, 1));
- if (field_pic_flag != 0) {
+ field_pic_flag = slice_reader.Read<bool>();
+ if (field_pic_flag) {
// bottom_field_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1));
+ slice_reader.ConsumeBits(1);
}
}
if (is_idr) {
// idr_pic_id: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
// pic_order_cnt_lsb: u(v)
// Represented by sps_.log2_max_pic_order_cnt_lsb bits.
if (sps_->pic_order_cnt_type == 0) {
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&bits_tmp, sps_->log2_max_pic_order_cnt_lsb));
- if (pps_->bottom_field_pic_order_in_frame_present_flag &&
- field_pic_flag == 0) {
+ slice_reader.ConsumeBits(sps_->log2_max_pic_order_cnt_lsb);
+ if (pps_->bottom_field_pic_order_in_frame_present_flag && !field_pic_flag) {
// delta_pic_order_cnt_bottom: se(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
}
if (sps_->pic_order_cnt_type == 1 &&
!sps_->delta_pic_order_always_zero_flag) {
// delta_pic_order_cnt[0]: se(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
if (pps_->bottom_field_pic_order_in_frame_present_flag && !field_pic_flag) {
// delta_pic_order_cnt[1]: se(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
}
if (pps_->redundant_pic_cnt_present_flag) {
// redundant_pic_cnt: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
if (slice_type == H264::SliceType::kB) {
// direct_spatial_mv_pred_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1));
+ slice_reader.ConsumeBits(1);
}
switch (slice_type) {
case H264::SliceType::kP:
case H264::SliceType::kB:
case H264::SliceType::kSp:
- uint32_t num_ref_idx_active_override_flag;
// num_ref_idx_active_override_flag: u(1)
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&num_ref_idx_active_override_flag, 1));
- if (num_ref_idx_active_override_flag != 0) {
+ if (slice_reader.Read<bool>()) {
// num_ref_idx_l0_active_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
if (slice_type == H264::SliceType::kB) {
// num_ref_idx_l1_active_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
}
break;
default:
break;
}
+ if (!slice_reader.Ok()) {
+ return kInvalidStream;
+ }
// assume nal_unit_type != 20 && nal_unit_type != 21:
if (nalu_type == 20 || nalu_type == 21) {
RTC_LOG(LS_ERROR) << "Unsupported nal unit type.";
@@ -151,54 +137,49 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
// else
{
// ref_pic_list_modification():
- // |slice_type| checks here don't use named constants as they aren't named
+ // `slice_type` checks here don't use named constants as they aren't named
// in the spec for this segment. Keeping them consistent makes it easier to
// verify that they are both the same.
if (slice_type % 5 != 2 && slice_type % 5 != 4) {
// ref_pic_list_modification_flag_l0: u(1)
- uint32_t ref_pic_list_modification_flag_l0;
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&ref_pic_list_modification_flag_l0, 1));
- if (ref_pic_list_modification_flag_l0) {
+ if (slice_reader.Read<bool>()) {
uint32_t modification_of_pic_nums_idc;
do {
// modification_of_pic_nums_idc: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(
- &modification_of_pic_nums_idc));
+ modification_of_pic_nums_idc = slice_reader.ReadExponentialGolomb();
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
// abs_diff_pic_num_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
} else if (modification_of_pic_nums_idc == 2) {
// long_term_pic_num: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
- } while (modification_of_pic_nums_idc != 3);
+ } while (modification_of_pic_nums_idc != 3 && slice_reader.Ok());
}
}
if (slice_type % 5 == 1) {
// ref_pic_list_modification_flag_l1: u(1)
- uint32_t ref_pic_list_modification_flag_l1;
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&ref_pic_list_modification_flag_l1, 1));
- if (ref_pic_list_modification_flag_l1) {
+ if (slice_reader.Read<bool>()) {
uint32_t modification_of_pic_nums_idc;
do {
// modification_of_pic_nums_idc: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(
- &modification_of_pic_nums_idc));
+ modification_of_pic_nums_idc = slice_reader.ReadExponentialGolomb();
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
// abs_diff_pic_num_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
} else if (modification_of_pic_nums_idc == 2) {
// long_term_pic_num: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
- } while (modification_of_pic_nums_idc != 3);
+ } while (modification_of_pic_nums_idc != 3 && slice_reader.Ok());
}
}
}
+ if (!slice_reader.Ok()) {
+ return kInvalidStream;
+ }
// TODO(pbos): Do we need support for pred_weight_table()?
if ((pps_->weighted_pred_flag && (slice_type == H264::SliceType::kP ||
slice_type == H264::SliceType::kSp)) ||
@@ -215,49 +196,47 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
if (is_idr) {
// no_output_of_prior_pics_flag: u(1)
// long_term_reference_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2));
+ slice_reader.ConsumeBits(2);
} else {
// adaptive_ref_pic_marking_mode_flag: u(1)
- uint32_t adaptive_ref_pic_marking_mode_flag;
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&adaptive_ref_pic_marking_mode_flag, 1));
- if (adaptive_ref_pic_marking_mode_flag) {
+ if (slice_reader.Read<bool>()) {
uint32_t memory_management_control_operation;
do {
// memory_management_control_operation: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(
- &memory_management_control_operation));
+ memory_management_control_operation =
+ slice_reader.ReadExponentialGolomb();
if (memory_management_control_operation == 1 ||
memory_management_control_operation == 3) {
// difference_of_pic_nums_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
if (memory_management_control_operation == 2) {
// long_term_pic_num: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
if (memory_management_control_operation == 3 ||
memory_management_control_operation == 6) {
// long_term_frame_idx: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
if (memory_management_control_operation == 4) {
// max_long_term_frame_idx_plus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
- } while (memory_management_control_operation != 0);
+ } while (memory_management_control_operation != 0 && slice_reader.Ok());
}
}
}
if (pps_->entropy_coding_mode_flag && slice_type != H264::SliceType::kI &&
slice_type != H264::SliceType::kSi) {
// cabac_init_idc: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ slice_reader.ReadExponentialGolomb();
}
- int32_t last_slice_qp_delta;
- RETURN_INV_ON_FAIL(
- slice_reader.ReadSignedExponentialGolomb(&last_slice_qp_delta));
+ int last_slice_qp_delta = slice_reader.ReadSignedExponentialGolomb();
+ if (!slice_reader.Ok()) {
+ return kInvalidStream;
+ }
if (abs(last_slice_qp_delta) > kMaxAbsQpDeltaValue) {
// Something has gone wrong, and the parsed value is invalid.
RTC_LOG(LS_WARNING) << "Parsed QP value out of range.";
@@ -275,56 +254,46 @@ void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) {
sps_ = SpsParser::ParseSps(slice + H264::kNaluTypeSize,
length - H264::kNaluTypeSize);
if (!sps_)
- RTC_LOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream.";
+ RTC_DLOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream.";
break;
}
case H264::NaluType::kPps: {
pps_ = PpsParser::ParsePps(slice + H264::kNaluTypeSize,
length - H264::kNaluTypeSize);
if (!pps_)
- RTC_LOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream.";
+ RTC_DLOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream.";
break;
}
case H264::NaluType::kAud:
case H264::NaluType::kSei:
+ case H264::NaluType::kPrefix:
break; // Ignore these nalus, as we don't care about their contents.
default:
Result res = ParseNonParameterSetNalu(slice, length, nalu_type);
if (res != kOk)
- RTC_LOG(LS_INFO) << "Failed to parse bitstream. Error: " << res;
+ RTC_DLOG(LS_INFO) << "Failed to parse bitstream. Error: " << res;
break;
}
}
-void H264BitstreamParser::ParseBitstream(const uint8_t* bitstream,
- size_t length) {
+void H264BitstreamParser::ParseBitstream(
+ rtc::ArrayView<const uint8_t> bitstream) {
std::vector<H264::NaluIndex> nalu_indices =
- H264::FindNaluIndices(bitstream, length);
+ H264::FindNaluIndices(bitstream.data(), bitstream.size());
for (const H264::NaluIndex& index : nalu_indices)
- ParseSlice(&bitstream[index.payload_start_offset], index.payload_size);
+ ParseSlice(bitstream.data() + index.payload_start_offset,
+ index.payload_size);
}
-bool H264BitstreamParser::GetLastSliceQp(int* qp) const {
+absl::optional<int> H264BitstreamParser::GetLastSliceQp() const {
if (!last_slice_qp_delta_ || !pps_)
- return false;
- const int parsed_qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_;
- if (parsed_qp < kMinQpValue || parsed_qp > kMaxQpValue) {
+ return absl::nullopt;
+ const int qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_;
+ if (qp < kMinQpValue || qp > kMaxQpValue) {
RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream.";
- return false;
+ return absl::nullopt;
}
- *qp = parsed_qp;
- return true;
-}
-
-void H264BitstreamParser::ParseBitstream(
- rtc::ArrayView<const uint8_t> bitstream) {
- ParseBitstream(bitstream.data(), bitstream.size());
-}
-
-absl::optional<int> H264BitstreamParser::GetLastSliceQp() const {
- int qp;
- bool success = GetLastSliceQp(&qp);
- return success ? absl::optional<int>(qp) : absl::nullopt;
+ return qp;
}
} // namespace webrtc
diff --git a/common_video/h264/h264_bitstream_parser.h b/common_video/h264/h264_bitstream_parser.h
index 48190665f0..05427825ac 100644
--- a/common_video/h264/h264_bitstream_parser.h
+++ b/common_video/h264/h264_bitstream_parser.h
@@ -31,11 +31,6 @@ class H264BitstreamParser : public BitstreamParser {
H264BitstreamParser();
~H264BitstreamParser() override;
- // These are here for backwards-compatability for the time being.
- void ParseBitstream(const uint8_t* bitstream, size_t length);
- bool GetLastSliceQp(int* qp) const;
-
- // New interface.
void ParseBitstream(rtc::ArrayView<const uint8_t> bitstream) override;
absl::optional<int> GetLastSliceQp() const override;
diff --git a/common_video/h264/h264_bitstream_parser_unittest.cc b/common_video/h264/h264_bitstream_parser_unittest.cc
index 1509d67753..3f4f202af2 100644
--- a/common_video/h264/h264_bitstream_parser_unittest.cc
+++ b/common_video/h264/h264_bitstream_parser_unittest.cc
@@ -46,43 +46,39 @@ uint8_t kH264BitstreamNextImageSliceChunkCabac[] = {
TEST(H264BitstreamParserTest, ReportsNoQpWithoutParsedSlices) {
H264BitstreamParser h264_parser;
- int qp;
- EXPECT_FALSE(h264_parser.GetLastSliceQp(&qp));
+ EXPECT_FALSE(h264_parser.GetLastSliceQp().has_value());
}
TEST(H264BitstreamParserTest, ReportsNoQpWithOnlyParsedPpsAndSpsSlices) {
H264BitstreamParser h264_parser;
- h264_parser.ParseBitstream(kH264SpsPps, sizeof(kH264SpsPps));
- int qp;
- EXPECT_FALSE(h264_parser.GetLastSliceQp(&qp));
+ h264_parser.ParseBitstream(kH264SpsPps);
+ EXPECT_FALSE(h264_parser.GetLastSliceQp().has_value());
}
TEST(H264BitstreamParserTest, ReportsLastSliceQpForImageSlices) {
H264BitstreamParser h264_parser;
- h264_parser.ParseBitstream(kH264BitstreamChunk, sizeof(kH264BitstreamChunk));
- int qp;
- ASSERT_TRUE(h264_parser.GetLastSliceQp(&qp));
- EXPECT_EQ(35, qp);
+ h264_parser.ParseBitstream(kH264BitstreamChunk);
+ absl::optional<int> qp = h264_parser.GetLastSliceQp();
+ ASSERT_TRUE(qp.has_value());
+ EXPECT_EQ(35, *qp);
// Parse an additional image slice.
- h264_parser.ParseBitstream(kH264BitstreamNextImageSliceChunk,
- sizeof(kH264BitstreamNextImageSliceChunk));
- ASSERT_TRUE(h264_parser.GetLastSliceQp(&qp));
- EXPECT_EQ(37, qp);
+ h264_parser.ParseBitstream(kH264BitstreamNextImageSliceChunk);
+ qp = h264_parser.GetLastSliceQp();
+ ASSERT_TRUE(qp.has_value());
+ EXPECT_EQ(37, *qp);
}
TEST(H264BitstreamParserTest, ReportsLastSliceQpForCABACImageSlices) {
H264BitstreamParser h264_parser;
- h264_parser.ParseBitstream(kH264BitstreamChunkCabac,
- sizeof(kH264BitstreamChunkCabac));
- int qp;
- EXPECT_FALSE(h264_parser.GetLastSliceQp(&qp));
+ h264_parser.ParseBitstream(kH264BitstreamChunkCabac);
+ EXPECT_FALSE(h264_parser.GetLastSliceQp().has_value());
// Parse an additional image slice.
- h264_parser.ParseBitstream(kH264BitstreamNextImageSliceChunkCabac,
- sizeof(kH264BitstreamNextImageSliceChunkCabac));
- ASSERT_TRUE(h264_parser.GetLastSliceQp(&qp));
- EXPECT_EQ(24, qp);
+ h264_parser.ParseBitstream(kH264BitstreamNextImageSliceChunkCabac);
+ absl::optional<int> qp = h264_parser.GetLastSliceQp();
+ ASSERT_TRUE(qp.has_value());
+ EXPECT_EQ(24, *qp);
}
} // namespace webrtc
diff --git a/common_video/h264/h264_common.h b/common_video/h264/h264_common.h
index 2beef16ac5..0b1843ee38 100644
--- a/common_video/h264/h264_common.h
+++ b/common_video/h264/h264_common.h
@@ -42,6 +42,7 @@ enum NaluType : uint8_t {
kEndOfSequence = 10,
kEndOfStream = 11,
kFiller = 12,
+ kPrefix = 14,
kStapA = 24,
kFuA = 28
};
diff --git a/common_video/h264/pps_parser.cc b/common_video/h264/pps_parser.cc
index ae01652189..2fc9749e8c 100644
--- a/common_video/h264/pps_parser.cc
+++ b/common_video/h264/pps_parser.cc
@@ -11,24 +11,20 @@
#include "common_video/h264/pps_parser.h"
#include <cstdint>
+#include <limits>
#include <vector>
+#include "absl/numeric/bits.h"
#include "common_video/h264/h264_common.h"
-#include "rtc_base/bit_buffer.h"
+#include "rtc_base/bitstream_reader.h"
#include "rtc_base/checks.h"
-#define RETURN_EMPTY_ON_FAIL(x) \
- if (!(x)) { \
- return absl::nullopt; \
- }
-
+namespace webrtc {
namespace {
-const int kMaxPicInitQpDeltaValue = 25;
-const int kMinPicInitQpDeltaValue = -26;
+constexpr int kMaxPicInitQpDeltaValue = 25;
+constexpr int kMinPicInitQpDeltaValue = -26;
} // namespace
-namespace webrtc {
-
// General note: this is based off the 02/2014 version of the H.264 standard.
// You can find it on this page:
// http://www.itu.int/rec/T-REC-H.264
@@ -38,9 +34,7 @@ absl::optional<PpsParser::PpsState> PpsParser::ParsePps(const uint8_t* data,
// First, parse out rbsp, which is basically the source buffer minus emulation
// bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in
// section 7.3.1 of the H.264 standard.
- std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
- rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size());
- return ParseInternal(&bit_buffer);
+ return ParseInternal(H264::ParseRbsp(data, length));
}
bool PpsParser::ParsePpsIds(const uint8_t* data,
@@ -53,149 +47,114 @@ bool PpsParser::ParsePpsIds(const uint8_t* data,
// bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in
// section 7.3.1 of the H.264 standard.
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
- rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size());
- return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id);
+ BitstreamReader reader(unpacked_buffer);
+ *pps_id = reader.ReadExponentialGolomb();
+ *sps_id = reader.ReadExponentialGolomb();
+ return reader.Ok();
}
absl::optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
size_t length) {
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
- rtc::BitBuffer slice_reader(unpacked_buffer.data(), unpacked_buffer.size());
+ BitstreamReader slice_reader(unpacked_buffer);
- uint32_t golomb_tmp;
// first_mb_in_slice: ue(v)
- if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
- return absl::nullopt;
+ slice_reader.ReadExponentialGolomb();
// slice_type: ue(v)
- if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
- return absl::nullopt;
+ slice_reader.ReadExponentialGolomb();
// pic_parameter_set_id: ue(v)
- uint32_t slice_pps_id;
- if (!slice_reader.ReadExponentialGolomb(&slice_pps_id))
+ uint32_t slice_pps_id = slice_reader.ReadExponentialGolomb();
+ if (!slice_reader.Ok()) {
return absl::nullopt;
+ }
return slice_pps_id;
}
absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
- rtc::BitBuffer* bit_buffer) {
+ rtc::ArrayView<const uint8_t> buffer) {
+ BitstreamReader reader(buffer);
PpsState pps;
+ pps.id = reader.ReadExponentialGolomb();
+ pps.sps_id = reader.ReadExponentialGolomb();
- RETURN_EMPTY_ON_FAIL(ParsePpsIdsInternal(bit_buffer, &pps.id, &pps.sps_id));
-
- uint32_t bits_tmp;
- uint32_t golomb_ignored;
// entropy_coding_mode_flag: u(1)
- uint32_t entropy_coding_mode_flag;
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&entropy_coding_mode_flag, 1));
- pps.entropy_coding_mode_flag = entropy_coding_mode_flag != 0;
+ pps.entropy_coding_mode_flag = reader.Read<bool>();
// bottom_field_pic_order_in_frame_present_flag: u(1)
- uint32_t bottom_field_pic_order_in_frame_present_flag;
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadBits(&bottom_field_pic_order_in_frame_present_flag, 1));
- pps.bottom_field_pic_order_in_frame_present_flag =
- bottom_field_pic_order_in_frame_present_flag != 0;
+ pps.bottom_field_pic_order_in_frame_present_flag = reader.Read<bool>();
// num_slice_groups_minus1: ue(v)
- uint32_t num_slice_groups_minus1;
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&num_slice_groups_minus1));
+ uint32_t num_slice_groups_minus1 = reader.ReadExponentialGolomb();
if (num_slice_groups_minus1 > 0) {
- uint32_t slice_group_map_type;
// slice_group_map_type: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&slice_group_map_type));
+ uint32_t slice_group_map_type = reader.ReadExponentialGolomb();
if (slice_group_map_type == 0) {
- for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1;
- ++i_group) {
+ for (uint32_t i_group = 0;
+ i_group <= num_slice_groups_minus1 && reader.Ok(); ++i_group) {
// run_length_minus1[iGroup]: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
}
} else if (slice_group_map_type == 1) {
// TODO(sprang): Implement support for dispersed slice group map type.
// See 8.2.2.2 Specification for dispersed slice group map type.
} else if (slice_group_map_type == 2) {
- for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1;
- ++i_group) {
+ for (uint32_t i_group = 0;
+ i_group <= num_slice_groups_minus1 && reader.Ok(); ++i_group) {
// top_left[iGroup]: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// bottom_right[iGroup]: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
}
} else if (slice_group_map_type == 3 || slice_group_map_type == 4 ||
slice_group_map_type == 5) {
// slice_group_change_direction_flag: u(1)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1));
+ reader.ConsumeBits(1);
// slice_group_change_rate_minus1: ue(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
} else if (slice_group_map_type == 6) {
// pic_size_in_map_units_minus1: ue(v)
- uint32_t pic_size_in_map_units_minus1;
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&pic_size_in_map_units_minus1));
- uint32_t slice_group_id_bits = 0;
- uint32_t num_slice_groups = num_slice_groups_minus1 + 1;
- // If num_slice_groups is not a power of two an additional bit is required
- // to account for the ceil() of log2() below.
- if ((num_slice_groups & (num_slice_groups - 1)) != 0)
- ++slice_group_id_bits;
- while (num_slice_groups > 0) {
- num_slice_groups >>= 1;
- ++slice_group_id_bits;
- }
- for (uint32_t i = 0; i <= pic_size_in_map_units_minus1; i++) {
- // slice_group_id[i]: u(v)
- // Represented by ceil(log2(num_slice_groups_minus1 + 1)) bits.
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadBits(&bits_tmp, slice_group_id_bits));
+ uint32_t pic_size_in_map_units = reader.ReadExponentialGolomb() + 1;
+ int slice_group_id_bits = 1 + absl::bit_width(num_slice_groups_minus1);
+
+ // slice_group_id: array of size pic_size_in_map_units, each element
+ // is represented by ceil(log2(num_slice_groups_minus1 + 1)) bits.
+ int64_t bits_to_consume =
+ int64_t{slice_group_id_bits} * pic_size_in_map_units;
+ if (!reader.Ok() || bits_to_consume > std::numeric_limits<int>::max()) {
+ return absl::nullopt;
}
+ reader.ConsumeBits(bits_to_consume);
}
}
// num_ref_idx_l0_default_active_minus1: ue(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// num_ref_idx_l1_default_active_minus1: ue(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// weighted_pred_flag: u(1)
- uint32_t weighted_pred_flag;
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&weighted_pred_flag, 1));
- pps.weighted_pred_flag = weighted_pred_flag != 0;
+ pps.weighted_pred_flag = reader.Read<bool>();
// weighted_bipred_idc: u(2)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_bipred_idc, 2));
+ pps.weighted_bipred_idc = reader.ReadBits(2);
// pic_init_qp_minus26: se(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadSignedExponentialGolomb(&pps.pic_init_qp_minus26));
+ pps.pic_init_qp_minus26 = reader.ReadSignedExponentialGolomb();
// Sanity-check parsed value
- if (pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue ||
+ if (!reader.Ok() || pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue ||
pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) {
- RETURN_EMPTY_ON_FAIL(false);
+ return absl::nullopt;
}
// pic_init_qs_minus26: se(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// chroma_qp_index_offset: se(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// deblocking_filter_control_present_flag: u(1)
// constrained_intra_pred_flag: u(1)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 2));
+ reader.ConsumeBits(2);
// redundant_pic_cnt_present_flag: u(1)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadBits(&pps.redundant_pic_cnt_present_flag, 1));
+ pps.redundant_pic_cnt_present_flag = reader.ReadBit();
+ if (!reader.Ok()) {
+ return absl::nullopt;
+ }
return pps;
}
-bool PpsParser::ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer,
- uint32_t* pps_id,
- uint32_t* sps_id) {
- // pic_parameter_set_id: ue(v)
- if (!bit_buffer->ReadExponentialGolomb(pps_id))
- return false;
- // seq_parameter_set_id: ue(v)
- if (!bit_buffer->ReadExponentialGolomb(sps_id))
- return false;
- return true;
-}
-
} // namespace webrtc
diff --git a/common_video/h264/pps_parser.h b/common_video/h264/pps_parser.h
index d6c31b0688..52717dcc26 100644
--- a/common_video/h264/pps_parser.h
+++ b/common_video/h264/pps_parser.h
@@ -11,11 +11,11 @@
#ifndef COMMON_VIDEO_H264_PPS_PARSER_H_
#define COMMON_VIDEO_H264_PPS_PARSER_H_
-#include "absl/types/optional.h"
+#include <stddef.h>
+#include <stdint.h>
-namespace rtc {
-class BitBuffer;
-}
+#include "absl/types/optional.h"
+#include "api/array_view.h"
namespace webrtc {
@@ -49,12 +49,10 @@ class PpsParser {
size_t length);
protected:
- // Parse the PPS state, for a bit buffer where RBSP decoding has already been
+ // Parse the PPS state, for a buffer where RBSP decoding has already been
// performed.
- static absl::optional<PpsState> ParseInternal(rtc::BitBuffer* bit_buffer);
- static bool ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer,
- uint32_t* pps_id,
- uint32_t* sps_id);
+ static absl::optional<PpsState> ParseInternal(
+ rtc::ArrayView<const uint8_t> buffer);
};
} // namespace webrtc
diff --git a/common_video/h264/pps_parser_unittest.cc b/common_video/h264/pps_parser_unittest.cc
index a279563f85..652f4c7ce0 100644
--- a/common_video/h264/pps_parser_unittest.cc
+++ b/common_video/h264/pps_parser_unittest.cc
@@ -99,7 +99,7 @@ void WritePps(const PpsParser::PpsState& pps,
break;
}
default:
- RTC_NOTREACHED();
+ RTC_DCHECK_NOTREACHED();
}
}
@@ -174,7 +174,7 @@ class PpsParserTest : public ::testing::Test {
WritePps(pps, slice_group_map_type, num_slice_groups, pic_size_in_map_units,
&buffer_);
parsed_pps_ = PpsParser::ParsePps(buffer_.data(), buffer_.size());
- EXPECT_TRUE(static_cast<bool>(parsed_pps_));
+ ASSERT_TRUE(parsed_pps_);
EXPECT_EQ(pps.bottom_field_pic_order_in_frame_present_flag,
parsed_pps_->bottom_field_pic_order_in_frame_present_flag);
EXPECT_EQ(pps.weighted_pred_flag, parsed_pps_->weighted_pred_flag);
diff --git a/common_video/h264/profile_level_id.h b/common_video/h264/profile_level_id.h
deleted file mode 100644
index 07b49e57c7..0000000000
--- a/common_video/h264/profile_level_id.h
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_H264_PROFILE_LEVEL_ID_H_
-#define COMMON_VIDEO_H264_PROFILE_LEVEL_ID_H_
-
-#include "media/base/h264_profile_level_id.h"
-
-// TODO(zhihuang): Delete this file once dependent applications switch to
-// including "webrtc/media/base/h264_profile_level_id.h" directly.
-
-#endif // COMMON_VIDEO_H264_PROFILE_LEVEL_ID_H_
diff --git a/common_video/h264/profile_level_id_unittest.cc b/common_video/h264/profile_level_id_unittest.cc
deleted file mode 100644
index 957b434a3c..0000000000
--- a/common_video/h264/profile_level_id_unittest.cc
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "common_video/h264/profile_level_id.h"
-
-#include <map>
-#include <string>
-
-#include "absl/types/optional.h"
-#include "media/base/h264_profile_level_id.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-namespace H264 {
-
-TEST(H264ProfileLevelId, TestParsingInvalid) {
- // Malformed strings.
- EXPECT_FALSE(ParseProfileLevelId(""));
- EXPECT_FALSE(ParseProfileLevelId(" 42e01f"));
- EXPECT_FALSE(ParseProfileLevelId("4242e01f"));
- EXPECT_FALSE(ParseProfileLevelId("e01f"));
- EXPECT_FALSE(ParseProfileLevelId("gggggg"));
-
- // Invalid level.
- EXPECT_FALSE(ParseProfileLevelId("42e000"));
- EXPECT_FALSE(ParseProfileLevelId("42e00f"));
- EXPECT_FALSE(ParseProfileLevelId("42e0ff"));
-
- // Invalid profile.
- EXPECT_FALSE(ParseProfileLevelId("42e11f"));
- EXPECT_FALSE(ParseProfileLevelId("58601f"));
- EXPECT_FALSE(ParseProfileLevelId("64e01f"));
-}
-
-TEST(H264ProfileLevelId, TestParsingLevel) {
- EXPECT_EQ(kLevel3_1, ParseProfileLevelId("42e01f")->level);
- EXPECT_EQ(kLevel1_1, ParseProfileLevelId("42e00b")->level);
- EXPECT_EQ(kLevel1_b, ParseProfileLevelId("42f00b")->level);
- EXPECT_EQ(kLevel4_2, ParseProfileLevelId("42C02A")->level);
- EXPECT_EQ(kLevel5_2, ParseProfileLevelId("640c34")->level);
-}
-
-TEST(H264ProfileLevelId, TestParsingConstrainedBaseline) {
- EXPECT_EQ(kProfileConstrainedBaseline,
- ParseProfileLevelId("42e01f")->profile);
- EXPECT_EQ(kProfileConstrainedBaseline,
- ParseProfileLevelId("42C02A")->profile);
- EXPECT_EQ(kProfileConstrainedBaseline,
- ParseProfileLevelId("4de01f")->profile);
- EXPECT_EQ(kProfileConstrainedBaseline,
- ParseProfileLevelId("58f01f")->profile);
-}
-
-TEST(H264ProfileLevelId, TestParsingBaseline) {
- EXPECT_EQ(kProfileBaseline, ParseProfileLevelId("42a01f")->profile);
- EXPECT_EQ(kProfileBaseline, ParseProfileLevelId("58A01F")->profile);
-}
-
-TEST(H264ProfileLevelId, TestParsingMain) {
- EXPECT_EQ(kProfileMain, ParseProfileLevelId("4D401f")->profile);
-}
-
-TEST(H264ProfileLevelId, TestParsingHigh) {
- EXPECT_EQ(kProfileHigh, ParseProfileLevelId("64001f")->profile);
-}
-
-TEST(H264ProfileLevelId, TestParsingConstrainedHigh) {
- EXPECT_EQ(kProfileConstrainedHigh, ParseProfileLevelId("640c1f")->profile);
-}
-
-TEST(H264ProfileLevelId, TestSupportedLevel) {
- EXPECT_EQ(kLevel2_1, *SupportedLevel(640 * 480, 25));
- EXPECT_EQ(kLevel3_1, *SupportedLevel(1280 * 720, 30));
- EXPECT_EQ(kLevel4_2, *SupportedLevel(1920 * 1280, 60));
-}
-
-// Test supported level below level 1 requirements.
-TEST(H264ProfileLevelId, TestSupportedLevelInvalid) {
- EXPECT_FALSE(SupportedLevel(0, 0));
- // All levels support fps > 5.
- EXPECT_FALSE(SupportedLevel(1280 * 720, 5));
- // All levels support frame sizes > 183 * 137.
- EXPECT_FALSE(SupportedLevel(183 * 137, 30));
-}
-
-TEST(H264ProfileLevelId, TestToString) {
- EXPECT_EQ("42e01f", *ProfileLevelIdToString(ProfileLevelId(
- kProfileConstrainedBaseline, kLevel3_1)));
- EXPECT_EQ("42000a",
- *ProfileLevelIdToString(ProfileLevelId(kProfileBaseline, kLevel1)));
- EXPECT_EQ("4d001f",
- ProfileLevelIdToString(ProfileLevelId(kProfileMain, kLevel3_1)));
- EXPECT_EQ("640c2a", *ProfileLevelIdToString(
- ProfileLevelId(kProfileConstrainedHigh, kLevel4_2)));
- EXPECT_EQ("64002a",
- *ProfileLevelIdToString(ProfileLevelId(kProfileHigh, kLevel4_2)));
-}
-
-TEST(H264ProfileLevelId, TestToStringLevel1b) {
- EXPECT_EQ("42f00b", *ProfileLevelIdToString(ProfileLevelId(
- kProfileConstrainedBaseline, kLevel1_b)));
- EXPECT_EQ("42100b", *ProfileLevelIdToString(
- ProfileLevelId(kProfileBaseline, kLevel1_b)));
- EXPECT_EQ("4d100b",
- *ProfileLevelIdToString(ProfileLevelId(kProfileMain, kLevel1_b)));
-}
-
-TEST(H264ProfileLevelId, TestToStringRoundTrip) {
- EXPECT_EQ("42e01f", *ProfileLevelIdToString(*ParseProfileLevelId("42e01f")));
- EXPECT_EQ("42e01f", *ProfileLevelIdToString(*ParseProfileLevelId("42E01F")));
- EXPECT_EQ("4d100b", *ProfileLevelIdToString(*ParseProfileLevelId("4d100b")));
- EXPECT_EQ("4d100b", *ProfileLevelIdToString(*ParseProfileLevelId("4D100B")));
- EXPECT_EQ("640c2a", *ProfileLevelIdToString(*ParseProfileLevelId("640c2a")));
- EXPECT_EQ("640c2a", *ProfileLevelIdToString(*ParseProfileLevelId("640C2A")));
-}
-
-TEST(H264ProfileLevelId, TestToStringInvalid) {
- EXPECT_FALSE(ProfileLevelIdToString(ProfileLevelId(kProfileHigh, kLevel1_b)));
- EXPECT_FALSE(ProfileLevelIdToString(
- ProfileLevelId(kProfileConstrainedHigh, kLevel1_b)));
- EXPECT_FALSE(ProfileLevelIdToString(
- ProfileLevelId(static_cast<Profile>(255), kLevel3_1)));
-}
-
-TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) {
- const absl::optional<ProfileLevelId> profile_level_id =
- ParseSdpProfileLevelId(CodecParameterMap());
- EXPECT_TRUE(profile_level_id);
- EXPECT_EQ(kProfileConstrainedBaseline, profile_level_id->profile);
- EXPECT_EQ(kLevel3_1, profile_level_id->level);
-}
-
-TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) {
- CodecParameterMap params;
- params["profile-level-id"] = "640c2a";
- const absl::optional<ProfileLevelId> profile_level_id =
- ParseSdpProfileLevelId(params);
- EXPECT_TRUE(profile_level_id);
- EXPECT_EQ(kProfileConstrainedHigh, profile_level_id->profile);
- EXPECT_EQ(kLevel4_2, profile_level_id->level);
-}
-
-TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdInvalid) {
- CodecParameterMap params;
- params["profile-level-id"] = "foobar";
- EXPECT_FALSE(ParseSdpProfileLevelId(params));
-}
-
-TEST(H264ProfileLevelId, TestGenerateProfileLevelIdForAnswerEmpty) {
- CodecParameterMap answer_params;
- GenerateProfileLevelIdForAnswer(CodecParameterMap(), CodecParameterMap(),
- &answer_params);
- EXPECT_TRUE(answer_params.empty());
-}
-
-TEST(H264ProfileLevelId,
- TestGenerateProfileLevelIdForAnswerLevelSymmetryCapped) {
- CodecParameterMap low_level;
- low_level["profile-level-id"] = "42e015";
- CodecParameterMap high_level;
- high_level["profile-level-id"] = "42e01f";
-
- // Level asymmetry is not allowed; test that answer level is the lower of the
- // local and remote levels.
- CodecParameterMap answer_params;
- GenerateProfileLevelIdForAnswer(low_level /* local_supported */,
- high_level /* remote_offered */,
- &answer_params);
- EXPECT_EQ("42e015", answer_params["profile-level-id"]);
-
- CodecParameterMap answer_params2;
- GenerateProfileLevelIdForAnswer(high_level /* local_supported */,
- low_level /* remote_offered */,
- &answer_params2);
- EXPECT_EQ("42e015", answer_params2["profile-level-id"]);
-}
-
-TEST(H264ProfileLevelId,
- TestGenerateProfileLevelIdForAnswerConstrainedBaselineLevelAsymmetry) {
- CodecParameterMap local_params;
- local_params["profile-level-id"] = "42e01f";
- local_params["level-asymmetry-allowed"] = "1";
- CodecParameterMap remote_params;
- remote_params["profile-level-id"] = "42e015";
- remote_params["level-asymmetry-allowed"] = "1";
- CodecParameterMap answer_params;
- GenerateProfileLevelIdForAnswer(local_params, remote_params, &answer_params);
- // When level asymmetry is allowed, we can answer a higher level than what was
- // offered.
- EXPECT_EQ("42e01f", answer_params["profile-level-id"]);
-}
-
-} // namespace H264
-} // namespace webrtc
diff --git a/common_video/h264/sps_parser.cc b/common_video/h264/sps_parser.cc
index 3d78184e7a..cfb0f24ff2 100644
--- a/common_video/h264/sps_parser.cc
+++ b/common_video/h264/sps_parser.cc
@@ -14,16 +14,9 @@
#include <vector>
#include "common_video/h264/h264_common.h"
-#include "rtc_base/bit_buffer.h"
+#include "rtc_base/bitstream_reader.h"
namespace {
-typedef absl::optional<webrtc::SpsParser::SpsState> OptionalSps;
-
-#define RETURN_EMPTY_ON_FAIL(x) \
- if (!(x)) { \
- return OptionalSps(); \
- }
-
constexpr int kScalingDeltaMin = -128;
constexpr int kScaldingDeltaMax = 127;
} // namespace
@@ -42,13 +35,13 @@ SpsParser::SpsState::~SpsState() = default;
absl::optional<SpsParser::SpsState> SpsParser::ParseSps(const uint8_t* data,
size_t length) {
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
- rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size());
- return ParseSpsUpToVui(&bit_buffer);
+ BitstreamReader reader(unpacked_buffer);
+ return ParseSpsUpToVui(reader);
}
absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
- rtc::BitBuffer* buffer) {
- // Now, we need to use a bit buffer to parse through the actual AVC SPS
+ BitstreamReader& reader) {
+ // Now, we need to use a bitstream reader to parse through the actual AVC SPS
// format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the
// H.264 standard for a complete description.
// Since we only care about resolution, we ignore the majority of fields, but
@@ -61,24 +54,18 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
SpsState sps;
- // The golomb values we have to read, not just consume.
- uint32_t golomb_ignored;
-
// chroma_format_idc will be ChromaArrayType if separate_colour_plane_flag is
// 0. It defaults to 1, when not specified.
uint32_t chroma_format_idc = 1;
// profile_idc: u(8). We need it to determine if we need to read/skip chroma
// formats.
- uint8_t profile_idc;
- RETURN_EMPTY_ON_FAIL(buffer->ReadUInt8(&profile_idc));
+ uint8_t profile_idc = reader.Read<uint8_t>();
// constraint_set0_flag through constraint_set5_flag + reserved_zero_2bits
- // 1 bit each for the flags + 2 bits = 8 bits = 1 byte.
- RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1));
- // level_idc: u(8)
- RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1));
+ // 1 bit each for the flags + 2 bits + 8 bits for level_idc = 16 bits.
+ reader.ConsumeBits(16);
// seq_parameter_set_id: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.id));
+ sps.id = reader.ReadExponentialGolomb();
sps.separate_colour_plane_flag = 0;
// See if profile_idc has chroma format information.
if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 ||
@@ -86,43 +73,37 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
profile_idc == 86 || profile_idc == 118 || profile_idc == 128 ||
profile_idc == 138 || profile_idc == 139 || profile_idc == 134) {
// chroma_format_idc: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&chroma_format_idc));
+ chroma_format_idc = reader.ReadExponentialGolomb();
if (chroma_format_idc == 3) {
// separate_colour_plane_flag: u(1)
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadBits(&sps.separate_colour_plane_flag, 1));
+ sps.separate_colour_plane_flag = reader.ReadBit();
}
// bit_depth_luma_minus8: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// bit_depth_chroma_minus8: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// qpprime_y_zero_transform_bypass_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
+ reader.ConsumeBits(1);
// seq_scaling_matrix_present_flag: u(1)
- uint32_t seq_scaling_matrix_present_flag;
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&seq_scaling_matrix_present_flag, 1));
- if (seq_scaling_matrix_present_flag) {
+ if (reader.Read<bool>()) {
// Process the scaling lists just enough to be able to properly
// skip over them, so we can still read the resolution on streams
// where this is included.
int scaling_list_count = (chroma_format_idc == 3 ? 12 : 8);
for (int i = 0; i < scaling_list_count; ++i) {
// seq_scaling_list_present_flag[i] : u(1)
- uint32_t seq_scaling_list_present_flags;
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadBits(&seq_scaling_list_present_flags, 1));
- if (seq_scaling_list_present_flags != 0) {
+ if (reader.Read<bool>()) {
int last_scale = 8;
int next_scale = 8;
int size_of_scaling_list = i < 6 ? 16 : 64;
for (int j = 0; j < size_of_scaling_list; j++) {
if (next_scale != 0) {
- int32_t delta_scale;
// delta_scale: se(v)
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadSignedExponentialGolomb(&delta_scale));
- RETURN_EMPTY_ON_FAIL(delta_scale >= kScalingDeltaMin &&
- delta_scale <= kScaldingDeltaMax);
+ int delta_scale = reader.ReadSignedExponentialGolomb();
+ if (!reader.Ok() || delta_scale < kScalingDeltaMin ||
+ delta_scale > kScaldingDeltaMax) {
+ return absl::nullopt;
+ }
next_scale = (last_scale + delta_scale + 256) % 256;
}
if (next_scale != 0)
@@ -133,50 +114,49 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
}
}
// log2_max_frame_num and log2_max_pic_order_cnt_lsb are used with
- // BitBuffer::ReadBits, which can read at most 32 bits at a time. We also have
- // to avoid overflow when adding 4 to the on-wire golomb value, e.g., for evil
- // input data, ReadExponentialGolomb might return 0xfffc.
+ // BitstreamReader::ReadBits, which can read at most 64 bits at a time. We
+ // also have to avoid overflow when adding 4 to the on-wire golomb value,
+ // e.g., for evil input data, ReadExponentialGolomb might return 0xfffc.
const uint32_t kMaxLog2Minus4 = 32 - 4;
// log2_max_frame_num_minus4: ue(v)
- uint32_t log2_max_frame_num_minus4;
- if (!buffer->ReadExponentialGolomb(&log2_max_frame_num_minus4) ||
- log2_max_frame_num_minus4 > kMaxLog2Minus4) {
- return OptionalSps();
+ uint32_t log2_max_frame_num_minus4 = reader.ReadExponentialGolomb();
+ if (!reader.Ok() || log2_max_frame_num_minus4 > kMaxLog2Minus4) {
+ return absl::nullopt;
}
sps.log2_max_frame_num = log2_max_frame_num_minus4 + 4;
// pic_order_cnt_type: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.pic_order_cnt_type));
+ sps.pic_order_cnt_type = reader.ReadExponentialGolomb();
if (sps.pic_order_cnt_type == 0) {
// log2_max_pic_order_cnt_lsb_minus4: ue(v)
- uint32_t log2_max_pic_order_cnt_lsb_minus4;
- if (!buffer->ReadExponentialGolomb(&log2_max_pic_order_cnt_lsb_minus4) ||
- log2_max_pic_order_cnt_lsb_minus4 > kMaxLog2Minus4) {
- return OptionalSps();
+ uint32_t log2_max_pic_order_cnt_lsb_minus4 = reader.ReadExponentialGolomb();
+ if (!reader.Ok() || log2_max_pic_order_cnt_lsb_minus4 > kMaxLog2Minus4) {
+ return absl::nullopt;
}
sps.log2_max_pic_order_cnt_lsb = log2_max_pic_order_cnt_lsb_minus4 + 4;
} else if (sps.pic_order_cnt_type == 1) {
// delta_pic_order_always_zero_flag: u(1)
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadBits(&sps.delta_pic_order_always_zero_flag, 1));
+ sps.delta_pic_order_always_zero_flag = reader.ReadBit();
// offset_for_non_ref_pic: se(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// offset_for_top_to_bottom_field: se(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
// num_ref_frames_in_pic_order_cnt_cycle: ue(v)
- uint32_t num_ref_frames_in_pic_order_cnt_cycle;
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&num_ref_frames_in_pic_order_cnt_cycle));
+ uint32_t num_ref_frames_in_pic_order_cnt_cycle =
+ reader.ReadExponentialGolomb();
for (size_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) {
// offset_for_ref_frame[i]: se(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ reader.ReadExponentialGolomb();
+ if (!reader.Ok()) {
+ return absl::nullopt;
+ }
}
}
// max_num_ref_frames: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.max_num_ref_frames));
+ sps.max_num_ref_frames = reader.ReadExponentialGolomb();
// gaps_in_frame_num_value_allowed_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
+ reader.ConsumeBits(1);
//
// IMPORTANT ONES! Now we're getting to resolution. First we read the pic
// width/height in macroblocks (16x16), which gives us the base resolution,
@@ -184,49 +164,41 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
// to signify resolutions that aren't multiples of 16.
//
// pic_width_in_mbs_minus1: ue(v)
- uint32_t pic_width_in_mbs_minus1;
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&pic_width_in_mbs_minus1));
+ sps.width = 16 * (reader.ReadExponentialGolomb() + 1);
// pic_height_in_map_units_minus1: ue(v)
- uint32_t pic_height_in_map_units_minus1;
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&pic_height_in_map_units_minus1));
+ uint32_t pic_height_in_map_units_minus1 = reader.ReadExponentialGolomb();
// frame_mbs_only_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.frame_mbs_only_flag, 1));
+ sps.frame_mbs_only_flag = reader.ReadBit();
if (!sps.frame_mbs_only_flag) {
// mb_adaptive_frame_field_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
+ reader.ConsumeBits(1);
}
+ sps.height =
+ 16 * (2 - sps.frame_mbs_only_flag) * (pic_height_in_map_units_minus1 + 1);
// direct_8x8_inference_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
+ reader.ConsumeBits(1);
//
// MORE IMPORTANT ONES! Now we're at the frame crop information.
//
- // frame_cropping_flag: u(1)
- uint32_t frame_cropping_flag;
uint32_t frame_crop_left_offset = 0;
uint32_t frame_crop_right_offset = 0;
uint32_t frame_crop_top_offset = 0;
uint32_t frame_crop_bottom_offset = 0;
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&frame_cropping_flag, 1));
- if (frame_cropping_flag) {
+ // frame_cropping_flag: u(1)
+ if (reader.Read<bool>()) {
// frame_crop_{left, right, top, bottom}_offset: ue(v)
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&frame_crop_left_offset));
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&frame_crop_right_offset));
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&frame_crop_top_offset));
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&frame_crop_bottom_offset));
+ frame_crop_left_offset = reader.ReadExponentialGolomb();
+ frame_crop_right_offset = reader.ReadExponentialGolomb();
+ frame_crop_top_offset = reader.ReadExponentialGolomb();
+ frame_crop_bottom_offset = reader.ReadExponentialGolomb();
}
// vui_parameters_present_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.vui_params_present, 1));
+ sps.vui_params_present = reader.ReadBit();
// Far enough! We don't use the rest of the SPS.
-
- // Start with the resolution determined by the pic_width/pic_height fields.
- sps.width = 16 * (pic_width_in_mbs_minus1 + 1);
- sps.height =
- 16 * (2 - sps.frame_mbs_only_flag) * (pic_height_in_map_units_minus1 + 1);
+ if (!reader.Ok()) {
+ return absl::nullopt;
+ }
// Figure out the crop units in pixels. That's based on the chroma format's
// sampling, which is indicated by chroma_format_idc.
@@ -249,7 +221,7 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
sps.width -= (frame_crop_left_offset + frame_crop_right_offset);
sps.height -= (frame_crop_top_offset + frame_crop_bottom_offset);
- return OptionalSps(sps);
+ return sps;
}
} // namespace webrtc
diff --git a/common_video/h264/sps_parser.h b/common_video/h264/sps_parser.h
index 76e627d27a..da328b48b0 100644
--- a/common_video/h264/sps_parser.h
+++ b/common_video/h264/sps_parser.h
@@ -12,10 +12,7 @@
#define COMMON_VIDEO_H264_SPS_PARSER_H_
#include "absl/types/optional.h"
-
-namespace rtc {
-class BitBuffer;
-}
+#include "rtc_base/bitstream_reader.h"
namespace webrtc {
@@ -46,9 +43,9 @@ class SpsParser {
static absl::optional<SpsState> ParseSps(const uint8_t* data, size_t length);
protected:
- // Parse the SPS state, up till the VUI part, for a bit buffer where RBSP
+ // Parse the SPS state, up till the VUI part, for a buffer where RBSP
// decoding has already been performed.
- static absl::optional<SpsState> ParseSpsUpToVui(rtc::BitBuffer* buffer);
+ static absl::optional<SpsState> ParseSpsUpToVui(BitstreamReader& reader);
};
} // namespace webrtc
diff --git a/common_video/h264/sps_parser_unittest.cc b/common_video/h264/sps_parser_unittest.cc
index 73157ffae9..9e210c65d8 100644
--- a/common_video/h264/sps_parser_unittest.cc
+++ b/common_video/h264/sps_parser_unittest.cc
@@ -110,123 +110,122 @@ void GenerateFakeSps(uint16_t width,
H264::WriteRbsp(rbsp, byte_count, out_buffer);
}
-// TODO(nisse): Delete test fixture.
-class H264SpsParserTest : public ::testing::Test {
- public:
- H264SpsParserTest() {}
- ~H264SpsParserTest() override {}
-
- absl::optional<SpsParser::SpsState> sps_;
-};
-
-TEST_F(H264SpsParserTest, TestSampleSPSHdLandscape) {
+TEST(H264SpsParserTest, TestSampleSPSHdLandscape) {
// SPS for a 1280x720 camera capture from ffmpeg on osx. Contains
// emulation bytes but no cropping.
const uint8_t buffer[] = {0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, 0x05,
0xBA, 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, 0x00,
0x00, 0x2A, 0xE0, 0xF1, 0x83, 0x19, 0x60};
- EXPECT_TRUE(
- static_cast<bool>(sps_ = SpsParser::ParseSps(buffer, arraysize(buffer))));
- EXPECT_EQ(1280u, sps_->width);
- EXPECT_EQ(720u, sps_->height);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer, arraysize(buffer));
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(1280u, sps->width);
+ EXPECT_EQ(720u, sps->height);
}
-TEST_F(H264SpsParserTest, TestSampleSPSVgaLandscape) {
+TEST(H264SpsParserTest, TestSampleSPSVgaLandscape) {
// SPS for a 640x360 camera capture from ffmpeg on osx. Contains emulation
// bytes and cropping (360 isn't divisible by 16).
const uint8_t buffer[] = {0x7A, 0x00, 0x1E, 0xBC, 0xD9, 0x40, 0xA0, 0x2F,
0xF8, 0x98, 0x40, 0x00, 0x00, 0x03, 0x01, 0x80,
0x00, 0x00, 0x56, 0x83, 0xC5, 0x8B, 0x65, 0x80};
- EXPECT_TRUE(
- static_cast<bool>(sps_ = SpsParser::ParseSps(buffer, arraysize(buffer))));
- EXPECT_EQ(640u, sps_->width);
- EXPECT_EQ(360u, sps_->height);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer, arraysize(buffer));
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(640u, sps->width);
+ EXPECT_EQ(360u, sps->height);
}
-TEST_F(H264SpsParserTest, TestSampleSPSWeirdResolution) {
+TEST(H264SpsParserTest, TestSampleSPSWeirdResolution) {
// SPS for a 200x400 camera capture from ffmpeg on osx. Horizontal and
// veritcal crop (neither dimension is divisible by 16).
const uint8_t buffer[] = {0x7A, 0x00, 0x0D, 0xBC, 0xD9, 0x43, 0x43, 0x3E,
0x5E, 0x10, 0x00, 0x00, 0x03, 0x00, 0x60, 0x00,
0x00, 0x15, 0xA0, 0xF1, 0x42, 0x99, 0x60};
- EXPECT_TRUE(
- static_cast<bool>(sps_ = SpsParser::ParseSps(buffer, arraysize(buffer))));
- EXPECT_EQ(200u, sps_->width);
- EXPECT_EQ(400u, sps_->height);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer, arraysize(buffer));
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(200u, sps->width);
+ EXPECT_EQ(400u, sps->height);
}
-TEST_F(H264SpsParserTest, TestSyntheticSPSQvgaLandscape) {
+TEST(H264SpsParserTest, TestSyntheticSPSQvgaLandscape) {
rtc::Buffer buffer;
GenerateFakeSps(320u, 180u, 1, 0, 0, &buffer);
- EXPECT_TRUE(static_cast<bool>(
- sps_ = SpsParser::ParseSps(buffer.data(), buffer.size())));
- EXPECT_EQ(320u, sps_->width);
- EXPECT_EQ(180u, sps_->height);
- EXPECT_EQ(1u, sps_->id);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer.data(), buffer.size());
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(320u, sps->width);
+ EXPECT_EQ(180u, sps->height);
+ EXPECT_EQ(1u, sps->id);
}
-TEST_F(H264SpsParserTest, TestSyntheticSPSWeirdResolution) {
+TEST(H264SpsParserTest, TestSyntheticSPSWeirdResolution) {
rtc::Buffer buffer;
GenerateFakeSps(156u, 122u, 2, 0, 0, &buffer);
- EXPECT_TRUE(static_cast<bool>(
- sps_ = SpsParser::ParseSps(buffer.data(), buffer.size())));
- EXPECT_EQ(156u, sps_->width);
- EXPECT_EQ(122u, sps_->height);
- EXPECT_EQ(2u, sps_->id);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer.data(), buffer.size());
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(156u, sps->width);
+ EXPECT_EQ(122u, sps->height);
+ EXPECT_EQ(2u, sps->id);
}
-TEST_F(H264SpsParserTest, TestSampleSPSWithScalingLists) {
+TEST(H264SpsParserTest, TestSampleSPSWithScalingLists) {
// SPS from a 1920x1080 video. Contains scaling lists (and vertical cropping).
const uint8_t buffer[] = {0x64, 0x00, 0x2a, 0xad, 0x84, 0x01, 0x0c, 0x20,
0x08, 0x61, 0x00, 0x43, 0x08, 0x02, 0x18, 0x40,
0x10, 0xc2, 0x00, 0x84, 0x3b, 0x50, 0x3c, 0x01,
0x13, 0xf2, 0xcd, 0xc0, 0x40, 0x40, 0x50, 0x00,
0x00, 0x00, 0x10, 0x00, 0x00, 0x01, 0xe8, 0x40};
- EXPECT_TRUE(
- static_cast<bool>(sps_ = SpsParser::ParseSps(buffer, arraysize(buffer))));
- EXPECT_EQ(1920u, sps_->width);
- EXPECT_EQ(1080u, sps_->height);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer, arraysize(buffer));
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(1920u, sps->width);
+ EXPECT_EQ(1080u, sps->height);
}
-TEST_F(H264SpsParserTest, TestLog2MaxFrameNumMinus4) {
+TEST(H264SpsParserTest, TestLog2MaxFrameNumMinus4) {
rtc::Buffer buffer;
GenerateFakeSps(320u, 180u, 1, 0, 0, &buffer);
- EXPECT_TRUE(static_cast<bool>(
- sps_ = SpsParser::ParseSps(buffer.data(), buffer.size())));
- EXPECT_EQ(320u, sps_->width);
- EXPECT_EQ(180u, sps_->height);
- EXPECT_EQ(1u, sps_->id);
- EXPECT_EQ(4u, sps_->log2_max_frame_num);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer.data(), buffer.size());
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(320u, sps->width);
+ EXPECT_EQ(180u, sps->height);
+ EXPECT_EQ(1u, sps->id);
+ EXPECT_EQ(4u, sps->log2_max_frame_num);
GenerateFakeSps(320u, 180u, 1, 28, 0, &buffer);
- EXPECT_TRUE(static_cast<bool>(
- sps_ = SpsParser::ParseSps(buffer.data(), buffer.size())));
- EXPECT_EQ(320u, sps_->width);
- EXPECT_EQ(180u, sps_->height);
- EXPECT_EQ(1u, sps_->id);
- EXPECT_EQ(32u, sps_->log2_max_frame_num);
+ sps = SpsParser::ParseSps(buffer.data(), buffer.size());
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(320u, sps->width);
+ EXPECT_EQ(180u, sps->height);
+ EXPECT_EQ(1u, sps->id);
+ EXPECT_EQ(32u, sps->log2_max_frame_num);
GenerateFakeSps(320u, 180u, 1, 29, 0, &buffer);
EXPECT_FALSE(SpsParser::ParseSps(buffer.data(), buffer.size()));
}
-TEST_F(H264SpsParserTest, TestLog2MaxPicOrderCntMinus4) {
+TEST(H264SpsParserTest, TestLog2MaxPicOrderCntMinus4) {
rtc::Buffer buffer;
GenerateFakeSps(320u, 180u, 1, 0, 0, &buffer);
- EXPECT_TRUE(static_cast<bool>(
- sps_ = SpsParser::ParseSps(buffer.data(), buffer.size())));
- EXPECT_EQ(320u, sps_->width);
- EXPECT_EQ(180u, sps_->height);
- EXPECT_EQ(1u, sps_->id);
- EXPECT_EQ(4u, sps_->log2_max_pic_order_cnt_lsb);
+ absl::optional<SpsParser::SpsState> sps =
+ SpsParser::ParseSps(buffer.data(), buffer.size());
+ ASSERT_TRUE(sps.has_value());
+ EXPECT_EQ(320u, sps->width);
+ EXPECT_EQ(180u, sps->height);
+ EXPECT_EQ(1u, sps->id);
+ EXPECT_EQ(4u, sps->log2_max_pic_order_cnt_lsb);
GenerateFakeSps(320u, 180u, 1, 0, 28, &buffer);
EXPECT_TRUE(static_cast<bool>(
- sps_ = SpsParser::ParseSps(buffer.data(), buffer.size())));
- EXPECT_EQ(320u, sps_->width);
- EXPECT_EQ(180u, sps_->height);
- EXPECT_EQ(1u, sps_->id);
- EXPECT_EQ(32u, sps_->log2_max_pic_order_cnt_lsb);
+ sps = SpsParser::ParseSps(buffer.data(), buffer.size())));
+ EXPECT_EQ(320u, sps->width);
+ EXPECT_EQ(180u, sps->height);
+ EXPECT_EQ(1u, sps->id);
+ EXPECT_EQ(32u, sps->log2_max_pic_order_cnt_lsb);
GenerateFakeSps(320u, 180u, 1, 0, 29, &buffer);
EXPECT_FALSE(SpsParser::ParseSps(buffer.data(), buffer.size()));
diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc
index 1c420a9e4b..117e92a1e5 100644
--- a/common_video/h264/sps_vui_rewriter.cc
+++ b/common_video/h264/sps_vui_rewriter.cc
@@ -13,6 +13,7 @@
#include <string.h>
+#include <algorithm>
#include <cstdint>
#include <vector>
@@ -20,9 +21,9 @@
#include "common_video/h264/h264_common.h"
#include "common_video/h264/sps_parser.h"
#include "rtc_base/bit_buffer.h"
+#include "rtc_base/bitstream_reader.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
-#include "rtc_base/numerics/safe_minmax.h"
#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -45,52 +46,63 @@ enum SpsValidEvent {
kSpsRewrittenMax = 8
};
-#define RETURN_FALSE_ON_FAIL(x) \
- if (!(x)) { \
- RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \
- return false; \
- }
-
-#define COPY_UINT8(src, dest, tmp) \
- do { \
- RETURN_FALSE_ON_FAIL((src)->ReadUInt8(&tmp)); \
- if (dest) \
- RETURN_FALSE_ON_FAIL((dest)->WriteUInt8(tmp)); \
+#define RETURN_FALSE_ON_FAIL(x) \
+ do { \
+ if (!(x)) { \
+ RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \
+ return false; \
+ } \
} while (0)
-#define COPY_EXP_GOLOMB(src, dest, tmp) \
- do { \
- RETURN_FALSE_ON_FAIL((src)->ReadExponentialGolomb(&tmp)); \
- if (dest) \
- RETURN_FALSE_ON_FAIL((dest)->WriteExponentialGolomb(tmp)); \
- } while (0)
+uint8_t CopyUInt8(BitstreamReader& source, rtc::BitBufferWriter& destination) {
+ uint8_t tmp = source.Read<uint8_t>();
+ if (!destination.WriteUInt8(tmp)) {
+ source.Invalidate();
+ }
+ return tmp;
+}
-#define COPY_BITS(src, dest, tmp, bits) \
- do { \
- RETURN_FALSE_ON_FAIL((src)->ReadBits(&tmp, bits)); \
- if (dest) \
- RETURN_FALSE_ON_FAIL((dest)->WriteBits(tmp, bits)); \
- } while (0)
+uint32_t CopyExpGolomb(BitstreamReader& source,
+ rtc::BitBufferWriter& destination) {
+ uint32_t tmp = source.ReadExponentialGolomb();
+ if (!destination.WriteExponentialGolomb(tmp)) {
+ source.Invalidate();
+ }
+ return tmp;
+}
+
+uint32_t CopyBits(int bits,
+ BitstreamReader& source,
+ rtc::BitBufferWriter& destination) {
+ RTC_DCHECK_GT(bits, 0);
+ RTC_DCHECK_LE(bits, 32);
+ uint64_t tmp = source.ReadBits(bits);
+ if (!destination.WriteBits(tmp, bits)) {
+ source.Invalidate();
+ }
+ return tmp;
+}
bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
- rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination,
+ BitstreamReader& source,
+ rtc::BitBufferWriter& destination,
const webrtc::ColorSpace* color_space,
- SpsVuiRewriter::ParseResult* out_vui_rewritten);
-bool CopyHrdParameters(rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination);
+ SpsVuiRewriter::ParseResult& out_vui_rewritten);
+
+void CopyHrdParameters(BitstreamReader& source,
+ rtc::BitBufferWriter& destination);
bool AddBitstreamRestriction(rtc::BitBufferWriter* destination,
uint32_t max_num_ref_frames);
bool IsDefaultColorSpace(const ColorSpace& color_space);
-bool AddVideoSignalTypeInfo(rtc::BitBufferWriter* destination,
+bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination,
const ColorSpace& color_space);
bool CopyOrRewriteVideoSignalTypeInfo(
- rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination,
+ BitstreamReader& source,
+ rtc::BitBufferWriter& destination,
const ColorSpace* color_space,
- SpsVuiRewriter::ParseResult* out_vui_rewritten);
-bool CopyRemainingBits(rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination);
+ SpsVuiRewriter::ParseResult& out_vui_rewritten);
+bool CopyRemainingBits(BitstreamReader& source,
+ rtc::BitBufferWriter& destination);
} // namespace
void SpsVuiRewriter::UpdateStats(ParseResult result, Direction direction) {
@@ -131,23 +143,25 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
// Create temporary RBSP decoded buffer of the payload (exlcuding the
// leading nalu type header byte (the SpsParser uses only the payload).
std::vector<uint8_t> rbsp_buffer = H264::ParseRbsp(buffer, length);
- rtc::BitBuffer source_buffer(rbsp_buffer.data(), rbsp_buffer.size());
+ BitstreamReader source_buffer(rbsp_buffer);
absl::optional<SpsParser::SpsState> sps_state =
- SpsParser::ParseSpsUpToVui(&source_buffer);
+ SpsParser::ParseSpsUpToVui(source_buffer);
if (!sps_state)
return ParseResult::kFailure;
*sps = sps_state;
- // We're going to completely muck up alignment, so we need a BitBuffer to
- // write with.
+ // We're going to completely muck up alignment, so we need a BitBufferWriter
+ // to write with.
rtc::Buffer out_buffer(length + kMaxVuiSpsIncrease);
rtc::BitBufferWriter sps_writer(out_buffer.data(), out_buffer.size());
// Check how far the SpsParser has read, and copy that data in bulk.
- size_t byte_offset;
- size_t bit_offset;
- source_buffer.GetCurrentOffset(&byte_offset, &bit_offset);
+ RTC_DCHECK(source_buffer.Ok());
+ size_t total_bit_offset =
+ rbsp_buffer.size() * 8 - source_buffer.RemainingBitCount();
+ size_t byte_offset = total_bit_offset / 8;
+ size_t bit_offset = total_bit_offset % 8;
memcpy(out_buffer.data(), rbsp_buffer.data(),
byte_offset + (bit_offset > 0 ? 1 : 0)); // OK to copy the last bits.
@@ -162,8 +176,8 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
sps_writer.Seek(byte_offset, bit_offset);
ParseResult vui_updated;
- if (!CopyAndRewriteVui(*sps_state, &source_buffer, &sps_writer, color_space,
- &vui_updated)) {
+ if (!CopyAndRewriteVui(*sps_state, source_buffer, sps_writer, color_space,
+ vui_updated)) {
RTC_LOG(LS_ERROR) << "Failed to parse/copy SPS VUI.";
return ParseResult::kFailure;
}
@@ -173,7 +187,7 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
return vui_updated;
}
- if (!CopyRemainingBits(&source_buffer, &sps_writer)) {
+ if (!CopyRemainingBits(source_buffer, sps_writer)) {
RTC_LOG(LS_ERROR) << "Failed to parse/copy SPS VUI.";
return ParseResult::kFailure;
}
@@ -210,32 +224,23 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
return result;
}
-void SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps(
+rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(
rtc::ArrayView<const uint8_t> buffer,
- size_t num_nalus,
- const size_t* nalu_offsets,
- const size_t* nalu_lengths,
- const webrtc::ColorSpace* color_space,
- rtc::Buffer* output_buffer,
- size_t* output_nalu_offsets,
- size_t* output_nalu_lengths) {
- // Allocate some extra space for potentially adding a missing VUI.
- output_buffer->EnsureCapacity(buffer.size() + num_nalus * kMaxVuiSpsIncrease);
+ const webrtc::ColorSpace* color_space) {
+ std::vector<H264::NaluIndex> nalus =
+ H264::FindNaluIndices(buffer.data(), buffer.size());
- const uint8_t* prev_nalu_ptr = buffer.data();
- size_t prev_nalu_length = 0;
-
- for (size_t i = 0; i < num_nalus; ++i) {
- const uint8_t* nalu_ptr = buffer.data() + nalu_offsets[i];
- const size_t nalu_length = nalu_lengths[i];
+ // Allocate some extra space for potentially adding a missing VUI.
+ rtc::Buffer output_buffer(/*size=*/0, /*capacity=*/buffer.size() +
+ nalus.size() * kMaxVuiSpsIncrease);
+ for (const H264::NaluIndex& nalu : nalus) {
// Copy NAL unit start code.
- const uint8_t* start_code_ptr = prev_nalu_ptr + prev_nalu_length;
+ const uint8_t* start_code_ptr = buffer.data() + nalu.start_offset;
const size_t start_code_length =
- (nalu_ptr - prev_nalu_ptr) - prev_nalu_length;
- output_buffer->AppendData(start_code_ptr, start_code_length);
-
- bool updated_sps = false;
+ nalu.payload_start_offset - nalu.start_offset;
+ const uint8_t* nalu_ptr = buffer.data() + nalu.payload_start_offset;
+ const size_t nalu_length = nalu.payload_size;
if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kSps) {
// Check if stream uses picture order count type 0, and if so rewrite it
@@ -260,39 +265,34 @@ void SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps(
nalu_ptr + H264::kNaluTypeSize, nalu_length - H264::kNaluTypeSize,
&sps, color_space, &output_nalu, Direction::kOutgoing);
if (result == ParseResult::kVuiRewritten) {
- updated_sps = true;
- output_nalu_offsets[i] = output_buffer->size();
- output_nalu_lengths[i] = output_nalu.size();
- output_buffer->AppendData(output_nalu.data(), output_nalu.size());
+ output_buffer.AppendData(start_code_ptr, start_code_length);
+ output_buffer.AppendData(output_nalu.data(), output_nalu.size());
+ continue;
}
+ } else if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kAud) {
+ // Skip the access unit delimiter copy.
+ continue;
}
- if (!updated_sps) {
- output_nalu_offsets[i] = output_buffer->size();
- output_nalu_lengths[i] = nalu_length;
- output_buffer->AppendData(nalu_ptr, nalu_length);
- }
-
- prev_nalu_ptr = nalu_ptr;
- prev_nalu_length = nalu_length;
+ // vui wasn't rewritten and it is not aud, copy the nal unit as is.
+ output_buffer.AppendData(start_code_ptr, start_code_length);
+ output_buffer.AppendData(nalu_ptr, nalu_length);
}
+ return output_buffer;
}
namespace {
bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
- rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination,
+ BitstreamReader& source,
+ rtc::BitBufferWriter& destination,
const webrtc::ColorSpace* color_space,
- SpsVuiRewriter::ParseResult* out_vui_rewritten) {
- uint32_t golomb_tmp;
- uint32_t bits_tmp;
-
- *out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiOk;
+ SpsVuiRewriter::ParseResult& out_vui_rewritten) {
+ out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiOk;
//
// vui_parameters_present_flag: u(1)
//
- RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1));
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
// ********* IMPORTANT! **********
// Now we're at the VUI, so we want to (1) add it if it isn't present, and
@@ -301,154 +301,140 @@ bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
// Write a simple VUI with the parameters we want and 0 for all other flags.
// aspect_ratio_info_present_flag, overscan_info_present_flag. Both u(1).
- RETURN_FALSE_ON_FAIL(destination->WriteBits(0, 2));
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(0, 2));
uint32_t video_signal_type_present_flag =
(color_space && !IsDefaultColorSpace(*color_space)) ? 1 : 0;
RETURN_FALSE_ON_FAIL(
- destination->WriteBits(video_signal_type_present_flag, 1));
+ destination.WriteBits(video_signal_type_present_flag, 1));
if (video_signal_type_present_flag) {
RETURN_FALSE_ON_FAIL(AddVideoSignalTypeInfo(destination, *color_space));
}
// chroma_loc_info_present_flag, timing_info_present_flag,
// nal_hrd_parameters_present_flag, vcl_hrd_parameters_present_flag,
// pic_struct_present_flag, All u(1)
- RETURN_FALSE_ON_FAIL(destination->WriteBits(0, 5));
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(0, 5));
// bitstream_restriction_flag: u(1)
- RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1));
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
RETURN_FALSE_ON_FAIL(
- AddBitstreamRestriction(destination, sps.max_num_ref_frames));
+ AddBitstreamRestriction(&destination, sps.max_num_ref_frames));
- *out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
+ out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
} else {
// Parse out the full VUI.
// aspect_ratio_info_present_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
- if (bits_tmp == 1) {
+ uint32_t aspect_ratio_info_present_flag = CopyBits(1, source, destination);
+ if (aspect_ratio_info_present_flag) {
// aspect_ratio_idc: u(8)
- COPY_BITS(source, destination, bits_tmp, 8);
- if (bits_tmp == 255u) { // Extended_SAR
+ uint8_t aspect_ratio_idc = CopyUInt8(source, destination);
+ if (aspect_ratio_idc == 255u) { // Extended_SAR
// sar_width/sar_height: u(16) each.
- COPY_BITS(source, destination, bits_tmp, 32);
+ CopyBits(32, source, destination);
}
}
// overscan_info_present_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
- if (bits_tmp == 1) {
+ uint32_t overscan_info_present_flag = CopyBits(1, source, destination);
+ if (overscan_info_present_flag) {
// overscan_appropriate_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
+ CopyBits(1, source, destination);
}
CopyOrRewriteVideoSignalTypeInfo(source, destination, color_space,
out_vui_rewritten);
// chroma_loc_info_present_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
- if (bits_tmp == 1) {
+ uint32_t chroma_loc_info_present_flag = CopyBits(1, source, destination);
+ if (chroma_loc_info_present_flag == 1) {
// chroma_sample_loc_type_(top|bottom)_field: ue(v) each.
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
+ CopyExpGolomb(source, destination);
+ CopyExpGolomb(source, destination);
}
// timing_info_present_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
- if (bits_tmp == 1) {
+ uint32_t timing_info_present_flag = CopyBits(1, source, destination);
+ if (timing_info_present_flag == 1) {
// num_units_in_tick, time_scale: u(32) each
- COPY_BITS(source, destination, bits_tmp, 32);
- COPY_BITS(source, destination, bits_tmp, 32);
+ CopyBits(32, source, destination);
+ CopyBits(32, source, destination);
// fixed_frame_rate_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
+ CopyBits(1, source, destination);
}
// nal_hrd_parameters_present_flag: u(1)
- uint32_t nal_hrd_parameters_present_flag;
- COPY_BITS(source, destination, nal_hrd_parameters_present_flag, 1);
+ uint32_t nal_hrd_parameters_present_flag = CopyBits(1, source, destination);
if (nal_hrd_parameters_present_flag == 1) {
- RETURN_FALSE_ON_FAIL(CopyHrdParameters(source, destination));
+ CopyHrdParameters(source, destination);
}
// vcl_hrd_parameters_present_flag: u(1)
- uint32_t vcl_hrd_parameters_present_flag;
- COPY_BITS(source, destination, vcl_hrd_parameters_present_flag, 1);
+ uint32_t vcl_hrd_parameters_present_flag = CopyBits(1, source, destination);
if (vcl_hrd_parameters_present_flag == 1) {
- RETURN_FALSE_ON_FAIL(CopyHrdParameters(source, destination));
+ CopyHrdParameters(source, destination);
}
if (nal_hrd_parameters_present_flag == 1 ||
vcl_hrd_parameters_present_flag == 1) {
// low_delay_hrd_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
+ CopyBits(1, source, destination);
}
// pic_struct_present_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
+ CopyBits(1, source, destination);
// bitstream_restriction_flag: u(1)
- uint32_t bitstream_restriction_flag;
- RETURN_FALSE_ON_FAIL(source->ReadBits(&bitstream_restriction_flag, 1));
- RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1));
+ uint32_t bitstream_restriction_flag = source.ReadBit();
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
if (bitstream_restriction_flag == 0) {
// We're adding one from scratch.
RETURN_FALSE_ON_FAIL(
- AddBitstreamRestriction(destination, sps.max_num_ref_frames));
- *out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
+ AddBitstreamRestriction(&destination, sps.max_num_ref_frames));
+ out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
} else {
// We're replacing.
// motion_vectors_over_pic_boundaries_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
+ CopyBits(1, source, destination);
// max_bytes_per_pic_denom: ue(v)
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
+ CopyExpGolomb(source, destination);
// max_bits_per_mb_denom: ue(v)
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
+ CopyExpGolomb(source, destination);
// log2_max_mv_length_horizontal: ue(v)
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
+ CopyExpGolomb(source, destination);
// log2_max_mv_length_vertical: ue(v)
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
+ CopyExpGolomb(source, destination);
// ********* IMPORTANT! **********
// The next two are the ones we need to set to low numbers:
// max_num_reorder_frames: ue(v)
// max_dec_frame_buffering: ue(v)
// However, if they are already set to no greater than the numbers we
// want, then we don't need to be rewriting.
- uint32_t max_num_reorder_frames, max_dec_frame_buffering;
+ uint32_t max_num_reorder_frames = source.ReadExponentialGolomb();
+ uint32_t max_dec_frame_buffering = source.ReadExponentialGolomb();
+ RETURN_FALSE_ON_FAIL(destination.WriteExponentialGolomb(0));
RETURN_FALSE_ON_FAIL(
- source->ReadExponentialGolomb(&max_num_reorder_frames));
- RETURN_FALSE_ON_FAIL(
- source->ReadExponentialGolomb(&max_dec_frame_buffering));
- RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(0));
- RETURN_FALSE_ON_FAIL(
- destination->WriteExponentialGolomb(sps.max_num_ref_frames));
+ destination.WriteExponentialGolomb(sps.max_num_ref_frames));
if (max_num_reorder_frames != 0 ||
max_dec_frame_buffering > sps.max_num_ref_frames) {
- *out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
+ out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
}
}
}
- return true;
+ return source.Ok();
}
// Copies a VUI HRD parameters segment.
-bool CopyHrdParameters(rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination) {
- uint32_t golomb_tmp;
- uint32_t bits_tmp;
-
+void CopyHrdParameters(BitstreamReader& source,
+ rtc::BitBufferWriter& destination) {
// cbp_cnt_minus1: ue(v)
- uint32_t cbp_cnt_minus1;
- COPY_EXP_GOLOMB(source, destination, cbp_cnt_minus1);
+ uint32_t cbp_cnt_minus1 = CopyExpGolomb(source, destination);
// bit_rate_scale and cbp_size_scale: u(4) each
- COPY_BITS(source, destination, bits_tmp, 8);
- for (size_t i = 0; i <= cbp_cnt_minus1; ++i) {
+ CopyBits(8, source, destination);
+ for (size_t i = 0; source.Ok() && i <= cbp_cnt_minus1; ++i) {
// bit_rate_value_minus1 and cbp_size_value_minus1: ue(v) each
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
- COPY_EXP_GOLOMB(source, destination, golomb_tmp);
+ CopyExpGolomb(source, destination);
+ CopyExpGolomb(source, destination);
// cbr_flag: u(1)
- COPY_BITS(source, destination, bits_tmp, 1);
+ CopyBits(1, source, destination);
}
// initial_cbp_removal_delay_length_minus1: u(5)
- COPY_BITS(source, destination, bits_tmp, 5);
// cbp_removal_delay_length_minus1: u(5)
- COPY_BITS(source, destination, bits_tmp, 5);
// dbp_output_delay_length_minus1: u(5)
- COPY_BITS(source, destination, bits_tmp, 5);
// time_offset_length: u(5)
- COPY_BITS(source, destination, bits_tmp, 5);
- return true;
+ CopyBits(5 * 4, source, destination);
}
// These functions are similar to webrtc::H264SpsParser::Parse, and based on the
@@ -488,51 +474,51 @@ bool IsDefaultColorSpace(const ColorSpace& color_space) {
color_space.matrix() == ColorSpace::MatrixID::kUnspecified;
}
-bool AddVideoSignalTypeInfo(rtc::BitBufferWriter* destination,
+bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination,
const ColorSpace& color_space) {
// video_format: u(3).
- RETURN_FALSE_ON_FAIL(destination->WriteBits(5, 3)); // 5 = Unspecified
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(5, 3)); // 5 = Unspecified
// video_full_range_flag: u(1)
- RETURN_FALSE_ON_FAIL(destination->WriteBits(
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(
color_space.range() == ColorSpace::RangeID::kFull ? 1 : 0, 1));
// colour_description_present_flag: u(1)
- RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1));
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
// colour_primaries: u(8)
RETURN_FALSE_ON_FAIL(
- destination->WriteUInt8(static_cast<uint8_t>(color_space.primaries())));
+ destination.WriteUInt8(static_cast<uint8_t>(color_space.primaries())));
// transfer_characteristics: u(8)
RETURN_FALSE_ON_FAIL(
- destination->WriteUInt8(static_cast<uint8_t>(color_space.transfer())));
+ destination.WriteUInt8(static_cast<uint8_t>(color_space.transfer())));
// matrix_coefficients: u(8)
RETURN_FALSE_ON_FAIL(
- destination->WriteUInt8(static_cast<uint8_t>(color_space.matrix())));
+ destination.WriteUInt8(static_cast<uint8_t>(color_space.matrix())));
return true;
}
bool CopyOrRewriteVideoSignalTypeInfo(
- rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination,
+ BitstreamReader& source,
+ rtc::BitBufferWriter& destination,
const ColorSpace* color_space,
- SpsVuiRewriter::ParseResult* out_vui_rewritten) {
+ SpsVuiRewriter::ParseResult& out_vui_rewritten) {
// Read.
- uint32_t video_signal_type_present_flag;
uint32_t video_format = 5; // H264 default: unspecified
uint32_t video_full_range_flag = 0; // H264 default: limited
uint32_t colour_description_present_flag = 0;
uint8_t colour_primaries = 3; // H264 default: unspecified
uint8_t transfer_characteristics = 3; // H264 default: unspecified
uint8_t matrix_coefficients = 3; // H264 default: unspecified
- RETURN_FALSE_ON_FAIL(source->ReadBits(&video_signal_type_present_flag, 1));
+ uint32_t video_signal_type_present_flag = source.ReadBit();
if (video_signal_type_present_flag) {
- RETURN_FALSE_ON_FAIL(source->ReadBits(&video_format, 3));
- RETURN_FALSE_ON_FAIL(source->ReadBits(&video_full_range_flag, 1));
- RETURN_FALSE_ON_FAIL(source->ReadBits(&colour_description_present_flag, 1));
+ video_format = source.ReadBits(3);
+ video_full_range_flag = source.ReadBit();
+ colour_description_present_flag = source.ReadBit();
if (colour_description_present_flag) {
- RETURN_FALSE_ON_FAIL(source->ReadUInt8(&colour_primaries));
- RETURN_FALSE_ON_FAIL(source->ReadUInt8(&transfer_characteristics));
- RETURN_FALSE_ON_FAIL(source->ReadUInt8(&matrix_coefficients));
+ colour_primaries = source.Read<uint8_t>();
+ transfer_characteristics = source.Read<uint8_t>();
+ matrix_coefficients = source.Read<uint8_t>();
}
}
+ RETURN_FALSE_ON_FAIL(source.Ok());
// Update.
uint32_t video_signal_type_present_flag_override =
@@ -573,19 +559,19 @@ bool CopyOrRewriteVideoSignalTypeInfo(
// Write.
RETURN_FALSE_ON_FAIL(
- destination->WriteBits(video_signal_type_present_flag_override, 1));
+ destination.WriteBits(video_signal_type_present_flag_override, 1));
if (video_signal_type_present_flag_override) {
- RETURN_FALSE_ON_FAIL(destination->WriteBits(video_format_override, 3));
+ RETURN_FALSE_ON_FAIL(destination.WriteBits(video_format_override, 3));
RETURN_FALSE_ON_FAIL(
- destination->WriteBits(video_full_range_flag_override, 1));
+ destination.WriteBits(video_full_range_flag_override, 1));
RETURN_FALSE_ON_FAIL(
- destination->WriteBits(colour_description_present_flag_override, 1));
+ destination.WriteBits(colour_description_present_flag_override, 1));
if (colour_description_present_flag_override) {
- RETURN_FALSE_ON_FAIL(destination->WriteUInt8(colour_primaries_override));
+ RETURN_FALSE_ON_FAIL(destination.WriteUInt8(colour_primaries_override));
RETURN_FALSE_ON_FAIL(
- destination->WriteUInt8(transfer_characteristics_override));
+ destination.WriteUInt8(transfer_characteristics_override));
RETURN_FALSE_ON_FAIL(
- destination->WriteUInt8(matrix_coefficients_override));
+ destination.WriteUInt8(matrix_coefficients_override));
}
}
@@ -598,27 +584,26 @@ bool CopyOrRewriteVideoSignalTypeInfo(
colour_primaries_override != colour_primaries ||
transfer_characteristics_override != transfer_characteristics ||
matrix_coefficients_override != matrix_coefficients) {
- *out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
+ out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
}
return true;
}
-bool CopyRemainingBits(rtc::BitBuffer* source,
- rtc::BitBufferWriter* destination) {
- uint32_t bits_tmp;
+bool CopyRemainingBits(BitstreamReader& source,
+ rtc::BitBufferWriter& destination) {
// Try to get at least the destination aligned.
- if (source->RemainingBitCount() > 0 && source->RemainingBitCount() % 8 != 0) {
- size_t misaligned_bits = source->RemainingBitCount() % 8;
- COPY_BITS(source, destination, bits_tmp, misaligned_bits);
+ if (source.RemainingBitCount() > 0 && source.RemainingBitCount() % 8 != 0) {
+ size_t misaligned_bits = source.RemainingBitCount() % 8;
+ CopyBits(misaligned_bits, source, destination);
}
- while (source->RemainingBitCount() > 0) {
- auto count = rtc::SafeMin<size_t>(32u, source->RemainingBitCount());
- COPY_BITS(source, destination, bits_tmp, count);
+ while (source.RemainingBitCount() > 0) {
+ int count = std::min(32, source.RemainingBitCount());
+ CopyBits(count, source, destination);
}
// TODO(noahric): The last byte could be all zeroes now, which we should just
// strip.
- return true;
+ return source.Ok();
}
} // namespace
diff --git a/common_video/h264/sps_vui_rewriter.h b/common_video/h264/sps_vui_rewriter.h
index 4cd4cb976d..ef80d5b60e 100644
--- a/common_video/h264/sps_vui_rewriter.h
+++ b/common_video/h264/sps_vui_rewriter.h
@@ -50,20 +50,11 @@ class SpsVuiRewriter : private SpsParser {
rtc::Buffer* destination,
Direction Direction);
- // Parses NAL units from |buffer| based on |nalu_offsets| and |nalu_lengths|
- // and rewrites VUI in SPS blocks if necessary.
- // The result is written to |output_buffer| and modified NAL unit offsets
- // and lenghts are written to |output_nalu_offsets| and |output_nalu_lenghts|
- // to account for any added data.
- static void ParseOutgoingBitstreamAndRewriteSps(
+ // Parses NAL units from `buffer`, strips AUD blocks and rewrites VUI in SPS
+ // blocks if necessary.
+ static rtc::Buffer ParseOutgoingBitstreamAndRewrite(
rtc::ArrayView<const uint8_t> buffer,
- size_t num_nalus,
- const size_t* nalu_offsets,
- const size_t* nalu_lengths,
- const ColorSpace* color_space,
- rtc::Buffer* output_buffer,
- size_t* output_nalu_offsets,
- size_t* output_nalu_lengths);
+ const ColorSpace* color_space);
private:
static ParseResult ParseAndRewriteSps(
diff --git a/common_video/h264/sps_vui_rewriter_unittest.cc b/common_video/h264/sps_vui_rewriter_unittest.cc
index e008948ce1..2907949e6c 100644
--- a/common_video/h264/sps_vui_rewriter_unittest.cc
+++ b/common_video/h264/sps_vui_rewriter_unittest.cc
@@ -36,6 +36,7 @@ static const size_t kWidth = 640;
static const size_t kHeight = 480;
static const uint8_t kStartSequence[] = {0x00, 0x00, 0x00, 0x01};
+static const uint8_t kAud[] = {H264::NaluType::kAud, 0x09, 0x10};
static const uint8_t kSpsNaluType[] = {H264::NaluType::kSps};
static const uint8_t kIdr1[] = {H264::NaluType::kIdr, 0xFF, 0x00, 0x00, 0x04};
static const uint8_t kIdr2[] = {H264::NaluType::kIdr, 0xFF, 0x00, 0x11};
@@ -396,36 +397,13 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamOptimalVui) {
GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps);
rtc::Buffer buffer;
- const size_t kNumNalus = 2;
- size_t nalu_offsets[kNumNalus];
- size_t nalu_lengths[kNumNalus];
buffer.AppendData(kStartSequence);
- nalu_offsets[0] = buffer.size();
- nalu_lengths[0] = optimal_sps.size();
buffer.AppendData(optimal_sps);
buffer.AppendData(kStartSequence);
- nalu_offsets[1] = buffer.size();
- nalu_lengths[1] = sizeof(kIdr1);
buffer.AppendData(kIdr1);
- rtc::Buffer modified_buffer;
- size_t modified_nalu_offsets[kNumNalus];
- size_t modified_nalu_lengths[kNumNalus];
-
- SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps(
- buffer, kNumNalus, nalu_offsets, nalu_lengths, nullptr, &modified_buffer,
- modified_nalu_offsets, modified_nalu_lengths);
-
- EXPECT_THAT(
- std::vector<uint8_t>(modified_buffer.data(),
- modified_buffer.data() + modified_buffer.size()),
- ::testing::ElementsAreArray(buffer.data(), buffer.size()));
- EXPECT_THAT(std::vector<size_t>(modified_nalu_offsets,
- modified_nalu_offsets + kNumNalus),
- ::testing::ElementsAreArray(nalu_offsets, kNumNalus));
- EXPECT_THAT(std::vector<size_t>(modified_nalu_lengths,
- modified_nalu_lengths + kNumNalus),
- ::testing::ElementsAreArray(nalu_lengths, kNumNalus));
+ EXPECT_THAT(SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(buffer, nullptr),
+ ::testing::ElementsAreArray(buffer));
}
TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) {
@@ -435,61 +413,51 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) {
GenerateFakeSps(kVuiNotPresent, &sps);
rtc::Buffer buffer;
- const size_t kNumNalus = 3;
- size_t nalu_offsets[kNumNalus];
- size_t nalu_lengths[kNumNalus];
buffer.AppendData(kStartSequence);
- nalu_offsets[0] = buffer.size();
- nalu_lengths[0] = sizeof(kIdr1);
buffer.AppendData(kIdr1);
buffer.AppendData(kStartSequence);
- nalu_offsets[1] = buffer.size();
- nalu_lengths[1] = sizeof(kSpsNaluType) + sps.size();
buffer.AppendData(kSpsNaluType);
buffer.AppendData(sps);
buffer.AppendData(kStartSequence);
- nalu_offsets[2] = buffer.size();
- nalu_lengths[2] = sizeof(kIdr2);
buffer.AppendData(kIdr2);
rtc::Buffer optimal_sps;
GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps);
rtc::Buffer expected_buffer;
- size_t expected_nalu_offsets[kNumNalus];
- size_t expected_nalu_lengths[kNumNalus];
expected_buffer.AppendData(kStartSequence);
- expected_nalu_offsets[0] = expected_buffer.size();
- expected_nalu_lengths[0] = sizeof(kIdr1);
expected_buffer.AppendData(kIdr1);
expected_buffer.AppendData(kStartSequence);
- expected_nalu_offsets[1] = expected_buffer.size();
- expected_nalu_lengths[1] = sizeof(kSpsNaluType) + optimal_sps.size();
expected_buffer.AppendData(kSpsNaluType);
expected_buffer.AppendData(optimal_sps);
expected_buffer.AppendData(kStartSequence);
- expected_nalu_offsets[2] = expected_buffer.size();
- expected_nalu_lengths[2] = sizeof(kIdr2);
expected_buffer.AppendData(kIdr2);
- rtc::Buffer modified_buffer;
- size_t modified_nalu_offsets[kNumNalus];
- size_t modified_nalu_lengths[kNumNalus];
-
- SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps(
- buffer, kNumNalus, nalu_offsets, nalu_lengths, nullptr, &modified_buffer,
- modified_nalu_offsets, modified_nalu_lengths);
-
- EXPECT_THAT(
- std::vector<uint8_t>(modified_buffer.data(),
- modified_buffer.data() + modified_buffer.size()),
- ::testing::ElementsAreArray(expected_buffer.data(),
- expected_buffer.size()));
- EXPECT_THAT(std::vector<size_t>(modified_nalu_offsets,
- modified_nalu_offsets + kNumNalus),
- ::testing::ElementsAreArray(expected_nalu_offsets, kNumNalus));
- EXPECT_THAT(std::vector<size_t>(modified_nalu_lengths,
- modified_nalu_lengths + kNumNalus),
- ::testing::ElementsAreArray(expected_nalu_lengths, kNumNalus));
+ EXPECT_THAT(SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(buffer, nullptr),
+ ::testing::ElementsAreArray(expected_buffer));
+}
+
+TEST(SpsVuiRewriterOutgoingAudTest, ParseOutgoingBitstreamWithAud) {
+ rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE);
+
+ rtc::Buffer optimal_sps;
+ GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps);
+
+ rtc::Buffer buffer;
+ buffer.AppendData(kStartSequence);
+ buffer.AppendData(kAud);
+ buffer.AppendData(kStartSequence);
+ buffer.AppendData(optimal_sps);
+ buffer.AppendData(kStartSequence);
+ buffer.AppendData(kIdr1);
+
+ rtc::Buffer expected_buffer;
+ expected_buffer.AppendData(kStartSequence);
+ expected_buffer.AppendData(optimal_sps);
+ expected_buffer.AppendData(kStartSequence);
+ expected_buffer.AppendData(kIdr1);
+
+ EXPECT_THAT(SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(buffer, nullptr),
+ ::testing::ElementsAreArray(expected_buffer));
}
} // namespace webrtc
diff --git a/common_video/i420_buffer_pool.cc b/common_video/i420_buffer_pool.cc
deleted file mode 100644
index d13da6a172..0000000000
--- a/common_video/i420_buffer_pool.cc
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "common_video/include/i420_buffer_pool.h"
-
-#include <limits>
-
-#include "rtc_base/checks.h"
-
-namespace webrtc {
-
-I420BufferPool::I420BufferPool() : I420BufferPool(false) {}
-I420BufferPool::I420BufferPool(bool zero_initialize)
- : I420BufferPool(zero_initialize, std::numeric_limits<size_t>::max()) {}
-I420BufferPool::I420BufferPool(bool zero_initialize,
- size_t max_number_of_buffers)
- : zero_initialize_(zero_initialize),
- max_number_of_buffers_(max_number_of_buffers) {}
-I420BufferPool::~I420BufferPool() = default;
-
-void I420BufferPool::Release() {
- buffers_.clear();
-}
-
-bool I420BufferPool::Resize(size_t max_number_of_buffers) {
- RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
- size_t used_buffers_count = 0;
- for (const rtc::scoped_refptr<PooledI420Buffer>& buffer : buffers_) {
- // If the buffer is in use, the ref count will be >= 2, one from the list we
- // are looping over and one from the application. If the ref count is 1,
- // then the list we are looping over holds the only reference and it's safe
- // to reuse.
- if (!buffer->HasOneRef()) {
- used_buffers_count++;
- }
- }
- if (used_buffers_count > max_number_of_buffers) {
- return false;
- }
- max_number_of_buffers_ = max_number_of_buffers;
-
- size_t buffers_to_purge = buffers_.size() - max_number_of_buffers_;
- auto iter = buffers_.begin();
- while (iter != buffers_.end() && buffers_to_purge > 0) {
- if ((*iter)->HasOneRef()) {
- iter = buffers_.erase(iter);
- buffers_to_purge--;
- } else {
- ++iter;
- }
- }
- return true;
-}
-
-rtc::scoped_refptr<I420Buffer> I420BufferPool::CreateBuffer(int width,
- int height) {
- // Default stride_y is width, default uv stride is width / 2 (rounding up).
- return CreateBuffer(width, height, width, (width + 1) / 2, (width + 1) / 2);
-}
-
-rtc::scoped_refptr<I420Buffer> I420BufferPool::CreateBuffer(int width,
- int height,
- int stride_y,
- int stride_u,
- int stride_v) {
- RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
- // Release buffers with wrong resolution.
- for (auto it = buffers_.begin(); it != buffers_.end();) {
- const auto& buffer = *it;
- if (buffer->width() != width || buffer->height() != height ||
- buffer->StrideY() != stride_y || buffer->StrideU() != stride_u ||
- buffer->StrideV() != stride_v) {
- it = buffers_.erase(it);
- } else {
- ++it;
- }
- }
- // Look for a free buffer.
- for (const rtc::scoped_refptr<PooledI420Buffer>& buffer : buffers_) {
- // If the buffer is in use, the ref count will be >= 2, one from the list we
- // are looping over and one from the application. If the ref count is 1,
- // then the list we are looping over holds the only reference and it's safe
- // to reuse.
- if (buffer->HasOneRef())
- return buffer;
- }
-
- if (buffers_.size() >= max_number_of_buffers_)
- return nullptr;
- // Allocate new buffer.
- rtc::scoped_refptr<PooledI420Buffer> buffer =
- new PooledI420Buffer(width, height, stride_y, stride_u, stride_v);
- if (zero_initialize_)
- buffer->InitializeData();
- buffers_.push_back(buffer);
- return buffer;
-}
-
-} // namespace webrtc
diff --git a/common_video/i420_buffer_pool_unittest.cc b/common_video/i420_buffer_pool_unittest.cc
deleted file mode 100644
index 27503e5b8a..0000000000
--- a/common_video/i420_buffer_pool_unittest.cc
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "common_video/include/i420_buffer_pool.h"
-
-#include <stdint.h>
-#include <string.h>
-
-#include "api/scoped_refptr.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_frame_buffer.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-
-TEST(TestI420BufferPool, SimpleFrameReuse) {
- I420BufferPool pool;
- auto buffer = pool.CreateBuffer(16, 16);
- EXPECT_EQ(16, buffer->width());
- EXPECT_EQ(16, buffer->height());
- // Extract non-refcounted pointers for testing.
- const uint8_t* y_ptr = buffer->DataY();
- const uint8_t* u_ptr = buffer->DataU();
- const uint8_t* v_ptr = buffer->DataV();
- // Release buffer so that it is returned to the pool.
- buffer = nullptr;
- // Check that the memory is resued.
- buffer = pool.CreateBuffer(16, 16);
- EXPECT_EQ(y_ptr, buffer->DataY());
- EXPECT_EQ(u_ptr, buffer->DataU());
- EXPECT_EQ(v_ptr, buffer->DataV());
-}
-
-TEST(TestI420BufferPool, FrameReuseWithDefaultThenExplicitStride) {
- I420BufferPool pool;
- auto buffer = pool.CreateBuffer(15, 16);
- EXPECT_EQ(15, buffer->width());
- EXPECT_EQ(16, buffer->height());
- // The default Y stride is width and UV stride is halfwidth (rounded up).
- ASSERT_EQ(15, buffer->StrideY());
- ASSERT_EQ(8, buffer->StrideU());
- ASSERT_EQ(8, buffer->StrideV());
- // Extract non-refcounted pointers for testing.
- const uint8_t* y_ptr = buffer->DataY();
- const uint8_t* u_ptr = buffer->DataU();
- const uint8_t* v_ptr = buffer->DataV();
- // Release buffer so that it is returned to the pool.
- buffer = nullptr;
- // Check that the memory is resued with explicit strides if they match the
- // assumed default above.
- buffer = pool.CreateBuffer(15, 16, 15, 8, 8);
- EXPECT_EQ(y_ptr, buffer->DataY());
- EXPECT_EQ(u_ptr, buffer->DataU());
- EXPECT_EQ(v_ptr, buffer->DataV());
- EXPECT_EQ(15, buffer->width());
- EXPECT_EQ(16, buffer->height());
- EXPECT_EQ(15, buffer->StrideY());
- EXPECT_EQ(8, buffer->StrideU());
- EXPECT_EQ(8, buffer->StrideV());
-}
-
-TEST(TestI420BufferPool, FailToReuseWrongSize) {
- // Set max frames to 1, just to make sure the first buffer is being released.
- I420BufferPool pool(/*zero_initialize=*/false, 1);
- auto buffer = pool.CreateBuffer(16, 16);
- EXPECT_EQ(16, buffer->width());
- EXPECT_EQ(16, buffer->height());
- // Release buffer so that it is returned to the pool.
- buffer = nullptr;
- // Check that the pool doesn't try to reuse buffers of incorrect size.
- buffer = pool.CreateBuffer(32, 16);
- ASSERT_TRUE(buffer);
- EXPECT_EQ(32, buffer->width());
- EXPECT_EQ(16, buffer->height());
-}
-
-TEST(TestI420BufferPool, FailToReuseWrongStride) {
- // Set max frames to 1, just to make sure the first buffer is being released.
- I420BufferPool pool(/*zero_initialize=*/false, 1);
- auto buffer = pool.CreateBuffer(32, 32, 32, 16, 16);
- // Make sure the stride was read correctly, for the rest of the test.
- ASSERT_EQ(16, buffer->StrideU());
- ASSERT_EQ(16, buffer->StrideV());
- buffer = pool.CreateBuffer(32, 32, 32, 20, 20);
- ASSERT_TRUE(buffer);
- EXPECT_EQ(32, buffer->StrideY());
- EXPECT_EQ(20, buffer->StrideU());
- EXPECT_EQ(20, buffer->StrideV());
-}
-
-TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
- rtc::scoped_refptr<I420Buffer> buffer;
- {
- I420BufferPool pool;
- buffer = pool.CreateBuffer(16, 16);
- }
- EXPECT_EQ(16, buffer->width());
- EXPECT_EQ(16, buffer->height());
- // Try to trigger use-after-free errors by writing to y-plane.
- memset(buffer->MutableDataY(), 0xA5, 16 * buffer->StrideY());
-}
-
-TEST(TestI420BufferPool, MaxNumberOfBuffers) {
- I420BufferPool pool(false, 1);
- auto buffer1 = pool.CreateBuffer(16, 16);
- EXPECT_NE(nullptr, buffer1.get());
- EXPECT_EQ(nullptr, pool.CreateBuffer(16, 16).get());
-}
-
-} // namespace webrtc
diff --git a/common_video/include/i420_buffer_pool.h b/common_video/include/i420_buffer_pool.h
deleted file mode 100644
index 44f4821798..0000000000
--- a/common_video/include/i420_buffer_pool.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_
-#define COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_
-
-#include <stddef.h>
-
-#include <list>
-
-#include "api/scoped_refptr.h"
-#include "api/video/i420_buffer.h"
-#include "rtc_base/race_checker.h"
-#include "rtc_base/ref_counted_object.h"
-
-namespace webrtc {
-
-// Simple buffer pool to avoid unnecessary allocations of I420Buffer objects.
-// The pool manages the memory of the I420Buffer returned from CreateBuffer.
-// When the I420Buffer is destructed, the memory is returned to the pool for use
-// by subsequent calls to CreateBuffer. If the resolution passed to CreateBuffer
-// changes, old buffers will be purged from the pool.
-// Note that CreateBuffer will crash if more than kMaxNumberOfFramesBeforeCrash
-// are created. This is to prevent memory leaks where frames are not returned.
-class I420BufferPool {
- public:
- I420BufferPool();
- explicit I420BufferPool(bool zero_initialize);
- I420BufferPool(bool zero_initialze, size_t max_number_of_buffers);
- ~I420BufferPool();
-
- // Returns a buffer from the pool. If no suitable buffer exist in the pool
- // and there are less than |max_number_of_buffers| pending, a buffer is
- // created. Returns null otherwise.
- rtc::scoped_refptr<I420Buffer> CreateBuffer(int width, int height);
-
- // Returns a buffer from the pool with the explicitly specified stride.
- rtc::scoped_refptr<I420Buffer> CreateBuffer(int width,
- int height,
- int stride_y,
- int stride_u,
- int stride_v);
-
- // Changes the max amount of buffers in the pool to the new value.
- // Returns true if change was successful and false if the amount of already
- // allocated buffers is bigger than new value.
- bool Resize(size_t max_number_of_buffers);
-
- // Clears buffers_ and detaches the thread checker so that it can be reused
- // later from another thread.
- void Release();
-
- private:
- // Explicitly use a RefCountedObject to get access to HasOneRef,
- // needed by the pool to check exclusive access.
- using PooledI420Buffer = rtc::RefCountedObject<I420Buffer>;
-
- rtc::RaceChecker race_checker_;
- std::list<rtc::scoped_refptr<PooledI420Buffer>> buffers_;
- // If true, newly allocated buffers are zero-initialized. Note that recycled
- // buffers are not zero'd before reuse. This is required of buffers used by
- // FFmpeg according to http://crbug.com/390941, which only requires it for the
- // initial allocation (as shown by FFmpeg's own buffer allocation code). It
- // has to do with "Use-of-uninitialized-value" on "Linux_msan_chrome".
- const bool zero_initialize_;
- // Max number of buffers this pool can have pending.
- size_t max_number_of_buffers_;
-};
-
-} // namespace webrtc
-
-#endif // COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_
diff --git a/common_video/include/incoming_video_stream.h b/common_video/include/incoming_video_stream.h
deleted file mode 100644
index 0dcd4efcbf..0000000000
--- a/common_video/include/incoming_video_stream.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_
-#define COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_
-
-#include <stdint.h>
-
-#include "api/task_queue/task_queue_factory.h"
-#include "api/video/video_frame.h"
-#include "api/video/video_sink_interface.h"
-#include "common_video/video_render_frames.h"
-#include "rtc_base/race_checker.h"
-#include "rtc_base/task_queue.h"
-#include "rtc_base/thread_checker.h"
-
-namespace webrtc {
-
-class IncomingVideoStream : public rtc::VideoSinkInterface<VideoFrame> {
- public:
- IncomingVideoStream(TaskQueueFactory* task_queue_factory,
- int32_t delay_ms,
- rtc::VideoSinkInterface<VideoFrame>* callback);
- ~IncomingVideoStream() override;
-
- private:
- void OnFrame(const VideoFrame& video_frame) override;
- void Dequeue();
-
- rtc::ThreadChecker main_thread_checker_;
- rtc::RaceChecker decoder_race_checker_;
-
- VideoRenderFrames render_buffers_; // Only touched on the TaskQueue.
- rtc::VideoSinkInterface<VideoFrame>* const callback_;
- rtc::TaskQueue incoming_render_queue_;
-};
-
-} // namespace webrtc
-
-#endif // COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_
diff --git a/common_video/include/video_frame.h b/common_video/include/video_frame.h
deleted file mode 100644
index ba280f2a8c..0000000000
--- a/common_video/include/video_frame.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_INCLUDE_VIDEO_FRAME_H_
-#define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_H_
-
-// TODO(nisse): Delete this file, after downstream code is updated.
-#include "api/video/encoded_image.h"
-
-#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_H_
diff --git a/common_video/include/video_frame_buffer.h b/common_video/include/video_frame_buffer.h
index bc70f34ec8..34a9bb5a37 100644
--- a/common_video/include/video_frame_buffer.h
+++ b/common_video/include/video_frame_buffer.h
@@ -13,10 +13,10 @@
#include <stdint.h>
+#include <functional>
+
#include "api/scoped_refptr.h"
#include "api/video/video_frame_buffer.h"
-#include "rtc_base/callback.h"
-#include "rtc_base/ref_counted_object.h"
namespace webrtc {
@@ -29,7 +29,18 @@ rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used);
+ std::function<void()> no_longer_used);
+
+rtc::scoped_refptr<I422BufferInterface> WrapI422Buffer(
+ int width,
+ int height,
+ const uint8_t* y_plane,
+ int y_stride,
+ const uint8_t* u_plane,
+ int u_stride,
+ const uint8_t* v_plane,
+ int v_stride,
+ std::function<void()> no_longer_used);
rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int width,
@@ -40,7 +51,7 @@ rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used);
+ std::function<void()> no_longer_used);
rtc::scoped_refptr<I420ABufferInterface> WrapI420ABuffer(
int width,
@@ -53,7 +64,7 @@ rtc::scoped_refptr<I420ABufferInterface> WrapI420ABuffer(
int v_stride,
const uint8_t* a_plane,
int a_stride,
- const rtc::Callback0<void>& no_longer_used);
+ std::function<void()> no_longer_used);
rtc::scoped_refptr<PlanarYuvBuffer> WrapYuvBuffer(
VideoFrameBuffer::Type type,
@@ -65,7 +76,7 @@ rtc::scoped_refptr<PlanarYuvBuffer> WrapYuvBuffer(
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used);
+ std::function<void()> no_longer_used);
rtc::scoped_refptr<I010BufferInterface> WrapI010Buffer(
int width,
@@ -76,8 +87,18 @@ rtc::scoped_refptr<I010BufferInterface> WrapI010Buffer(
int u_stride,
const uint16_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used);
+ std::function<void()> no_longer_used);
+rtc::scoped_refptr<I210BufferInterface> WrapI210Buffer(
+ int width,
+ int height,
+ const uint16_t* y_plane,
+ int y_stride,
+ const uint16_t* u_plane,
+ int u_stride,
+ const uint16_t* v_plane,
+ int v_stride,
+ std::function<void()> no_longer_used);
} // namespace webrtc
#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_
diff --git a/common_video/include/video_frame_buffer_pool.h b/common_video/include/video_frame_buffer_pool.h
new file mode 100644
index 0000000000..fd1bd164ec
--- /dev/null
+++ b/common_video/include/video_frame_buffer_pool.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_
+#define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_
+
+#include <stddef.h>
+
+#include <list>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i010_buffer.h"
+#include "api/video/i210_buffer.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/i422_buffer.h"
+#include "api/video/i444_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "rtc_base/race_checker.h"
+
+namespace webrtc {
+
+// Simple buffer pool to avoid unnecessary allocations of video frame buffers.
+// The pool manages the memory of the I420Buffer/NV12Buffer returned from
+// Create(I420|NV12)Buffer. When the buffer is destructed, the memory is
+// returned to the pool for use by subsequent calls to Create(I420|NV12)Buffer.
+// If the resolution passed to Create(I420|NV12)Buffer changes or requested
+// pixel format changes, old buffers will be purged from the pool.
+// Note that Create(I420|NV12)Buffer will crash if more than
+// kMaxNumberOfFramesBeforeCrash are created. This is to prevent memory leaks
+// where frames are not returned.
+class VideoFrameBufferPool {
+ public:
+ VideoFrameBufferPool();
+ explicit VideoFrameBufferPool(bool zero_initialize);
+ VideoFrameBufferPool(bool zero_initialize, size_t max_number_of_buffers);
+ ~VideoFrameBufferPool();
+
+ // Returns a buffer from the pool. If no suitable buffer exist in the pool
+ // and there are less than `max_number_of_buffers` pending, a buffer is
+ // created. Returns null otherwise.
+ rtc::scoped_refptr<I420Buffer> CreateI420Buffer(int width, int height);
+ rtc::scoped_refptr<I422Buffer> CreateI422Buffer(int width, int height);
+ rtc::scoped_refptr<I444Buffer> CreateI444Buffer(int width, int height);
+ rtc::scoped_refptr<I010Buffer> CreateI010Buffer(int width, int height);
+ rtc::scoped_refptr<I210Buffer> CreateI210Buffer(int width, int height);
+ rtc::scoped_refptr<NV12Buffer> CreateNV12Buffer(int width, int height);
+
+ // Changes the max amount of buffers in the pool to the new value.
+ // Returns true if change was successful and false if the amount of already
+ // allocated buffers is bigger than new value.
+ bool Resize(size_t max_number_of_buffers);
+
+ // Clears buffers_ and detaches the thread checker so that it can be reused
+ // later from another thread.
+ void Release();
+
+ private:
+ rtc::scoped_refptr<VideoFrameBuffer>
+ GetExistingBuffer(int width, int height, VideoFrameBuffer::Type type);
+
+ rtc::RaceChecker race_checker_;
+ std::list<rtc::scoped_refptr<VideoFrameBuffer>> buffers_;
+ // If true, newly allocated buffers are zero-initialized. Note that recycled
+ // buffers are not zero'd before reuse. This is required of buffers used by
+ // FFmpeg according to http://crbug.com/390941, which only requires it for the
+ // initial allocation (as shown by FFmpeg's own buffer allocation code). It
+ // has to do with "Use-of-uninitialized-value" on "Linux_msan_chrome".
+ const bool zero_initialize_;
+ // Max number of buffers this pool can have pending.
+ size_t max_number_of_buffers_;
+};
+
+} // namespace webrtc
+
+#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_
diff --git a/common_video/incoming_video_stream.cc b/common_video/incoming_video_stream.cc
deleted file mode 100644
index d1f8beac5b..0000000000
--- a/common_video/incoming_video_stream.cc
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "common_video/include/incoming_video_stream.h"
-
-#include <memory>
-#include <utility>
-
-#include "absl/types/optional.h"
-#include "common_video/video_render_frames.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/trace_event.h"
-
-namespace webrtc {
-
-IncomingVideoStream::IncomingVideoStream(
- TaskQueueFactory* task_queue_factory,
- int32_t delay_ms,
- rtc::VideoSinkInterface<VideoFrame>* callback)
- : render_buffers_(delay_ms),
- callback_(callback),
- incoming_render_queue_(task_queue_factory->CreateTaskQueue(
- "IncomingVideoStream",
- TaskQueueFactory::Priority::HIGH)) {}
-
-IncomingVideoStream::~IncomingVideoStream() {
- RTC_DCHECK(main_thread_checker_.IsCurrent());
-}
-
-void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) {
- TRACE_EVENT0("webrtc", "IncomingVideoStream::OnFrame");
- RTC_CHECK_RUNS_SERIALIZED(&decoder_race_checker_);
- RTC_DCHECK(!incoming_render_queue_.IsCurrent());
- // TODO(srte): Using video_frame = std::move(video_frame) would move the frame
- // into the lambda instead of copying it, but it doesn't work unless we change
- // OnFrame to take its frame argument by value instead of const reference.
- incoming_render_queue_.PostTask([this, video_frame = video_frame]() mutable {
- RTC_DCHECK(incoming_render_queue_.IsCurrent());
- if (render_buffers_.AddFrame(std::move(video_frame)) == 1)
- Dequeue();
- });
-}
-
-void IncomingVideoStream::Dequeue() {
- TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue");
- RTC_DCHECK(incoming_render_queue_.IsCurrent());
- absl::optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender();
- if (frame_to_render)
- callback_->OnFrame(*frame_to_render);
-
- if (render_buffers_.HasPendingFrames()) {
- uint32_t wait_time = render_buffers_.TimeToNextFrameRelease();
- incoming_render_queue_.PostDelayedTask([this]() { Dequeue(); }, wait_time);
- }
-}
-
-} // namespace webrtc
diff --git a/common_video/libyuv/include/webrtc_libyuv.h b/common_video/libyuv/include/webrtc_libyuv.h
index ba17577216..08a035a8d7 100644
--- a/common_video/libyuv/include/webrtc_libyuv.h
+++ b/common_video/libyuv/include/webrtc_libyuv.h
@@ -32,18 +32,14 @@ enum class VideoType {
kI420,
kIYUV,
kRGB24,
- kABGR,
kARGB,
- kARGB4444,
kRGB565,
- kARGB1555,
kYUY2,
kYV12,
kUYVY,
kMJPEG,
- kNV21,
- kNV12,
kBGRA,
+ kNV12,
};
// This is the max PSNR value our algorithms can return.
@@ -58,16 +54,6 @@ const double kPerfectPSNR = 48.0f;
// video frame.
size_t CalcBufferSize(VideoType type, int width, int height);
-// TODO(mikhal): Add unit test for these two functions and determine location.
-// Print VideoFrame to file
-// Input:
-// - frame : Reference to video frame.
-// - file : pointer to file object. It is assumed that the file is
-// already open for writing.
-// Return value: 0 if OK, < 0 otherwise.
-int PrintVideoFrame(const VideoFrame& frame, FILE* file);
-int PrintVideoFrame(const I420BufferInterface& frame, FILE* file);
-
// Extract buffer from VideoFrame or I420BufferInterface (consecutive
// planes, no stride)
// Input:
@@ -102,20 +88,33 @@ double I420SSE(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer);
// Compute PSNR for an I420 frame (all planes).
-// Returns the PSNR in decibel, to a maximum of kInfinitePSNR.
+// Returns the PSNR in decibel, to a maximum of kPerfectPSNR.
double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame);
double I420PSNR(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer);
+// Computes the weighted PSNR-YUV for an I420 buffer.
+//
+// For the definition and motivation, see
+// J. Ohm, G. J. Sullivan, H. Schwarz, T. K. Tan and T. Wiegand,
+// "Comparison of the Coding Efficiency of Video Coding Standards—Including
+// High Efficiency Video Coding (HEVC)," in IEEE Transactions on Circuits and
+// Systems for Video Technology, vol. 22, no. 12, pp. 1669-1684, Dec. 2012
+// doi: 10.1109/TCSVT.2012.2221192.
+//
+// Returns the PSNR-YUV in decibel, to a maximum of kPerfectPSNR.
+double I420WeightedPSNR(const I420BufferInterface& ref_buffer,
+ const I420BufferInterface& test_buffer);
+
// Compute SSIM for an I420 frame (all planes).
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame);
double I420SSIM(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer);
// Helper function for scaling NV12 to NV12.
-// If the |src_width| and |src_height| matches the |dst_width| and |dst_height|,
-// then |tmp_buffer| is not used. In other cases, the minimum size of
-// |tmp_buffer| should be:
+// If the `src_width` and `src_height` matches the `dst_width` and `dst_height`,
+// then `tmp_buffer` is not used. In other cases, the minimum size of
+// `tmp_buffer` should be:
// (src_width/2) * (src_height/2) * 2 + (dst_width/2) * (dst_height/2) * 2
void NV12Scale(uint8_t* tmp_buffer,
const uint8_t* src_y,
diff --git a/common_video/libyuv/libyuv_unittest.cc b/common_video/libyuv/libyuv_unittest.cc
index 2a7992865a..f9c82f6284 100644
--- a/common_video/libyuv/libyuv_unittest.cc
+++ b/common_video/libyuv/libyuv_unittest.cc
@@ -31,6 +31,38 @@ void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
*stride_uv = 16 * ((width + 31) / 32);
}
+int PrintPlane(const uint8_t* buf,
+ int width,
+ int height,
+ int stride,
+ FILE* file) {
+ for (int i = 0; i < height; i++, buf += stride) {
+ if (fwrite(buf, 1, width, file) != static_cast<unsigned int>(width))
+ return -1;
+ }
+ return 0;
+}
+
+int PrintVideoFrame(const I420BufferInterface& frame, FILE* file) {
+ int width = frame.width();
+ int height = frame.height();
+ int chroma_width = frame.ChromaWidth();
+ int chroma_height = frame.ChromaHeight();
+
+ if (PrintPlane(frame.DataY(), width, height, frame.StrideY(), file) < 0) {
+ return -1;
+ }
+ if (PrintPlane(frame.DataU(), chroma_width, chroma_height, frame.StrideU(),
+ file) < 0) {
+ return -1;
+ }
+ if (PrintPlane(frame.DataV(), chroma_width, chroma_height, frame.StrideV(),
+ file) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
} // Anonymous namespace
class TestLibYuv : public ::testing::Test {
@@ -82,10 +114,6 @@ void TestLibYuv::TearDown() {
source_file_ = NULL;
}
-TEST_F(TestLibYuv, ConvertSanityTest) {
- // TODO(mikhal)
-}
-
TEST_F(TestLibYuv, ConvertTest) {
// Reading YUV frame - testing on the first frame of the foreman sequence
int j = 0;
@@ -336,4 +364,23 @@ TEST_F(TestLibYuv, NV12Scale4x4to2x2) {
::testing::ElementsAre(Average(0, 2, 4, 6), Average(1, 3, 5, 7)));
}
+TEST(I420WeightedPSNRTest, SmokeTest) {
+ uint8_t ref_y[] = {0, 0, 0, 0};
+ uint8_t ref_uv[] = {0};
+ rtc::scoped_refptr<I420Buffer> ref_buffer =
+ I420Buffer::Copy(/*width=*/2, /*height=*/2, ref_y, /*stride_y=*/2, ref_uv,
+ /*stride_u=*/1, ref_uv, /*stride_v=*/1);
+
+ uint8_t test_y[] = {1, 1, 1, 1};
+ uint8_t test_uv[] = {2};
+ rtc::scoped_refptr<I420Buffer> test_buffer = I420Buffer::Copy(
+ /*width=*/2, /*height=*/2, test_y, /*stride_y=*/2, test_uv,
+ /*stride_u=*/1, test_uv, /*stride_v=*/1);
+
+ auto psnr = [](double mse) { return 10.0 * log10(255.0 * 255.0 / mse); };
+ EXPECT_NEAR(I420WeightedPSNR(*ref_buffer, *test_buffer),
+ (6.0 * psnr(1.0) + psnr(4.0) + psnr(4.0)) / 8.0,
+ /*abs_error=*/0.001);
+}
+
} // namespace webrtc
diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc
index 833001cf1c..14e2d22612 100644
--- a/common_video/libyuv/webrtc_libyuv.cc
+++ b/common_video/libyuv/webrtc_libyuv.cc
@@ -14,7 +14,6 @@
#include "api/video/i420_buffer.h"
#include "common_video/include/video_frame_buffer.h"
-#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "third_party/libyuv/include/libyuv.h"
@@ -26,18 +25,15 @@ size_t CalcBufferSize(VideoType type, int width, int height) {
size_t buffer_size = 0;
switch (type) {
case VideoType::kI420:
- case VideoType::kNV12:
- case VideoType::kNV21:
case VideoType::kIYUV:
- case VideoType::kYV12: {
+ case VideoType::kYV12:
+ case VideoType::kNV12: {
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
buffer_size = width * height + half_width * half_height * 2;
break;
}
- case VideoType::kARGB4444:
case VideoType::kRGB565:
- case VideoType::kARGB1555:
case VideoType::kYUY2:
case VideoType::kUYVY:
buffer_size = width * height * 2;
@@ -50,49 +46,12 @@ size_t CalcBufferSize(VideoType type, int width, int height) {
buffer_size = width * height * 4;
break;
default:
- RTC_NOTREACHED();
+ RTC_DCHECK_NOTREACHED();
break;
}
return buffer_size;
}
-static int PrintPlane(const uint8_t* buf,
- int width,
- int height,
- int stride,
- FILE* file) {
- for (int i = 0; i < height; i++, buf += stride) {
- if (fwrite(buf, 1, width, file) != static_cast<unsigned int>(width))
- return -1;
- }
- return 0;
-}
-
-// TODO(nisse): Belongs with the test code?
-int PrintVideoFrame(const I420BufferInterface& frame, FILE* file) {
- int width = frame.width();
- int height = frame.height();
- int chroma_width = frame.ChromaWidth();
- int chroma_height = frame.ChromaHeight();
-
- if (PrintPlane(frame.DataY(), width, height, frame.StrideY(), file) < 0) {
- return -1;
- }
- if (PrintPlane(frame.DataU(), chroma_width, chroma_height, frame.StrideU(),
- file) < 0) {
- return -1;
- }
- if (PrintPlane(frame.DataV(), chroma_width, chroma_height, frame.StrideV(),
- file) < 0) {
- return -1;
- }
- return 0;
-}
-
-int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
- return PrintVideoFrame(*frame.video_frame_buffer()->ToI420(), file);
-}
-
int ExtractBuffer(const rtc::scoped_refptr<I420BufferInterface>& input_frame,
size_t size,
uint8_t* buffer) {
@@ -135,8 +94,6 @@ int ConvertVideoType(VideoType video_type) {
return libyuv::FOURCC_YV12;
case VideoType::kRGB24:
return libyuv::FOURCC_24BG;
- case VideoType::kABGR:
- return libyuv::FOURCC_ABGR;
case VideoType::kRGB565:
return libyuv::FOURCC_RGBP;
case VideoType::kYUY2:
@@ -145,20 +102,14 @@ int ConvertVideoType(VideoType video_type) {
return libyuv::FOURCC_UYVY;
case VideoType::kMJPEG:
return libyuv::FOURCC_MJPG;
- case VideoType::kNV21:
- return libyuv::FOURCC_NV21;
- case VideoType::kNV12:
- return libyuv::FOURCC_NV12;
case VideoType::kARGB:
return libyuv::FOURCC_ARGB;
case VideoType::kBGRA:
return libyuv::FOURCC_BGRA;
- case VideoType::kARGB4444:
- return libyuv::FOURCC_R444;
- case VideoType::kARGB1555:
- return libyuv::FOURCC_RGBO;
+ case VideoType::kNV12:
+ return libyuv::FOURCC_NV12;
}
- RTC_NOTREACHED();
+ RTC_DCHECK_NOTREACHED();
return libyuv::FOURCC_ANY;
}
@@ -175,10 +126,6 @@ int ConvertFromI420(const VideoFrame& src_frame,
ConvertVideoType(dst_video_type));
}
-// Helper functions for keeping references alive.
-void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
- rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
-
rtc::scoped_refptr<I420ABufferInterface> ScaleI420ABuffer(
const I420ABufferInterface& buffer,
int target_width,
@@ -197,7 +144,8 @@ rtc::scoped_refptr<I420ABufferInterface> ScaleI420ABuffer(
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(),
axx_buffer->StrideY(),
- rtc::Bind(&KeepBufferRefs, yuv_buffer, axx_buffer));
+ // To keep references alive.
+ [yuv_buffer, axx_buffer] {});
return merged_buffer;
}
@@ -310,6 +258,45 @@ double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
*test_frame->video_frame_buffer()->ToI420());
}
+double I420WeightedPSNR(const I420BufferInterface& ref_buffer,
+ const I420BufferInterface& test_buffer) {
+ RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
+ RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
+ if ((ref_buffer.width() != test_buffer.width()) ||
+ (ref_buffer.height() != test_buffer.height())) {
+ rtc::scoped_refptr<I420Buffer> scaled_ref_buffer =
+ I420Buffer::Create(test_buffer.width(), test_buffer.height());
+ scaled_ref_buffer->ScaleFrom(ref_buffer);
+ return I420WeightedPSNR(*scaled_ref_buffer, test_buffer);
+ }
+
+ // Luma.
+ int width_y = test_buffer.width();
+ int height_y = test_buffer.height();
+ uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(),
+ test_buffer.StrideY(), width_y, height_y);
+ uint64_t num_samples_y = (uint64_t)width_y * (uint64_t)height_y;
+ double psnr_y = libyuv::SumSquareErrorToPsnr(sse_y, num_samples_y);
+
+ // Chroma.
+ int width_uv = (width_y + 1) >> 1;
+ int height_uv = (height_y + 1) >> 1;
+ uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(),
+ test_buffer.StrideU(), width_uv, height_uv);
+ uint64_t num_samples_uv = (uint64_t)width_uv * (uint64_t)height_uv;
+ double psnr_u = libyuv::SumSquareErrorToPsnr(sse_u, num_samples_uv);
+ uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(),
+ test_buffer.StrideV(), width_uv, height_uv);
+ double psnr_v = libyuv::SumSquareErrorToPsnr(sse_v, num_samples_uv);
+
+ // Weights from Ohm et. al 2012.
+ double psnr_yuv = (6.0 * psnr_y + psnr_u + psnr_v) / 8.0;
+ return (psnr_yuv > kPerfectPSNR) ? kPerfectPSNR : psnr_yuv;
+}
+
// Compute SSIM for an I420A frame (all planes). Can upscale test frame.
double I420ASSIM(const I420ABufferInterface& ref_buffer,
const I420ABufferInterface& test_buffer) {
diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc
index a13548f95e..6e93835c25 100644
--- a/common_video/video_frame_buffer.cc
+++ b/common_video/video_frame_buffer.cc
@@ -9,9 +9,9 @@
*/
#include "common_video/include/video_frame_buffer.h"
+#include "api/make_ref_counted.h"
#include "api/video/i420_buffer.h"
#include "rtc_base/checks.h"
-#include "rtc_base/ref_counted_object.h"
#include "third_party/libyuv/include/libyuv/convert.h"
namespace webrtc {
@@ -30,7 +30,7 @@ class WrappedYuvBuffer : public Base {
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used)
+ std::function<void()> no_longer_used)
: width_(width),
height_(height),
y_plane_(y_plane),
@@ -70,7 +70,7 @@ class WrappedYuvBuffer : public Base {
const int y_stride_;
const int u_stride_;
const int v_stride_;
- rtc::Callback0<void> no_longer_used_cb_;
+ std::function<void()> no_longer_used_cb_;
};
// Template to implement a wrapped buffer for a I4??BufferInterface.
@@ -87,7 +87,7 @@ class WrappedYuvaBuffer : public WrappedYuvBuffer<BaseWithA> {
int v_stride,
const uint8_t* a_plane,
int a_stride,
- const rtc::Callback0<void>& no_longer_used)
+ std::function<void()> no_longer_used)
: WrappedYuvBuffer<BaseWithA>(width,
height,
y_plane,
@@ -124,6 +124,22 @@ rtc::scoped_refptr<I420BufferInterface> I444BufferBase::ToI420() {
return i420_buffer;
}
+class I422BufferBase : public I422BufferInterface {
+ public:
+ rtc::scoped_refptr<I420BufferInterface> ToI420() final;
+};
+
+rtc::scoped_refptr<I420BufferInterface> I422BufferBase::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ libyuv::I422ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(),
+ width(), height());
+ return i420_buffer;
+}
+
// Template to implement a wrapped buffer for a PlanarYuv16BBuffer.
template <typename Base>
class WrappedYuv16BBuffer : public Base {
@@ -136,7 +152,7 @@ class WrappedYuv16BBuffer : public Base {
int u_stride,
const uint16_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used)
+ std::function<void()> no_longer_used)
: width_(width),
height_(height),
y_plane_(y_plane),
@@ -176,7 +192,7 @@ class WrappedYuv16BBuffer : public Base {
const int y_stride_;
const int u_stride_;
const int v_stride_;
- rtc::Callback0<void> no_longer_used_cb_;
+ std::function<void()> no_longer_used_cb_;
};
class I010BufferBase : public I010BufferInterface {
@@ -199,6 +215,22 @@ rtc::scoped_refptr<I420BufferInterface> I010BufferBase::ToI420() {
return i420_buffer;
}
+class I210BufferBase : public I210BufferInterface {
+ public:
+ rtc::scoped_refptr<I420BufferInterface> ToI420() final;
+};
+
+rtc::scoped_refptr<I420BufferInterface> I210BufferBase::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(),
+ width(), height());
+ return i420_buffer;
+}
+
} // namespace
rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
@@ -210,9 +242,9 @@ rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used) {
+ std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I420BufferInterface>(
- new rtc::RefCountedObject<WrappedYuvBuffer<I420BufferInterface>>(
+ rtc::make_ref_counted<WrappedYuvBuffer<I420BufferInterface>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
@@ -228,13 +260,29 @@ rtc::scoped_refptr<I420ABufferInterface> WrapI420ABuffer(
int v_stride,
const uint8_t* a_plane,
int a_stride,
- const rtc::Callback0<void>& no_longer_used) {
+ std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I420ABufferInterface>(
- new rtc::RefCountedObject<WrappedYuvaBuffer<I420ABufferInterface>>(
+ rtc::make_ref_counted<WrappedYuvaBuffer<I420ABufferInterface>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, a_plane, a_stride, no_longer_used));
}
+rtc::scoped_refptr<I422BufferInterface> WrapI422Buffer(
+ int width,
+ int height,
+ const uint8_t* y_plane,
+ int y_stride,
+ const uint8_t* u_plane,
+ int u_stride,
+ const uint8_t* v_plane,
+ int v_stride,
+ std::function<void()> no_longer_used) {
+ return rtc::scoped_refptr<I422BufferBase>(
+ rtc::make_ref_counted<WrappedYuvBuffer<I422BufferBase>>(
+ width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
+ v_stride, no_longer_used));
+}
+
rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int width,
int height,
@@ -244,9 +292,9 @@ rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used) {
+ std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I444BufferInterface>(
- new rtc::RefCountedObject<WrappedYuvBuffer<I444BufferBase>>(
+ rtc::make_ref_counted<WrappedYuvBuffer<I444BufferBase>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
@@ -261,17 +309,19 @@ rtc::scoped_refptr<PlanarYuvBuffer> WrapYuvBuffer(
int u_stride,
const uint8_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used) {
+ std::function<void()> no_longer_used) {
switch (type) {
case VideoFrameBuffer::Type::kI420:
return WrapI420Buffer(width, height, y_plane, y_stride, u_plane, u_stride,
v_plane, v_stride, no_longer_used);
+ case VideoFrameBuffer::Type::kI422:
+ return WrapI422Buffer(width, height, y_plane, y_stride, u_plane, u_stride,
+ v_plane, v_stride, no_longer_used);
case VideoFrameBuffer::Type::kI444:
return WrapI444Buffer(width, height, y_plane, y_stride, u_plane, u_stride,
v_plane, v_stride, no_longer_used);
default:
- FATAL() << "Unexpected frame buffer type.";
- return nullptr;
+ RTC_CHECK_NOTREACHED();
}
}
@@ -284,9 +334,25 @@ rtc::scoped_refptr<I010BufferInterface> WrapI010Buffer(
int u_stride,
const uint16_t* v_plane,
int v_stride,
- const rtc::Callback0<void>& no_longer_used) {
+ std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I010BufferInterface>(
- new rtc::RefCountedObject<WrappedYuv16BBuffer<I010BufferBase>>(
+ rtc::make_ref_counted<WrappedYuv16BBuffer<I010BufferBase>>(
+ width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
+ v_stride, no_longer_used));
+}
+
+rtc::scoped_refptr<I210BufferInterface> WrapI210Buffer(
+ int width,
+ int height,
+ const uint16_t* y_plane,
+ int y_stride,
+ const uint16_t* u_plane,
+ int u_stride,
+ const uint16_t* v_plane,
+ int v_stride,
+ std::function<void()> no_longer_used) {
+ return rtc::scoped_refptr<I210BufferInterface>(
+ rtc::make_ref_counted<WrappedYuv16BBuffer<I210BufferBase>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc
new file mode 100644
index 0000000000..7f695814f9
--- /dev/null
+++ b/common_video/video_frame_buffer_pool.cc
@@ -0,0 +1,312 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/include/video_frame_buffer_pool.h"
+
+#include <limits>
+
+#include "api/make_ref_counted.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+namespace {
+bool HasOneRef(const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
+ // Cast to rtc::RefCountedObject is safe because this function is only called
+ // on locally created VideoFrameBuffers, which are either
+ // `rtc::RefCountedObject<I420Buffer>`, `rtc::RefCountedObject<I444Buffer>` or
+ // `rtc::RefCountedObject<NV12Buffer>`.
+ switch (buffer->type()) {
+ case VideoFrameBuffer::Type::kI420: {
+ return static_cast<rtc::RefCountedObject<I420Buffer>*>(buffer.get())
+ ->HasOneRef();
+ }
+ case VideoFrameBuffer::Type::kI444: {
+ return static_cast<rtc::RefCountedObject<I444Buffer>*>(buffer.get())
+ ->HasOneRef();
+ }
+ case VideoFrameBuffer::Type::kI422: {
+ return static_cast<rtc::RefCountedObject<I422Buffer>*>(buffer.get())
+ ->HasOneRef();
+ }
+ case VideoFrameBuffer::Type::kI010: {
+ return static_cast<rtc::RefCountedObject<I010Buffer>*>(buffer.get())
+ ->HasOneRef();
+ }
+ case VideoFrameBuffer::Type::kI210: {
+ return static_cast<rtc::RefCountedObject<I210Buffer>*>(buffer.get())
+ ->HasOneRef();
+ }
+ case VideoFrameBuffer::Type::kNV12: {
+ return static_cast<rtc::RefCountedObject<NV12Buffer>*>(buffer.get())
+ ->HasOneRef();
+ }
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ return false;
+}
+
+} // namespace
+
+VideoFrameBufferPool::VideoFrameBufferPool() : VideoFrameBufferPool(false) {}
+
+VideoFrameBufferPool::VideoFrameBufferPool(bool zero_initialize)
+ : VideoFrameBufferPool(zero_initialize,
+ std::numeric_limits<size_t>::max()) {}
+
+VideoFrameBufferPool::VideoFrameBufferPool(bool zero_initialize,
+ size_t max_number_of_buffers)
+ : zero_initialize_(zero_initialize),
+ max_number_of_buffers_(max_number_of_buffers) {}
+
+VideoFrameBufferPool::~VideoFrameBufferPool() = default;
+
+void VideoFrameBufferPool::Release() {
+ buffers_.clear();
+}
+
+bool VideoFrameBufferPool::Resize(size_t max_number_of_buffers) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+ size_t used_buffers_count = 0;
+ for (const rtc::scoped_refptr<VideoFrameBuffer>& buffer : buffers_) {
+ // If the buffer is in use, the ref count will be >= 2, one from the list we
+ // are looping over and one from the application. If the ref count is 1,
+ // then the list we are looping over holds the only reference and it's safe
+ // to reuse.
+ if (!HasOneRef(buffer)) {
+ used_buffers_count++;
+ }
+ }
+ if (used_buffers_count > max_number_of_buffers) {
+ return false;
+ }
+ max_number_of_buffers_ = max_number_of_buffers;
+
+ size_t buffers_to_purge = buffers_.size() - max_number_of_buffers_;
+ auto iter = buffers_.begin();
+ while (iter != buffers_.end() && buffers_to_purge > 0) {
+ if (HasOneRef(*iter)) {
+ iter = buffers_.erase(iter);
+ buffers_to_purge--;
+ } else {
+ ++iter;
+ }
+ }
+ return true;
+}
+
+rtc::scoped_refptr<I420Buffer> VideoFrameBufferPool::CreateI420Buffer(
+ int width,
+ int height) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
+ GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI420);
+ if (existing_buffer) {
+ // Cast is safe because the only way kI420 buffer is created is
+ // in the same function below, where `RefCountedObject<I420Buffer>` is
+ // created.
+ rtc::RefCountedObject<I420Buffer>* raw_buffer =
+ static_cast<rtc::RefCountedObject<I420Buffer>*>(existing_buffer.get());
+ // Creates a new scoped_refptr, which is also pointing to the same
+ // RefCountedObject as buffer, increasing ref count.
+ return rtc::scoped_refptr<I420Buffer>(raw_buffer);
+ }
+
+ if (buffers_.size() >= max_number_of_buffers_)
+ return nullptr;
+ // Allocate new buffer.
+ rtc::scoped_refptr<I420Buffer> buffer =
+ rtc::make_ref_counted<I420Buffer>(width, height);
+
+ if (zero_initialize_)
+ buffer->InitializeData();
+
+ buffers_.push_back(buffer);
+ return buffer;
+}
+
+rtc::scoped_refptr<I444Buffer> VideoFrameBufferPool::CreateI444Buffer(
+ int width,
+ int height) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
+ GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI444);
+ if (existing_buffer) {
+ // Cast is safe because the only way kI444 buffer is created is
+ // in the same function below, where |RefCountedObject<I444Buffer>|
+ // is created.
+ rtc::RefCountedObject<I444Buffer>* raw_buffer =
+ static_cast<rtc::RefCountedObject<I444Buffer>*>(existing_buffer.get());
+ // Creates a new scoped_refptr, which is also pointing to the same
+ // RefCountedObject as buffer, increasing ref count.
+ return rtc::scoped_refptr<I444Buffer>(raw_buffer);
+ }
+
+ if (buffers_.size() >= max_number_of_buffers_)
+ return nullptr;
+ // Allocate new buffer.
+ rtc::scoped_refptr<I444Buffer> buffer =
+ rtc::make_ref_counted<I444Buffer>(width, height);
+
+ if (zero_initialize_)
+ buffer->InitializeData();
+
+ buffers_.push_back(buffer);
+ return buffer;
+}
+
+rtc::scoped_refptr<I422Buffer> VideoFrameBufferPool::CreateI422Buffer(
+ int width,
+ int height) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
+ GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI422);
+ if (existing_buffer) {
+ // Cast is safe because the only way kI422 buffer is created is
+ // in the same function below, where |RefCountedObject<I422Buffer>|
+ // is created.
+ rtc::RefCountedObject<I422Buffer>* raw_buffer =
+ static_cast<rtc::RefCountedObject<I422Buffer>*>(existing_buffer.get());
+ // Creates a new scoped_refptr, which is also pointing to the same
+ // RefCountedObject as buffer, increasing ref count.
+ return rtc::scoped_refptr<I422Buffer>(raw_buffer);
+ }
+
+ if (buffers_.size() >= max_number_of_buffers_)
+ return nullptr;
+ // Allocate new buffer.
+ rtc::scoped_refptr<I422Buffer> buffer =
+ rtc::make_ref_counted<I422Buffer>(width, height);
+
+ if (zero_initialize_)
+ buffer->InitializeData();
+
+ buffers_.push_back(buffer);
+ return buffer;
+}
+
+rtc::scoped_refptr<NV12Buffer> VideoFrameBufferPool::CreateNV12Buffer(
+ int width,
+ int height) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
+ GetExistingBuffer(width, height, VideoFrameBuffer::Type::kNV12);
+ if (existing_buffer) {
+ // Cast is safe because the only way kI420 buffer is created is
+ // in the same function below, where `RefCountedObject<I420Buffer>` is
+ // created.
+ rtc::RefCountedObject<NV12Buffer>* raw_buffer =
+ static_cast<rtc::RefCountedObject<NV12Buffer>*>(existing_buffer.get());
+ // Creates a new scoped_refptr, which is also pointing to the same
+ // RefCountedObject as buffer, increasing ref count.
+ return rtc::scoped_refptr<NV12Buffer>(raw_buffer);
+ }
+
+ if (buffers_.size() >= max_number_of_buffers_)
+ return nullptr;
+ // Allocate new buffer.
+ rtc::scoped_refptr<NV12Buffer> buffer =
+ rtc::make_ref_counted<NV12Buffer>(width, height);
+
+ if (zero_initialize_)
+ buffer->InitializeData();
+
+ buffers_.push_back(buffer);
+ return buffer;
+}
+
+rtc::scoped_refptr<I010Buffer> VideoFrameBufferPool::CreateI010Buffer(
+ int width,
+ int height) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
+ GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI010);
+ if (existing_buffer) {
+ // Cast is safe because the only way kI010 buffer is created is
+ // in the same function below, where |RefCountedObject<I010Buffer>|
+ // is created.
+ rtc::RefCountedObject<I010Buffer>* raw_buffer =
+ static_cast<rtc::RefCountedObject<I010Buffer>*>(existing_buffer.get());
+ // Creates a new scoped_refptr, which is also pointing to the same
+ // RefCountedObject as buffer, increasing ref count.
+ return rtc::scoped_refptr<I010Buffer>(raw_buffer);
+ }
+
+ if (buffers_.size() >= max_number_of_buffers_)
+ return nullptr;
+ // Allocate new buffer.
+ rtc::scoped_refptr<I010Buffer> buffer = I010Buffer::Create(width, height);
+
+ buffers_.push_back(buffer);
+ return buffer;
+}
+
+rtc::scoped_refptr<I210Buffer> VideoFrameBufferPool::CreateI210Buffer(
+ int width,
+ int height) {
+ RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
+ GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI210);
+ if (existing_buffer) {
+ // Cast is safe because the only way kI210 buffer is created is
+ // in the same function below, where |RefCountedObject<I210Buffer>|
+ // is created.
+ rtc::RefCountedObject<I210Buffer>* raw_buffer =
+ static_cast<rtc::RefCountedObject<I210Buffer>*>(existing_buffer.get());
+ // Creates a new scoped_refptr, which is also pointing to the same
+ // RefCountedObject as buffer, increasing ref count.
+ return rtc::scoped_refptr<I210Buffer>(raw_buffer);
+ }
+
+ if (buffers_.size() >= max_number_of_buffers_)
+ return nullptr;
+ // Allocate new buffer.
+ rtc::scoped_refptr<I210Buffer> buffer = I210Buffer::Create(width, height);
+
+ buffers_.push_back(buffer);
+ return buffer;
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> VideoFrameBufferPool::GetExistingBuffer(
+ int width,
+ int height,
+ VideoFrameBuffer::Type type) {
+ // Release buffers with wrong resolution or different type.
+ for (auto it = buffers_.begin(); it != buffers_.end();) {
+ const auto& buffer = *it;
+ if (buffer->width() != width || buffer->height() != height ||
+ buffer->type() != type) {
+ it = buffers_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ // Look for a free buffer.
+ for (const rtc::scoped_refptr<VideoFrameBuffer>& buffer : buffers_) {
+ // If the buffer is in use, the ref count will be >= 2, one from the list we
+ // are looping over and one from the application. If the ref count is 1,
+ // then the list we are looping over holds the only reference and it's safe
+ // to reuse.
+ if (HasOneRef(buffer)) {
+ RTC_CHECK(buffer->type() == type);
+ return buffer;
+ }
+ }
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/common_video/video_frame_buffer_pool_unittest.cc b/common_video/video_frame_buffer_pool_unittest.cc
new file mode 100644
index 0000000000..f177468617
--- /dev/null
+++ b/common_video/video_frame_buffer_pool_unittest.cc
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/include/video_frame_buffer_pool.h"
+
+#include <stdint.h>
+#include <string.h>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_buffer.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(TestVideoFrameBufferPool, SimpleFrameReuse) {
+ VideoFrameBufferPool pool;
+ auto buffer = pool.CreateI420Buffer(16, 16);
+ EXPECT_EQ(16, buffer->width());
+ EXPECT_EQ(16, buffer->height());
+ // Extract non-refcounted pointers for testing.
+ const uint8_t* y_ptr = buffer->DataY();
+ const uint8_t* u_ptr = buffer->DataU();
+ const uint8_t* v_ptr = buffer->DataV();
+ // Release buffer so that it is returned to the pool.
+ buffer = nullptr;
+ // Check that the memory is resued.
+ buffer = pool.CreateI420Buffer(16, 16);
+ EXPECT_EQ(y_ptr, buffer->DataY());
+ EXPECT_EQ(u_ptr, buffer->DataU());
+ EXPECT_EQ(v_ptr, buffer->DataV());
+}
+
+TEST(TestVideoFrameBufferPool, FailToReuseWrongSize) {
+ // Set max frames to 1, just to make sure the first buffer is being released.
+ VideoFrameBufferPool pool(/*zero_initialize=*/false, 1);
+ auto buffer = pool.CreateI420Buffer(16, 16);
+ EXPECT_EQ(16, buffer->width());
+ EXPECT_EQ(16, buffer->height());
+ // Release buffer so that it is returned to the pool.
+ buffer = nullptr;
+ // Check that the pool doesn't try to reuse buffers of incorrect size.
+ buffer = pool.CreateI420Buffer(32, 16);
+ ASSERT_TRUE(buffer);
+ EXPECT_EQ(32, buffer->width());
+ EXPECT_EQ(16, buffer->height());
+}
+
+TEST(TestVideoFrameBufferPool, FrameValidAfterPoolDestruction) {
+ rtc::scoped_refptr<I420Buffer> buffer;
+ {
+ VideoFrameBufferPool pool;
+ buffer = pool.CreateI420Buffer(16, 16);
+ }
+ EXPECT_EQ(16, buffer->width());
+ EXPECT_EQ(16, buffer->height());
+ // Access buffer, so that ASAN could find any issues if buffer
+ // doesn't outlive the buffer pool.
+ memset(buffer->MutableDataY(), 0xA5, 16 * buffer->StrideY());
+}
+
+TEST(TestVideoFrameBufferPool, MaxNumberOfBuffers) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffer = pool.CreateI420Buffer(16, 16);
+ EXPECT_NE(nullptr, buffer.get());
+ EXPECT_EQ(nullptr, pool.CreateI420Buffer(16, 16).get());
+}
+
+TEST(TestVideoFrameBufferPool, ProducesNv12) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffer = pool.CreateNV12Buffer(16, 16);
+ EXPECT_NE(nullptr, buffer.get());
+}
+
+TEST(TestVideoFrameBufferPool, ProducesI422) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffer = pool.CreateI422Buffer(16, 16);
+ EXPECT_NE(nullptr, buffer.get());
+}
+
+TEST(TestVideoFrameBufferPool, ProducesI444) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffer = pool.CreateI444Buffer(16, 16);
+ EXPECT_NE(nullptr, buffer.get());
+}
+
+TEST(TestVideoFrameBufferPool, ProducesI010) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffer = pool.CreateI010Buffer(16, 16);
+ EXPECT_NE(nullptr, buffer.get());
+}
+
+TEST(TestVideoFrameBufferPool, ProducesI210) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffer = pool.CreateI210Buffer(16, 16);
+ EXPECT_NE(nullptr, buffer.get());
+}
+
+TEST(TestVideoFrameBufferPool, SwitchingPixelFormat) {
+ VideoFrameBufferPool pool(false, 1);
+ auto buffeNV12 = pool.CreateNV12Buffer(16, 16);
+ EXPECT_EQ(nullptr, pool.CreateNV12Buffer(16, 16).get());
+
+ auto bufferI420 = pool.CreateI420Buffer(16, 16);
+ EXPECT_NE(nullptr, bufferI420.get());
+ EXPECT_EQ(nullptr, pool.CreateI420Buffer(16, 16).get());
+
+ auto bufferI444 = pool.CreateI444Buffer(16, 16);
+ EXPECT_NE(nullptr, bufferI444.get());
+ EXPECT_EQ(nullptr, pool.CreateI444Buffer(16, 16).get());
+
+ auto bufferI422 = pool.CreateI422Buffer(16, 16);
+ EXPECT_NE(nullptr, bufferI422.get());
+ EXPECT_EQ(nullptr, pool.CreateI422Buffer(16, 16).get());
+
+ auto bufferI010 = pool.CreateI010Buffer(16, 16);
+ EXPECT_NE(nullptr, bufferI010.get());
+ EXPECT_EQ(nullptr, pool.CreateI010Buffer(16, 16).get());
+
+ auto bufferI210 = pool.CreateI210Buffer(16, 16);
+ EXPECT_NE(nullptr, bufferI210.get());
+ EXPECT_EQ(nullptr, pool.CreateI210Buffer(16, 16).get());
+}
+
+} // namespace webrtc
diff --git a/common_video/video_frame_unittest.cc b/common_video/video_frame_unittest.cc
index 225a7d3089..15f07a9401 100644
--- a/common_video/video_frame_unittest.cc
+++ b/common_video/video_frame_unittest.cc
@@ -14,8 +14,11 @@
#include <string.h>
#include "api/video/i010_buffer.h"
+#include "api/video/i210_buffer.h"
#include "api/video/i420_buffer.h"
-#include "rtc_base/bind.h"
+#include "api/video/i422_buffer.h"
+#include "api/video/i444_buffer.h"
+#include "api/video/nv12_buffer.h"
#include "rtc_base/time_utils.h"
#include "test/fake_texture_frame.h"
#include "test/frame_utils.h"
@@ -25,114 +28,59 @@ namespace webrtc {
namespace {
-// Helper class to delegate calls to appropriate container.
-class PlanarYuvBufferFactory {
- public:
- static rtc::scoped_refptr<PlanarYuvBuffer> Create(VideoFrameBuffer::Type type,
- int width,
- int height) {
- switch (type) {
- case VideoFrameBuffer::Type::kI420:
- return I420Buffer::Create(width, height);
- case VideoFrameBuffer::Type::kI010:
- return I010Buffer::Create(width, height);
- default:
- RTC_NOTREACHED();
- }
- return nullptr;
- }
-
- static rtc::scoped_refptr<PlanarYuvBuffer> Copy(const VideoFrameBuffer& src) {
- switch (src.type()) {
- case VideoFrameBuffer::Type::kI420:
- return I420Buffer::Copy(src);
- case VideoFrameBuffer::Type::kI010:
- return I010Buffer::Copy(*src.GetI010());
- default:
- RTC_NOTREACHED();
- }
- return nullptr;
- }
+struct SubSampling {
+ int x;
+ int y;
+};
- static rtc::scoped_refptr<PlanarYuvBuffer> Rotate(const VideoFrameBuffer& src,
- VideoRotation rotation) {
- switch (src.type()) {
- case VideoFrameBuffer::Type::kI420:
- return I420Buffer::Rotate(src, rotation);
- case VideoFrameBuffer::Type::kI010:
- return I010Buffer::Rotate(*src.GetI010(), rotation);
- default:
- RTC_NOTREACHED();
- }
- return nullptr;
+SubSampling SubSamplingForType(VideoFrameBuffer::Type type) {
+ switch (type) {
+ case VideoFrameBuffer::Type::kI420:
+ return {.x = 2, .y = 2};
+ case VideoFrameBuffer::Type::kI420A:
+ return {.x = 2, .y = 2};
+ case VideoFrameBuffer::Type::kI422:
+ return {.x = 2, .y = 1};
+ case VideoFrameBuffer::Type::kI444:
+ return {.x = 1, .y = 1};
+ case VideoFrameBuffer::Type::kI010:
+ return {.x = 2, .y = 2};
+ case VideoFrameBuffer::Type::kI210:
+ return {.x = 2, .y = 1};
+ default:
+ return {};
}
+}
- static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
- const VideoFrameBuffer& src,
- int offset_x,
- int offset_y,
- int crop_width,
- int crop_height) {
- switch (src.type()) {
- case VideoFrameBuffer::Type::kI420: {
- rtc::scoped_refptr<I420Buffer> buffer =
- I420Buffer::Create(crop_width, crop_height);
- buffer->CropAndScaleFrom(*src.GetI420(), offset_x, offset_y, crop_width,
- crop_height);
- return buffer;
- }
- case VideoFrameBuffer::Type::kI010: {
- rtc::scoped_refptr<I010Buffer> buffer =
- I010Buffer::Create(crop_width, crop_height);
- buffer->CropAndScaleFrom(*src.GetI010(), offset_x, offset_y, crop_width,
- crop_height);
- return buffer;
- }
- default:
- RTC_NOTREACHED();
+// Helper function to create a buffer and fill it with a gradient for
+// PlanarYuvBuffer based buffers.
+template <class T>
+rtc::scoped_refptr<T> CreateGradient(int width, int height) {
+ rtc::scoped_refptr<T> buffer(T::Create(width, height));
+ // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
+ for (int x = 0; x < width; x++) {
+ for (int y = 0; y < height; y++) {
+ buffer->MutableDataY()[x + y * width] =
+ 128 * (x * height + y * width) / (width * height);
}
- return nullptr;
- }
-
- static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
- const VideoFrameBuffer& src,
- int crop_width,
- int crop_height) {
- const int out_width =
- std::min(src.width(), crop_width * src.height() / crop_height);
- const int out_height =
- std::min(src.height(), crop_height * src.width() / crop_width);
- return CropAndScaleFrom(src, (src.width() - out_width) / 2,
- (src.height() - out_height) / 2, out_width,
- out_height);
}
-
- static rtc::scoped_refptr<PlanarYuvBuffer>
- ScaleFrom(const VideoFrameBuffer& src, int crop_width, int crop_height) {
- switch (src.type()) {
- case VideoFrameBuffer::Type::kI420: {
- rtc::scoped_refptr<I420Buffer> buffer =
- I420Buffer::Create(crop_width, crop_height);
- buffer->ScaleFrom(*src.GetI420());
- return buffer;
- }
- case VideoFrameBuffer::Type::kI010: {
- rtc::scoped_refptr<I010Buffer> buffer =
- I010Buffer::Create(crop_width, crop_height);
- buffer->ScaleFrom(*src.GetI010());
- return buffer;
- }
- default:
- RTC_NOTREACHED();
+ int chroma_width = buffer->ChromaWidth();
+ int chroma_height = buffer->ChromaHeight();
+ for (int x = 0; x < chroma_width; x++) {
+ for (int y = 0; y < chroma_height; y++) {
+ buffer->MutableDataU()[x + y * chroma_width] =
+ 255 * x / (chroma_width - 1);
+ buffer->MutableDataV()[x + y * chroma_width] =
+ 255 * y / (chroma_height - 1);
}
- return nullptr;
}
-};
+ return buffer;
+}
-rtc::scoped_refptr<PlanarYuvBuffer> CreateGradient(VideoFrameBuffer::Type type,
- int width,
- int height) {
- rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
+// Helper function to create a buffer and fill it with a gradient.
+rtc::scoped_refptr<NV12BufferInterface> CreateNV12Gradient(int width,
+ int height) {
+ rtc::scoped_refptr<NV12Buffer> buffer(NV12Buffer::Create(width, height));
// Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
@@ -144,29 +92,29 @@ rtc::scoped_refptr<PlanarYuvBuffer> CreateGradient(VideoFrameBuffer::Type type,
int chroma_height = buffer->ChromaHeight();
for (int x = 0; x < chroma_width; x++) {
for (int y = 0; y < chroma_height; y++) {
- buffer->MutableDataU()[x + y * chroma_width] =
+ buffer->MutableDataUV()[x * 2 + y * buffer->StrideUV()] =
255 * x / (chroma_width - 1);
- buffer->MutableDataV()[x + y * chroma_width] =
+ buffer->MutableDataUV()[x * 2 + 1 + y * buffer->StrideUV()] =
255 * y / (chroma_height - 1);
}
}
- if (type == VideoFrameBuffer::Type::kI420)
- return buffer;
-
- RTC_DCHECK(type == VideoFrameBuffer::Type::kI010);
- return I010Buffer::Copy(*buffer);
+ return buffer;
}
// The offsets and sizes describe the rectangle extracted from the
// original (gradient) frame, in relative coordinates where the
// original frame correspond to the unit square, 0.0 <= x, y < 1.0.
-void CheckCrop(const webrtc::I420BufferInterface& frame,
+template <class T>
+void CheckCrop(const T& frame,
double offset_x,
double offset_y,
double rel_width,
double rel_height) {
int width = frame.width();
int height = frame.height();
+
+ SubSampling plane_divider = SubSamplingForType(frame.type());
+
// Check that pixel values in the corners match the gradient used
// for initialization.
for (int i = 0; i < 2; i++) {
@@ -181,18 +129,23 @@ void CheckCrop(const webrtc::I420BufferInterface& frame,
EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0,
(orig_x + orig_y) / 2, 0.02);
- EXPECT_NEAR(frame.DataU()[x / 2 + (y / 2) * frame.StrideU()] / 256.0,
+ EXPECT_NEAR(frame.DataU()[x / plane_divider.x +
+ (y / plane_divider.y) * frame.StrideU()] /
+ 256.0,
orig_x, 0.02);
- EXPECT_NEAR(frame.DataV()[x / 2 + (y / 2) * frame.StrideV()] / 256.0,
+ EXPECT_NEAR(frame.DataV()[x / plane_divider.x +
+ (y / plane_divider.y) * frame.StrideV()] /
+ 256.0,
orig_y, 0.02);
}
}
}
+template <class T>
void CheckRotate(int width,
int height,
webrtc::VideoRotation rotation,
- const webrtc::I420BufferInterface& rotated) {
+ const T& rotated) {
int rotated_width = width;
int rotated_height = height;
@@ -215,56 +168,31 @@ void CheckRotate(int width,
} colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}};
int corner_offset = static_cast<int>(rotation) / 90;
+ SubSampling plane_divider = SubSamplingForType(rotated.type());
+
for (int i = 0; i < 4; i++) {
int j = (i + corner_offset) % 4;
int x = corners[j].x * (rotated_width - 1);
int y = corners[j].y * (rotated_height - 1);
EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]);
- EXPECT_EQ(colors[i].u,
- rotated.DataU()[(x / 2) + (y / 2) * rotated.StrideU()]);
- EXPECT_EQ(colors[i].v,
- rotated.DataV()[(x / 2) + (y / 2) * rotated.StrideV()]);
- }
-}
-
-int GetU(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
- if (buf->type() == VideoFrameBuffer::Type::kI420) {
- return buf->GetI420()
- ->DataU()[row / 2 * buf->GetI420()->StrideU() + col / 2];
- } else {
- return buf->GetI010()
- ->DataU()[row / 2 * buf->GetI010()->StrideU() + col / 2];
- }
-}
-
-int GetV(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
- if (buf->type() == VideoFrameBuffer::Type::kI420) {
- return buf->GetI420()
- ->DataV()[row / 2 * buf->GetI420()->StrideV() + col / 2];
- } else {
- return buf->GetI010()
- ->DataV()[row / 2 * buf->GetI010()->StrideV() + col / 2];
- }
-}
-
-int GetY(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
- if (buf->type() == VideoFrameBuffer::Type::kI420) {
- return buf->GetI420()->DataY()[row * buf->GetI420()->StrideY() + col];
- } else {
- return buf->GetI010()->DataY()[row * buf->GetI010()->StrideY() + col];
- }
-}
-
-void PasteFromBuffer(PlanarYuvBuffer* canvas,
- const PlanarYuvBuffer& picture,
- int offset_col,
- int offset_row) {
- if (canvas->type() == VideoFrameBuffer::Type::kI420) {
- I420Buffer* buf = static_cast<I420Buffer*>(canvas);
- buf->PasteFrom(*picture.GetI420(), offset_col, offset_row);
- } else {
- I010Buffer* buf = static_cast<I010Buffer*>(canvas);
- buf->PasteFrom(*picture.GetI010(), offset_col, offset_row);
+ if (rotated.type() == VideoFrameBuffer::Type::kI422 ||
+ rotated.type() == VideoFrameBuffer::Type::kI210) {
+ EXPECT_NEAR(colors[i].u,
+ rotated.DataU()[(x / plane_divider.x) +
+ (y / plane_divider.y) * rotated.StrideU()],
+ 1);
+ EXPECT_NEAR(colors[i].v,
+ rotated.DataV()[(x / plane_divider.x) +
+ (y / plane_divider.y) * rotated.StrideV()],
+ 1);
+ } else {
+ EXPECT_EQ(colors[i].u,
+ rotated.DataU()[(x / plane_divider.x) +
+ (y / plane_divider.y) * rotated.StrideU()]);
+ EXPECT_EQ(colors[i].v,
+ rotated.DataV()[(x / plane_divider.x) +
+ (y / plane_divider.y) * rotated.StrideV()]);
+ }
}
}
@@ -362,161 +290,177 @@ TEST(TestVideoFrame, TextureInitialValues) {
EXPECT_EQ(20, frame.timestamp_us());
}
-class TestPlanarYuvBuffer
- : public ::testing::TestWithParam<VideoFrameBuffer::Type> {};
+template <typename T>
+class TestPlanarYuvBuffer : public ::testing::Test {};
+TYPED_TEST_SUITE_P(TestPlanarYuvBuffer);
-rtc::scoped_refptr<I420Buffer> CreateAndFillBuffer() {
- auto buf = I420Buffer::Create(20, 10);
+template <class T>
+rtc::scoped_refptr<T> CreateAndFillBuffer() {
+ auto buf = T::Create(20, 10);
memset(buf->MutableDataY(), 1, 200);
- memset(buf->MutableDataU(), 2, 50);
- memset(buf->MutableDataV(), 3, 50);
- return buf;
-}
-TEST_P(TestPlanarYuvBuffer, Copy) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf1;
- switch (GetParam()) {
- case VideoFrameBuffer::Type::kI420: {
- buf1 = CreateAndFillBuffer();
- break;
- }
- case VideoFrameBuffer::Type::kI010: {
- buf1 = I010Buffer::Copy(*CreateAndFillBuffer());
- break;
- }
- default:
- RTC_NOTREACHED();
+ if (buf->type() == VideoFrameBuffer::Type::kI444) {
+ memset(buf->MutableDataU(), 2, 200);
+ memset(buf->MutableDataV(), 3, 200);
+ } else if (buf->type() == VideoFrameBuffer::Type::kI422 ||
+ buf->type() == VideoFrameBuffer::Type::kI210) {
+ memset(buf->MutableDataU(), 2, 100);
+ memset(buf->MutableDataV(), 3, 100);
+ } else {
+ memset(buf->MutableDataU(), 2, 50);
+ memset(buf->MutableDataV(), 3, 50);
}
- rtc::scoped_refptr<PlanarYuvBuffer> buf2 =
- PlanarYuvBufferFactory::Copy(*buf1);
- EXPECT_TRUE(test::FrameBufsEqual(buf1->ToI420(), buf2->ToI420()));
+ return buf;
}
-TEST_P(TestPlanarYuvBuffer, Scale) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), 200, 100);
-
- // Pure scaling, no cropping.
- rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
- PlanarYuvBufferFactory::ScaleFrom(*buf, 150, 75);
- CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.0, 1.0, 1.0);
+TYPED_TEST_P(TestPlanarYuvBuffer, Copy) {
+ rtc::scoped_refptr<TypeParam> buf1 = CreateAndFillBuffer<TypeParam>();
+ rtc::scoped_refptr<TypeParam> buf2 = TypeParam::Copy(*buf1);
+ EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2));
}
-TEST_P(TestPlanarYuvBuffer, CropXCenter) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), 200, 100);
+TYPED_TEST_P(TestPlanarYuvBuffer, CropXCenter) {
+ rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(200, 100);
// Pure center cropping, no scaling.
- rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
- PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 50, 0, 100, 100);
- CheckCrop(*scaled_buffer->ToI420(), 0.25, 0.0, 0.5, 1.0);
+ rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100);
+ scaled_buffer->CropAndScaleFrom(*buf, 50, 0, 100, 100);
+ CheckCrop<TypeParam>(*scaled_buffer, 0.25, 0.0, 0.5, 1.0);
}
-TEST_P(TestPlanarYuvBuffer, CropXNotCenter) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), 200, 100);
+TYPED_TEST_P(TestPlanarYuvBuffer, CropXNotCenter) {
+ rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(200, 100);
// Non-center cropping, no scaling.
- rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
- PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 25, 0, 100, 100);
- CheckCrop(*scaled_buffer->ToI420(), 0.125, 0.0, 0.5, 1.0);
+ rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100);
+ scaled_buffer->CropAndScaleFrom(*buf, 25, 0, 100, 100);
+ CheckCrop<TypeParam>(*scaled_buffer, 0.125, 0.0, 0.5, 1.0);
}
-TEST_P(TestPlanarYuvBuffer, CropYCenter) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), 100, 200);
+TYPED_TEST_P(TestPlanarYuvBuffer, CropYCenter) {
+ rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(100, 200);
// Pure center cropping, no scaling.
- rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
- PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 50, 100, 100);
- CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.25, 1.0, 0.5);
+ rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100);
+ scaled_buffer->CropAndScaleFrom(*buf, 0, 50, 100, 100);
+ CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.25, 1.0, 0.5);
}
-TEST_P(TestPlanarYuvBuffer, CropYNotCenter) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), 100, 200);
+TYPED_TEST_P(TestPlanarYuvBuffer, CropYNotCenter) {
+ rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(100, 200);
// Pure center cropping, no scaling.
- rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
- PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 25, 100, 100);
- CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.5);
+ rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100);
+ scaled_buffer->CropAndScaleFrom(*buf, 0, 25, 100, 100);
+ CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.125, 1.0, 0.5);
}
-TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) {
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), 640, 480);
+TYPED_TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) {
+ const int buffer_width = 640;
+ const int buffer_height = 480;
+ const int crop_width = 320;
+ const int crop_height = 180;
+ rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(640, 480);
// Pure center cropping, no scaling.
- rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
- PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 320, 180);
- CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.75);
-}
-
-TEST_P(TestPlanarYuvBuffer, PastesIntoBuffer) {
- const int kOffsetx = 20;
- const int kOffsety = 30;
- const int kPicSize = 20;
- const int kWidth = 160;
- const int kHeight = 80;
- rtc::scoped_refptr<PlanarYuvBuffer> buf =
- CreateGradient(GetParam(), kWidth, kHeight);
-
- rtc::scoped_refptr<PlanarYuvBuffer> original =
- CreateGradient(GetParam(), kWidth, kHeight);
-
- rtc::scoped_refptr<PlanarYuvBuffer> picture =
- CreateGradient(GetParam(), kPicSize, kPicSize);
-
- rtc::scoped_refptr<PlanarYuvBuffer> odd_picture =
- CreateGradient(GetParam(), kPicSize + 1, kPicSize - 1);
-
- PasteFromBuffer(buf.get(), *picture, kOffsetx, kOffsety);
-
- for (int i = 0; i < kWidth; ++i) {
- for (int j = 0; j < kHeight; ++j) {
- bool is_inside = i >= kOffsetx && i < kOffsetx + kPicSize &&
- j >= kOffsety && j < kOffsety + kPicSize;
- if (!is_inside) {
- EXPECT_EQ(GetU(original, i, j), GetU(buf, i, j));
- EXPECT_EQ(GetV(original, i, j), GetV(buf, i, j));
- EXPECT_EQ(GetY(original, i, j), GetY(buf, i, j));
- } else {
- EXPECT_EQ(GetU(picture, i - kOffsetx, j - kOffsety), GetU(buf, i, j));
- EXPECT_EQ(GetV(picture, i - kOffsetx, j - kOffsety), GetV(buf, i, j));
- EXPECT_EQ(GetY(picture, i - kOffsetx, j - kOffsety), GetY(buf, i, j));
- }
- }
+ const int out_width =
+ std::min(buffer_width, crop_width * buffer_height / crop_height);
+ const int out_height =
+ std::min(buffer_height, crop_height * buffer_width / crop_width);
+ rtc::scoped_refptr<TypeParam> scaled_buffer =
+ TypeParam::Create(out_width, out_height);
+ scaled_buffer->CropAndScaleFrom(*buf, (buffer_width - out_width) / 2,
+ (buffer_height - out_height) / 2, out_width,
+ out_height);
+ CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.125, 1.0, 0.75);
+}
+
+REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBuffer,
+ Copy,
+ CropXCenter,
+ CropXNotCenter,
+ CropYCenter,
+ CropYNotCenter,
+ CropAndScale16x9);
+
+using TestTypesAll = ::testing::
+ Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer, I210Buffer>;
+INSTANTIATE_TYPED_TEST_SUITE_P(All, TestPlanarYuvBuffer, TestTypesAll);
+
+template <class T>
+class TestPlanarYuvBufferScale : public ::testing::Test {};
+TYPED_TEST_SUITE_P(TestPlanarYuvBufferScale);
+
+TYPED_TEST_P(TestPlanarYuvBufferScale, Scale) {
+ rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(200, 100);
+
+ // Pure scaling, no cropping.
+ rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(150, 75);
+ scaled_buffer->ScaleFrom(*buf);
+ CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.0, 1.0, 1.0);
+}
+
+REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBufferScale, Scale);
+
+using TestTypesScale = ::testing::Types<I420Buffer, I010Buffer, I210Buffer>;
+INSTANTIATE_TYPED_TEST_SUITE_P(All, TestPlanarYuvBufferScale, TestTypesScale);
+
+template <class T>
+class TestPlanarYuvBufferRotate : public ::testing::Test {
+ public:
+ std::vector<webrtc::VideoRotation> RotationParams = {
+ kVideoRotation_0, kVideoRotation_90, kVideoRotation_180,
+ kVideoRotation_270};
+};
+
+TYPED_TEST_SUITE_P(TestPlanarYuvBufferRotate);
+
+TYPED_TEST_P(TestPlanarYuvBufferRotate, Rotates) {
+ for (const webrtc::VideoRotation& rotation : this->RotationParams) {
+ rtc::scoped_refptr<TypeParam> buffer = CreateGradient<TypeParam>(640, 480);
+ rtc::scoped_refptr<TypeParam> rotated_buffer =
+ TypeParam::Rotate(*buffer, rotation);
+ CheckRotate(640, 480, rotation, *rotated_buffer);
}
}
-INSTANTIATE_TEST_SUITE_P(All,
- TestPlanarYuvBuffer,
- ::testing::Values(VideoFrameBuffer::Type::kI420,
- VideoFrameBuffer::Type::kI010));
-
-class TestPlanarYuvBufferRotate
- : public ::testing::TestWithParam<
- std::tuple<webrtc::VideoRotation, VideoFrameBuffer::Type>> {};
-
-TEST_P(TestPlanarYuvBufferRotate, Rotates) {
- const webrtc::VideoRotation rotation = std::get<0>(GetParam());
- const VideoFrameBuffer::Type type = std::get<1>(GetParam());
- rtc::scoped_refptr<PlanarYuvBuffer> buffer = CreateGradient(type, 640, 480);
- rtc::scoped_refptr<PlanarYuvBuffer> rotated_buffer =
- PlanarYuvBufferFactory::Rotate(*buffer, rotation);
- CheckRotate(640, 480, rotation, *rotated_buffer->ToI420());
-}
-
-INSTANTIATE_TEST_SUITE_P(
- Rotate,
- TestPlanarYuvBufferRotate,
- ::testing::Combine(::testing::Values(kVideoRotation_0,
- kVideoRotation_90,
- kVideoRotation_180,
- kVideoRotation_270),
- ::testing::Values(VideoFrameBuffer::Type::kI420,
- VideoFrameBuffer::Type::kI010)));
+REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBufferRotate, Rotates);
+
+using TestTypesRotate = ::testing::
+ Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer, I210Buffer>;
+INSTANTIATE_TYPED_TEST_SUITE_P(Rotate,
+ TestPlanarYuvBufferRotate,
+ TestTypesRotate);
+
+TEST(TestNV12Buffer, CropAndScale) {
+ const int kSourceWidth = 640;
+ const int kSourceHeight = 480;
+ const int kScaledWidth = 320;
+ const int kScaledHeight = 240;
+ const int kCropLeft = 40;
+ const int kCropTop = 30;
+ const int kCropRight = 0;
+ const int kCropBottom = 30;
+
+ rtc::scoped_refptr<VideoFrameBuffer> buf =
+ CreateNV12Gradient(kSourceWidth, kSourceHeight);
+
+ rtc::scoped_refptr<VideoFrameBuffer> scaled_buffer = buf->CropAndScale(
+ kCropLeft, kCropTop, kSourceWidth - kCropLeft - kCropRight,
+ kSourceHeight - kCropTop - kCropBottom, kScaledWidth, kScaledHeight);
+
+ // Parameters to CheckCrop indicate what part of the source frame is in the
+ // scaled frame.
+ const float kOffsetX = (kCropLeft + 0.0) / kSourceWidth;
+ const float kOffsetY = (kCropTop + 0.0) / kSourceHeight;
+ const float kRelativeWidth =
+ (kSourceWidth - kCropLeft - kCropRight + 0.0) / kSourceWidth;
+ const float kRelativeHeight =
+ (kSourceHeight - kCropTop - kCropBottom + 0.0) / kSourceHeight;
+ CheckCrop(*scaled_buffer->ToI420(), kOffsetX, kOffsetY, kRelativeWidth,
+ kRelativeHeight);
+}
TEST(TestUpdateRect, CanCompare) {
VideoFrame::UpdateRect a = {0, 0, 100, 200};
diff --git a/common_video/video_render_frames.cc b/common_video/video_render_frames.cc
deleted file mode 100644
index 5ef51f2805..0000000000
--- a/common_video/video_render_frames.cc
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "common_video/video_render_frames.h"
-
-#include <type_traits>
-#include <utility>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/time_utils.h"
-#include "system_wrappers/include/metrics.h"
-
-namespace webrtc {
-namespace {
-// Don't render frames with timestamp older than 500ms from now.
-const int kOldRenderTimestampMS = 500;
-// Don't render frames with timestamp more than 10s into the future.
-const int kFutureRenderTimestampMS = 10000;
-
-const uint32_t kEventMaxWaitTimeMs = 200;
-const uint32_t kMinRenderDelayMs = 10;
-const uint32_t kMaxRenderDelayMs = 500;
-const size_t kMaxIncomingFramesBeforeLogged = 100;
-
-uint32_t EnsureValidRenderDelay(uint32_t render_delay) {
- return (render_delay < kMinRenderDelayMs || render_delay > kMaxRenderDelayMs)
- ? kMinRenderDelayMs
- : render_delay;
-}
-} // namespace
-
-VideoRenderFrames::VideoRenderFrames(uint32_t render_delay_ms)
- : render_delay_ms_(EnsureValidRenderDelay(render_delay_ms)) {}
-
-VideoRenderFrames::~VideoRenderFrames() {
- frames_dropped_ += incoming_frames_.size();
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.RenderQueue",
- frames_dropped_);
- RTC_LOG(LS_INFO) << "WebRTC.Video.DroppedFrames.RenderQueue "
- << frames_dropped_;
-}
-
-int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) {
- const int64_t time_now = rtc::TimeMillis();
-
- // Drop old frames only when there are other frames in the queue, otherwise, a
- // really slow system never renders any frames.
- if (!incoming_frames_.empty() &&
- new_frame.render_time_ms() + kOldRenderTimestampMS < time_now) {
- RTC_LOG(LS_WARNING) << "Too old frame, timestamp=" << new_frame.timestamp();
- ++frames_dropped_;
- return -1;
- }
-
- if (new_frame.render_time_ms() > time_now + kFutureRenderTimestampMS) {
- RTC_LOG(LS_WARNING) << "Frame too long into the future, timestamp="
- << new_frame.timestamp();
- ++frames_dropped_;
- return -1;
- }
-
- if (new_frame.render_time_ms() < last_render_time_ms_) {
- RTC_LOG(LS_WARNING) << "Frame scheduled out of order, render_time="
- << new_frame.render_time_ms()
- << ", latest=" << last_render_time_ms_;
- // For more details, see bug:
- // https://bugs.chromium.org/p/webrtc/issues/detail?id=7253
- ++frames_dropped_;
- return -1;
- }
-
- last_render_time_ms_ = new_frame.render_time_ms();
- incoming_frames_.emplace_back(std::move(new_frame));
-
- if (incoming_frames_.size() > kMaxIncomingFramesBeforeLogged) {
- RTC_LOG(LS_WARNING) << "Stored incoming frames: "
- << incoming_frames_.size();
- }
- return static_cast<int32_t>(incoming_frames_.size());
-}
-
-absl::optional<VideoFrame> VideoRenderFrames::FrameToRender() {
- absl::optional<VideoFrame> render_frame;
- // Get the newest frame that can be released for rendering.
- while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) {
- if (render_frame) {
- ++frames_dropped_;
- }
- render_frame = std::move(incoming_frames_.front());
- incoming_frames_.pop_front();
- }
- return render_frame;
-}
-
-uint32_t VideoRenderFrames::TimeToNextFrameRelease() {
- if (incoming_frames_.empty()) {
- return kEventMaxWaitTimeMs;
- }
- const int64_t time_to_release = incoming_frames_.front().render_time_ms() -
- render_delay_ms_ - rtc::TimeMillis();
- return time_to_release < 0 ? 0u : static_cast<uint32_t>(time_to_release);
-}
-
-bool VideoRenderFrames::HasPendingFrames() const {
- return !incoming_frames_.empty();
-}
-
-} // namespace webrtc
diff --git a/common_video/video_render_frames.h b/common_video/video_render_frames.h
deleted file mode 100644
index 9973c1ff08..0000000000
--- a/common_video/video_render_frames.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_VIDEO_RENDER_FRAMES_H_
-#define COMMON_VIDEO_VIDEO_RENDER_FRAMES_H_
-
-#include <stddef.h>
-#include <stdint.h>
-
-#include <list>
-
-#include "absl/types/optional.h"
-#include "api/video/video_frame.h"
-
-namespace webrtc {
-
-// Class definitions
-class VideoRenderFrames {
- public:
- explicit VideoRenderFrames(uint32_t render_delay_ms);
- VideoRenderFrames(const VideoRenderFrames&) = delete;
- ~VideoRenderFrames();
-
- // Add a frame to the render queue
- int32_t AddFrame(VideoFrame&& new_frame);
-
- // Get a frame for rendering, or false if it's not time to render.
- absl::optional<VideoFrame> FrameToRender();
-
- // Returns the number of ms to next frame to render
- uint32_t TimeToNextFrameRelease();
-
- bool HasPendingFrames() const;
-
- private:
- // Sorted list with framed to be rendered, oldest first.
- std::list<VideoFrame> incoming_frames_;
-
- // Estimated delay from a frame is released until it's rendered.
- const uint32_t render_delay_ms_;
-
- int64_t last_render_time_ms_ = 0;
- size_t frames_dropped_ = 0;
-};
-
-} // namespace webrtc
-
-#endif // COMMON_VIDEO_VIDEO_RENDER_FRAMES_H_