diff options
author | jackychen <jackychen@webrtc.org> | 2016-01-11 21:34:07 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2016-01-12 05:34:14 +0000 |
commit | 67e94fb6f2a84cad97ba96abce3506c29f8ebb24 (patch) | |
tree | 3d0c8ef27928e2037561b49009e25bc2b9829213 | |
parent | b2328d11dcc86fba1661ee3fa0d51fc126939764 (diff) | |
download | webrtc-67e94fb6f2a84cad97ba96abce3506c29f8ebb24.tar.gz |
Add unit test for stand-alone denoiser and fixed some bugs.
The unit test will run the pure C denoiser and SSE2/NEON denoiser (based
on the CPU detection) and compare the denoised frames to ensure the bit
exact.
TBR=tommi@webrtc.org
BUG=webrtc:5255
Review URL: https://codereview.webrtc.org/1492053003
Cr-Commit-Position: refs/heads/master@{#11216}
-rw-r--r-- | webrtc/common_video/i420_video_frame_unittest.cc | 49 | ||||
-rw-r--r-- | webrtc/common_video/video_frame.cc | 43 | ||||
-rw-r--r-- | webrtc/modules/modules.gyp | 2 | ||||
-rw-r--r-- | webrtc/modules/video_processing/frame_preprocessor.cc | 2 | ||||
-rw-r--r-- | webrtc/modules/video_processing/test/denoiser_test.cc | 156 | ||||
-rw-r--r-- | webrtc/modules/video_processing/util/denoiser_filter.cc | 28 | ||||
-rw-r--r-- | webrtc/modules/video_processing/util/denoiser_filter.h | 2 | ||||
-rw-r--r-- | webrtc/modules/video_processing/util/denoiser_filter_c.cc | 4 | ||||
-rw-r--r-- | webrtc/modules/video_processing/util/denoiser_filter_sse2.cc | 10 | ||||
-rw-r--r-- | webrtc/modules/video_processing/video_denoiser.cc | 8 | ||||
-rw-r--r-- | webrtc/modules/video_processing/video_denoiser.h | 2 | ||||
-rw-r--r-- | webrtc/video_frame.h | 2 |
12 files changed, 236 insertions, 72 deletions
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc index 926a5ee548..1ec451cb79 100644 --- a/webrtc/common_video/i420_video_frame_unittest.cc +++ b/webrtc/common_video/i420_video_frame_unittest.cc @@ -24,7 +24,6 @@ bool EqualPlane(const uint8_t* data1, int stride, int width, int height); -bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2); int ExpectedSize(int plane_stride, int image_height, PlaneType type); TEST(TestVideoFrame, InitialValues) { @@ -102,7 +101,7 @@ TEST(TestVideoFrame, CopyFrame) { stride_u, stride_v, kRotation)); // Frame of smaller dimensions. EXPECT_EQ(0, small_frame.CopyFrame(big_frame)); - EXPECT_TRUE(EqualFrames(small_frame, big_frame)); + EXPECT_TRUE(small_frame.EqualsFrame(big_frame)); EXPECT_EQ(kRotation, small_frame.rotation()); // Frame of larger dimensions. @@ -112,7 +111,7 @@ TEST(TestVideoFrame, CopyFrame) { memset(small_frame.buffer(kUPlane), 2, small_frame.allocated_size(kUPlane)); memset(small_frame.buffer(kVPlane), 3, small_frame.allocated_size(kVPlane)); EXPECT_EQ(0, big_frame.CopyFrame(small_frame)); - EXPECT_TRUE(EqualFrames(small_frame, big_frame)); + EXPECT_TRUE(small_frame.EqualsFrame(big_frame)); } TEST(TestVideoFrame, ShallowCopy) { @@ -257,48 +256,4 @@ TEST(TestVideoFrame, TextureInitialValues) { EXPECT_EQ(20, frame.render_time_ms()); } -bool EqualPlane(const uint8_t* data1, - const uint8_t* data2, - int stride, - int width, - int height) { - for (int y = 0; y < height; ++y) { - if (memcmp(data1, data2, width) != 0) - return false; - data1 += stride; - data2 += stride; - } - return true; -} - -bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { - if ((frame1.width() != frame2.width()) || - (frame1.height() != frame2.height()) || - (frame1.stride(kYPlane) != frame2.stride(kYPlane)) || - (frame1.stride(kUPlane) != frame2.stride(kUPlane)) || - (frame1.stride(kVPlane) != frame2.stride(kVPlane)) || - (frame1.timestamp() != frame2.timestamp()) || - (frame1.ntp_time_ms() != frame2.ntp_time_ms()) || - (frame1.render_time_ms() != frame2.render_time_ms())) { - return false; - } - const int half_width = (frame1.width() + 1) / 2; - const int half_height = (frame1.height() + 1) / 2; - return EqualPlane(frame1.buffer(kYPlane), frame2.buffer(kYPlane), - frame1.stride(kYPlane), frame1.width(), frame1.height()) && - EqualPlane(frame1.buffer(kUPlane), frame2.buffer(kUPlane), - frame1.stride(kUPlane), half_width, half_height) && - EqualPlane(frame1.buffer(kVPlane), frame2.buffer(kVPlane), - frame1.stride(kVPlane), half_width, half_height); -} - -int ExpectedSize(int plane_stride, int image_height, PlaneType type) { - if (type == kYPlane) { - return (plane_stride * image_height); - } else { - int half_height = (image_height + 1) / 2; - return (plane_stride * half_height); - } -} - } // namespace webrtc diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc index 7cdbd53f9d..710a06d3dc 100644 --- a/webrtc/common_video/video_frame.cc +++ b/webrtc/common_video/video_frame.cc @@ -19,6 +19,29 @@ namespace webrtc { +bool EqualPlane(const uint8_t* data1, + const uint8_t* data2, + int stride, + int width, + int height) { + for (int y = 0; y < height; ++y) { + if (memcmp(data1, data2, width) != 0) + return false; + data1 += stride; + data2 += stride; + } + return true; +} + +int ExpectedSize(int plane_stride, int image_height, PlaneType type) { + if (type == kYPlane) { + return (plane_stride * image_height); + } else { + int half_height = (image_height + 1) / 2; + return (plane_stride * half_height); + } +} + VideoFrame::VideoFrame() { // Intentionally using Reset instead of initializer list so that any missed // fields in Reset will be caught by memory checkers. @@ -202,4 +225,24 @@ VideoFrame VideoFrame::ConvertNativeToI420Frame() const { return frame; } +bool VideoFrame::EqualsFrame(const VideoFrame& frame) const { + if ((this->width() != frame.width()) || (this->height() != frame.height()) || + (this->stride(kYPlane) != frame.stride(kYPlane)) || + (this->stride(kUPlane) != frame.stride(kUPlane)) || + (this->stride(kVPlane) != frame.stride(kVPlane)) || + (this->timestamp() != frame.timestamp()) || + (this->ntp_time_ms() != frame.ntp_time_ms()) || + (this->render_time_ms() != frame.render_time_ms())) { + return false; + } + const int half_width = (this->width() + 1) / 2; + const int half_height = (this->height() + 1) / 2; + return EqualPlane(this->buffer(kYPlane), frame.buffer(kYPlane), + this->stride(kYPlane), this->width(), this->height()) && + EqualPlane(this->buffer(kUPlane), frame.buffer(kUPlane), + this->stride(kUPlane), half_width, half_height) && + EqualPlane(this->buffer(kVPlane), frame.buffer(kVPlane), + this->stride(kVPlane), half_width, half_height); +} + } // namespace webrtc diff --git a/webrtc/modules/modules.gyp b/webrtc/modules/modules.gyp index 39bc69c3ca..f8febd8d50 100644 --- a/webrtc/modules/modules.gyp +++ b/webrtc/modules/modules.gyp @@ -144,6 +144,7 @@ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', '<(webrtc_root)/common.gyp:webrtc_common', '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', + '<(webrtc_root)/common_video/common_video.gyp:common_video', '<(webrtc_root)/modules/modules.gyp:video_capture', '<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8', '<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9', @@ -369,6 +370,7 @@ 'video_processing/test/brightness_detection_test.cc', 'video_processing/test/content_metrics_test.cc', 'video_processing/test/deflickering_test.cc', + 'video_processing/test/denoiser_test.cc', 'video_processing/test/video_processing_unittest.cc', 'video_processing/test/video_processing_unittest.h', ], diff --git a/webrtc/modules/video_processing/frame_preprocessor.cc b/webrtc/modules/video_processing/frame_preprocessor.cc index db2b84b174..6778a597be 100644 --- a/webrtc/modules/video_processing/frame_preprocessor.cc +++ b/webrtc/modules/video_processing/frame_preprocessor.cc @@ -95,7 +95,7 @@ uint32_t VPMFramePreprocessor::GetDecimatedHeight() const { } void VPMFramePreprocessor::EnableDenosing(bool enable) { - denoiser_.reset(new VideoDenoiser()); + denoiser_.reset(new VideoDenoiser(true)); } const VideoFrame* VPMFramePreprocessor::PreprocessFrame( diff --git a/webrtc/modules/video_processing/test/denoiser_test.cc b/webrtc/modules/video_processing/test/denoiser_test.cc new file mode 100644 index 0000000000..551a77617d --- /dev/null +++ b/webrtc/modules/video_processing/test/denoiser_test.cc @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <string.h> + +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/modules/video_processing/include/video_processing.h" +#include "webrtc/modules/video_processing/test/video_processing_unittest.h" +#include "webrtc/modules/video_processing/video_denoiser.h" + +namespace webrtc { + +TEST_F(VideoProcessingTest, CopyMem) { + rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false)); + rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true)); + uint8_t src[16 * 16], dst[16 * 16]; + for (int i = 0; i < 16; ++i) { + for (int j = 0; j < 16; ++j) { + src[i * 16 + j] = i * 16 + j; + } + } + + memset(dst, 0, 8 * 8); + df_c->CopyMem8x8(src, 8, dst, 8); + EXPECT_EQ(0, memcmp(src, dst, 8 * 8)); + + memset(dst, 0, 16 * 16); + df_c->CopyMem16x16(src, 16, dst, 16); + EXPECT_EQ(0, memcmp(src, dst, 16 * 16)); + + memset(dst, 0, 8 * 8); + df_sse_neon->CopyMem16x16(src, 8, dst, 8); + EXPECT_EQ(0, memcmp(src, dst, 8 * 8)); + + memset(dst, 0, 16 * 16); + df_sse_neon->CopyMem16x16(src, 16, dst, 16); + EXPECT_EQ(0, memcmp(src, dst, 16 * 16)); +} + +TEST_F(VideoProcessingTest, Variance) { + rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false)); + rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true)); + uint8_t src[16 * 16], dst[16 * 16]; + uint32_t sum = 0, sse = 0, var; + for (int i = 0; i < 16; ++i) { + for (int j = 0; j < 16; ++j) { + src[i * 16 + j] = i * 16 + j; + } + } + // Compute the 16x8 variance of the 16x16 block. + for (int i = 0; i < 8; ++i) { + for (int j = 0; j < 16; ++j) { + sum += (i * 32 + j); + sse += (i * 32 + j) * (i * 32 + j); + } + } + var = sse - ((sum * sum) >> 7); + memset(dst, 0, 16 * 16); + EXPECT_EQ(var, df_c->Variance16x8(src, 16, dst, 16, &sse)); + EXPECT_EQ(var, df_sse_neon->Variance16x8(src, 16, dst, 16, &sse)); +} + +TEST_F(VideoProcessingTest, MbDenoise) { + rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false)); + rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true)); + uint8_t running_src[16 * 16], src[16 * 16], dst[16 * 16], dst_ref[16 * 16]; + + // Test case: |diff| <= |3 + shift_inc1| + for (int i = 0; i < 16; ++i) { + for (int j = 0; j < 16; ++j) { + running_src[i * 16 + j] = i * 11 + j; + src[i * 16 + j] = i * 11 + j + 2; + dst_ref[i * 16 + j] = running_src[i * 16 + j]; + } + } + memset(dst, 0, 16 * 16); + df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16)); + + // Test case: |diff| >= |4 + shift_inc1| + for (int i = 0; i < 16; ++i) { + for (int j = 0; j < 16; ++j) { + running_src[i * 16 + j] = i * 11 + j; + src[i * 16 + j] = i * 11 + j + 5; + dst_ref[i * 16 + j] = src[i * 16 + j] - 2; + } + } + memset(dst, 0, 16 * 16); + df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16)); + memset(dst, 0, 16 * 16); + df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16)); + + // Test case: |diff| >= 8 + for (int i = 0; i < 16; ++i) { + for (int j = 0; j < 16; ++j) { + running_src[i * 16 + j] = i * 11 + j; + src[i * 16 + j] = i * 11 + j + 8; + dst_ref[i * 16 + j] = src[i * 16 + j] - 6; + } + } + memset(dst, 0, 16 * 16); + df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16)); + memset(dst, 0, 16 * 16); + df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16)); + + // Test case: |diff| > 15 + for (int i = 0; i < 16; ++i) { + for (int j = 0; j < 16; ++j) { + running_src[i * 16 + j] = i * 11 + j; + src[i * 16 + j] = i * 11 + j + 16; + } + } + memset(dst, 0, 16 * 16); + DenoiserDecision decision = + df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(COPY_BLOCK, decision); + decision = df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1); + EXPECT_EQ(COPY_BLOCK, decision); +} + +TEST_F(VideoProcessingTest, Denoiser) { + // Create pure C denoiser. + VideoDenoiser denoiser_c(false); + // Create SSE or NEON denoiser. + VideoDenoiser denoiser_sse_neon(true); + VideoFrame denoised_frame_c; + VideoFrame denoised_frame_sse_neon; + + rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_) { + // Using ConvertToI420 to add stride to the image. + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); + + denoiser_c.DenoiseFrame(video_frame_, &denoised_frame_c); + denoiser_sse_neon.DenoiseFrame(video_frame_, &denoised_frame_sse_neon); + + // Denoising results should be the same for C and SSE/NEON denoiser. + ASSERT_EQ(true, denoised_frame_c.EqualsFrame(denoised_frame_sse_neon)); + } + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; +} + +} // namespace webrtc diff --git a/webrtc/modules/video_processing/util/denoiser_filter.cc b/webrtc/modules/video_processing/util/denoiser_filter.cc index 3e36077ea0..8fead6dd31 100644 --- a/webrtc/modules/video_processing/util/denoiser_filter.cc +++ b/webrtc/modules/video_processing/util/denoiser_filter.cc @@ -20,26 +20,30 @@ const int kMotionMagnitudeThreshold = 8 * 3; const int kSumDiffThreshold = 16 * 16 * 2; const int kSumDiffThresholdHigh = 600; -DenoiserFilter* DenoiserFilter::Create() { +DenoiserFilter* DenoiserFilter::Create(bool runtime_cpu_detection) { DenoiserFilter* filter = NULL; + if (runtime_cpu_detection) { // If we know the minimum architecture at compile time, avoid CPU detection. #if defined(WEBRTC_ARCH_X86_FAMILY) - // x86 CPU detection required. - if (WebRtc_GetCPUInfo(kSSE2)) { - filter = new DenoiserFilterSSE2(); - } else { - filter = new DenoiserFilterC(); - } + // x86 CPU detection required. + if (WebRtc_GetCPUInfo(kSSE2)) { + filter = new DenoiserFilterSSE2(); + } else { + filter = new DenoiserFilterC(); + } #elif defined(WEBRTC_DETECT_NEON) - if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) { - filter = new DenoiserFilterNEON(); + if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) { + filter = new DenoiserFilterNEON(); + } else { + filter = new DenoiserFilterC(); + } +#else + filter = new DenoiserFilterC(); +#endif } else { filter = new DenoiserFilterC(); } -#else - filter = new DenoiserFilterC(); -#endif return filter; } diff --git a/webrtc/modules/video_processing/util/denoiser_filter.h b/webrtc/modules/video_processing/util/denoiser_filter.h index 19135b3b9e..e9bd375a22 100644 --- a/webrtc/modules/video_processing/util/denoiser_filter.h +++ b/webrtc/modules/video_processing/util/denoiser_filter.h @@ -30,7 +30,7 @@ struct DenoiseMetrics { class DenoiserFilter { public: - static DenoiserFilter* Create(); + static DenoiserFilter* Create(bool runtime_cpu_detection); virtual ~DenoiserFilter() {} diff --git a/webrtc/modules/video_processing/util/denoiser_filter_c.cc b/webrtc/modules/video_processing/util/denoiser_filter_c.cc index e32bf83889..6323980e18 100644 --- a/webrtc/modules/video_processing/util/denoiser_filter_c.cc +++ b/webrtc/modules/video_processing/util/denoiser_filter_c.cc @@ -56,7 +56,7 @@ uint32_t DenoiserFilterC::Variance16x8(const uint8_t* a, a += a_stride; b += b_stride; } - return *sse - ((static_cast<int64_t>(sum) * sum) >> 8); + return *sse - ((static_cast<int64_t>(sum) * sum) >> 7); } DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y, @@ -72,7 +72,7 @@ DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y, int adj_val[3] = {3, 4, 6}; int shift_inc1 = 0; int shift_inc2 = 1; - int col_sum[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + int col_sum[16] = {0}; if (motion_magnitude <= kMotionMagnitudeThreshold) { if (increase_denoising) { shift_inc1 = 1; diff --git a/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc b/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc index 5465440373..903d7b1ec6 100644 --- a/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc +++ b/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc @@ -47,7 +47,7 @@ static void Get8x8varSse2(const uint8_t* src, vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 8)); vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 4)); vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 2)); - *sum = static_cast<int>(_mm_extract_epi16(vsum, 0)); + *sum = static_cast<int16_t>(_mm_extract_epi16(vsum, 0)); // sse vsse = _mm_add_epi32(vsse, _mm_srli_si128(vsse, 8)); @@ -62,7 +62,7 @@ static void VarianceSSE2(const unsigned char* src, int w, int h, uint32_t* sse, - uint32_t* sum, + int64_t* sum, int block_size) { *sse = 0; *sum = 0; @@ -126,9 +126,9 @@ uint32_t DenoiserFilterSSE2::Variance16x8(const uint8_t* src, int src_stride, const uint8_t* ref, int ref_stride, - unsigned int* sse) { - uint32_t sum = 0; - VarianceSSE2(src, src_stride, ref, ref_stride, 16, 8, sse, &sum, 8); + uint32_t* sse) { + int64_t sum = 0; + VarianceSSE2(src, src_stride << 1, ref, ref_stride << 1, 16, 8, sse, &sum, 8); return *sse - ((sum * sum) >> 7); } diff --git a/webrtc/modules/video_processing/video_denoiser.cc b/webrtc/modules/video_processing/video_denoiser.cc index 0ebbf7bc44..4902a89491 100644 --- a/webrtc/modules/video_processing/video_denoiser.cc +++ b/webrtc/modules/video_processing/video_denoiser.cc @@ -13,8 +13,10 @@ namespace webrtc { -VideoDenoiser::VideoDenoiser() - : width_(0), height_(0), filter_(DenoiserFilter::Create()) {} +VideoDenoiser::VideoDenoiser(bool runtime_cpu_detection) + : width_(0), + height_(0), + filter_(DenoiserFilter::Create(runtime_cpu_detection)) {} void VideoDenoiser::TrailingReduction(int mb_rows, int mb_cols, @@ -78,7 +80,7 @@ void VideoDenoiser::DenoiseFrame(const VideoFrame& frame, int mb_cols = width_ >> 4; int mb_rows = height_ >> 4; if (metrics_.get() == nullptr) - metrics_.reset(new DenoiseMetrics[mb_cols * mb_rows]); + metrics_.reset(new DenoiseMetrics[mb_cols * mb_rows]()); // Denoise on Y plane. uint8_t* y_dst = denoised_frame->buffer(kYPlane); uint8_t* u_dst = denoised_frame->buffer(kUPlane); diff --git a/webrtc/modules/video_processing/video_denoiser.h b/webrtc/modules/video_processing/video_denoiser.h index 2a90724ad0..107a15ca07 100644 --- a/webrtc/modules/video_processing/video_denoiser.h +++ b/webrtc/modules/video_processing/video_denoiser.h @@ -18,7 +18,7 @@ namespace webrtc { class VideoDenoiser { public: - VideoDenoiser(); + explicit VideoDenoiser(bool runtime_cpu_detection); void DenoiseFrame(const VideoFrame& frame, VideoFrame* denoised_frame); private: diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h index 7ad0c328db..9d2ed9fd4d 100644 --- a/webrtc/video_frame.h +++ b/webrtc/video_frame.h @@ -158,6 +158,8 @@ class VideoFrame { // called on a non-native-handle frame. VideoFrame ConvertNativeToI420Frame() const; + bool EqualsFrame(const VideoFrame& frame) const; + private: // An opaque reference counted handle that stores the pixel data. rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_; |