aboutsummaryrefslogtreecommitdiff
path: root/webrtc/modules/video_processing/main/test
diff options
context:
space:
mode:
authorChih-hung Hsieh <chh@google.com>2015-12-01 17:07:48 +0000
committerandroid-build-merger <android-build-merger@google.com>2015-12-01 17:07:48 +0000
commita4acd9d6bc9b3b033d7d274316e75ee067df8d20 (patch)
tree672a185b294789cf991f385c3e395dd63bea9063 /webrtc/modules/video_processing/main/test
parent3681b90ba4fe7a27232dd3e27897d5d7ed9d651c (diff)
parentfe8b4a657979b49e1701bd92f6d5814a99e0b2be (diff)
downloadwebrtc-a4acd9d6bc9b3b033d7d274316e75ee067df8d20.tar.gz
Merge changes I7bbf776e,I1b827825
am: fe8b4a6579 * commit 'fe8b4a657979b49e1701bd92f6d5814a99e0b2be': (7237 commits) WIP: Changes after merge commit 'cb3f9bd' Make the nonlinear beamformer steerable Utilize bitrate above codec max to protect video. Enable VP9 internal resize by default. Filter overlapping RTP header extensions. Make VCMEncodedFrameCallback const. MediaCodecVideoEncoder: Add number of quality resolution downscales to Encoded callback. Remove redudant encoder rate calls. Create isolate files for nonparallel tests. Register header extensions in RtpRtcpObserver to avoid log spam. Make an enum class out of NetEqDecoder, and hide the neteq_decoders_ table ACM: Move NACK functionality inside NetEq Fix chromium-style warnings in webrtc/sound/. Create a 'webrtc_nonparallel_tests' target. Update scalability structure data according to updates in the RTP payload profile. audio_coding: rename interface -> include Rewrote perform_action_on_all_files to be parallell. Update reference indices according to updates in the RTP payload profile. Disable P2PTransport...TestFailoverControlledSide on Memcheck pass clangcl compile options to ignore warnings in gflags.cc ...
Diffstat (limited to 'webrtc/modules/video_processing/main/test')
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc121
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc44
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/createTable.m179
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc100
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m45
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc390
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h47
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m22
8 files changed, 948 insertions, 0 deletions
diff --git a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
new file mode 100644
index 0000000000..4d0de3ac98
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+using namespace webrtc;
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(BrightnessDetection))
+{
+ uint32_t frameNum = 0;
+ int32_t brightnessWarning = 0;
+ uint32_t warningCount = 0;
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_)
+ {
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
+ height_, 0, kVideoRotation_0, &video_frame_));
+ frameNum++;
+ VideoProcessingModule::FrameStats stats;
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_,
+ stats), 0);
+ if (brightnessWarning != VideoProcessingModule::kNoWarning)
+ {
+ warningCount++;
+ }
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ // Expect few warnings
+ float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+ printf("\nWarning proportions:\n");
+ printf("Stock foreman: %.1f %%\n", warningProportion);
+ EXPECT_LT(warningProportion, 10);
+
+ rewind(source_file_);
+ frameNum = 0;
+ warningCount = 0;
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_ &&
+ frameNum < 300)
+ {
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
+ height_, 0, kVideoRotation_0, &video_frame_));
+ frameNum++;
+
+ uint8_t* frame = video_frame_.buffer(kYPlane);
+ uint32_t yTmp = 0;
+ for (int yIdx = 0; yIdx < width_ * height_; yIdx++)
+ {
+ yTmp = frame[yIdx] << 1;
+ if (yTmp > 255)
+ {
+ yTmp = 255;
+ }
+ frame[yIdx] = static_cast<uint8_t>(yTmp);
+ }
+
+ VideoProcessingModule::FrameStats stats;
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_,
+ stats), 0);
+ EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
+ if (brightnessWarning == VideoProcessingModule::kBrightWarning)
+ {
+ warningCount++;
+ }
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ // Expect many brightness warnings
+ warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+ printf("Bright foreman: %.1f %%\n", warningProportion);
+ EXPECT_GT(warningProportion, 95);
+
+ rewind(source_file_);
+ frameNum = 0;
+ warningCount = 0;
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_ && frameNum < 300)
+ {
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
+ height_, 0, kVideoRotation_0, &video_frame_));
+ frameNum++;
+
+ uint8_t* y_plane = video_frame_.buffer(kYPlane);
+ int32_t yTmp = 0;
+ for (int yIdx = 0; yIdx < width_ * height_; yIdx++)
+ {
+ yTmp = y_plane[yIdx] >> 1;
+ y_plane[yIdx] = static_cast<uint8_t>(yTmp);
+ }
+
+ VideoProcessingModule::FrameStats stats;
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_,
+ stats), 0);
+ EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
+ if (brightnessWarning == VideoProcessingModule::kDarkWarning)
+ {
+ warningCount++;
+ }
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ // Expect many darkness warnings
+ warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+ printf("Dark foreman: %.1f %%\n\n", warningProportion);
+ EXPECT_GT(warningProportion, 90);
+}
diff --git a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc
new file mode 100644
index 0000000000..d9c1309d9b
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/modules/video_processing/main/source/content_analysis.h"
+#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(ContentAnalysis)) {
+ VPMContentAnalysis ca__c(false);
+ VPMContentAnalysis ca__sse(true);
+ VideoContentMetrics *_cM_c, *_cM_SSE;
+
+ ca__c.Initialize(width_,height_);
+ ca__sse.Initialize(width_,height_);
+
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_)
+ == frame_length_) {
+ // Using ConvertToI420 to add stride to the image.
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ _cM_c = ca__c.ComputeContentMetrics(video_frame_);
+ _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_);
+
+ ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
+ ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
+ ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
+ ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/test/unit_test/createTable.m b/webrtc/modules/video_processing/main/test/unit_test/createTable.m
new file mode 100644
index 0000000000..2c7fb522f6
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/createTable.m
@@ -0,0 +1,179 @@
+% Create the color enhancement look-up table and write it to
+% file colorEnhancementTable.cpp. Copy contents of that file into
+% the source file for the color enhancement function.
+
+clear
+close all
+
+
+% First, define the color enhancement in a normalized domain
+
+% Compander function is defined in three radial zones.
+% 1. From 0 to radius r0, the compander function
+% is a second-order polynomial intersecting the points (0,0)
+% and (r0, r0), and with a slope B in (0,0).
+% 2. From r0 to r1, the compander is a third-order polynomial
+% intersecting the points (r0, r0) and (r1, r1), and with the
+% same slope as the first part in the point (r0, r0) and slope
+% equal to 1 in (r1, r1).
+% 3. For radii larger than r1, the compander function is the
+% unity scale function (no scaling at all).
+
+r0=0.07; % Dead zone radius (must be > 0)
+r1=0.6; % Enhancement zone radius (must be > r0 and < 1)
+B=0.2; % initial slope of compander function (between 0 and 1)
+
+x0=linspace(0,r0).'; % zone 1
+x1=linspace(r0,r1).'; % zone 2
+x2=linspace(r1,1).'; % zone 3
+
+A=(1-B)/r0;
+f0=A*x0.^2+B*x0; % compander function in zone 1
+
+% equation system for finding second zone parameters
+M=[r0^3 r0^2 r0 1;
+ 3*r0^2 2*r0 1 0;
+ 3*r1^2 2*r1 1 0;
+ r1^3 r1^2 r1 1];
+m=[A*r0^2+B*r0; 2*A*r0+B; 1; r1];
+% solve equations
+theta=M\m;
+
+% compander function in zone 1
+f1=[x1.^3 x1.^2 x1 ones(size(x1))]*theta;
+
+x=[x0; x1; x2];
+f=[f0; f1; x2];
+
+% plot it
+figure(1)
+plot(x,f,x,x,':')
+xlabel('Normalized radius')
+ylabel('Modified radius')
+
+
+% Now, create the look-up table in the integer color space
+[U,V]=meshgrid(0:255, 0:255); % U-V space
+U0=U;
+V0=V;
+
+% Conversion matrix from normalized YUV to RGB
+T=[1 0 1.13983; 1 -0.39465 -0.58060; 1 2.03211 0];
+Ylum=0.5;
+
+figure(2)
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(121)
+image(Z);
+axis square
+axis off
+set(gcf,'color','k')
+
+R = sqrt((U-127).^2 + (V-127).^2);
+Rnorm = R/127;
+RnormMod = Rnorm;
+RnormMod(RnormMod==0)=1; % avoid division with zero
+
+% find indices to pixels in dead-zone (zone 1)
+ix=find(Rnorm<=r0);
+scaleMatrix = (A*Rnorm(ix).^2 + B*Rnorm(ix))./RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix+127;
+V(ix)=(V(ix)-127).*scaleMatrix+127;
+
+% find indices to pixels in zone 2
+ix=find(Rnorm>r0 & Rnorm<=r1);
+scaleMatrix = (theta(1)*Rnorm(ix).^3 + theta(2)*Rnorm(ix).^2 + ...
+ theta(3)*Rnorm(ix) + theta(4)) ./ RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix + 127;
+V(ix)=(V(ix)-127).*scaleMatrix + 127;
+
+% round to integer values and saturate
+U=round(U);
+V=round(V);
+U=max(min(U,255),0);
+V=max(min(V,255),0);
+
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(122)
+image(Z);
+axis square
+axis off
+
+figure(3)
+subplot(121)
+mesh(U-U0)
+subplot(122)
+mesh(V-V0)
+
+
+
+% Last, write to file
+% Write only one matrix, since U=V'
+
+fid = fopen('../out/Debug/colorEnhancementTable.h','wt');
+if fid==-1
+ error('Cannot open file colorEnhancementTable.cpp');
+end
+
+fprintf(fid,'//colorEnhancementTable.h\n\n');
+fprintf(fid,'//Copy the constant table to the appropriate header file.\n\n');
+
+fprintf(fid,'//Table created with Matlab script createTable.m\n\n');
+fprintf(fid,'//Usage:\n');
+fprintf(fid,'// Umod=colorTable[U][V]\n');
+fprintf(fid,'// Vmod=colorTable[V][U]\n');
+
+fprintf(fid,'static unsigned char colorTable[%i][%i] = {\n', size(U,1), size(U,2));
+
+for u=1:size(U,2)
+ fprintf(fid,' {%i', U(1,u));
+ for v=2:size(U,1)
+ fprintf(fid,', %i', U(v,u));
+ end
+ fprintf(fid,'}');
+ if u<size(U,2)
+ fprintf(fid,',');
+ end
+ fprintf(fid,'\n');
+end
+fprintf(fid,'};\n\n');
+fclose(fid);
+fprintf('done');
+
+
+answ=input('Create test vector (takes some time...)? y/n : ','s');
+if answ ~= 'y'
+ return
+end
+
+% Also, create test vectors
+
+% Read test file foreman.yuv
+fprintf('Reading test file...')
+[y,u,v]=readYUV420file('../out/Debug/testFiles/foreman_cif.yuv',352,288);
+fprintf(' done\n');
+unew=uint8(zeros(size(u)));
+vnew=uint8(zeros(size(v)));
+
+% traverse all frames
+for k=1:size(y,3)
+ fprintf('Frame %i\n', k);
+ for r=1:size(u,1)
+ for c=1:size(u,2)
+ unew(r,c,k) = uint8(U(double(v(r,c,k))+1, double(u(r,c,k))+1));
+ vnew(r,c,k) = uint8(V(double(v(r,c,k))+1, double(u(r,c,k))+1));
+ end
+ end
+end
+
+fprintf('\nWriting modified test file...')
+writeYUV420file('../out/Debug/foremanColorEnhanced.yuv',y,unew,vnew);
+fprintf(' done\n');
diff --git a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc
new file mode 100644
index 0000000000..83d09ef486
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(Deflickering))
+{
+ enum { NumRuns = 30 };
+ uint32_t frameNum = 0;
+ const uint32_t frame_rate = 15;
+
+ int64_t min_runtime = 0;
+ int64_t avg_runtime = 0;
+
+ // Close automatically opened Foreman.
+ fclose(source_file_);
+ const std::string input_file =
+ webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
+ source_file_ = fopen(input_file.c_str(), "rb");
+ ASSERT_TRUE(source_file_ != NULL) <<
+ "Cannot read input file: " << input_file << "\n";
+
+ const std::string output_file =
+ webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
+ FILE* deflickerFile = fopen(output_file.c_str(), "wb");
+ ASSERT_TRUE(deflickerFile != NULL) <<
+ "Could not open output file: " << output_file << "\n";
+
+ printf("\nRun time [us / frame]:\n");
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++)
+ {
+ TickTime t0;
+ TickTime t1;
+ TickInterval acc_ticks;
+ uint32_t timeStamp = 1;
+
+ frameNum = 0;
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_)
+ {
+ frameNum++;
+ EXPECT_EQ(
+ 0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
+ height_, 0, kVideoRotation_0, &video_frame_));
+ video_frame_.set_timestamp(timeStamp);
+
+ t0 = TickTime::Now();
+ VideoProcessingModule::FrameStats stats;
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
+ t1 = TickTime::Now();
+ acc_ticks += (t1 - t0);
+
+ if (run_idx == 0)
+ {
+ if (PrintVideoFrame(video_frame_, deflickerFile) < 0) {
+ return;
+ }
+ }
+ timeStamp += (90000 / frame_rate);
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ printf("%u\n", static_cast<int>(acc_ticks.Microseconds() / frameNum));
+ if (acc_ticks.Microseconds() < min_runtime || run_idx == 0)
+ {
+ min_runtime = acc_ticks.Microseconds();
+ }
+ avg_runtime += acc_ticks.Microseconds();
+
+ rewind(source_file_);
+ }
+ ASSERT_EQ(0, fclose(deflickerFile));
+ // TODO(kjellander): Add verification of deflicker output file.
+
+ printf("\nAverage run time = %d us / frame\n",
+ static_cast<int>(avg_runtime / frameNum / NumRuns));
+ printf("Min run time = %d us / frame\n\n",
+ static_cast<int>(min_runtime / frameNum));
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m b/webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m
new file mode 100644
index 0000000000..03013efd3a
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m
@@ -0,0 +1,45 @@
+function [Y,U,V] = readYUV420file(filename, width, height)
+% [Y,U,V] = readYUVfile(filename, width, height)
+
+fid = fopen(filename,'rb');
+if fid==-1
+ error(['Cannot open file ' filename]);
+end
+
+% Number of pixels per image
+nPx=width*height;
+
+% nPx bytes luminance, nPx/4 bytes U, nPx/4 bytes V
+frameSizeBytes = nPx*1.5;
+
+% calculate number of frames
+fseek(fid,0,'eof'); % move to end of file
+fileLen=ftell(fid); % number of bytes
+fseek(fid,0,'bof'); % rewind to start
+
+% calculate number of frames
+numFrames = floor(fileLen/frameSizeBytes);
+
+Y=uint8(zeros(height,width,numFrames));
+U=uint8(zeros(height/2,width/2,numFrames));
+V=uint8(zeros(height/2,width/2,numFrames));
+
+[X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+
+for k=1:numFrames
+
+ % Store luminance
+ Y(:,:,k)=uint8(reshape(X(1:nPx), width, height).');
+
+ % Store U channel
+ U(:,:,k)=uint8(reshape(X(nPx + (1:nPx/4)), width/2, height/2).');
+
+ % Store V channel
+ V(:,:,k)=uint8(reshape(X(nPx + nPx/4 + (1:nPx/4)), width/2, height/2).');
+
+ % Read next frame
+ [X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+end
+
+
+fclose(fid);
diff --git a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc
new file mode 100644
index 0000000000..11ccc4891b
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc
@@ -0,0 +1,390 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
+
+#include <string>
+
+#include <gflags/gflags.h>
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+namespace {
+
+// Define command line flag 'gen_files' (default value: false).
+DEFINE_bool(gen_files, false, "Output files for visual inspection.");
+
+} // namespace
+
+static void PreprocessFrameAndVerify(const VideoFrame& source,
+ int target_width,
+ int target_height,
+ VideoProcessingModule* vpm,
+ VideoFrame** out_frame);
+static void CropFrame(const uint8_t* source_data,
+ int source_width,
+ int source_height,
+ int offset_x,
+ int offset_y,
+ int cropped_width,
+ int cropped_height,
+ VideoFrame* cropped_frame);
+// The |source_data| is cropped and scaled to |target_width| x |target_height|,
+// and then scaled back to the expected cropped size. |expected_psnr| is used to
+// verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
+// verified under the same conditions.
+static void TestSize(const VideoFrame& source_frame,
+ const VideoFrame& cropped_source_frame,
+ int target_width,
+ int target_height,
+ double expected_psnr,
+ VideoProcessingModule* vpm);
+static bool CompareFrames(const webrtc::VideoFrame& frame1,
+ const webrtc::VideoFrame& frame2);
+static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
+ const VideoFrame& processed);
+
+VideoProcessingModuleTest::VideoProcessingModuleTest()
+ : vpm_(NULL),
+ source_file_(NULL),
+ width_(352),
+ half_width_((width_ + 1) / 2),
+ height_(288),
+ size_y_(width_ * height_),
+ size_uv_(half_width_ * ((height_ + 1) / 2)),
+ frame_length_(CalcBufferSize(kI420, width_, height_)) {}
+
+void VideoProcessingModuleTest::SetUp() {
+ vpm_ = VideoProcessingModule::Create();
+ ASSERT_TRUE(vpm_ != NULL);
+
+ ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_,
+ half_width_, half_width_));
+ // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
+ memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
+ memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
+ memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
+ const std::string video_file =
+ webrtc::test::ResourcePath("foreman_cif", "yuv");
+ source_file_ = fopen(video_file.c_str(),"rb");
+ ASSERT_TRUE(source_file_ != NULL) <<
+ "Cannot read source file: " + video_file + "\n";
+}
+
+void VideoProcessingModuleTest::TearDown() {
+ if (source_file_ != NULL) {
+ ASSERT_EQ(0, fclose(source_file_));
+ }
+ source_file_ = NULL;
+
+ if (vpm_ != NULL) {
+ VideoProcessingModule::Destroy(vpm_);
+ }
+ vpm_ = NULL;
+}
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(HandleNullBuffer)) {
+ // TODO(mikhal/stefan): Do we need this one?
+ VideoProcessingModule::FrameStats stats;
+ // Video frame with unallocated buffer.
+ VideoFrame videoFrame;
+
+ EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame));
+
+ EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats));
+
+ EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats));
+}
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(HandleBadStats)) {
+ VideoProcessingModule::FrameStats stats;
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
+ source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+
+ EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
+
+ EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
+}
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(IdenticalResultsAfterReset)) {
+ VideoFrame video_frame2;
+ VideoProcessingModule::FrameStats stats;
+ // Only testing non-static functions here.
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
+ source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
+ ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
+ vpm_->Reset();
+ // Retrieve frame stats again in case Deflickering() has zeroed them.
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2));
+ ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats));
+ EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
+
+ ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
+ source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ video_frame2.CopyFrame(video_frame_);
+ ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats));
+ vpm_->Reset();
+ ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats));
+ EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
+}
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(FrameStats)) {
+ VideoProcessingModule::FrameStats stats;
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
+ source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+
+ EXPECT_FALSE(vpm_->ValidFrameStats(stats));
+ EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
+ EXPECT_TRUE(vpm_->ValidFrameStats(stats));
+
+ printf("\nFrameStats\n");
+ printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: "
+ "%u\nsumSamplHeight: %u\nsum: %u\n\n",
+ static_cast<unsigned int>(stats.mean),
+ static_cast<unsigned int>(stats.num_pixels),
+ static_cast<unsigned int>(stats.subSamplHeight),
+ static_cast<unsigned int>(stats.subSamplWidth),
+ static_cast<unsigned int>(stats.sum));
+
+ vpm_->ClearFrameStats(&stats);
+ EXPECT_FALSE(vpm_->ValidFrameStats(stats));
+}
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(PreprocessorLogic)) {
+ // Disable temporal sampling (frame dropping).
+ vpm_->EnableTemporalDecimation(false);
+ int resolution = 100;
+ EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15));
+ EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
+ // Disable spatial sampling.
+ vpm_->SetInputFrameResampleMode(kNoRescaling);
+ EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
+ VideoFrame* out_frame = NULL;
+ // Set rescaling => output frame != NULL.
+ vpm_->SetInputFrameResampleMode(kFastRescaling);
+ PreprocessFrameAndVerify(video_frame_, resolution, resolution, vpm_,
+ &out_frame);
+ // No rescaling=> output frame = NULL.
+ vpm_->SetInputFrameResampleMode(kNoRescaling);
+ EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame));
+ EXPECT_TRUE(out_frame == NULL);
+}
+
+TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(Resampler)) {
+ enum { NumRuns = 1 };
+
+ int64_t min_runtime = 0;
+ int64_t total_runtime = 0;
+
+ rewind(source_file_);
+ ASSERT_TRUE(source_file_ != NULL) <<
+ "Cannot read input file \n";
+
+ // CA not needed here
+ vpm_->EnableContentAnalysis(false);
+ // no temporal decimation
+ vpm_->EnableTemporalDecimation(false);
+
+ // Reading test frame
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
+ source_file_));
+ // Using ConvertToI420 to add stride to the image.
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ // Cropped source frame that will contain the expected visible region.
+ VideoFrame cropped_source_frame;
+ cropped_source_frame.CopyFrame(video_frame_);
+
+ for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
+ // Initiate test timer.
+ const TickTime time_start = TickTime::Now();
+
+ // Init the sourceFrame with a timestamp.
+ video_frame_.set_render_time_ms(time_start.MillisecondTimestamp());
+ video_frame_.set_timestamp(time_start.MillisecondTimestamp() * 90);
+
+ // Test scaling to different sizes: source is of |width|/|height| = 352/288.
+ // Pure scaling:
+ TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vpm_);
+ TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vpm_);
+ // No resampling:
+ TestSize(video_frame_, video_frame_, width_, height_, -1, vpm_);
+ TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vpm_);
+
+ // Scaling and cropping. The cropped source frame is the largest center
+ // aligned region that can be used from the source while preserving aspect
+ // ratio.
+ CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vpm_);
+
+ CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vpm_);
+
+ CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vpm_);
+
+ CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vpm_);
+
+ CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vpm_);
+
+ // Upsampling to odd size.
+ CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vpm_);
+ // Downsample to odd size.
+ CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vpm_);
+
+ // Stop timer.
+ const int64_t runtime = (TickTime::Now() - time_start).Microseconds();
+ if (runtime < min_runtime || run_idx == 0) {
+ min_runtime = runtime;
+ }
+ total_runtime += runtime;
+ }
+
+ printf("\nAverage run time = %d us / frame\n",
+ static_cast<int>(total_runtime));
+ printf("Min run time = %d us / frame\n\n",
+ static_cast<int>(min_runtime));
+}
+
+void PreprocessFrameAndVerify(const VideoFrame& source,
+ int target_width,
+ int target_height,
+ VideoProcessingModule* vpm,
+ VideoFrame** out_frame) {
+ ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
+ ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source, out_frame));
+
+ // If no resizing is needed, expect NULL.
+ if (target_width == source.width() && target_height == source.height()) {
+ EXPECT_EQ(NULL, *out_frame);
+ return;
+ }
+
+ // Verify the resampled frame.
+ EXPECT_TRUE(*out_frame != NULL);
+ EXPECT_EQ(source.render_time_ms(), (*out_frame)->render_time_ms());
+ EXPECT_EQ(source.timestamp(), (*out_frame)->timestamp());
+ EXPECT_EQ(target_width, (*out_frame)->width());
+ EXPECT_EQ(target_height, (*out_frame)->height());
+}
+
+void CropFrame(const uint8_t* source_data,
+ int source_width,
+ int source_height,
+ int offset_x,
+ int offset_y,
+ int cropped_width,
+ int cropped_height,
+ VideoFrame* cropped_frame) {
+ cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
+ (cropped_width + 1) / 2,
+ (cropped_width + 1) / 2);
+ EXPECT_EQ(0,
+ ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
+ source_height, 0, kVideoRotation_0, cropped_frame));
+}
+
+void TestSize(const VideoFrame& source_frame,
+ const VideoFrame& cropped_source_frame,
+ int target_width,
+ int target_height,
+ double expected_psnr,
+ VideoProcessingModule* vpm) {
+ // Resample source_frame to out_frame.
+ VideoFrame* out_frame = NULL;
+ vpm->SetInputFrameResampleMode(kBox);
+ PreprocessFrameAndVerify(source_frame, target_width, target_height, vpm,
+ &out_frame);
+ if (out_frame == NULL)
+ return;
+ WriteProcessedFrameForVisualInspection(source_frame, *out_frame);
+
+ // Scale |resampled_source_frame| back to the source scale.
+ VideoFrame resampled_source_frame;
+ resampled_source_frame.CopyFrame(*out_frame);
+ PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
+ cropped_source_frame.height(), vpm, &out_frame);
+ WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
+
+ // Compute PSNR against the cropped source frame and check expectation.
+ double psnr = I420PSNR(&cropped_source_frame, out_frame);
+ EXPECT_GT(psnr, expected_psnr);
+ printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
+ "source which is scaled down/up to: %d %d, and back to source size \n",
+ psnr, source_frame.width(), source_frame.height(),
+ target_width, target_height);
+}
+
+bool CompareFrames(const webrtc::VideoFrame& frame1,
+ const webrtc::VideoFrame& frame2) {
+ for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
+ webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
+ int allocated_size1 = frame1.allocated_size(plane_type);
+ int allocated_size2 = frame2.allocated_size(plane_type);
+ if (allocated_size1 != allocated_size2)
+ return false;
+ const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
+ const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
+ if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
+ return false;
+ }
+ return true;
+}
+
+void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
+ const VideoFrame& processed) {
+ // Skip if writing to files is not enabled.
+ if (!FLAGS_gen_files)
+ return;
+ // Write the processed frame to file for visual inspection.
+ std::ostringstream filename;
+ filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width()
+ << "x" << source.height() << "_to_" << processed.width() << "x"
+ << processed.height() << "_30Hz_P420.yuv";
+ std::cout << "Watch " << filename.str() << " and verify that it is okay."
+ << std::endl;
+ FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
+ if (PrintVideoFrame(processed, stand_alone_file) < 0)
+ std::cerr << "Failed to write: " << filename.str() << std::endl;
+ if (stand_alone_file)
+ fclose(stand_alone_file);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h
new file mode 100644
index 0000000000..4a4fda41e6
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VIDEO_PROCESSING_UNITTEST_H
+#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VIDEO_PROCESSING_UNITTEST_H
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+class VideoProcessingModuleTest : public ::testing::Test {
+ protected:
+ VideoProcessingModuleTest();
+ virtual void SetUp();
+ virtual void TearDown();
+ static void SetUpTestCase() {
+ Trace::CreateTrace();
+ std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt";
+ ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str()));
+ }
+ static void TearDownTestCase() {
+ Trace::ReturnTrace();
+ }
+ VideoProcessingModule* vpm_;
+ FILE* source_file_;
+ VideoFrame video_frame_;
+ const int width_;
+ const int half_width_;
+ const int height_;
+ const int size_y_;
+ const int size_uv_;
+ const size_t frame_length_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VIDEO_PROCESSING_UNITTEST_H
diff --git a/webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m b/webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m
new file mode 100644
index 0000000000..69a8808338
--- /dev/null
+++ b/webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m
@@ -0,0 +1,22 @@
+function writeYUV420file(filename, Y, U, V)
+% writeYUV420file(filename, Y, U, V)
+
+fid = fopen(filename,'wb');
+if fid==-1
+ error(['Cannot open file ' filename]);
+end
+
+numFrames=size(Y,3);
+
+for k=1:numFrames
+ % Write luminance
+ fwrite(fid,uint8(Y(:,:,k).'), 'uchar');
+
+ % Write U channel
+ fwrite(fid,uint8(U(:,:,k).'), 'uchar');
+
+ % Write V channel
+ fwrite(fid,uint8(V(:,:,k).'), 'uchar');
+end
+
+fclose(fid);