summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--base/nat_unittest.cc2
-rw-r--r--base/network.cc67
-rw-r--r--base/network.h31
-rw-r--r--base/network_unittest.cc43
-rw-r--r--base/thread.cc63
-rw-r--r--base/thread.h26
-rw-r--r--base/thread_unittest.cc10
-rw-r--r--build/common.gypi7
-rw-r--r--build/merge_libs.gyp6
-rw-r--r--build/merge_libs_voice.gyp48
-rw-r--r--build/webrtc.gni7
-rw-r--r--common_audio/BUILD.gn2
-rw-r--r--common_video/interface/i420_video_frame.h146
-rw-r--r--common_video/interface/video_image.h59
-rw-r--r--examples/android/media_demo/jni/on_load.cc15
-rw-r--r--modules/audio_coding/neteq/neteq_external_decoder_unittest.cc99
-rw-r--r--modules/audio_processing/audio_processing_impl.cc1
-rw-r--r--modules/audio_processing/noise_suppression_impl.cc27
-rw-r--r--modules/audio_processing/noise_suppression_impl.h1
-rw-r--r--modules/audio_processing/ns/include/noise_suppression.h15
-rw-r--r--modules/audio_processing/ns/noise_suppression.c3
-rw-r--r--modules/audio_processing/ns/ns_core.c4
-rw-r--r--modules/audio_processing/ns/ns_core.h19
-rw-r--r--modules/desktop_capture/window_capturer_win.cc6
-rw-r--r--modules/modules.gyp1
-rw-r--r--modules/rtp_rtcp/source/rtp_format_vp8.cc258
-rw-r--r--modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc336
-rw-r--r--modules/rtp_rtcp/source/rtp_receiver_impl.cc1
-rw-r--r--modules/rtp_rtcp/source/rtp_utility.cc229
-rw-r--r--modules/rtp_rtcp/source/rtp_utility.h92
-rw-r--r--modules/rtp_rtcp/source/rtp_utility_unittest.cc288
-rw-r--r--modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc12
-rw-r--r--modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h45
-rw-r--r--modules/video_capture/BUILD.gn232
-rw-r--r--modules/video_capture/ensure_initialized.cc30
-rw-r--r--modules/video_capture/include/video_capture.h8
-rw-r--r--modules/video_capture/include/video_capture_factory.h4
-rw-r--r--modules/video_capture/ios/rtc_video_capture_ios_objc.mm27
-rw-r--r--modules/video_capture/test/video_capture_unittest.cc25
-rw-r--r--modules/video_capture/video_capture.gypi240
-rw-r--r--modules/video_capture/video_capture_internal.h27
-rw-r--r--modules/video_coding/codecs/interface/video_codec_interface.h103
-rw-r--r--modules/video_coding/codecs/test/mock/mock_packet_manipulator.h2
-rw-r--r--modules/video_coding/main/source/codec_database.cc46
-rw-r--r--modules/video_render/BUILD.gn230
-rw-r--r--modules/video_render/android/video_render_android_impl.cc7
-rw-r--r--modules/video_render/include/video_render.h4
-rw-r--r--modules/video_render/test/testAPI/testAPI.cc9
-rw-r--r--modules/video_render/video_render.gypi95
-rw-r--r--modules/video_render/video_render_impl.cc247
-rw-r--r--modules/video_render/video_render_internal.h27
-rw-r--r--modules/video_render/video_render_internal_impl.cc917
-rw-r--r--overrides/OWNERS13
-rw-r--r--overrides/webrtc/base/constructormagic.h2
-rw-r--r--test/channel_transport/udp_socket2_win.h31
-rw-r--r--test/channel_transport/udp_socket_posix.cc11
-rw-r--r--test/channel_transport/udp_socket_posix.h4
-rw-r--r--test/channel_transport/udp_transport_impl.h4
-rw-r--r--test/configurable_frame_size_encoder.cc4
-rw-r--r--test/configurable_frame_size_encoder.h2
-rw-r--r--test/encoder_settings.cc17
-rw-r--r--test/fake_encoder.cc2
-rw-r--r--test/fake_encoder.h3
-rw-r--r--test/webrtc_test_common.gyp56
-rw-r--r--test/win/d3d_renderer.h1
-rw-r--r--video/bitrate_estimator_tests.cc5
-rw-r--r--video/call.cc10
-rw-r--r--video/call_perf_tests.cc5
-rw-r--r--video/end_to_end_tests.cc22
-rw-r--r--video/full_stack.cc9
-rw-r--r--video/loopback.cc2
-rw-r--r--video/video_send_stream.cc12
-rw-r--r--video_encoder.h74
-rw-r--r--video_engine/include/vie_base.h9
-rw-r--r--video_engine/test/auto_test/source/vie_autotest_android.cc7
-rw-r--r--video_engine/test/auto_test/vie_auto_test.gypi4
-rw-r--r--video_engine/video_engine_core.gypi2
-rw-r--r--video_engine/vie_impl.cc21
-rw-r--r--video_frame.h219
-rw-r--r--webrtc.gyp10
-rw-r--r--webrtc_examples.gyp2
-rw-r--r--webrtc_tests.gypi12
82 files changed, 2821 insertions, 2003 deletions
diff --git a/base/nat_unittest.cc b/base/nat_unittest.cc
index 8b9d8a15..0e16259a 100644
--- a/base/nat_unittest.cc
+++ b/base/nat_unittest.cc
@@ -209,7 +209,7 @@ void TestPhysicalInternal(const SocketAddress& int_addr) {
// can't talk to ip, so check for connectivity as well.
for (std::vector<Network*>::iterator it = networks.begin();
it != networks.end(); ++it) {
- const IPAddress& ip = (*it)->ip();
+ const IPAddress& ip = (*it)->GetBestIP();
if (ip.family() == int_addr.family() && TestConnectivity(int_addr, ip)) {
ext_addr2.SetIP(ip);
break;
diff --git a/base/network.cc b/base/network.cc
index 8c84c2e3..c1f978b9 100644
--- a/base/network.cc
+++ b/base/network.cc
@@ -88,10 +88,13 @@ bool SortNetworks(const Network* a, const Network* b) {
return a->type() < b->type();
}
+ IPAddress ip_a = a->GetBestIP();
+ IPAddress ip_b = b->GetBestIP();
+
// After type, networks are sorted by IP address precedence values
// from RFC 3484-bis
- if (IPAddressPrecedence(a->ip()) != IPAddressPrecedence(b->ip())) {
- return IPAddressPrecedence(a->ip()) > IPAddressPrecedence(b->ip());
+ if (IPAddressPrecedence(ip_a) != IPAddressPrecedence(ip_b)) {
+ return IPAddressPrecedence(ip_a) > IPAddressPrecedence(ip_b);
}
// TODO(mallinath) - Add VPN and Link speed conditions while sorting.
@@ -474,7 +477,7 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
}
return true;
}
-#endif // WEBRTC_WIN
+#endif // WEBRTC_WIN
#if defined(WEBRTC_LINUX)
bool IsDefaultRoute(const std::string& network_name) {
@@ -636,16 +639,6 @@ Network::Network(const std::string& name, const std::string& desc,
ignored_(false), type_(type), preference_(0) {
}
-std::string Network::ToString() const {
- std::stringstream ss;
- // Print out the first space-terminated token of the network desc, plus
- // the IP address.
- ss << "Net[" << description_.substr(0, description_.find(' '))
- << ":" << prefix_.ToSensitiveString() << "/" << prefix_length_
- << ":" << AdapterTypeToString(type_) << "]";
- return ss.str();
-}
-
// Sets the addresses of this network. Returns true if the address set changed.
// Change detection is short circuited if the changed argument is true.
bool Network::SetIPs(const std::vector<InterfaceAddress>& ips, bool changed) {
@@ -669,14 +662,52 @@ bool Network::SetIPs(const std::vector<InterfaceAddress>& ips, bool changed) {
return changed;
}
-// TODO(guoweis): will change the name to a more meaningful name as
-// this is not simply return the first address once the logic of ipv6
-// address selection is complete.
-IPAddress Network::ip() const {
+// Select the best IP address to use from this Network.
+IPAddress Network::GetBestIP() const {
if (ips_.size() == 0) {
return IPAddress();
}
- return static_cast<IPAddress>(ips_.at(0));
+
+ if (prefix_.family() == AF_INET) {
+ return static_cast<IPAddress>(ips_.at(0));
+ }
+
+ InterfaceAddress selected_ip, ula_ip;
+
+ for (size_t i = 0; i < ips_.size(); i++) {
+ // Ignore any address which has been deprecated already.
+ if (ips_[i].ipv6_flags() & IPV6_ADDRESS_FLAG_DEPRECATED)
+ continue;
+
+ // ULA address should only be returned when we have no other
+ // global IP.
+ if (IPIsULA(static_cast<const IPAddress&>(ips_[i]))) {
+ ula_ip = ips_[i];
+ continue;
+ }
+ selected_ip = ips_[i];
+
+ // Search could stop once a temporary non-deprecated one is found.
+ if (ips_[i].ipv6_flags() & IPV6_ADDRESS_FLAG_TEMPORARY)
+ break;
+ }
+
+ // No proper global IPv6 address found, use ULA instead.
+ if (IPIsUnspec(selected_ip) && !IPIsUnspec(ula_ip)) {
+ selected_ip = ula_ip;
+ }
+
+ return static_cast<IPAddress>(selected_ip);
+}
+
+std::string Network::ToString() const {
+ std::stringstream ss;
+ // Print out the first space-terminated token of the network desc, plus
+ // the IP address.
+ ss << "Net[" << description_.substr(0, description_.find(' '))
+ << ":" << prefix_.ToSensitiveString() << "/" << prefix_length_
+ << ":" << AdapterTypeToString(type_) << "]";
+ return ss.str();
}
} // namespace rtc
diff --git a/base/network.h b/base/network.h
index 4cdd4d8e..6f9d08e7 100644
--- a/base/network.h
+++ b/base/network.h
@@ -188,19 +188,28 @@ class Network {
std::string key() const { return key_; }
// Returns the Network's current idea of the 'best' IP it has.
- // 'Best' currently means the first one added.
- // Returns an unset IP if this network has no active addresses.
- // Here is the rule on how we mark the IPv6 address as ignorable for webrtc.
+ // Or return an unset IP if this network has no active addresses.
+ // Here is the rule on how we mark the IPv6 address as ignorable for WebRTC.
// 1) return all global temporary dynamic and non-deprecrated ones.
- // 2) if #1 not available, return global dynamic ones.
- // 3) if #2 not available, return global ones.
- // 4) if #3 not available, use ULA ipv6 as last resort.
+ // 2) if #1 not available, return global ones.
+ // 3) if #2 not available, use ULA ipv6 as last resort. (ULA stands
+ // for unique local address, which is not route-able in open
+ // internet but might be useful for a close WebRTC deployment.
+
+ // TODO(guoweis): rule #3 actually won't happen at current
+ // implementation. The reason being that ULA address starting with
+ // 0xfc 0r 0xfd will be grouped into its own Network. The result of
+ // that is WebRTC will have one extra Network to generate candidates
+ // but the lack of rule #3 shouldn't prevent turning on IPv6 since
+ // ULA should only be tried in a close deployment anyway.
+
// Note that when not specifying any flag, it's treated as case global
- // dynamic IPv6 address
- // TODO(guoweis): will change the name to a more meaningful name as
- // this is not simply return the first address once the logic of ipv6
- // address selection is complete.
- IPAddress ip() const;
+ // IPv6 address
+ IPAddress GetBestIP() const;
+
+ // Keep the original function here for now.
+ // TODO(guoweis): Remove this when all callers are migrated to GetBestIP().
+ IPAddress ip() const { return GetBestIP(); }
// Adds an active IP address to this network. Does not check for duplicates.
void AddIP(const InterfaceAddress& ip) { ips_.push_back(ip); }
diff --git a/base/network_unittest.cc b/base/network_unittest.cc
index 8123f8bb..acb118d4 100644
--- a/base/network_unittest.cc
+++ b/base/network_unittest.cc
@@ -114,7 +114,7 @@ TEST_F(NetworkTest, DISABLED_TestCreateNetworks) {
++it) {
sockaddr_storage storage;
memset(&storage, 0, sizeof(storage));
- IPAddress ip = (*it)->ip();
+ IPAddress ip = (*it)->GetBestIP();
SocketAddress bindaddress(ip, 0);
bindaddress.SetScopeID((*it)->scope_id());
// TODO(thaloun): Use rtc::AsyncSocket once it supports IPv6.
@@ -650,4 +650,45 @@ TEST_F(NetworkTest, TestMergeNetworkList) {
EXPECT_EQ(list2[0]->GetIPs()[1], ip2);
}
+// Test that the filtering logic follows the defined ruleset in network.h.
+TEST_F(NetworkTest, TestIPv6Selection) {
+ InterfaceAddress ip;
+ std::string ipstr;
+
+ ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c3";
+ ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_DEPRECATED, &ip));
+
+ // Create a network with this prefix.
+ Network ipv6_network(
+ "test_eth0", "Test NetworkAdapter", TruncateIP(ip, 64), 64);
+
+ // When there is no address added, it should return an unspecified
+ // address.
+ EXPECT_EQ(ipv6_network.GetBestIP(), IPAddress());
+ EXPECT_TRUE(IPIsUnspec(ipv6_network.GetBestIP()));
+
+ // Deprecated one should not be returned.
+ ipv6_network.AddIP(ip);
+ EXPECT_EQ(ipv6_network.GetBestIP(), IPAddress());
+
+ // Add ULA one. ULA is unique local address which is starting either
+ // with 0xfc or 0xfd.
+ ipstr = "fd00:fa00:4:1000:be30:5bff:fee5:c4";
+ ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &ip));
+ ipv6_network.AddIP(ip);
+ EXPECT_EQ(ipv6_network.GetBestIP(), static_cast<IPAddress>(ip));
+
+ // Add global one.
+ ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c5";
+ ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &ip));
+ ipv6_network.AddIP(ip);
+ EXPECT_EQ(ipv6_network.GetBestIP(), static_cast<IPAddress>(ip));
+
+ // Add global dynamic temporary one.
+ ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c6";
+ ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_TEMPORARY, &ip));
+ ipv6_network.AddIP(ip);
+ EXPECT_EQ(ipv6_network.GetBestIP(), static_cast<IPAddress>(ip));
+}
+
} // namespace rtc
diff --git a/base/thread.cc b/base/thread.cc
index 6da9a7fb..9d2917d9 100644
--- a/base/thread.cc
+++ b/base/thread.cc
@@ -107,7 +107,7 @@ Thread *ThreadManager::WrapCurrentThread() {
Thread* result = CurrentThread();
if (NULL == result) {
result = new Thread();
- result->WrapCurrentWithThreadManager(this);
+ result->WrapCurrentWithThreadManager(this, true);
}
return result;
}
@@ -188,6 +188,7 @@ bool Thread::SetName(const std::string& name, const void* obj) {
bool Thread::SetPriority(ThreadPriority priority) {
#if defined(WEBRTC_WIN)
if (running()) {
+ ASSERT(thread_ != NULL);
BOOL ret = FALSE;
if (priority == PRIORITY_NORMAL) {
ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_NORMAL);
@@ -288,12 +289,35 @@ bool Thread::Start(Runnable* runnable) {
return true;
}
+bool Thread::WrapCurrent() {
+ return WrapCurrentWithThreadManager(ThreadManager::Instance(), true);
+}
+
+void Thread::UnwrapCurrent() {
+ // Clears the platform-specific thread-specific storage.
+ ThreadManager::Instance()->SetCurrentThread(NULL);
+#if defined(WEBRTC_WIN)
+ if (thread_ != NULL) {
+ if (!CloseHandle(thread_)) {
+ LOG_GLE(LS_ERROR) << "When unwrapping thread, failed to close handle.";
+ }
+ thread_ = NULL;
+ }
+#endif
+ running_.Reset();
+}
+
+void Thread::SafeWrapCurrent() {
+ WrapCurrentWithThreadManager(ThreadManager::Instance(), false);
+}
+
void Thread::Join() {
AssertBlockingIsAllowedOnCurrentThread();
if (running()) {
ASSERT(!IsCurrent());
#if defined(WEBRTC_WIN)
+ ASSERT(thread_ != NULL);
WaitForSingleObject(thread_, INFINITE);
CloseHandle(thread_);
thread_ = NULL;
@@ -526,43 +550,32 @@ bool Thread::ProcessMessages(int cmsLoop) {
}
}
-bool Thread::WrapCurrent() {
- return WrapCurrentWithThreadManager(ThreadManager::Instance());
-}
-
-bool Thread::WrapCurrentWithThreadManager(ThreadManager* thread_manager) {
+bool Thread::WrapCurrentWithThreadManager(ThreadManager* thread_manager,
+ bool need_synchronize_access) {
if (running())
return false;
+
#if defined(WEBRTC_WIN)
- // We explicitly ask for no rights other than synchronization.
- // This gives us the best chance of succeeding.
- thread_ = OpenThread(SYNCHRONIZE, FALSE, GetCurrentThreadId());
- if (!thread_) {
- LOG_GLE(LS_ERROR) << "Unable to get handle to thread.";
- return false;
+ if (need_synchronize_access) {
+ // We explicitly ask for no rights other than synchronization.
+ // This gives us the best chance of succeeding.
+ thread_ = OpenThread(SYNCHRONIZE, FALSE, GetCurrentThreadId());
+ if (!thread_) {
+ LOG_GLE(LS_ERROR) << "Unable to get handle to thread.";
+ return false;
+ }
+ thread_id_ = GetCurrentThreadId();
}
- thread_id_ = GetCurrentThreadId();
#elif defined(WEBRTC_POSIX)
thread_ = pthread_self();
#endif
+
owned_ = false;
running_.Set();
thread_manager->SetCurrentThread(this);
return true;
}
-void Thread::UnwrapCurrent() {
- // Clears the platform-specific thread-specific storage.
- ThreadManager::Instance()->SetCurrentThread(NULL);
-#if defined(WEBRTC_WIN)
- if (!CloseHandle(thread_)) {
- LOG_GLE(LS_ERROR) << "When unwrapping thread, failed to close handle.";
- }
-#endif
- running_.Reset();
-}
-
-
AutoThread::AutoThread(SocketServer* ss) : Thread(ss) {
if (!ThreadManager::Instance()->CurrentThread()) {
ThreadManager::Instance()->SetCurrentThread(this);
diff --git a/base/thread.h b/base/thread.h
index 742ba6dc..25b0f569 100644
--- a/base/thread.h
+++ b/base/thread.h
@@ -202,15 +202,6 @@ class Thread : public MessageQueue {
}
#endif
- // This method should be called when thread is created using non standard
- // method, like derived implementation of rtc::Thread and it can not be
- // started by calling Start(). This will set started flag to true and
- // owned to false. This must be called from the current thread.
- // NOTE: These methods should be used by the derived classes only, added here
- // only for testing.
- bool WrapCurrent();
- void UnwrapCurrent();
-
// Expose private method running() for tests.
//
// DANGER: this is a terrible public API. Most callers that might want to
@@ -220,6 +211,18 @@ class Thread : public MessageQueue {
bool RunningForTest() { return running(); }
protected:
+ // This method should be called when thread is created using non standard
+ // method, like derived implementation of rtc::Thread and it can not be
+ // started by calling Start(). This will set started flag to true and
+ // owned to false. This must be called from the current thread.
+ bool WrapCurrent();
+ void UnwrapCurrent();
+
+ // Same as WrapCurrent except that it never fails as it does not try to
+ // acquire the synchronization access of the thread. The caller should never
+ // call Stop() or Join() on this thread.
+ void SafeWrapCurrent();
+
// Blocks the calling thread until this thread has terminated.
void Join();
@@ -237,7 +240,10 @@ class Thread : public MessageQueue {
// ThreadManager calls this instead WrapCurrent() because
// ThreadManager::Instance() cannot be used while ThreadManager is
// being created.
- bool WrapCurrentWithThreadManager(ThreadManager* thread_manager);
+ // The method tries to get synchronization rights of the thread on Windows if
+ // |need_synchronize_access| is true.
+ bool WrapCurrentWithThreadManager(ThreadManager* thread_manager,
+ bool need_synchronize_access);
// Return true if the thread was started and hasn't yet stopped.
bool running() { return running_.Wait(0); }
diff --git a/base/thread_unittest.cc b/base/thread_unittest.cc
index 6a54ac7b..6a687574 100644
--- a/base/thread_unittest.cc
+++ b/base/thread_unittest.cc
@@ -105,6 +105,13 @@ class CustomThread : public rtc::Thread {
CustomThread() {}
virtual ~CustomThread() { Stop(); }
bool Start() { return false; }
+
+ bool WrapCurrent() {
+ return Thread::WrapCurrent();
+ }
+ void UnwrapCurrent() {
+ Thread::UnwrapCurrent();
+ }
};
@@ -240,8 +247,6 @@ TEST(ThreadTest, Priorities) {
}
TEST(ThreadTest, Wrap) {
- Thread* current_thread = Thread::Current();
- current_thread->UnwrapCurrent();
CustomThread* cthread = new CustomThread();
EXPECT_TRUE(cthread->WrapCurrent());
EXPECT_TRUE(cthread->RunningForTest());
@@ -249,7 +254,6 @@ TEST(ThreadTest, Wrap) {
cthread->UnwrapCurrent();
EXPECT_FALSE(cthread->RunningForTest());
delete cthread;
- current_thread->WrapCurrent();
}
TEST(ThreadTest, Invoke) {
diff --git a/build/common.gypi b/build/common.gypi
index 4b17eb4d..92404fde 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -132,11 +132,6 @@
# Exclude internal ADM since Chromium uses its own IO handling.
'include_internal_audio_device%': 0,
- # Exclude internal VCM in Chromium build.
- 'include_internal_video_capture%': 0,
-
- # Exclude internal video render module in Chromium build.
- 'include_internal_video_render%': 0,
}, { # Settings for the standalone (not-in-Chromium) build.
# TODO(andrew): For now, disable the Chrome plugins, which causes a
# flood of chromium-style warnings. Investigate enabling them:
@@ -145,8 +140,6 @@
'include_pulse_audio%': 1,
'include_internal_audio_device%': 1,
- 'include_internal_video_capture%': 1,
- 'include_internal_video_render%': 1,
}],
['build_with_libjingle==1', {
'include_tests%': 0,
diff --git a/build/merge_libs.gyp b/build/merge_libs.gyp
index d257c991..d6610029 100644
--- a/build/merge_libs.gyp
+++ b/build/merge_libs.gyp
@@ -10,9 +10,6 @@
'includes': ['common.gypi',],
'variables': {
'merge_libs_dependencies': [
- '../webrtc.gyp:webrtc',
- '../sound/sound.gyp:rtc_sound',
- '../libjingle/xmllite/xmllite.gyp:rtc_xmllite',
],
},
'targets': [
@@ -21,6 +18,9 @@
'type': 'executable',
'dependencies': [
'<@(merge_libs_dependencies)',
+ '../webrtc.gyp:webrtc',
+ '../sound/sound.gyp:rtc_sound',
+ '../libjingle/xmllite/xmllite.gyp:rtc_xmllite',
],
'sources': ['no_op.cc',],
},
diff --git a/build/merge_libs_voice.gyp b/build/merge_libs_voice.gyp
new file mode 100644
index 00000000..10e30590
--- /dev/null
+++ b/build/merge_libs_voice.gyp
@@ -0,0 +1,48 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': ['common.gypi',],
+ 'variables': {
+ 'merge_libs_dependencies': [
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'no_op_voice',
+ 'type': 'executable',
+ 'dependencies': [
+ '<@(merge_libs_dependencies)',
+ '../voice_engine/voice_engine.gyp:voice_engine'
+ ],
+ 'sources': ['no_op.cc',],
+ },
+ {
+ 'target_name': 'merged_lib_voice',
+ 'type': 'none',
+ 'dependencies': [
+ 'no_op_voice',
+ ],
+ 'actions': [
+ {
+ 'variables': {
+ 'output_lib_name': 'rtc_voice_merged',
+ 'output_lib': '<(PRODUCT_DIR)/<(STATIC_LIB_PREFIX)<(output_lib_name)<(STATIC_LIB_SUFFIX)',
+ },
+ 'action_name': 'merge_libs_voice',
+ 'inputs': ['<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)no_op_voice<(EXECUTABLE_SUFFIX)'],
+ 'outputs': ['<(output_lib)'],
+ 'action': ['python',
+ 'merge_libs.py',
+ '<(PRODUCT_DIR)',
+ '<(output_lib)',],
+ },
+ ],
+ },
+ ],
+}
diff --git a/build/webrtc.gni b/build/webrtc.gni
index d46b8c64..878b6ff1 100644
--- a/build/webrtc.gni
+++ b/build/webrtc.gni
@@ -74,11 +74,6 @@ declare_args() {
# Exclude internal ADM since Chromium uses its own IO handling.
rtc_include_internal_audio_device = false
- # Exclude internal VCM in Chromium build.
- rtc_include_internal_video_capture = false
-
- # Exclude internal video render module in Chromium build.
- rtc_include_internal_video_render = false
} else {
# Settings for the standalone (not-in-Chromium) build.
@@ -89,8 +84,6 @@ declare_args() {
rtc_include_pulse_audio = true
rtc_include_internal_audio_device = true
- rtc_include_internal_video_capture = true
- rtc_include_internal_video_render = true
}
if (build_with_libjingle) {
diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn
index f9bbd6a1..036daa1f 100644
--- a/common_audio/BUILD.gn
+++ b/common_audio/BUILD.gn
@@ -130,7 +130,7 @@ source_set("common_audio") {
}
if (is_win) {
- cflags += [
+ cflags = [
"/wd4334", # Ignore warning on shift operator promotion.
]
}
diff --git a/common_video/interface/i420_video_frame.h b/common_video/interface/i420_video_frame.h
index 5f7a572b..ba23c87a 100644
--- a/common_video/interface/i420_video_frame.h
+++ b/common_video/interface/i420_video_frame.h
@@ -11,149 +11,7 @@
#ifndef COMMON_VIDEO_INTERFACE_I420_VIDEO_FRAME_H
#define COMMON_VIDEO_INTERFACE_I420_VIDEO_FRAME_H
-// I420VideoFrame class
-//
-// Storing and handling of YUV (I420) video frames.
-
-#include <assert.h>
-
-#include "webrtc/common_video/plane.h"
-#include "webrtc/system_wrappers/interface/scoped_refptr.h"
-#include "webrtc/typedefs.h"
-
-/*
- * I420VideoFrame includes support for a reference counted impl.
- */
-
-namespace webrtc {
-
-enum PlaneType {
- kYPlane = 0,
- kUPlane = 1,
- kVPlane = 2,
- kNumOfPlanes = 3
-};
-
-class I420VideoFrame {
- public:
- I420VideoFrame();
- virtual ~I420VideoFrame();
- // Infrastructure for refCount implementation.
- // Implements dummy functions for reference counting so that non reference
- // counted instantiation can be done. These functions should not be called
- // when creating the frame with new I420VideoFrame().
- // Note: do not pass a I420VideoFrame created with new I420VideoFrame() or
- // equivalent to a scoped_refptr or memory leak will occur.
- virtual int32_t AddRef() {assert(false); return -1;}
- virtual int32_t Release() {assert(false); return -1;}
-
- // CreateEmptyFrame: Sets frame dimensions and allocates buffers based
- // on set dimensions - height and plane stride.
- // If required size is bigger than the allocated one, new buffers of adequate
- // size will be allocated.
- // Return value: 0 on success, -1 on error.
- virtual int CreateEmptyFrame(int width, int height,
- int stride_y, int stride_u, int stride_v);
-
- // CreateFrame: Sets the frame's members and buffers. If required size is
- // bigger than allocated one, new buffers of adequate size will be allocated.
- // Return value: 0 on success, -1 on error.
- virtual int CreateFrame(int size_y, const uint8_t* buffer_y,
- int size_u, const uint8_t* buffer_u,
- int size_v, const uint8_t* buffer_v,
- int width, int height,
- int stride_y, int stride_u, int stride_v);
-
- // Copy frame: If required size is bigger than allocated one, new buffers of
- // adequate size will be allocated.
- // Return value: 0 on success, -1 on error.
- virtual int CopyFrame(const I420VideoFrame& videoFrame);
-
- // Make a copy of |this|. The caller owns the returned frame.
- // Return value: a new frame on success, NULL on error.
- virtual I420VideoFrame* CloneFrame() const;
-
- // Swap Frame.
- virtual void SwapFrame(I420VideoFrame* videoFrame);
-
- // Get pointer to buffer per plane.
- virtual uint8_t* buffer(PlaneType type);
- // Overloading with const.
- virtual const uint8_t* buffer(PlaneType type) const;
-
- // Get allocated size per plane.
- virtual int allocated_size(PlaneType type) const;
-
- // Get allocated stride per plane.
- virtual int stride(PlaneType type) const;
-
- // Set frame width.
- virtual int set_width(int width);
-
- // Set frame height.
- virtual int set_height(int height);
-
- // Get frame width.
- virtual int width() const {return width_;}
-
- // Get frame height.
- virtual int height() const {return height_;}
-
- // Set frame timestamp (90kHz).
- virtual void set_timestamp(uint32_t timestamp) {timestamp_ = timestamp;}
-
- // Get frame timestamp (90kHz).
- virtual uint32_t timestamp() const {return timestamp_;}
-
- // Set capture ntp time in miliseconds.
- virtual void set_ntp_time_ms(int64_t ntp_time_ms) {
- ntp_time_ms_ = ntp_time_ms;
- }
-
- // Get capture ntp time in miliseconds.
- virtual int64_t ntp_time_ms() const {return ntp_time_ms_;}
-
- // Set render time in miliseconds.
- virtual void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
- render_time_ms;}
-
- // Get render time in miliseconds.
- virtual int64_t render_time_ms() const {return render_time_ms_;}
-
- // Return true if underlying plane buffers are of zero size, false if not.
- virtual bool IsZeroSize() const;
-
- // Reset underlying plane buffers sizes to 0. This function doesn't
- // clear memory.
- virtual void ResetSize();
-
- // Return the handle of the underlying video frame. This is used when the
- // frame is backed by a texture. The object should be destroyed when it is no
- // longer in use, so the underlying resource can be freed.
- virtual void* native_handle() const;
-
- protected:
- // Verifies legality of parameters.
- // Return value: 0 on success, -1 on error.
- virtual int CheckDimensions(int width, int height,
- int stride_y, int stride_u, int stride_v);
-
- private:
- // Get the pointer to a specific plane.
- const Plane* GetPlane(PlaneType type) const;
- // Overloading with non-const.
- Plane* GetPlane(PlaneType type);
-
- Plane y_plane_;
- Plane u_plane_;
- Plane v_plane_;
- int width_;
- int height_;
- uint32_t timestamp_;
- int64_t ntp_time_ms_;
- int64_t render_time_ms_;
-}; // I420VideoFrame
-
-} // namespace webrtc
+// TODO(pbos): Remove this file and include webrtc/video_frame.h instead.
+#include "webrtc/video_frame.h"
#endif // COMMON_VIDEO_INTERFACE_I420_VIDEO_FRAME_H
diff --git a/common_video/interface/video_image.h b/common_video/interface/video_image.h
index c8df436b..4cbf23f1 100644
--- a/common_video/interface/video_image.h
+++ b/common_video/interface/video_image.h
@@ -11,62 +11,7 @@
#ifndef COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
#define COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
-#include <stdlib.h>
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-enum VideoFrameType
-{
- kKeyFrame = 0,
- kDeltaFrame = 1,
- kGoldenFrame = 2,
- kAltRefFrame = 3,
- kSkipFrame = 4
-};
-
-class EncodedImage
-{
-public:
- EncodedImage()
- : _encodedWidth(0),
- _encodedHeight(0),
- _timeStamp(0),
- capture_time_ms_(0),
- _frameType(kDeltaFrame),
- _buffer(NULL),
- _length(0),
- _size(0),
- _completeFrame(false) {}
-
- EncodedImage(uint8_t* buffer,
- uint32_t length,
- uint32_t size)
- : _encodedWidth(0),
- _encodedHeight(0),
- _timeStamp(0),
- ntp_time_ms_(0),
- capture_time_ms_(0),
- _frameType(kDeltaFrame),
- _buffer(buffer),
- _length(length),
- _size(size),
- _completeFrame(false) {}
-
- uint32_t _encodedWidth;
- uint32_t _encodedHeight;
- uint32_t _timeStamp;
- // NTP time of the capture time in local timebase in milliseconds.
- int64_t ntp_time_ms_;
- int64_t capture_time_ms_;
- VideoFrameType _frameType;
- uint8_t* _buffer;
- uint32_t _length;
- uint32_t _size;
- bool _completeFrame;
-};
-
-} // namespace webrtc
+// TODO(pbos): Remove this file and include webrtc/video_frame.h instead.
+#include "webrtc/video_frame.h"
#endif // COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
diff --git a/examples/android/media_demo/jni/on_load.cc b/examples/android/media_demo/jni/on_load.cc
index 9fc4ca92..fd771717 100644
--- a/examples/android/media_demo/jni/on_load.cc
+++ b/examples/android/media_demo/jni/on_load.cc
@@ -15,7 +15,8 @@
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include "webrtc/examples/android/media_demo/jni/video_engine_jni.h"
#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
-#include "webrtc/video_engine/include/vie_base.h"
+#include "webrtc/modules/video_capture/video_capture_internal.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
#include "webrtc/voice_engine/include/voe_base.h"
// Macro for native functions that can be found by way of jni-auto discovery.
@@ -38,8 +39,10 @@ JOWW(void, NativeWebRtcContextRegistry_register)(
jobject context) {
webrtc_examples::SetVoeDeviceObjects(g_vm);
webrtc_examples::SetVieDeviceObjects(g_vm);
- CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm, context) == 0,
- "Failed to register android objects to video engine");
+ CHECK(webrtc::SetCaptureAndroidVM(g_vm, context) == 0,
+ "Failed to register android objects to video capture");
+ CHECK(webrtc::SetRenderAndroidVM(g_vm) == 0,
+ "Failed to register android objects to video render");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, jni, context) == 0,
"Failed to register android objects to voice engine");
}
@@ -47,8 +50,10 @@ JOWW(void, NativeWebRtcContextRegistry_register)(
JOWW(void, NativeWebRtcContextRegistry_unRegister)(
JNIEnv* jni,
jclass) {
- CHECK(webrtc::VideoEngine::SetAndroidObjects(NULL, NULL) == 0,
- "Failed to unregister android objects from video engine");
+ CHECK(webrtc::SetCaptureAndroidVM(NULL, NULL) == 0,
+ "Failed to unregister android objects from video capture");
+ CHECK(webrtc::SetRenderAndroidVM(NULL) == 0,
+ "Failed to unregister android objects from video render");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL, NULL) == 0,
"Failed to unregister android objects from voice engine");
webrtc_examples::ClearVieDeviceObjects();
diff --git a/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc b/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
index 9497a85e..77615257 100644
--- a/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
@@ -46,7 +46,7 @@ class NetEqExternalDecoderTest : public ::testing::Test {
frame_size_samples_(frame_size_ms_ * samples_per_ms_),
output_size_samples_(frame_size_ms_ * samples_per_ms_),
external_decoder_(new MockExternalPcm16B(kDecoderPCM16Bswb32kHz)),
- rtp_generator_(samples_per_ms_),
+ rtp_generator_(new test::RtpGenerator(samples_per_ms_)),
payload_size_bytes_(0),
last_send_time_(0),
last_arrival_time_(0) {
@@ -63,7 +63,6 @@ class NetEqExternalDecoderTest : public ::testing::Test {
delete neteq_;
// We will now delete the decoder ourselves, so expecting Die to be called.
EXPECT_CALL(*external_decoder_, Die()).Times(1);
- delete external_decoder_;
delete [] input_;
delete [] encoded_;
}
@@ -78,9 +77,8 @@ class NetEqExternalDecoderTest : public ::testing::Test {
// NetEq is not allowed to delete the external decoder (hence Times(0)).
EXPECT_CALL(*external_decoder_, Die()).Times(0);
ASSERT_EQ(NetEq::kOK,
- neteq_external_->RegisterExternalDecoder(external_decoder_,
- decoder,
- kPayloadType));
+ neteq_external_->RegisterExternalDecoder(
+ external_decoder_.get(), decoder, kPayloadType));
ASSERT_EQ(NetEq::kOK,
neteq_->RegisterPayloadType(decoder, kPayloadType));
}
@@ -96,13 +94,12 @@ class NetEqExternalDecoderTest : public ::testing::Test {
if (frame_size_samples_ * 2 != payload_size_bytes_) {
return -1;
}
- int next_send_time = rtp_generator_.GetRtpHeader(kPayloadType,
- frame_size_samples_,
- &rtp_header_);
+ int next_send_time = rtp_generator_->GetRtpHeader(
+ kPayloadType, frame_size_samples_, &rtp_header_);
return next_send_time;
}
- void VerifyOutput(size_t num_samples) {
+ virtual void VerifyOutput(size_t num_samples) const {
for (size_t i = 0; i < num_samples; ++i) {
ASSERT_EQ(output_[i], output_external_[i]) <<
"Diff in sample " << i << ".";
@@ -118,6 +115,49 @@ class NetEqExternalDecoderTest : public ::testing::Test {
virtual bool Lost() { return false; }
+ virtual void InsertPackets(int next_arrival_time) {
+ // Insert packet in regular instance.
+ ASSERT_EQ(
+ NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header_, encoded_, payload_size_bytes_, next_arrival_time));
+ // Insert packet in external decoder instance.
+ EXPECT_CALL(*external_decoder_,
+ IncomingPacket(_,
+ payload_size_bytes_,
+ rtp_header_.header.sequenceNumber,
+ rtp_header_.header.timestamp,
+ next_arrival_time));
+ ASSERT_EQ(
+ NetEq::kOK,
+ neteq_external_->InsertPacket(
+ rtp_header_, encoded_, payload_size_bytes_, next_arrival_time));
+ }
+
+ virtual void GetOutputAudio() {
+ NetEqOutputType output_type;
+ // Get audio from regular instance.
+ int samples_per_channel;
+ int num_channels;
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->GetAudio(kMaxBlockSize,
+ output_,
+ &samples_per_channel,
+ &num_channels,
+ &output_type));
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(output_size_samples_, samples_per_channel);
+ // Get audio from external decoder instance.
+ ASSERT_EQ(NetEq::kOK,
+ neteq_external_->GetAudio(kMaxBlockSize,
+ output_external_,
+ &samples_per_channel,
+ &num_channels,
+ &output_type));
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(output_size_samples_, samples_per_channel);
+ }
+
void RunTest(int num_loops) {
// Get next input packets (mono and multi-channel).
int next_send_time;
@@ -134,21 +174,8 @@ class NetEqExternalDecoderTest : public ::testing::Test {
int time_now = 0;
for (int k = 0; k < num_loops; ++k) {
while (time_now >= next_arrival_time) {
- // Insert packet in regular instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header_, encoded_,
- payload_size_bytes_,
- next_arrival_time));
- // Insert packet in external decoder instance.
- EXPECT_CALL(*external_decoder_,
- IncomingPacket(_, payload_size_bytes_,
- rtp_header_.header.sequenceNumber,
- rtp_header_.header.timestamp,
- next_arrival_time));
- ASSERT_EQ(NetEq::kOK,
- neteq_external_->InsertPacket(rtp_header_, encoded_,
- payload_size_bytes_,
- next_arrival_time));
+ InsertPackets(next_arrival_time);
+
// Get next input packet.
do {
next_send_time = GetNewPackets();
@@ -156,23 +183,9 @@ class NetEqExternalDecoderTest : public ::testing::Test {
next_arrival_time = GetArrivalTime(next_send_time);
} while (Lost()); // If lost, immediately read the next packet.
}
- NetEqOutputType output_type;
- // Get audio from regular instance.
- int samples_per_channel;
- int num_channels;
- EXPECT_EQ(NetEq::kOK,
- neteq_->GetAudio(kMaxBlockSize, output_,
- &samples_per_channel, &num_channels,
- &output_type));
- EXPECT_EQ(1, num_channels);
- EXPECT_EQ(output_size_samples_, samples_per_channel);
- // Get audio from external decoder instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_external_->GetAudio(kMaxBlockSize, output_external_,
- &samples_per_channel, &num_channels,
- &output_type));
- EXPECT_EQ(1, num_channels);
- EXPECT_EQ(output_size_samples_, samples_per_channel);
+
+ GetOutputAudio();
+
std::ostringstream ss;
ss << "Lap number " << k << ".";
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
@@ -190,8 +203,8 @@ class NetEqExternalDecoderTest : public ::testing::Test {
const int output_size_samples_;
NetEq* neteq_external_;
NetEq* neteq_;
- MockExternalPcm16B* external_decoder_;
- test::RtpGenerator rtp_generator_;
+ scoped_ptr<MockExternalPcm16B> external_decoder_;
+ scoped_ptr<test::RtpGenerator> rtp_generator_;
int16_t* input_;
uint8_t* encoded_;
int16_t output_[kMaxBlockSize];
diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc
index 9d7f1ec3..659b794a 100644
--- a/modules/audio_processing/audio_processing_impl.cc
+++ b/modules/audio_processing/audio_processing_impl.cc
@@ -487,6 +487,7 @@ int AudioProcessingImpl::ProcessStreamLocked() {
if (echo_control_mobile_->is_enabled() && noise_suppression_->is_enabled()) {
ca->CopyLowPassToReference();
}
+ RETURN_ON_ERR(noise_suppression_->AnalyzeCaptureAudio(ca));
RETURN_ON_ERR(noise_suppression_->ProcessCaptureAudio(ca));
RETURN_ON_ERR(echo_control_mobile_->ProcessCaptureAudio(ca));
RETURN_ON_ERR(voice_detection_->ProcessCaptureAudio(ca));
diff --git a/modules/audio_processing/noise_suppression_impl.cc b/modules/audio_processing/noise_suppression_impl.cc
index eea0a04a..ab8dadae 100644
--- a/modules/audio_processing/noise_suppression_impl.cc
+++ b/modules/audio_processing/noise_suppression_impl.cc
@@ -55,6 +55,27 @@ NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessing* apm,
NoiseSuppressionImpl::~NoiseSuppressionImpl() {}
+int NoiseSuppressionImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
+#if defined(WEBRTC_NS_FLOAT)
+ if (!is_component_enabled()) {
+ return apm_->kNoError;
+ }
+ assert(audio->samples_per_split_channel() <= 160);
+ assert(audio->num_channels() == num_handles());
+
+ for (int i = 0; i < num_handles(); ++i) {
+ Handle* my_handle = static_cast<Handle*>(handle(i));
+
+ int err = WebRtcNs_Analyze(my_handle,
+ audio->low_pass_split_data_f(i));
+ if (err != apm_->kNoError) {
+ return GetHandleError(my_handle);
+ }
+ }
+#endif
+ return apm_->kNoError;
+}
+
int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
int err = apm_->kNoError;
@@ -64,16 +85,16 @@ int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
assert(audio->samples_per_split_channel() <= 160);
assert(audio->num_channels() == num_handles());
- for (int i = 0; i < num_handles(); i++) {
+ for (int i = 0; i < num_handles(); ++i) {
Handle* my_handle = static_cast<Handle*>(handle(i));
#if defined(WEBRTC_NS_FLOAT)
- err = WebRtcNs_Process(static_cast<Handle*>(handle(i)),
+ err = WebRtcNs_Process(my_handle,
audio->low_pass_split_data_f(i),
audio->high_pass_split_data_f(i),
audio->low_pass_split_data_f(i),
audio->high_pass_split_data_f(i));
#elif defined(WEBRTC_NS_FIXED)
- err = WebRtcNsx_Process(static_cast<Handle*>(handle(i)),
+ err = WebRtcNsx_Process(my_handle,
audio->low_pass_split_data(i),
audio->high_pass_split_data(i),
audio->low_pass_split_data(i),
diff --git a/modules/audio_processing/noise_suppression_impl.h b/modules/audio_processing/noise_suppression_impl.h
index cadbbd9c..14abe66b 100644
--- a/modules/audio_processing/noise_suppression_impl.h
+++ b/modules/audio_processing/noise_suppression_impl.h
@@ -26,6 +26,7 @@ class NoiseSuppressionImpl : public NoiseSuppression,
CriticalSectionWrapper* crit);
virtual ~NoiseSuppressionImpl();
+ int AnalyzeCaptureAudio(AudioBuffer* audio);
int ProcessCaptureAudio(AudioBuffer* audio);
// NoiseSuppression implementation.
diff --git a/modules/audio_processing/ns/include/noise_suppression.h b/modules/audio_processing/ns/include/noise_suppression.h
index 3cf889e2..093f1185 100644
--- a/modules/audio_processing/ns/include/noise_suppression.h
+++ b/modules/audio_processing/ns/include/noise_suppression.h
@@ -79,6 +79,21 @@ int WebRtcNs_Init(NsHandle* NS_inst, uint32_t fs);
*/
int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
+/*
+ * This functions estimates the background noise for the inserted speech frame.
+ * The input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ * - NS_inst : Noise suppression instance.
+ * - spframe : Pointer to speech frame buffer for L band
+ *
+ * Output:
+ * - NS_inst : Updated NS instance
+ *
+ * Return value : 0 - OK
+ * -1 - Error
+ */
+int WebRtcNs_Analyze(NsHandle* NS_inst, float* spframe);
/*
* This functions does Noise Suppression for the inserted speech frame. The
diff --git a/modules/audio_processing/ns/noise_suppression.c b/modules/audio_processing/ns/noise_suppression.c
index 075ab88c..0015e385 100644
--- a/modules/audio_processing/ns/noise_suppression.c
+++ b/modules/audio_processing/ns/noise_suppression.c
@@ -42,6 +42,9 @@ int WebRtcNs_set_policy(NsHandle* NS_inst, int mode) {
return WebRtcNs_set_policy_core((NSinst_t*) NS_inst, mode);
}
+int WebRtcNs_Analyze(NsHandle* NS_inst, float* spframe) {
+ return WebRtcNs_AnalyzeCore((NSinst_t*) NS_inst, spframe);
+}
int WebRtcNs_Process(NsHandle* NS_inst, float* spframe, float* spframe_H,
float* outframe, float* outframe_H) {
diff --git a/modules/audio_processing/ns/ns_core.c b/modules/audio_processing/ns/ns_core.c
index a6a2deab..0d60aafc 100644
--- a/modules/audio_processing/ns/ns_core.c
+++ b/modules/audio_processing/ns/ns_core.c
@@ -714,6 +714,10 @@ void WebRtcNs_SpeechNoiseProb(NSinst_t* inst, float* probSpeechFinal, float* snr
}
}
+int WebRtcNs_AnalyzeCore(NSinst_t* inst, float* inFrame) {
+ return 0;
+}
+
int WebRtcNs_ProcessCore(NSinst_t* inst,
float* speechFrame,
float* speechFrameHB,
diff --git a/modules/audio_processing/ns/ns_core.h b/modules/audio_processing/ns/ns_core.h
index 785239eb..2d4b23cf 100644
--- a/modules/audio_processing/ns/ns_core.h
+++ b/modules/audio_processing/ns/ns_core.h
@@ -147,6 +147,23 @@ int WebRtcNs_InitCore(NSinst_t* inst, uint32_t fs);
int WebRtcNs_set_policy_core(NSinst_t* inst, int mode);
/****************************************************************************
+ * WebRtcNs_AnalyzeCore
+ *
+ * Estimate the background noise.
+ *
+ * Input:
+ * - inst : Instance that should be initialized
+ * - inFrame : Input speech frame for lower band
+ *
+ * Output:
+ * - inst : Updated instance
+ *
+ * Return value : 0 - OK
+ * -1 - Error
+ */
+int WebRtcNs_AnalyzeCore(NSinst_t* inst, float* inFrame);
+
+/****************************************************************************
* WebRtcNs_ProcessCore
*
* Do noise suppression.
@@ -164,8 +181,6 @@ int WebRtcNs_set_policy_core(NSinst_t* inst, int mode);
* Return value : 0 - OK
* -1 - Error
*/
-
-
int WebRtcNs_ProcessCore(NSinst_t* inst,
float* inFrameLow,
float* inFrameHigh,
diff --git a/modules/desktop_capture/window_capturer_win.cc b/modules/desktop_capture/window_capturer_win.cc
index ac0ee7a7..3cb10488 100644
--- a/modules/desktop_capture/window_capturer_win.cc
+++ b/modules/desktop_capture/window_capturer_win.cc
@@ -173,10 +173,10 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
return;
}
- // Return a 2x2 black frame if the window is minimized. The size is 2x2 so it
- // can be subsampled to I420 downstream.
+ // Return a 1x1 black frame if the window is minimized, to match the behavior
+ // on Mac.
if (IsIconic(window_)) {
- BasicDesktopFrame* frame = new BasicDesktopFrame(DesktopSize(2, 2));
+ BasicDesktopFrame* frame = new BasicDesktopFrame(DesktopSize(1, 1));
memset(frame->data(), 0, frame->stride() * frame->size().height());
previous_size_ = frame->size();
diff --git a/modules/modules.gyp b/modules/modules.gyp
index f5175687..4086299a 100644
--- a/modules/modules.gyp
+++ b/modules/modules.gyp
@@ -217,7 +217,6 @@
'rtp_rtcp/source/rtp_packet_history_unittest.cc',
'rtp_rtcp/source/rtp_payload_registry_unittest.cc',
'rtp_rtcp/source/rtp_rtcp_impl_unittest.cc',
- 'rtp_rtcp/source/rtp_utility_unittest.cc',
'rtp_rtcp/source/rtp_header_extension_unittest.cc',
'rtp_rtcp/source/rtp_sender_unittest.cc',
'rtp_rtcp/source/vp8_partition_aggregator_unittest.cc',
diff --git a/modules/rtp_rtcp/source/rtp_format_vp8.cc b/modules/rtp_rtcp/source/rtp_format_vp8.cc
index e638c361..a3d673b3 100644
--- a/modules/rtp_rtcp/source/rtp_format_vp8.cc
+++ b/modules/rtp_rtcp/source/rtp_format_vp8.cc
@@ -15,10 +15,223 @@
#include <vector>
-#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
+namespace {
+struct ParsedPayload {
+ ParsedPayload() : data(NULL), data_length(0) {}
+
+ const uint8_t* data; // Start address of parsed payload data.
+ int data_length; // Length of parsed payload data.
+};
+
+int ParseVP8PictureID(RTPVideoHeaderVP8* vp8,
+ const uint8_t** data,
+ int* data_length,
+ int* parsed_bytes) {
+ assert(vp8 != NULL);
+ if (*data_length <= 0)
+ return -1;
+
+ vp8->pictureId = (**data & 0x7F);
+ if (**data & 0x80) {
+ (*data)++;
+ (*parsed_bytes)++;
+ if (--(*data_length) <= 0)
+ return -1;
+ // PictureId is 15 bits
+ vp8->pictureId = (vp8->pictureId << 8) + **data;
+ }
+ (*data)++;
+ (*parsed_bytes)++;
+ (*data_length)--;
+ return 0;
+}
+
+int ParseVP8Tl0PicIdx(RTPVideoHeaderVP8* vp8,
+ const uint8_t** data,
+ int* data_length,
+ int* parsed_bytes) {
+ assert(vp8 != NULL);
+ if (*data_length <= 0)
+ return -1;
+
+ vp8->tl0PicIdx = **data;
+ (*data)++;
+ (*parsed_bytes)++;
+ (*data_length)--;
+ return 0;
+}
+
+int ParseVP8TIDAndKeyIdx(RTPVideoHeaderVP8* vp8,
+ const uint8_t** data,
+ int* data_length,
+ int* parsed_bytes,
+ bool has_tid,
+ bool has_key_idx) {
+ assert(vp8 != NULL);
+ if (*data_length <= 0)
+ return -1;
+
+ if (has_tid) {
+ vp8->temporalIdx = ((**data >> 6) & 0x03);
+ vp8->layerSync = (**data & 0x20) ? true : false; // Y bit
+ }
+ if (has_key_idx) {
+ vp8->keyIdx = (**data & 0x1F);
+ }
+ (*data)++;
+ (*parsed_bytes)++;
+ (*data_length)--;
+ return 0;
+}
+
+int ParseVP8Extension(RTPVideoHeaderVP8* vp8,
+ const uint8_t* data,
+ int data_length) {
+ assert(vp8 != NULL);
+ int parsed_bytes = 0;
+ if (data_length <= 0)
+ return -1;
+ // Optional X field is present.
+ bool has_picture_id = (*data & 0x80) ? true : false; // I bit
+ bool has_tl0_pic_idx = (*data & 0x40) ? true : false; // L bit
+ bool has_tid = (*data & 0x20) ? true : false; // T bit
+ bool has_key_idx = (*data & 0x10) ? true : false; // K bit
+
+ // Advance data and decrease remaining payload size.
+ data++;
+ parsed_bytes++;
+ data_length--;
+
+ if (has_picture_id) {
+ if (ParseVP8PictureID(vp8, &data, &data_length, &parsed_bytes) != 0) {
+ return -1;
+ }
+ }
+
+ if (has_tl0_pic_idx) {
+ if (ParseVP8Tl0PicIdx(vp8, &data, &data_length, &parsed_bytes) != 0) {
+ return -1;
+ }
+ }
+
+ if (has_tid || has_key_idx) {
+ if (ParseVP8TIDAndKeyIdx(
+ vp8, &data, &data_length, &parsed_bytes, has_tid, has_key_idx) !=
+ 0) {
+ return -1;
+ }
+ }
+ return parsed_bytes;
+}
+
+int ParseVP8FrameSize(WebRtcRTPHeader* rtp_header,
+ const uint8_t* data,
+ int data_length) {
+ assert(rtp_header != NULL);
+ if (rtp_header->frameType != kVideoFrameKey) {
+ // Included in payload header for I-frames.
+ return 0;
+ }
+ if (data_length < 10) {
+ // For an I-frame we should always have the uncompressed VP8 header
+ // in the beginning of the partition.
+ return -1;
+ }
+ rtp_header->type.Video.width = ((data[7] << 8) + data[6]) & 0x3FFF;
+ rtp_header->type.Video.height = ((data[9] << 8) + data[8]) & 0x3FFF;
+ return 0;
+}
+
+//
+// VP8 format:
+//
+// Payload descriptor
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |X|R|N|S|PartID | (REQUIRED)
+// +-+-+-+-+-+-+-+-+
+// X: |I|L|T|K| RSV | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+// I: | PictureID | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+// L: | TL0PICIDX | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+// T/K: |TID:Y| KEYIDX | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+//
+// Payload header (considered part of the actual payload, sent to decoder)
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |Size0|H| VER |P|
+// +-+-+-+-+-+-+-+-+
+// | ... |
+// + +
+bool ParseVP8(WebRtcRTPHeader* rtp_header,
+ const uint8_t* data,
+ int data_length,
+ ParsedPayload* payload) {
+ assert(rtp_header != NULL);
+ // Parse mandatory first byte of payload descriptor.
+ bool extension = (*data & 0x80) ? true : false; // X bit
+ bool beginning_of_partition = (*data & 0x10) ? true : false; // S bit
+ int partition_id = (*data & 0x0F); // PartID field
+
+ rtp_header->type.Video.isFirstPacket =
+ beginning_of_partition && (partition_id == 0);
+
+ rtp_header->type.Video.codecHeader.VP8.nonReference =
+ (*data & 0x20) ? true : false; // N bit
+ rtp_header->type.Video.codecHeader.VP8.partitionId = partition_id;
+ rtp_header->type.Video.codecHeader.VP8.beginningOfPartition =
+ beginning_of_partition;
+ rtp_header->type.Video.codecHeader.VP8.pictureId = kNoPictureId;
+ rtp_header->type.Video.codecHeader.VP8.tl0PicIdx = kNoTl0PicIdx;
+ rtp_header->type.Video.codecHeader.VP8.temporalIdx = kNoTemporalIdx;
+ rtp_header->type.Video.codecHeader.VP8.layerSync = false;
+ rtp_header->type.Video.codecHeader.VP8.keyIdx = kNoKeyIdx;
+
+ if (partition_id > 8) {
+ // Weak check for corrupt data: PartID MUST NOT be larger than 8.
+ return false;
+ }
+
+ // Advance data and decrease remaining payload size.
+ data++;
+ data_length--;
+
+ if (extension) {
+ const int parsed_bytes = ParseVP8Extension(
+ &rtp_header->type.Video.codecHeader.VP8, data, data_length);
+ if (parsed_bytes < 0)
+ return false;
+ data += parsed_bytes;
+ data_length -= parsed_bytes;
+ }
+
+ if (data_length <= 0) {
+ LOG(LS_ERROR) << "Error parsing VP8 payload descriptor!";
+ return false;
+ }
+
+ // Read P bit from payload header (only at beginning of first partition).
+ if (data_length > 0 && beginning_of_partition && partition_id == 0) {
+ rtp_header->frameType = (*data & 0x01) ? kVideoFrameDelta : kVideoFrameKey;
+ } else {
+ rtp_header->frameType = kVideoFrameDelta;
+ }
+
+ if (0 != ParseVP8FrameSize(rtp_header, data, data_length)) {
+ return false;
+ }
+ payload->data = data;
+ payload->data_length = data_length;
+ return true;
+}
+} // namespace
// Define how the VP8PacketizerModes are implemented.
// Modes are: kStrict, kAggregate, kEqualSize.
@@ -523,48 +736,15 @@ RtpDepacketizerVp8::RtpDepacketizerVp8(RtpData* const callback)
bool RtpDepacketizerVp8::Parse(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
size_t payload_data_length) {
- RtpUtility::RTPPayload parsed_packet;
- RtpUtility::RTPPayloadParser rtp_payload_parser(
- kRtpVideoVp8, payload_data, payload_data_length);
-
- if (!rtp_payload_parser.Parse(parsed_packet))
+ ParsedPayload payload;
+ if (!ParseVP8(rtp_header, payload_data, payload_data_length, &payload))
return false;
- if (parsed_packet.info.VP8.dataLength == 0)
+ if (payload.data_length == 0)
return true;
- rtp_header->frameType = (parsed_packet.frameType == RtpUtility::kIFrame)
- ? kVideoFrameKey
- : kVideoFrameDelta;
-
- RTPVideoHeaderVP8* to_header = &rtp_header->type.Video.codecHeader.VP8;
- RtpUtility::RTPPayloadVP8* from_header = &parsed_packet.info.VP8;
-
- rtp_header->type.Video.isFirstPacket =
- from_header->beginningOfPartition && (from_header->partitionID == 0);
- to_header->nonReference = from_header->nonReferenceFrame;
- to_header->pictureId =
- from_header->hasPictureID ? from_header->pictureID : kNoPictureId;
- to_header->tl0PicIdx =
- from_header->hasTl0PicIdx ? from_header->tl0PicIdx : kNoTl0PicIdx;
- if (from_header->hasTID) {
- to_header->temporalIdx = from_header->tID;
- to_header->layerSync = from_header->layerSync;
- } else {
- to_header->temporalIdx = kNoTemporalIdx;
- to_header->layerSync = false;
- }
- to_header->keyIdx = from_header->hasKeyIdx ? from_header->keyIdx : kNoKeyIdx;
-
- rtp_header->type.Video.width = from_header->frameWidth;
- rtp_header->type.Video.height = from_header->frameHeight;
-
- to_header->partitionId = from_header->partitionID;
- to_header->beginningOfPartition = from_header->beginningOfPartition;
-
- if (callback_->OnReceivedPayloadData(parsed_packet.info.VP8.data,
- parsed_packet.info.VP8.dataLength,
- rtp_header) != 0) {
+ if (callback_->OnReceivedPayloadData(
+ payload.data, payload.data_length, rtp_header) != 0) {
return false;
}
return true;
diff --git a/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
index c6574e6e..7a4dbcf6 100644
--- a/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
@@ -12,17 +12,76 @@
* This file includes unit tests for the VP8 packetizer.
*/
+#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h"
#include "webrtc/system_wrappers/interface/compile_assert.h"
#include "webrtc/typedefs.h"
-#define CHECK_ARRAY_SIZE(expected_size, array) \
- COMPILE_ASSERT(expected_size == sizeof(array) / sizeof(array[0]), \
+#define CHECK_ARRAY_SIZE(expected_size, array) \
+ COMPILE_ASSERT(expected_size == sizeof(array) / sizeof(array[0]), \
check_array_size);
+using ::testing::_;
+using ::testing::Args;
+using ::testing::ElementsAreArray;
+using ::testing::Return;
+
namespace webrtc {
+namespace {
+// Payload descriptor
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |X|R|N|S|PartID | (REQUIRED)
+// +-+-+-+-+-+-+-+-+
+// X: |I|L|T|K| RSV | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+// I: | PictureID | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+// L: | TL0PICIDX | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+// T/K: |TID:Y| KEYIDX | (OPTIONAL)
+// +-+-+-+-+-+-+-+-+
+//
+// Payload header
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |Size0|H| VER |P|
+// +-+-+-+-+-+-+-+-+
+// | Size1 |
+// +-+-+-+-+-+-+-+-+
+// | Size2 |
+// +-+-+-+-+-+-+-+-+
+// | Bytes 4..N of |
+// | VP8 payload |
+// : :
+// +-+-+-+-+-+-+-+-+
+// | OPTIONAL RTP |
+// | padding |
+// : :
+// +-+-+-+-+-+-+-+-+
+
+void VerifyBasicHeader(WebRtcRTPHeader* header, bool N, bool S, int part_id) {
+ ASSERT_TRUE(header != NULL);
+ EXPECT_EQ(N, header->type.Video.codecHeader.VP8.nonReference);
+ EXPECT_EQ(S, header->type.Video.codecHeader.VP8.beginningOfPartition);
+ EXPECT_EQ(part_id, header->type.Video.codecHeader.VP8.partitionId);
+}
+
+void VerifyExtensions(WebRtcRTPHeader* header,
+ int16_t picture_id, /* I */
+ int16_t tl0_pic_idx, /* L */
+ uint8_t temporal_idx, /* T */
+ int key_idx /* K */) {
+ ASSERT_TRUE(header != NULL);
+ EXPECT_EQ(picture_id, header->type.Video.codecHeader.VP8.pictureId);
+ EXPECT_EQ(tl0_pic_idx, header->type.Video.codecHeader.VP8.tl0PicIdx);
+ EXPECT_EQ(temporal_idx, header->type.Video.codecHeader.VP8.temporalIdx);
+ EXPECT_EQ(key_idx, header->type.Video.codecHeader.VP8.keyIdx);
+}
+} // namespace
class RtpPacketizerVp8Test : public ::testing::Test {
protected:
@@ -35,7 +94,8 @@ class RtpPacketizerVp8Test : public ::testing::Test {
hdr_info_.layerSync = false;
hdr_info_.tl0PicIdx = kNoTl0PicIdx;
hdr_info_.keyIdx = kNoKeyIdx;
- if (helper_ != NULL) return false;
+ if (helper_ != NULL)
+ return false;
helper_ = new test::RtpFormatVp8TestHelper(&hdr_info_);
return helper_->Init(partition_sizes, num_partitions);
}
@@ -59,14 +119,17 @@ TEST_F(RtpPacketizerVp8Test, TestStrictMode) {
// The expected sizes are obtained by running a verified good implementation.
const int kExpectedSizes[] = {9, 9, 12, 11, 11, 11, 10};
const int kExpectedPart[] = {0, 0, 1, 2, 2, 2, 2};
- const bool kExpectedFragStart[] =
- {true, false, true, true, false, false, false};
+ const bool kExpectedFragStart[] = {true, false, true, true,
+ false, false, false};
const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
TEST_F(RtpPacketizerVp8Test, TestAggregateMode) {
@@ -89,8 +152,11 @@ TEST_F(RtpPacketizerVp8Test, TestAggregateMode) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
TEST_F(RtpPacketizerVp8Test, TestAggregateModeManyPartitions1) {
@@ -113,8 +179,11 @@ TEST_F(RtpPacketizerVp8Test, TestAggregateModeManyPartitions1) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
TEST_F(RtpPacketizerVp8Test, TestAggregateModeManyPartitions2) {
@@ -137,8 +206,11 @@ TEST_F(RtpPacketizerVp8Test, TestAggregateModeManyPartitions2) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
TEST_F(RtpPacketizerVp8Test, TestAggregateModeTwoLargePartitions) {
@@ -161,8 +233,11 @@ TEST_F(RtpPacketizerVp8Test, TestAggregateModeTwoLargePartitions) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
// Verify that EqualSize mode is forced if fragmentation info is missing.
@@ -172,7 +247,7 @@ TEST_F(RtpPacketizerVp8Test, TestEqualSizeModeFallback) {
ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
hdr_info_.pictureId = 200; // > 0x7F should produce 2-byte PictureID
- const int kMaxSize = 12; // Small enough to produce 4 packets.
+ const int kMaxSize = 12; // Small enough to produce 4 packets.
RtpPacketizerVp8 packetizer(hdr_info_, kMaxSize);
packetizer.SetPayloadData(
helper_->payload_data(), helper_->payload_size(), NULL);
@@ -187,8 +262,11 @@ TEST_F(RtpPacketizerVp8Test, TestEqualSizeModeFallback) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
helper_->set_sloppy_partitioning(true);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
// Verify that non-reference bit is set. EqualSize mode fallback is expected.
@@ -213,8 +291,11 @@ TEST_F(RtpPacketizerVp8Test, TestNonReferenceBit) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
helper_->set_sloppy_partitioning(true);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
// Verify Tl0PicIdx and TID fields, and layerSync bit.
@@ -241,8 +322,11 @@ TEST_F(RtpPacketizerVp8Test, TestTl0PicIdxAndTID) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
// Verify KeyIdx field.
@@ -267,8 +351,11 @@ TEST_F(RtpPacketizerVp8Test, TestKeyIdx) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
// Verify TID field and KeyIdx field in combination.
@@ -294,7 +381,202 @@ TEST_F(RtpPacketizerVp8Test, TestTIDAndKeyIdx) {
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart);
CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart);
- helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
- kExpectedFragStart, kExpectedNum);
+ helper_->GetAllPacketsAndCheck(&packetizer,
+ kExpectedSizes,
+ kExpectedPart,
+ kExpectedFragStart,
+ kExpectedNum);
}
-} // namespace
+
+class RtpDepacketizerVp8Test : public ::testing::Test {
+ protected:
+ RtpDepacketizerVp8Test()
+ : callback_(),
+ depacketizer_(RtpDepacketizer::Create(kRtpVideoVp8, &callback_)) {}
+
+ void ExpectPacket(const uint8_t* data, size_t length) {
+ EXPECT_CALL(callback_, OnReceivedPayloadData(_, length, _))
+ .With(Args<0, 1>(ElementsAreArray(data, length)))
+ .Times(1)
+ .WillOnce(Return(0));
+ }
+
+ MockRtpData callback_;
+ scoped_ptr<RtpDepacketizer> depacketizer_;
+};
+
+TEST_F(RtpDepacketizerVp8Test, BasicHeader) {
+ const uint8_t kHeaderLength = 1;
+ uint8_t packet[4] = {0};
+ packet[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
+ packet[1] = 0x01; // P frame.
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength, sizeof(packet) - kHeaderLength);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+
+ EXPECT_EQ(kVideoFrameDelta, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 0, 1, 4);
+ VerifyExtensions(
+ &rtp_header, kNoPictureId, kNoTl0PicIdx, kNoTemporalIdx, kNoKeyIdx);
+}
+
+TEST_F(RtpDepacketizerVp8Test, PictureID) {
+ const uint8_t kHeaderLength1 = 3;
+ const uint8_t kHeaderLength2 = 4;
+ const uint8_t kPictureId = 17;
+ uint8_t packet[10] = {0};
+ packet[0] = 0xA0;
+ packet[1] = 0x80;
+ packet[2] = kPictureId;
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength1, sizeof(packet) - kHeaderLength1);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ EXPECT_EQ(kVideoFrameDelta, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 1, 0, 0);
+ VerifyExtensions(
+ &rtp_header, kPictureId, kNoTl0PicIdx, kNoTemporalIdx, kNoKeyIdx);
+
+ // Re-use packet, but change to long PictureID.
+ packet[2] = 0x80 | kPictureId;
+ packet[3] = kPictureId;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength2, sizeof(packet) - kHeaderLength2);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ VerifyBasicHeader(&rtp_header, 1, 0, 0);
+ VerifyExtensions(&rtp_header,
+ (kPictureId << 8) + kPictureId,
+ kNoTl0PicIdx,
+ kNoTemporalIdx,
+ kNoKeyIdx);
+}
+
+TEST_F(RtpDepacketizerVp8Test, Tl0PicIdx) {
+ const uint8_t kHeaderLength = 3;
+ const uint8_t kTl0PicIdx = 17;
+ uint8_t packet[13] = {0};
+ packet[0] = 0x90;
+ packet[1] = 0x40;
+ packet[2] = kTl0PicIdx;
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength, sizeof(packet) - kHeaderLength);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ EXPECT_EQ(kVideoFrameKey, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 0, 1, 0);
+ VerifyExtensions(
+ &rtp_header, kNoPictureId, kTl0PicIdx, kNoTemporalIdx, kNoKeyIdx);
+}
+
+TEST_F(RtpDepacketizerVp8Test, TIDAndLayerSync) {
+ const uint8_t kHeaderLength = 3;
+ uint8_t packet[10] = {0};
+ packet[0] = 0x88;
+ packet[1] = 0x20;
+ packet[2] = 0x80; // TID(2) + LayerSync(false)
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength, sizeof(packet) - kHeaderLength);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ EXPECT_EQ(kVideoFrameDelta, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 0, 0, 8);
+ VerifyExtensions(&rtp_header, kNoPictureId, kNoTl0PicIdx, 2, kNoKeyIdx);
+ EXPECT_FALSE(rtp_header.type.Video.codecHeader.VP8.layerSync);
+}
+
+TEST_F(RtpDepacketizerVp8Test, KeyIdx) {
+ const uint8_t kHeaderLength = 3;
+ const uint8_t kKeyIdx = 17;
+ uint8_t packet[10] = {0};
+ packet[0] = 0x88;
+ packet[1] = 0x10; // K = 1.
+ packet[2] = kKeyIdx;
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength, sizeof(packet) - kHeaderLength);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ EXPECT_EQ(kVideoFrameDelta, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 0, 0, 8);
+ VerifyExtensions(
+ &rtp_header, kNoPictureId, kNoTl0PicIdx, kNoTemporalIdx, kKeyIdx);
+}
+
+TEST_F(RtpDepacketizerVp8Test, MultipleExtensions) {
+ const uint8_t kHeaderLength = 6;
+ uint8_t packet[10] = {0};
+ packet[0] = 0x88;
+ packet[1] = 0x80 | 0x40 | 0x20 | 0x10;
+ packet[2] = 0x80 | 17; // PictureID, high 7 bits.
+ packet[3] = 17; // PictureID, low 8 bits.
+ packet[4] = 42; // Tl0PicIdx.
+ packet[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength, sizeof(packet) - kHeaderLength);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ EXPECT_EQ(kVideoFrameDelta, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 0, 0, 8);
+ VerifyExtensions(&rtp_header, (17 << 8) + 17, 42, 1, 17);
+}
+
+TEST_F(RtpDepacketizerVp8Test, TooShortHeader) {
+ uint8_t packet[4] = {0};
+ packet[0] = 0x88;
+ packet[1] = 0x80 | 0x40 | 0x20 | 0x10; // All extensions are enabled...
+ packet[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
+ packet[3] = 17; // PictureID, low 8 bits.
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ EXPECT_FALSE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+}
+
+TEST_F(RtpDepacketizerVp8Test, TestWithPacketizer) {
+ const uint8_t kHeaderLength = 5;
+ uint8_t payload[10] = {0};
+ uint8_t packet[20] = {0};
+ RTPVideoHeaderVP8 input_header;
+ input_header.nonReference = true;
+ input_header.pictureId = 300;
+ input_header.temporalIdx = 1;
+ input_header.layerSync = false;
+ input_header.tl0PicIdx = kNoTl0PicIdx; // Disable.
+ input_header.keyIdx = 31;
+ RtpPacketizerVp8 packetizer(input_header, 20);
+ packetizer.SetPayloadData(payload, 10, NULL);
+ bool last;
+ size_t send_bytes;
+ ASSERT_TRUE(packetizer.NextPacket(packet, &send_bytes, &last));
+ ASSERT_TRUE(last);
+
+ WebRtcRTPHeader rtp_header;
+ memset(&rtp_header, 0, sizeof(rtp_header));
+
+ ExpectPacket(packet + kHeaderLength, sizeof(packet) - kHeaderLength);
+ EXPECT_TRUE(depacketizer_->Parse(&rtp_header, packet, sizeof(packet)));
+ EXPECT_EQ(kVideoFrameKey, rtp_header.frameType);
+ VerifyBasicHeader(&rtp_header, 1, 1, 0);
+ VerifyExtensions(&rtp_header,
+ input_header.pictureId,
+ input_header.tl0PicIdx,
+ input_header.temporalIdx,
+ input_header.keyIdx);
+ EXPECT_EQ(rtp_header.type.Video.codecHeader.VP8.layerSync,
+ input_header.layerSync);
+}
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index 764e9acf..22fae100 100644
--- a/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -24,7 +24,6 @@ namespace webrtc {
using RtpUtility::GetCurrentRTP;
using RtpUtility::Payload;
-using RtpUtility::RTPPayloadParser;
using RtpUtility::StringCompare;
RtpReceiver* RtpReceiver::CreateVideoReceiver(
diff --git a/modules/rtp_rtcp/source/rtp_utility.cc b/modules/rtp_rtcp/source/rtp_utility.cc
index 95389b46..441a9c50 100644
--- a/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/modules/rtp_rtcp/source/rtp_utility.cc
@@ -188,33 +188,6 @@ uint32_t pow2(uint8_t exp) {
return 1 << exp;
}
-void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
- type = videoType;
-
- switch (type) {
- case kRtpVideoGeneric:
- break;
- case kRtpVideoVp8: {
- info.VP8.nonReferenceFrame = false;
- info.VP8.beginningOfPartition = false;
- info.VP8.partitionID = 0;
- info.VP8.hasPictureID = false;
- info.VP8.hasTl0PicIdx = false;
- info.VP8.hasTID = false;
- info.VP8.hasKeyIdx = false;
- info.VP8.pictureID = -1;
- info.VP8.tl0PicIdx = -1;
- info.VP8.tID = -1;
- info.VP8.layerSync = false;
- info.VP8.frameWidth = 0;
- info.VP8.frameHeight = 0;
- break;
- }
- default:
- break;
- }
-}
-
RtpHeaderParser::RtpHeaderParser(const uint8_t* rtpData,
const size_t rtpDataLength)
: _ptrRTPDataBegin(rtpData),
@@ -565,208 +538,6 @@ uint8_t RtpHeaderParser::ParsePaddingBytes(
}
return num_zero_bytes;
}
-
-RTPPayloadParser::RTPPayloadParser(const RtpVideoCodecTypes videoType,
- const uint8_t* payloadData,
- uint16_t payloadDataLength)
- : _dataPtr(payloadData),
- _dataLength(payloadDataLength),
- _videoType(videoType) {}
-
-RTPPayloadParser::~RTPPayloadParser() {
-}
-
-bool RTPPayloadParser::Parse(RTPPayload& parsedPacket) const {
- parsedPacket.SetType(_videoType);
-
- switch (_videoType) {
- case kRtpVideoGeneric:
- return ParseGeneric(parsedPacket);
- case kRtpVideoVp8:
- return ParseVP8(parsedPacket);
- default:
- return false;
- }
-}
-
-bool RTPPayloadParser::ParseGeneric(RTPPayload& /*parsedPacket*/) const {
- return false;
-}
-
-//
-// VP8 format:
-//
-// Payload descriptor
-// 0 1 2 3 4 5 6 7
-// +-+-+-+-+-+-+-+-+
-// |X|R|N|S|PartID | (REQUIRED)
-// +-+-+-+-+-+-+-+-+
-// X: |I|L|T|K| RSV | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-// I: | PictureID | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-// L: | TL0PICIDX | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-// T/K: |TID:Y| KEYIDX | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-//
-// Payload header (considered part of the actual payload, sent to decoder)
-// 0 1 2 3 4 5 6 7
-// +-+-+-+-+-+-+-+-+
-// |Size0|H| VER |P|
-// +-+-+-+-+-+-+-+-+
-// | ... |
-// + +
-
-bool RTPPayloadParser::ParseVP8(RTPPayload& parsedPacket) const {
- RTPPayloadVP8* vp8 = &parsedPacket.info.VP8;
- const uint8_t* dataPtr = _dataPtr;
- int dataLength = _dataLength;
-
- // Parse mandatory first byte of payload descriptor
- bool extension = (*dataPtr & 0x80) ? true : false; // X bit
- vp8->nonReferenceFrame = (*dataPtr & 0x20) ? true : false; // N bit
- vp8->beginningOfPartition = (*dataPtr & 0x10) ? true : false; // S bit
- vp8->partitionID = (*dataPtr & 0x0F); // PartID field
-
- if (vp8->partitionID > 8) {
- // Weak check for corrupt data: PartID MUST NOT be larger than 8.
- return false;
- }
-
- // Advance dataPtr and decrease remaining payload size
- dataPtr++;
- dataLength--;
-
- if (extension) {
- const int parsedBytes = ParseVP8Extension(vp8, dataPtr, dataLength);
- if (parsedBytes < 0) return false;
- dataPtr += parsedBytes;
- dataLength -= parsedBytes;
- }
-
- if (dataLength <= 0) {
- LOG(LS_ERROR) << "Error parsing VP8 payload descriptor!";
- return false;
- }
-
- // Read P bit from payload header (only at beginning of first partition)
- if (dataLength > 0 && vp8->beginningOfPartition && vp8->partitionID == 0) {
- parsedPacket.frameType = (*dataPtr & 0x01) ? kPFrame : kIFrame;
- } else {
- parsedPacket.frameType = kPFrame;
- }
- if (0 != ParseVP8FrameSize(parsedPacket, dataPtr, dataLength)) {
- return false;
- }
- parsedPacket.info.VP8.data = dataPtr;
- parsedPacket.info.VP8.dataLength = dataLength;
- return true;
-}
-
-int RTPPayloadParser::ParseVP8FrameSize(RTPPayload& parsedPacket,
- const uint8_t* dataPtr,
- int dataLength) const {
- if (parsedPacket.frameType != kIFrame) {
- // Included in payload header for I-frames.
- return 0;
- }
- if (dataLength < 10) {
- // For an I-frame we should always have the uncompressed VP8 header
- // in the beginning of the partition.
- return -1;
- }
- RTPPayloadVP8* vp8 = &parsedPacket.info.VP8;
- vp8->frameWidth = ((dataPtr[7] << 8) + dataPtr[6]) & 0x3FFF;
- vp8->frameHeight = ((dataPtr[9] << 8) + dataPtr[8]) & 0x3FFF;
- return 0;
-}
-
-int RTPPayloadParser::ParseVP8Extension(RTPPayloadVP8* vp8,
- const uint8_t* dataPtr,
- int dataLength) const {
- int parsedBytes = 0;
- if (dataLength <= 0) return -1;
- // Optional X field is present
- vp8->hasPictureID = (*dataPtr & 0x80) ? true : false; // I bit
- vp8->hasTl0PicIdx = (*dataPtr & 0x40) ? true : false; // L bit
- vp8->hasTID = (*dataPtr & 0x20) ? true : false; // T bit
- vp8->hasKeyIdx = (*dataPtr & 0x10) ? true : false; // K bit
-
- // Advance dataPtr and decrease remaining payload size
- dataPtr++;
- parsedBytes++;
- dataLength--;
-
- if (vp8->hasPictureID) {
- if (ParseVP8PictureID(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
- return -1;
- }
- }
-
- if (vp8->hasTl0PicIdx) {
- if (ParseVP8Tl0PicIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
- return -1;
- }
- }
-
- if (vp8->hasTID || vp8->hasKeyIdx) {
- if (ParseVP8TIDAndKeyIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
- return -1;
- }
- }
- return parsedBytes;
-}
-
-int RTPPayloadParser::ParseVP8PictureID(RTPPayloadVP8* vp8,
- const uint8_t** dataPtr,
- int* dataLength,
- int* parsedBytes) const {
- if (*dataLength <= 0) return -1;
- vp8->pictureID = (**dataPtr & 0x7F);
- if (**dataPtr & 0x80) {
- (*dataPtr)++;
- (*parsedBytes)++;
- if (--(*dataLength) <= 0) return -1;
- // PictureID is 15 bits
- vp8->pictureID = (vp8->pictureID << 8) +** dataPtr;
- }
- (*dataPtr)++;
- (*parsedBytes)++;
- (*dataLength)--;
- return 0;
-}
-
-int RTPPayloadParser::ParseVP8Tl0PicIdx(RTPPayloadVP8* vp8,
- const uint8_t** dataPtr,
- int* dataLength,
- int* parsedBytes) const {
- if (*dataLength <= 0) return -1;
- vp8->tl0PicIdx = **dataPtr;
- (*dataPtr)++;
- (*parsedBytes)++;
- (*dataLength)--;
- return 0;
-}
-
-int RTPPayloadParser::ParseVP8TIDAndKeyIdx(RTPPayloadVP8* vp8,
- const uint8_t** dataPtr,
- int* dataLength,
- int* parsedBytes) const {
- if (*dataLength <= 0) return -1;
- if (vp8->hasTID) {
- vp8->tID = ((**dataPtr >> 6) & 0x03);
- vp8->layerSync = (**dataPtr & 0x20) ? true : false; // Y bit
- }
- if (vp8->hasKeyIdx) {
- vp8->keyIdx = (**dataPtr & 0x1F);
- }
- (*dataPtr)++;
- (*parsedBytes)++;
- (*dataLength)--;
- return 0;
-}
-
} // namespace RtpUtility
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_utility.h b/modules/rtp_rtcp/source/rtp_utility.h
index ef50570d..3a3ad5ee 100644
--- a/modules/rtp_rtcp/source/rtp_utility.h
+++ b/modules/rtp_rtcp/source/rtp_utility.h
@@ -115,97 +115,7 @@ namespace RtpUtility {
const uint8_t* const _ptrRTPDataBegin;
const uint8_t* const _ptrRTPDataEnd;
};
-
- enum FrameTypes
- {
- kIFrame, // key frame
- kPFrame // Delta frame
- };
-
- struct RTPPayloadVP8
- {
- bool nonReferenceFrame;
- bool beginningOfPartition;
- int partitionID;
- bool hasPictureID;
- bool hasTl0PicIdx;
- bool hasTID;
- bool hasKeyIdx;
- int pictureID;
- int tl0PicIdx;
- int tID;
- bool layerSync;
- int keyIdx;
- int frameWidth;
- int frameHeight;
-
- const uint8_t* data;
- uint16_t dataLength;
- };
-
- union RTPPayloadUnion
- {
- RTPPayloadVP8 VP8;
- };
-
- struct RTPPayload
- {
- void SetType(RtpVideoCodecTypes videoType);
-
- RtpVideoCodecTypes type;
- FrameTypes frameType;
- RTPPayloadUnion info;
- };
-
- // RTP payload parser
- class RTPPayloadParser
- {
- public:
- RTPPayloadParser(const RtpVideoCodecTypes payloadType,
- const uint8_t* payloadData,
- // Length w/o padding.
- const uint16_t payloadDataLength);
-
- ~RTPPayloadParser();
-
- bool Parse(RTPPayload& parsedPacket) const;
-
- private:
- bool ParseGeneric(RTPPayload& parsedPacket) const;
-
- bool ParseVP8(RTPPayload& parsedPacket) const;
-
- int ParseVP8Extension(RTPPayloadVP8 *vp8,
- const uint8_t *dataPtr,
- int dataLength) const;
-
- int ParseVP8PictureID(RTPPayloadVP8 *vp8,
- const uint8_t **dataPtr,
- int *dataLength,
- int *parsedBytes) const;
-
- int ParseVP8Tl0PicIdx(RTPPayloadVP8 *vp8,
- const uint8_t **dataPtr,
- int *dataLength,
- int *parsedBytes) const;
-
- int ParseVP8TIDAndKeyIdx(RTPPayloadVP8 *vp8,
- const uint8_t **dataPtr,
- int *dataLength,
- int *parsedBytes) const;
-
- int ParseVP8FrameSize(RTPPayload& parsedPacket,
- const uint8_t *dataPtr,
- int dataLength) const;
-
- private:
- const uint8_t* _dataPtr;
- const uint16_t _dataLength;
- const RtpVideoCodecTypes _videoType;
- };
-
- } // namespace RtpUtility
-
+} // namespace RtpUtility
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
diff --git a/modules/rtp_rtcp/source/rtp_utility_unittest.cc b/modules/rtp_rtcp/source/rtp_utility_unittest.cc
deleted file mode 100644
index 82ec953c..00000000
--- a/modules/rtp_rtcp/source/rtp_utility_unittest.cc
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file conatins unit tests for the RtpUtility.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
-#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-using RtpUtility::RTPPayloadParser;
-using RtpUtility::RTPPayload;
-using RtpUtility::RTPPayloadVP8;
-
-// Payload descriptor
-// 0 1 2 3 4 5 6 7
-// +-+-+-+-+-+-+-+-+
-// |X|R|N|S|PartID | (REQUIRED)
-// +-+-+-+-+-+-+-+-+
-// X: |I|L|T|K| RSV | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-// I: | PictureID | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-// L: | TL0PICIDX | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-// T/K: |TID:Y| KEYIDX | (OPTIONAL)
-// +-+-+-+-+-+-+-+-+
-//
-// Payload header
-// 0 1 2 3 4 5 6 7
-// +-+-+-+-+-+-+-+-+
-// |Size0|H| VER |P|
-// +-+-+-+-+-+-+-+-+
-// | Size1 |
-// +-+-+-+-+-+-+-+-+
-// | Size2 |
-// +-+-+-+-+-+-+-+-+
-// | Bytes 4..N of |
-// | VP8 payload |
-// : :
-// +-+-+-+-+-+-+-+-+
-// | OPTIONAL RTP |
-// | padding |
-// : :
-// +-+-+-+-+-+-+-+-+
-
-void VerifyBasicHeader(const RTPPayloadVP8 &header,
- bool N, bool S, int PartID) {
- EXPECT_EQ(N, header.nonReferenceFrame);
- EXPECT_EQ(S, header.beginningOfPartition);
- EXPECT_EQ(PartID, header.partitionID);
-}
-
-void VerifyExtensions(const RTPPayloadVP8 &header,
- bool I, bool L, bool T, bool K) {
- EXPECT_EQ(I, header.hasPictureID);
- EXPECT_EQ(L, header.hasTl0PicIdx);
- EXPECT_EQ(T, header.hasTID);
- EXPECT_EQ(K, header.hasKeyIdx);
-}
-
-TEST(ParseVP8Test, BasicHeader) {
- uint8_t payload[4] = {0};
- payload[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
- payload[1] = 0x01; // P frame.
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 4 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
-
- EXPECT_EQ(payload + 1, parsedPacket.info.VP8.data);
- EXPECT_EQ(4 - 1, parsedPacket.info.VP8.dataLength);
-}
-
-TEST(ParseVP8Test, PictureID) {
- uint8_t payload[10] = {0};
- payload[0] = 0xA0;
- payload[1] = 0x80;
- payload[2] = 17;
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
-
- EXPECT_EQ(17, parsedPacket.info.VP8.pictureID);
-
- EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
- EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
-
-
- // Re-use payload, but change to long PictureID.
- payload[2] = 0x80 | 17;
- payload[3] = 17;
- RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10);
-
- ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
-
- VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
-
- EXPECT_EQ((17<<8) + 17, parsedPacket.info.VP8.pictureID);
-
- EXPECT_EQ(payload + 4, parsedPacket.info.VP8.data);
- EXPECT_EQ(10 - 4, parsedPacket.info.VP8.dataLength);
-}
-
-TEST(ParseVP8Test, Tl0PicIdx) {
- uint8_t payload[13] = {0};
- payload[0] = 0x90;
- payload[1] = 0x40;
- payload[2] = 17;
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kIFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 0 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 1 /*L*/, 0 /*T*/, 0 /*K*/);
-
- EXPECT_EQ(17, parsedPacket.info.VP8.tl0PicIdx);
-
- EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
- EXPECT_EQ(13 - 3, parsedPacket.info.VP8.dataLength);
-}
-
-TEST(ParseVP8Test, TIDAndLayerSync) {
- uint8_t payload[10] = {0};
- payload[0] = 0x88;
- payload[1] = 0x20;
- payload[2] = 0x80; // TID(2) + LayerSync(false)
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 1 /*T*/, 0 /*K*/);
-
- EXPECT_EQ(2, parsedPacket.info.VP8.tID);
- EXPECT_FALSE(parsedPacket.info.VP8.layerSync);
-
- EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
- EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
-}
-
-TEST(ParseVP8Test, KeyIdx) {
- uint8_t payload[10] = {0};
- payload[0] = 0x88;
- payload[1] = 0x10; // K = 1.
- payload[2] = 0x11; // KEYIDX = 17 decimal.
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 1 /*K*/);
-
- EXPECT_EQ(17, parsedPacket.info.VP8.keyIdx);
-
- EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
- EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
-}
-
-TEST(ParseVP8Test, MultipleExtensions) {
- uint8_t payload[10] = {0};
- payload[0] = 0x88;
- payload[1] = 0x80 | 0x40 | 0x20 | 0x10;
- payload[2] = 0x80 | 17; // PictureID, high 7 bits.
- payload[3] = 17; // PictureID, low 8 bits.
- payload[4] = 42; // Tl0PicIdx.
- payload[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 1 /*L*/, 1 /*T*/, 1 /*K*/);
-
- EXPECT_EQ((17<<8) + 17, parsedPacket.info.VP8.pictureID);
- EXPECT_EQ(42, parsedPacket.info.VP8.tl0PicIdx);
- EXPECT_EQ(1, parsedPacket.info.VP8.tID);
- EXPECT_EQ(17, parsedPacket.info.VP8.keyIdx);
-
- EXPECT_EQ(payload + 6, parsedPacket.info.VP8.data);
- EXPECT_EQ(10 - 6, parsedPacket.info.VP8.dataLength);
-}
-
-TEST(ParseVP8Test, TooShortHeader) {
- uint8_t payload[4] = {0};
- payload[0] = 0x88;
- payload[1] = 0x80 | 0x40 | 0x20 | 0x10; // All extensions are enabled...
- payload[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
- payload[3] = 17; // PictureID, low 8 bits.
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4);
-
- RTPPayload parsedPacket;
- EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
-}
-
-TEST(ParseVP8Test, TestWithPacketizer) {
- uint8_t payload[10] = {0};
- uint8_t packet[20] = {0};
- RTPVideoHeaderVP8 inputHeader;
- inputHeader.nonReference = true;
- inputHeader.pictureId = 300;
- inputHeader.temporalIdx = 1;
- inputHeader.layerSync = false;
- inputHeader.tl0PicIdx = kNoTl0PicIdx; // Disable.
- inputHeader.keyIdx = 31;
- RtpPacketizerVp8 packetizer(inputHeader, 20);
- packetizer.SetPayloadData(payload, 10, NULL);
- bool last;
- size_t send_bytes;
- ASSERT_TRUE(packetizer.NextPacket(packet, &send_bytes, &last));
- ASSERT_TRUE(last);
-
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes);
-
- RTPPayload parsedPacket;
- ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
-
- EXPECT_EQ(RtpUtility::kIFrame, parsedPacket.frameType);
- EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
-
- VerifyBasicHeader(parsedPacket.info.VP8,
- inputHeader.nonReference /*N*/,
- 1 /*S*/,
- 0 /*PartID*/);
- VerifyExtensions(parsedPacket.info.VP8,
- 1 /*I*/,
- 0 /*L*/,
- 1 /*T*/,
- 1 /*K*/);
-
- EXPECT_EQ(inputHeader.pictureId, parsedPacket.info.VP8.pictureID);
- EXPECT_EQ(inputHeader.temporalIdx, parsedPacket.info.VP8.tID);
- EXPECT_EQ(inputHeader.layerSync, parsedPacket.info.VP8.layerSync);
- EXPECT_EQ(inputHeader.keyIdx, parsedPacket.info.VP8.keyIdx);
-
- EXPECT_EQ(packet + 5, parsedPacket.info.VP8.data);
- EXPECT_EQ(send_bytes - 5, parsedPacket.info.VP8.dataLength);
-}
-
-} // namespace
diff --git a/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc b/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
index 71809755..9e706e7d 100644
--- a/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
+++ b/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
@@ -33,14 +33,14 @@ public:
protected:
// Inherited from UdpTransportData
virtual void IncomingRTPPacket(const int8_t* incomingRtpPacket,
- const int32_t rtpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort);
+ const int32_t rtpPacketLength,
+ const int8_t* fromIP,
+ const uint16_t fromPort) OVERRIDE;
virtual void IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
- const int32_t rtcpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort);
+ const int32_t rtcpPacketLength,
+ const int8_t* fromIP,
+ const uint16_t fromPort) OVERRIDE;
private:
RtpRtcp *_rtpMod;
diff --git a/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h b/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
index 6edb380e..ade827e2 100644
--- a/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
+++ b/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
@@ -71,12 +71,15 @@ public:
bool timeOutTriggered () { return (_timeOut); };
// Inherited from RtpFeedback
- virtual int32_t OnInitializeDecoder(const int32_t id,
- const int8_t payloadType,
- const int8_t payloadName[RTP_PAYLOAD_NAME_SIZE],
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) { return(0);};
+ virtual int32_t OnInitializeDecoder(
+ const int32_t id,
+ const int8_t payloadType,
+ const int8_t payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate) OVERRIDE {
+ return 0;
+ }
virtual void OnPacketTimeout(const int32_t id);
@@ -86,31 +89,31 @@ public:
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {};
- virtual void OnIncomingSSRCChanged( const int32_t id,
- const uint32_t SSRC) {};
+ virtual void OnIncomingSSRCChanged(const int32_t id,
+ const uint32_t SSRC) OVERRIDE {}
- virtual void OnIncomingCSRCChanged( const int32_t id,
- const uint32_t CSRC,
- const bool added) {};
+ virtual void OnIncomingCSRCChanged(const int32_t id,
+ const uint32_t CSRC,
+ const bool added) OVERRIDE {}
// Inherited from RtpData
-
- virtual int32_t OnReceivedPayloadData(const uint8_t* payloadData,
- const uint16_t payloadSize,
- const webrtc::WebRtcRTPHeader* rtpHeader);
+ virtual int32_t OnReceivedPayloadData(
+ const uint8_t* payloadData,
+ const uint16_t payloadSize,
+ const webrtc::WebRtcRTPHeader* rtpHeader) OVERRIDE;
// Inherited from UdpTransportData
virtual void IncomingRTPPacket(const int8_t* incomingRtpPacket,
- const int32_t rtpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort);
+ const int32_t rtpPacketLength,
+ const int8_t* fromIP,
+ const uint16_t fromPort) OVERRIDE;
virtual void IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
- const int32_t rtcpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort);
+ const int32_t rtcpPacketLength,
+ const int8_t* fromIP,
+ const uint16_t fromPort) OVERRIDE;
diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn
index 7d6ddc0e..773708bc 100644
--- a/modules/video_capture/BUILD.gn
+++ b/modules/video_capture/BUILD.gn
@@ -8,16 +8,10 @@
import("../../build/webrtc.gni")
-config("video_capture_config") {
- if (is_ios) {
- libs = [
- "AVFoundation.framework",
- "CoreMedia.framework",
- "CoreVideo.framework",
- ]
- }
-}
-
+# Note this target is missing an implementation for the video capture.
+# Targets must link with either 'video_capture_impl' or
+# 'video_capture_internal_impl' depending on whether they want to
+# use the internal capturer.
source_set("video_capture") {
sources = [
"device_info_impl.cc",
@@ -32,108 +26,140 @@ source_set("video_capture") {
"video_capture_impl.h",
]
- libs = []
- deps = []
-
- if (rtc_include_internal_video_capture) {
- if (is_linux) {
- sources += [
- "linux/device_info_linux.cc",
- "linux/device_info_linux.h",
- "linux/video_capture_linux.cc",
- "linux/video_capture_linux.h",
- ]
- }
- if (is_mac) {
- sources += [
- "mac/qtkit/video_capture_qtkit.h",
- "mac/qtkit/video_capture_qtkit.mm",
- "mac/qtkit/video_capture_qtkit_info.h",
- "mac/qtkit/video_capture_qtkit_info.mm",
- "mac/qtkit/video_capture_qtkit_info_objc.h",
- "mac/qtkit/video_capture_qtkit_info_objc.mm",
- "mac/qtkit/video_capture_qtkit_objc.h",
- "mac/qtkit/video_capture_qtkit_objc.mm",
- "mac/qtkit/video_capture_qtkit_utility.h",
- "mac/video_capture_mac.mm",
- ]
-
- libs += [
- "CoreVideo.framework",
- "QTKit.framework",
- ]
- }
- if (is_win) {
- sources += [
- "windows/device_info_ds.cc",
- "windows/device_info_ds.h",
- "windows/device_info_mf.cc",
- "windows/device_info_mf.h",
- "windows/help_functions_ds.cc",
- "windows/help_functions_ds.h",
- "windows/sink_filter_ds.cc",
- "windows/sink_filter_ds.h",
- "windows/video_capture_ds.cc",
- "windows/video_capture_ds.h",
- "windows/video_capture_factory_windows.cc",
- "windows/video_capture_mf.cc",
- "windows/video_capture_mf.h",
- ]
-
- libs += [ "Strmiids.lib" ]
- deps += [ "//third_party/winsdk_samples"]
- }
- if (is_android) {
- sources += [
- "android/device_info_android.cc",
- "android/device_info_android.h",
- "android/video_capture_android.cc",
- "android/video_capture_android.h",
- ]
-
- deps += [
- "//third_party/icu:icuuc",
- "//third_party/jsoncpp",
- ]
- }
- if (is_ios) {
- sources += [
- "ios/device_info_ios.h",
- "ios/device_info_ios.mm",
- "ios/device_info_ios_objc.h",
- "ios/device_info_ios_objc.mm",
- "ios/rtc_video_capture_ios_objc.h",
- "ios/rtc_video_capture_ios_objc.mm",
- "ios/video_capture_ios.h",
- "ios/video_capture_ios.mm",
- ]
-
- cflags += [
- "-fobjc-arc", # CLANG_ENABLE_OBJC_ARC = YES.
- # To avoid warnings for deprecated videoMinFrameDuration and
- # videoMaxFrameDuration properties in iOS 7.0.
- # See webrtc:3705 for more details.
- "-Wno-deprecated-declarations",
- ]
- }
- } else {
- sources += [
- "external/device_info_external.cc",
- "external/video_capture_external.cc",
- ]
+ deps = [
+ "../../common_video",
+ "../../system_wrappers",
+ "../utility",
+ ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
}
+}
+
+source_set("video_capture_impl") {
+ sources = [
+ "external/device_info_external.cc",
+ "external/video_capture_external.cc",
+ ]
- all_dependent_configs = [ ":video_capture_config"]
+ deps = [
+ ":video_capture",
+ "../../system_wrappers",
+ ]
if (is_clang) {
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
+}
- deps += [
- "../../common_video",
+config("video_capture_internal_impl_config") {
+ if (is_ios) {
+ libs = [
+ "AVFoundation.framework",
+ "CoreMedia.framework",
+ "CoreVideo.framework",
+ ]
+ }
+}
+
+source_set("video_capture_internal_impl") {
+ deps = [
+ ":video_capture",
"../../system_wrappers",
- "../utility",
]
+
+ if (is_linux) {
+ sources = [
+ "linux/device_info_linux.cc",
+ "linux/device_info_linux.h",
+ "linux/video_capture_linux.cc",
+ "linux/video_capture_linux.h",
+ ]
+ }
+ if (is_mac) {
+ sources = [
+ "mac/qtkit/video_capture_qtkit.h",
+ "mac/qtkit/video_capture_qtkit.mm",
+ "mac/qtkit/video_capture_qtkit_info.h",
+ "mac/qtkit/video_capture_qtkit_info.mm",
+ "mac/qtkit/video_capture_qtkit_info_objc.h",
+ "mac/qtkit/video_capture_qtkit_info_objc.mm",
+ "mac/qtkit/video_capture_qtkit_objc.h",
+ "mac/qtkit/video_capture_qtkit_objc.mm",
+ "mac/qtkit/video_capture_qtkit_utility.h",
+ "mac/video_capture_mac.mm",
+ ]
+
+ libs = [
+ "CoreVideo.framework",
+ "QTKit.framework",
+ ]
+ }
+ if (is_win) {
+ sources = [
+ "windows/device_info_ds.cc",
+ "windows/device_info_ds.h",
+ "windows/device_info_mf.cc",
+ "windows/device_info_mf.h",
+ "windows/help_functions_ds.cc",
+ "windows/help_functions_ds.h",
+ "windows/sink_filter_ds.cc",
+ "windows/sink_filter_ds.h",
+ "windows/video_capture_ds.cc",
+ "windows/video_capture_ds.h",
+ "windows/video_capture_factory_windows.cc",
+ "windows/video_capture_mf.cc",
+ "windows/video_capture_mf.h",
+ ]
+
+ libs = [ "Strmiids.lib" ]
+
+ deps += [ "//third_party/winsdk_samples"]
+ }
+ if (is_android) {
+ sources = [
+ "android/device_info_android.cc",
+ "android/device_info_android.h",
+ "android/video_capture_android.cc",
+ "android/video_capture_android.h",
+ ]
+
+ deps += [
+ "//third_party/icu:icuuc",
+ "//third_party/jsoncpp",
+ ]
+ }
+ if (is_ios) {
+ sources = [
+ "ios/device_info_ios.h",
+ "ios/device_info_ios.mm",
+ "ios/device_info_ios_objc.h",
+ "ios/device_info_ios_objc.mm",
+ "ios/rtc_video_capture_ios_objc.h",
+ "ios/rtc_video_capture_ios_objc.mm",
+ "ios/video_capture_ios.h",
+ "ios/video_capture_ios.mm",
+ ]
+
+ cflags = [
+ "-fobjc-arc", # CLANG_ENABLE_OBJC_ARC = YES.
+ # To avoid warnings for deprecated videoMinFrameDuration and
+ # videoMaxFrameDuration properties in iOS 7.0.
+ # See webrtc:3705 for more details.
+ "-Wno-deprecated-declarations",
+ ]
+ }
+
+ all_dependent_configs = [ ":video_capture_internal_impl_config" ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
}
diff --git a/modules/video_capture/ensure_initialized.cc b/modules/video_capture/ensure_initialized.cc
index 65c9a8db..9d43d9f1 100644
--- a/modules/video_capture/ensure_initialized.cc
+++ b/modules/video_capture/ensure_initialized.cc
@@ -10,7 +10,7 @@
// Platform-specific initialization bits, if any, go here.
-#if !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+#ifndef ANDROID
namespace webrtc {
namespace videocapturemodule {
@@ -18,26 +18,15 @@ void EnsureInitialized() {}
} // namespace videocapturemodule
} // namespace webrtc
-#else // !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+#else
-#include <assert.h>
#include <pthread.h>
#include "base/android/jni_android.h"
-
-// Handy alternative to assert() which suppresses unused-variable warnings when
-// assert() is a no-op (i.e. in Release builds).
-#ifdef NDEBUG
-#define ASSERT(x) if (false && (x)); else
-#else
-#define ASSERT(x) assert(x)
-#endif
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_capture/video_capture_internal.h"
namespace webrtc {
-
-// Declared in webrtc/modules/video_capture/include/video_capture.h.
-int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject g_context);
-
namespace videocapturemodule {
static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
@@ -46,18 +35,15 @@ void EnsureInitializedOnce() {
JNIEnv* jni = ::base::android::AttachCurrentThread();
jobject context = ::base::android::GetApplicationContext();
JavaVM* jvm = NULL;
- int status = jni->GetJavaVM(&jvm);
- ASSERT(status == 0);
- status = webrtc::SetCaptureAndroidVM(jvm, context) == 0;
- ASSERT(status);
+ CHECK_EQ(0, jni->GetJavaVM(&jvm));
+ CHECK_EQ(0, webrtc::SetCaptureAndroidVM(jvm, context));
}
void EnsureInitialized() {
- int ret = pthread_once(&g_initialize_once, &EnsureInitializedOnce);
- ASSERT(ret == 0);
+ CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce));
}
} // namespace videocapturemodule
} // namespace webrtc
-#endif // ANDROID & WEBRTC_CHROMIUM_BUILD
+#endif // !ANDROID
diff --git a/modules/video_capture/include/video_capture.h b/modules/video_capture/include/video_capture.h
index 7398af60..6e728d15 100644
--- a/modules/video_capture/include/video_capture.h
+++ b/modules/video_capture/include/video_capture.h
@@ -14,16 +14,8 @@
#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/video_capture/include/video_capture_defines.h"
-#ifdef ANDROID
-#include <jni.h>
-#endif
-
namespace webrtc {
-#if defined(ANDROID)
-int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context);
-#endif
-
class VideoCaptureModule: public RefCountedModule {
public:
// Interface for receiving information about available camera devices.
diff --git a/modules/video_capture/include/video_capture_factory.h b/modules/video_capture/include/video_capture_factory.h
index ec92d31e..f78437d1 100644
--- a/modules/video_capture/include/video_capture_factory.h
+++ b/modules/video_capture/include/video_capture_factory.h
@@ -36,10 +36,6 @@ class VideoCaptureFactory {
static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
const int32_t id);
-#ifdef WEBRTC_ANDROID
- static int32_t SetAndroidObjects(void* javaVM, void* javaContext);
-#endif
-
private:
~VideoCaptureFactory();
};
diff --git a/modules/video_capture/ios/rtc_video_capture_ios_objc.mm b/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
index ac90b72a..b5c53b19 100644
--- a/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
+++ b/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
@@ -204,12 +204,31 @@ using namespace webrtc::videocapturemodule;
[_captureSession setSessionPreset:captureQuality];
// take care of capture framerate now
+ NSArray* sessionInputs = _captureSession.inputs;
+ AVCaptureDeviceInput* deviceInput = [sessionInputs count] > 0 ?
+ sessionInputs[0] : nil;
+ AVCaptureDevice* inputDevice = deviceInput.device;
+ if (inputDevice) {
+ AVCaptureDeviceFormat* activeFormat = inputDevice.activeFormat;
+ NSArray* supportedRanges = activeFormat.videoSupportedFrameRateRanges;
+ AVFrameRateRange* targetRange = [supportedRanges count] > 0 ?
+ supportedRanges[0] : nil;
+ // Find the largest supported framerate less than capability maxFPS.
+ for (AVFrameRateRange* range in supportedRanges) {
+ if (range.maxFrameRate <= _capability.maxFPS &&
+ targetRange.maxFrameRate <= range.maxFrameRate) {
+ targetRange = range;
+ }
+ }
+ if (targetRange && [inputDevice lockForConfiguration:NULL]) {
+ inputDevice.activeVideoMinFrameDuration = targetRange.minFrameDuration;
+ inputDevice.activeVideoMaxFrameDuration = targetRange.minFrameDuration;
+ [inputDevice unlockForConfiguration];
+ }
+ }
+
_connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
[self setRelativeVideoOrientation];
- CMTime cm_time = {1, _capability.maxFPS, kCMTimeFlags_Valid, 0};
-
- [_connection setVideoMinFrameDuration:cm_time];
- [_connection setVideoMaxFrameDuration:cm_time];
// finished configuring, commit settings to AVCaptureSession.
[_captureSession commitConfiguration];
diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc
index ac271345..4c2263d0 100644
--- a/modules/video_capture/test/video_capture_unittest.cc
+++ b/modules/video_capture/test/video_capture_unittest.cc
@@ -278,8 +278,7 @@ class VideoCaptureTest : public testing::Test {
unsigned int number_of_devices_;
};
-// Video capture tests broken on Android, see webrtc:3768.
-TEST_F(VideoCaptureTest, DISABLED_ON_ANDROID(CreateDelete)) {
+TEST_F(VideoCaptureTest, CreateDelete) {
for (int i = 0; i < 5; ++i) {
int64_t start_time = TickTime::MillisecondTimestamp();
TestVideoCaptureCallback capture_observer;
@@ -316,8 +315,7 @@ TEST_F(VideoCaptureTest, DISABLED_ON_ANDROID(CreateDelete)) {
}
}
-// Video capture tests broken on Android, see webrtc:3768.
-TEST_F(VideoCaptureTest, DISABLED_ON_ANDROID(Capabilities)) {
+TEST_F(VideoCaptureTest, Capabilities) {
#ifdef WEBRTC_MAC
printf("Video capture capabilities are not supported on Mac.\n");
return;
@@ -472,8 +470,7 @@ class VideoCaptureExternalTest : public testing::Test {
};
// Test input of external video frames.
-// Video capture tests broken on Android, see webrtc:3768.
-TEST_F(VideoCaptureExternalTest, DISABLED_ON_ANDROID(TestExternalCapture)) {
+TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
@@ -555,13 +552,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
// Test frame rate and no picture alarm.
// Flaky on Win32, see webrtc:3270.
-// Video capture tests broken on Android, see webrtc:3768.
-#if defined(_WIN32) || defined(WEBRTC_ANDROID)
-#define MAYBE_FrameRate DISABLED_FrameRate
-#else
-#define MAYBE_FrameRate FrameRate
-#endif
-TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
+TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) {
int64_t testTime = 3;
TickTime startTime = TickTime::Now();
@@ -598,8 +589,7 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
capture_feedback_.frame_rate() <= 33);
}
-// Video capture tests broken on Android, see webrtc:3768.
-TEST_F(VideoCaptureExternalTest, DISABLED_ON_ANDROID(Rotation)) {
+TEST_F(VideoCaptureExternalTest, Rotation) {
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kCameraRotate0));
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
@@ -621,8 +611,3 @@ TEST_F(VideoCaptureExternalTest, DISABLED_ON_ANDROID(Rotation)) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
}
-
-// Video capture tests broken on Android, see webrtc:3768.
-// Add dummy test that always passes to avoid the entire test returning exit
-// code 1 when zero tests have executed.
-TEST_F(VideoCaptureExternalTest, Dummy) {}
diff --git a/modules/video_capture/video_capture.gypi b/modules/video_capture/video_capture.gypi
index 3e83ab3f..888d7727 100644
--- a/modules/video_capture/video_capture.gypi
+++ b/modules/video_capture/video_capture.gypi
@@ -9,6 +9,10 @@
{
'targets': [
{
+ # Note this library is missing an implementation for the video capture.
+ # Targets must link with either 'video_capture_module_impl' or
+ # 'video_capture_module_internal_impl' depending on whether they want to
+ # use the internal capturer.
'target_name': 'video_capture_module',
'type': 'static_library',
'dependencies': [
@@ -28,128 +32,128 @@
'video_capture_impl.cc',
'video_capture_impl.h',
],
+ },
+ {
+ # Default video capture module implementation that only supports external
+ # capture.
+ 'target_name': 'video_capture_module_impl',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'video_capture_module',
+ ],
+ 'sources': [
+ 'external/device_info_external.cc',
+ 'external/video_capture_external.cc',
+ ],
+ },
+ {
+ 'target_name': 'video_capture_module_internal_impl',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'video_capture_module',
+ ],
'conditions': [
- ['include_internal_video_capture==0', {
+ ['OS=="linux"', {
'sources': [
- 'external/device_info_external.cc',
- 'external/video_capture_external.cc',
+ 'linux/device_info_linux.cc',
+ 'linux/device_info_linux.h',
+ 'linux/video_capture_linux.cc',
+ 'linux/video_capture_linux.h',
],
- }, { # include_internal_video_capture == 1
- 'conditions': [
- ['OS=="linux"', {
- 'sources': [
- 'linux/device_info_linux.cc',
- 'linux/device_info_linux.h',
- 'linux/video_capture_linux.cc',
- 'linux/video_capture_linux.h',
- ],
- }], # linux
- ['OS=="mac"', {
- 'sources': [
- 'mac/qtkit/video_capture_qtkit.h',
- 'mac/qtkit/video_capture_qtkit.mm',
- 'mac/qtkit/video_capture_qtkit_info.h',
- 'mac/qtkit/video_capture_qtkit_info.mm',
- 'mac/qtkit/video_capture_qtkit_info_objc.h',
- 'mac/qtkit/video_capture_qtkit_info_objc.mm',
- 'mac/qtkit/video_capture_qtkit_objc.h',
- 'mac/qtkit/video_capture_qtkit_objc.mm',
- 'mac/qtkit/video_capture_qtkit_utility.h',
- 'mac/video_capture_mac.mm',
- ],
- 'link_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework Cocoa',
- '-framework CoreVideo',
- '-framework QTKit',
- ],
- },
- },
- }], # mac
- ['OS=="win"', {
- 'dependencies': [
- '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
- ],
- 'sources': [
- 'windows/device_info_ds.cc',
- 'windows/device_info_ds.h',
- 'windows/device_info_mf.cc',
- 'windows/device_info_mf.h',
- 'windows/help_functions_ds.cc',
- 'windows/help_functions_ds.h',
- 'windows/sink_filter_ds.cc',
- 'windows/sink_filter_ds.h',
- 'windows/video_capture_ds.cc',
- 'windows/video_capture_ds.h',
- 'windows/video_capture_factory_windows.cc',
- 'windows/video_capture_mf.cc',
- 'windows/video_capture_mf.h',
- ],
- 'link_settings': {
- 'libraries': [
- '-lStrmiids.lib',
- ],
- },
- }], # win
- ['OS=="android"', {
- 'dependencies': [
- '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
- '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
- ],
- 'sources': [
- 'android/device_info_android.cc',
- 'android/device_info_android.h',
- 'android/video_capture_android.cc',
- 'android/video_capture_android.h',
+ }], # linux
+ ['OS=="mac"', {
+ 'sources': [
+ 'mac/qtkit/video_capture_qtkit.h',
+ 'mac/qtkit/video_capture_qtkit.mm',
+ 'mac/qtkit/video_capture_qtkit_info.h',
+ 'mac/qtkit/video_capture_qtkit_info.mm',
+ 'mac/qtkit/video_capture_qtkit_info_objc.h',
+ 'mac/qtkit/video_capture_qtkit_info_objc.mm',
+ 'mac/qtkit/video_capture_qtkit_objc.h',
+ 'mac/qtkit/video_capture_qtkit_objc.mm',
+ 'mac/qtkit/video_capture_qtkit_utility.h',
+ 'mac/video_capture_mac.mm',
+ ],
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework Cocoa',
+ '-framework CoreVideo',
+ '-framework QTKit',
],
- }], # android
- ['OS=="ios"', {
- 'sources': [
- 'ios/device_info_ios.h',
- 'ios/device_info_ios.mm',
- 'ios/device_info_ios_objc.h',
- 'ios/device_info_ios_objc.mm',
- 'ios/rtc_video_capture_ios_objc.h',
- 'ios/rtc_video_capture_ios_objc.mm',
- 'ios/video_capture_ios.h',
- 'ios/video_capture_ios.mm',
+ },
+ },
+ }], # mac
+ # Note that since winsdk_samples isn't pulled into chromium gyp will
+ # fail to parse this rule and try to resolve the dependencies. This
+ # is not a problem since the internal video capture implementation
+ # should not be used in chrome - issue 3831.
+ ['OS=="win" and build_with_chromium==0', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
+ ],
+ 'sources': [
+ 'windows/device_info_ds.cc',
+ 'windows/device_info_ds.h',
+ 'windows/device_info_mf.cc',
+ 'windows/device_info_mf.h',
+ 'windows/help_functions_ds.cc',
+ 'windows/help_functions_ds.h',
+ 'windows/sink_filter_ds.cc',
+ 'windows/sink_filter_ds.h',
+ 'windows/video_capture_ds.cc',
+ 'windows/video_capture_ds.h',
+ 'windows/video_capture_factory_windows.cc',
+ 'windows/video_capture_mf.cc',
+ 'windows/video_capture_mf.h',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lStrmiids.lib',
+ ],
+ },
+ }], # win
+ ['OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
+ '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+ ],
+ 'sources': [
+ 'android/device_info_android.cc',
+ 'android/device_info_android.h',
+ 'android/video_capture_android.cc',
+ 'android/video_capture_android.h',
+ ],
+ }], # android
+ ['OS=="ios"', {
+ 'sources': [
+ 'ios/device_info_ios.h',
+ 'ios/device_info_ios.mm',
+ 'ios/device_info_ios_objc.h',
+ 'ios/device_info_ios_objc.mm',
+ 'ios/rtc_video_capture_ios_objc.h',
+ 'ios/rtc_video_capture_ios_objc.mm',
+ 'ios/video_capture_ios.h',
+ 'ios/video_capture_ios.mm',
+ ],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework AVFoundation',
+ '-framework CoreMedia',
+ '-framework CoreVideo',
+ '-framework UIKit',
],
- 'xcode_settings': {
- 'CLANG_ENABLE_OBJC_ARC': 'YES',
- 'WARNING_CFLAGS': [
- # To avoid warnings for deprecated videoMinFrameDuration and
- # videoMaxFrameDuration properties in iOS 7.0.
- # See webrtc:3705 for more details.
- '-Wno-deprecated-declarations',
- ],
- },
- 'all_dependent_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework AVFoundation',
- '-framework CoreMedia',
- '-framework CoreVideo',
- '-framework UIKit',
- ],
- },
- },
- }], # ios
- ], # conditions
- }], # include_internal_video_capture
+ },
+ },
+ }], # ios
], # conditions
- },
+ }
],
'conditions': [
- ['include_tests==1 and OS=="android"', {
- # Use WebRTC capture code for Android APK tests that are built from a
- # Chromium checkout. Normally when built as a part of Chromium the
- # Chromium video capture code is used. This overrides the default in
- # webrtc/build/common.gypi.
- 'variables': {
- 'include_internal_video_capture': 1,
- },
- }],
['include_tests==1', {
'targets': [
{
@@ -157,6 +161,7 @@
'type': '<(gtest_target_type)',
'dependencies': [
'video_capture_module',
+ 'video_capture_module_internal_impl',
'webrtc_utility',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(DEPTH)/testing/gtest.gyp:gtest',
@@ -187,6 +192,13 @@
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
+ # Need to disable error due to the line in
+ # base/android/jni_android.h triggering it:
+ # const BASE_EXPORT jobject GetApplicationContext()
+ # error: type qualifiers ignored on function return type
+ 'cflags': [
+ '-Wno-ignored-qualifiers',
+ ],
}],
['OS=="mac"', {
'dependencies': [
diff --git a/modules/video_capture/video_capture_internal.h b/modules/video_capture/video_capture_internal.h
new file mode 100644
index 00000000..1a90af13
--- /dev/null
+++ b/modules/video_capture/video_capture_internal.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_INTERNAL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_INTERNAL_H_
+
+#ifdef ANDROID
+#include <jni.h>
+
+namespace webrtc {
+
+// In order to be able to use the internal webrtc video capture
+// for android, the jvm objects must be set via this method.
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context);
+
+} // namespace webrtc
+
+#endif // ANDROID
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_INTERNAL_H_
diff --git a/modules/video_coding/codecs/interface/video_codec_interface.h b/modules/video_coding/codecs/interface/video_codec_interface.h
index e6b1d097..82bcd26d 100644
--- a/modules/video_coding/codecs/interface/video_codec_interface.h
+++ b/modules/video_coding/codecs/interface/video_codec_interface.h
@@ -15,11 +15,10 @@
#include "webrtc/common_types.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
-#include "webrtc/common_video/interface/video_image.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
-
#include "webrtc/typedefs.h"
+#include "webrtc/video_encoder.h"
namespace webrtc
{
@@ -63,106 +62,6 @@ struct CodecSpecificInfo
CodecSpecificInfoUnion codecSpecific;
};
-class EncodedImageCallback
-{
-public:
- virtual ~EncodedImageCallback() {};
-
- // Callback function which is called when an image has been encoded.
- //
- // Input:
- // - encodedImage : The encoded image
- //
- // Return value : > 0, signals to the caller that one or more future frames
- // should be dropped to keep bit rate or frame rate.
- // = 0, if OK.
- // < 0, on error.
- virtual int32_t
- Encoded(EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo = NULL,
- const RTPFragmentationHeader* fragmentation = NULL) = 0;
-};
-
-class VideoEncoder
-{
-public:
- virtual ~VideoEncoder() {};
-
- // Initialize the encoder with the information from the VideoCodec.
- //
- // Input:
- // - codecSettings : Codec settings
- // - numberOfCores : Number of cores available for the encoder
- // - maxPayloadSize : The maximum size each payload is allowed
- // to have. Usually MTU - overhead.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t InitEncode(const VideoCodec* codecSettings, int32_t numberOfCores, uint32_t maxPayloadSize) = 0;
-
- // Encode an I420 image (as a part of a video stream). The encoded image
- // will be returned to the user through the encode complete callback.
- //
- // Input:
- // - inputImage : Image to be encoded
- // - codecSpecificInfo : Pointer to codec specific data
- // - frame_types : The frame type to encode
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0
- // otherwise.
- virtual int32_t Encode(
- const I420VideoFrame& inputImage,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<VideoFrameType>* frame_types) = 0;
-
- // Register an encode complete callback object.
- //
- // Input:
- // - callback : Callback object which handles encoded images.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback) = 0;
-
- // Free encoder memory.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t Release() = 0;
-
- // Inform the encoder about the packet loss and round trip time on the
- // network used to decide the best pattern and signaling.
- //
- // - packetLoss : Fraction lost (loss rate in percent =
- // 100 * packetLoss / 255)
- // - rtt : Round-trip time in milliseconds
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t SetChannelParameters(uint32_t packetLoss, int rtt) = 0;
-
- // Inform the encoder about the new target bit rate.
- //
- // - newBitRate : New target bit rate
- // - frameRate : The target frame rate
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) = 0;
-
- // Use this function to enable or disable periodic key frames. Can be useful for codecs
- // which have other ways of stopping error propagation.
- //
- // - enable : Enable or disable periodic key frames
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t SetPeriodicKeyFrames(bool enable) { return WEBRTC_VIDEO_CODEC_ERROR; }
-
- // Codec configuration data to send out-of-band, i.e. in SIP call setup
- //
- // - buffer : Buffer pointer to where the configuration data
- // should be stored
- // - size : The size of the buffer in bytes
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t CodecConfigParameters(uint8_t* /*buffer*/, int32_t /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
-};
-
class DecodedImageCallback
{
public:
diff --git a/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h b/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
index 75fe63fe..1e2c9b8f 100644
--- a/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
+++ b/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
@@ -16,8 +16,8 @@
#include <string>
#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/common_video/interface/video_image.h"
#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
namespace webrtc {
namespace test {
diff --git a/modules/video_coding/main/source/codec_database.cc b/modules/video_coding/main/source/codec_database.cc
index e5013689..cd8f9d35 100644
--- a/modules/video_coding/main/source/codec_database.cc
+++ b/modules/video_coding/main/source/codec_database.cc
@@ -24,6 +24,36 @@
namespace webrtc {
+VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() {
+ VideoCodecVP8 vp8_settings;
+ memset(&vp8_settings, 0, sizeof(vp8_settings));
+
+ vp8_settings.resilience = kResilientStream;
+ vp8_settings.numberOfTemporalLayers = 1;
+ vp8_settings.denoisingOn = true;
+ vp8_settings.errorConcealmentOn = false;
+ vp8_settings.automaticResizeOn = false;
+ vp8_settings.frameDroppingOn = true;
+ vp8_settings.keyFrameInterval = 3000;
+
+ return vp8_settings;
+}
+
+VideoCodecH264 VideoEncoder::GetDefaultH264Settings() {
+ VideoCodecH264 h264_settings;
+ memset(&h264_settings, 0, sizeof(h264_settings));
+
+ h264_settings.profile = kProfileBase;
+ h264_settings.frameDroppingOn = true;
+ h264_settings.keyFrameInterval = 3000;
+ h264_settings.spsData = NULL;
+ h264_settings.spsLen = 0;
+ h264_settings.ppsData = NULL;
+ h264_settings.ppsLen = 0;
+
+ return h264_settings;
+}
+
VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings,
int number_of_cores,
bool require_key_frame)
@@ -92,13 +122,7 @@ bool VCMCodecDataBase::Codec(int list_id,
settings->height = VCM_DEFAULT_CODEC_HEIGHT;
settings->numberOfSimulcastStreams = 0;
settings->qpMax = 56;
- settings->codecSpecific.VP8.resilience = kResilientStream;
- settings->codecSpecific.VP8.numberOfTemporalLayers = 1;
- settings->codecSpecific.VP8.denoisingOn = true;
- settings->codecSpecific.VP8.errorConcealmentOn = false;
- settings->codecSpecific.VP8.automaticResizeOn = false;
- settings->codecSpecific.VP8.frameDroppingOn = true;
- settings->codecSpecific.VP8.keyFrameInterval = 3000;
+ settings->codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
return true;
}
#endif
@@ -116,13 +140,7 @@ bool VCMCodecDataBase::Codec(int list_id,
settings->height = VCM_DEFAULT_CODEC_HEIGHT;
settings->numberOfSimulcastStreams = 0;
settings->qpMax = 56;
- settings->codecSpecific.H264.profile = kProfileBase;
- settings->codecSpecific.H264.frameDroppingOn = true;
- settings->codecSpecific.H264.keyFrameInterval = 3000;
- settings->codecSpecific.H264.spsData = NULL;
- settings->codecSpecific.H264.spsLen = 0;
- settings->codecSpecific.H264.ppsData = NULL;
- settings->codecSpecific.H264.ppsLen = 0;
+ settings->codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
return true;
}
#endif
diff --git a/modules/video_render/BUILD.gn b/modules/video_render/BUILD.gn
index 2c14b546..84b73c58 100644
--- a/modules/video_render/BUILD.gn
+++ b/modules/video_render/BUILD.gn
@@ -8,15 +8,6 @@
import("../../build/webrtc.gni")
-config("video_render_config") {
- if (is_ios) {
- libs = [
- "OpenGLES.framework",
- "QuartzCore.framework",
- ]
- }
-}
-
source_set("video_render") {
sources = [
"external/video_render_external_impl.cc",
@@ -28,116 +19,147 @@ source_set("video_render") {
"incoming_video_stream.h",
"video_render_frames.cc",
"video_render_frames.h",
- "video_render_impl.cc",
"video_render_impl.h",
]
+ deps = [
+ "../../common_video",
+ "../../system_wrappers",
+ "../utility",
+ ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+}
+
+source_set("video_render_impl") {
+ sources = [
+ "video_render_impl.cc",
+ ]
+ deps = [
+ ":video_render",
+ "../../system_wrappers",
+ ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+}
+
+config("video_render_internal_impl_config") {
+ if (is_ios) {
+ libs = [
+ "OpenGLES.framework",
+ "QuartzCore.framework",
+ ]
+ }
+}
+
+source_set("video_render_internal_impl") {
libs = []
- deps = []
-
- if (rtc_include_internal_video_render) {
- defines += [ "WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER" ]
-
- if (is_linux) {
- sources += [
- "linux/video_render_linux_impl.cc",
- "linux/video_render_linux_impl.h",
- "linux/video_x11_channel.cc",
- "linux/video_x11_channel.h",
- "linux/video_x11_render.cc",
- "linux/video_x11_render.h",
- ]
-
- libs += [ "Xext" ]
- }
- if (is_mac) {
- sources += [
- "mac/cocoa_full_screen_window.h",
- "mac/cocoa_full_screen_window.mm",
- "mac/cocoa_render_view.h",
- "mac/cocoa_render_view.mm",
- "mac/video_render_agl.cc",
- "mac/video_render_agl.h",
- "mac/video_render_mac_carbon_impl.cc",
- "mac/video_render_mac_carbon_impl.h",
- "mac/video_render_mac_cocoa_impl.h",
- "mac/video_render_mac_cocoa_impl.mm",
- "mac/video_render_nsopengl.h",
- "mac/video_render_nsopengl.mm",
- ]
-
- libs += [
- "CoreVideo.framework",
- "QTKit.framework",
- ]
- }
- if (is_win) {
- sources += [
- "windows/i_video_render_win.h",
- "windows/video_render_direct3d9.cc",
- "windows/video_render_direct3d9.h",
- "windows/video_render_windows_impl.cc",
- "windows/video_render_windows_impl.h",
- ]
-
- directxsdk_exists =
- (exec_script("//build/dir_exists.py",
- [ rebase_path("//third_party/directxsdk/files",
- root_build_dir) ],
- "trim string") == "True")
- if (directxsdk_exists) {
- directxsdk_path = "//third_party/directxsdk/files"
- } else {
- directxsdk_path = getenv("DXSDK_DIR")
- }
- include_dirs = [ directxsdk_path + "/Include" ]
+ sources = [
+ "video_render_internal_impl.cc",
+ ]
+ deps = [
+ ":video_render",
+ "../../system_wrappers",
+ ]
- }
- if (is_android) {
- sources += [
- "android/video_render_android_impl.cc",
- "android/video_render_android_impl.h",
- "android/video_render_android_native_opengl2.cc",
- "android/video_render_android_native_opengl2.h",
- "android/video_render_android_surface_view.cc",
- "android/video_render_android_surface_view.h",
- "android/video_render_opengles20.cc",
- "android/video_render_opengles20.h",
- ]
-
- libs += [ "GLESv2" ]
- }
- if (is_ios) {
- sources += [
- "ios/open_gles20.h",
- "ios/open_gles20.mm",
- "ios/video_render_ios_channel.h",
- "ios/video_render_ios_channel.mm",
- "ios/video_render_ios_gles20.h",
- "ios/video_render_ios_gles20.mm",
- "ios/video_render_ios_impl.h",
- "ios/video_render_ios_impl.mm",
- "ios/video_render_ios_view.h",
- "ios/video_render_ios_view.mm",
- ]
- }
+ if (is_linux) {
+ sources += [
+ "linux/video_render_linux_impl.cc",
+ "linux/video_render_linux_impl.h",
+ "linux/video_x11_channel.cc",
+ "linux/video_x11_channel.h",
+ "linux/video_x11_render.cc",
+ "linux/video_x11_render.h",
+ ]
+
+ libs += [ "Xext" ]
}
+ if (is_mac) {
+ sources += [
+ "mac/cocoa_full_screen_window.h",
+ "mac/cocoa_full_screen_window.mm",
+ "mac/cocoa_render_view.h",
+ "mac/cocoa_render_view.mm",
+ "mac/video_render_agl.cc",
+ "mac/video_render_agl.h",
+ "mac/video_render_mac_carbon_impl.cc",
+ "mac/video_render_mac_carbon_impl.h",
+ "mac/video_render_mac_cocoa_impl.h",
+ "mac/video_render_mac_cocoa_impl.mm",
+ "mac/video_render_nsopengl.h",
+ "mac/video_render_nsopengl.mm",
+ ]
- all_dependent_configs = [ ":video_render_config"]
+ libs += [
+ "CoreVideo.framework",
+ "QTKit.framework",
+ ]
+ }
+ if (is_win) {
+ sources += [
+ "windows/i_video_render_win.h",
+ "windows/video_render_direct3d9.cc",
+ "windows/video_render_direct3d9.h",
+ "windows/video_render_windows_impl.cc",
+ "windows/video_render_windows_impl.h",
+ ]
+ directxsdk_exists =
+ (exec_script("//build/dir_exists.py",
+ [ rebase_path("//third_party/directxsdk/files",
+ root_build_dir) ],
+ "trim string") == "True")
+ if (directxsdk_exists) {
+ directxsdk_path = "//third_party/directxsdk/files"
+ } else {
+ directxsdk_path = getenv("DXSDK_DIR")
+ }
+ include_dirs = [ directxsdk_path + "/Include" ]
+ }
+ if (is_android) {
+ sources += [
+ "android/video_render_android_impl.cc",
+ "android/video_render_android_impl.h",
+ "android/video_render_android_native_opengl2.cc",
+ "android/video_render_android_native_opengl2.h",
+ "android/video_render_android_surface_view.cc",
+ "android/video_render_android_surface_view.h",
+ "android/video_render_opengles20.cc",
+ "android/video_render_opengles20.h",
+ ]
+
+ libs += [ "GLESv2" ]
+ }
if (is_ios) {
+ sources += [
+ "ios/open_gles20.h",
+ "ios/open_gles20.mm",
+ "ios/video_render_ios_channel.h",
+ "ios/video_render_ios_channel.mm",
+ "ios/video_render_ios_gles20.h",
+ "ios/video_render_ios_gles20.mm",
+ "ios/video_render_ios_impl.h",
+ "ios/video_render_ios_impl.mm",
+ "ios/video_render_ios_view.h",
+ "ios/video_render_ios_view.mm",
+ ]
+
cflags += [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES.
}
+ all_dependent_configs = [ ":video_render_internal_impl_config"]
+
if (is_clang) {
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
-
- deps += [
- "../../common_video",
- "../../system_wrappers",
- "../utility",
- ]
}
diff --git a/modules/video_render/android/video_render_android_impl.cc b/modules/video_render/android/video_render_android_impl.cc
index 27a264ac..20694b65 100644
--- a/modules/video_render/android/video_render_android_impl.cc
+++ b/modules/video_render/android/video_render_android_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
@@ -29,13 +30,11 @@ namespace webrtc {
JavaVM* VideoRenderAndroid::g_jvm = NULL;
-#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-int32_t SetRenderAndroidVM(void* javaVM) {
+int32_t SetRenderAndroidVM(JavaVM* javaVM) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
- VideoRenderAndroid::g_jvm = (JavaVM*)javaVM;
+ VideoRenderAndroid::g_jvm = javaVM;
return 0;
}
-#endif
VideoRenderAndroid::VideoRenderAndroid(
const int32_t id,
diff --git a/modules/video_render/include/video_render.h b/modules/video_render/include/video_render.h
index 56e0cdb4..53a40414 100644
--- a/modules/video_render/include/video_render.h
+++ b/modules/video_render/include/video_render.h
@@ -25,10 +25,6 @@
namespace webrtc {
-#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-int32_t SetRenderAndroidVM(void* javaVM);
-#endif
-
// Class definitions
class VideoRender: public Module
{
diff --git a/modules/video_render/test/testAPI/testAPI.cc b/modules/video_render/test/testAPI/testAPI.cc
index 41817186..3ec68ddb 100644
--- a/modules/video_render/test/testAPI/testAPI.cc
+++ b/modules/video_render/test/testAPI/testAPI.cc
@@ -294,11 +294,6 @@ int TestSingleStream(VideoRender* renderModule) {
VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
assert(renderCallback0 != NULL);
-#ifndef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
- MyRenderCallback externalRender;
- renderModule->AddExternalRenderCallback(streamId0, &externalRender);
-#endif
-
printf("Start render\n");
error = renderModule->StartRender(streamId0);
if (error != 0) {
@@ -577,10 +572,6 @@ int TestExternalRender(VideoRender* renderModule) {
}
void RunVideoRenderTests(void* window, VideoRenderType windowType) {
-#ifndef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
- windowType = kRenderExternal;
-#endif
-
int myId = 12345;
// Create the render module
diff --git a/modules/video_render/video_render.gypi b/modules/video_render/video_render.gypi
index 0828e551..da51588a 100644
--- a/modules/video_render/video_render.gypi
+++ b/modules/video_render/video_render.gypi
@@ -9,6 +9,10 @@
{
'targets': [
{
+ # Note this library is missing an implementation for the video render.
+ # For that targets must link with 'video_render_module_impl' or
+ # 'video_render_module_internal_impl' if they want to compile and use
+ # the internal render as the default renderer.
'target_name': 'video_render_module',
'type': 'static_library',
'dependencies': [
@@ -26,16 +30,36 @@
'incoming_video_stream.h',
'video_render_frames.cc',
'video_render_frames.h',
- 'video_render_impl.cc',
'video_render_impl.h',
],
- # TODO(andrew): with the proper suffix, these files will be excluded
- # automatically.
+ },
+ {
+ # Default video_render_module implementation that only supports external
+ # renders.
+ 'target_name': 'video_render_module_impl',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'video_render_module',
+ ],
+ 'sources': [
+ 'video_render_impl.cc',
+ ],
+ },
+ {
+ # video_render_module implementation that supports the internal
+ # video_render implementation.
+ 'target_name': 'video_render_module_internal_impl',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'video_render_module',
+ ],
+ 'sources': [
+ 'video_render_internal_impl.cc',
+ ],
+ # TODO(andrew): with the proper suffix, these files will be excluded
+ # automatically.
'conditions': [
- ['include_internal_video_render==1', {
- 'defines': ['WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER',],
- }],
- ['OS=="android" and include_internal_video_render==1', {
+ ['OS=="android"', {
'sources': [
'android/video_render_android_impl.h',
'android/video_render_android_native_opengl2.h',
@@ -52,7 +76,7 @@
],
},
}],
- ['OS=="ios" and include_internal_video_render==1', {
+ ['OS=="ios"', {
'sources': [
# iOS
'ios/open_gles20.h',
@@ -66,8 +90,20 @@
'ios/video_render_ios_view.h',
'ios/video_render_ios_view.mm',
],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework OpenGLES',
+ '-framework QuartzCore',
+ '-framework UIKit',
+ ],
+ },
+ },
}],
- ['OS=="linux" and include_internal_video_render==1', {
+ ['OS=="linux"', {
'sources': [
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.h',
@@ -82,7 +118,7 @@
],
},
}],
- ['OS=="mac" and include_internal_video_render==1', {
+ ['OS=="mac"', {
'sources': [
'mac/cocoa_full_screen_window.h',
'mac/cocoa_render_view.h',
@@ -98,21 +134,14 @@
'mac/cocoa_full_screen_window.mm',
],
}],
- ['OS=="ios"', {
- 'xcode_settings': {
- 'CLANG_ENABLE_OBJC_ARC': 'YES',
- },
- 'all_dependent_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework OpenGLES',
- '-framework QuartzCore',
- '-framework UIKit',
- ],
- },
- },
- }],
- ['OS=="win" and include_internal_video_render==1', {
+ ['OS=="win"', {
+ 'sources': [
+ 'windows/i_video_render_win.h',
+ 'windows/video_render_direct3d9.h',
+ 'windows/video_render_windows_impl.h',
+ 'windows/video_render_direct3d9.cc',
+ 'windows/video_render_windows_impl.cc',
+ ],
'variables': {
# 'directx_sdk_path' will be overridden in the condition block
# below, but it must not be declared as empty here since gyp
@@ -133,30 +162,18 @@
'<(directx_sdk_path)/Include',
],
}],
- ['OS=="win" and include_internal_video_render==1', {
- 'sources': [
- 'windows/i_video_render_win.h',
- 'windows/video_render_direct3d9.h',
- 'windows/video_render_windows_impl.h',
- 'windows/video_render_direct3d9.cc',
- 'windows/video_render_windows_impl.cc',
- ],
- }],
] # conditions
- }, # video_render_module
+ },
], # targets
'conditions': [
- ['include_internal_video_render==1', {
- 'defines': ['WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER',],
- }],
['include_tests==1', {
'targets': [
{
'target_name': 'video_render_tests',
'type': 'executable',
'dependencies': [
- 'video_render_module',
+ 'video_render_module_internal_impl',
'webrtc_utility',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
diff --git a/modules/video_render/video_render_impl.cc b/modules/video_render/video_render_impl.cc
index 763590f2..db0dd5ae 100644
--- a/modules/video_render/video_render_impl.cc
+++ b/modules/video_render/video_render_impl.cc
@@ -11,55 +11,14 @@
#include <assert.h>
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/i_video_render.h"
+#include "webrtc/modules/video_render/external/video_render_external_impl.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/modules/video_render/incoming_video_stream.h"
+#include "webrtc/modules/video_render/i_video_render.h"
#include "webrtc/modules/video_render/video_render_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
-
-#if defined (_WIN32)
-#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
-#define STANDARD_RENDERING kRenderWindows
-
-// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
-// gets defined if WEBRTC_IOS is defined
-#elif defined(WEBRTC_IOS)
-#define STANDARD_RENDERING kRenderiOS
-#include "ios/video_render_ios_impl.h"
-#elif defined(WEBRTC_MAC)
-#if defined(COCOA_RENDERING)
-#define STANDARD_RENDERING kRenderCocoa
-#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
-#elif defined(CARBON_RENDERING)
-#define STANDARD_RENDERING kRenderCarbon
-#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
-#endif
-
-#elif defined(WEBRTC_ANDROID)
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
-#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
-#define STANDARD_RENDERING kRenderAndroid
-
-#elif defined(WEBRTC_LINUX)
-#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
-#define STANDARD_RENDERING kRenderX11
-
-#else
-//Other platforms
-#endif
-
-#endif // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
-
-// For external rendering
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-#ifndef STANDARD_RENDERING
-#define STANDARD_RENDERING kRenderExternal
-#endif // STANDARD_RENDERING
-
namespace webrtc {
VideoRender*
@@ -71,7 +30,7 @@ VideoRender::CreateVideoRender(const int32_t id,
VideoRenderType resultVideoRenderType = videoRenderType;
if (videoRenderType == kRenderDefault)
{
- resultVideoRenderType = STANDARD_RENDERING;
+ resultVideoRenderType = kRenderExternal;
}
return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
fullscreen);
@@ -98,97 +57,6 @@ ModuleVideoRenderImpl::ModuleVideoRenderImpl(
// Create platform specific renderer
switch (videoRenderType)
{
-#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
-
-#if defined(_WIN32)
- case kRenderWindows:
- {
- VideoRenderWindowsImpl* ptrRenderer;
- ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-
-#elif defined(WEBRTC_IOS)
- case kRenderiOS:
- {
- VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-
-#elif defined(WEBRTC_MAC)
-
-#if defined(COCOA_RENDERING)
- case kRenderCocoa:
- {
- VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
-
- break;
-#elif defined(CARBON_RENDERING)
- case kRenderCarbon:
- {
- VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-#endif
-
-#elif defined(WEBRTC_ANDROID)
- case kRenderAndroid:
- {
- if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
- {
- AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
- ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- else
- {
- AndroidSurfaceViewRenderer* ptrRenderer = NULL;
- ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
-
- }
- break;
-#elif defined(WEBRTC_LINUX)
- case kRenderX11:
- {
- VideoRenderLinuxImpl* ptrRenderer = NULL;
- ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
- if ( ptrRenderer )
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
-
-#else
- // Other platforms
-#endif
-
-#endif // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
case kRenderExternal:
{
VideoRenderExternalImpl* ptrRenderer(NULL);
@@ -238,66 +106,6 @@ ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
delete ptrRenderer;
}
break;
-#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
-
-#if defined(_WIN32)
- case kRenderWindows:
- {
- VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(WEBRTC_IOS)
- case kRenderiOS:
- {
- VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(WEBRTC_MAC)
-
-#if defined(COCOA_RENDERING)
- case kRenderCocoa:
- {
- VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(CARBON_RENDERING)
- case kRenderCarbon:
- {
- VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#endif
-
-#elif defined(WEBRTC_ANDROID)
- case kRenderAndroid:
- {
- VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-
-#elif defined(WEBRTC_LINUX)
- case kRenderX11:
- {
- VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#else
- //other platforms
-#endif
-
-#endif // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
default:
// Error...
@@ -341,56 +149,7 @@ ModuleVideoRenderImpl::Window()
int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
{
-
- CriticalSectionScoped cs(&_moduleCrit);
-
-#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
-
-#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC
- _ptrRenderer = NULL;
- delete _ptrRenderer;
-
- VideoRenderIosImpl* ptrRenderer;
- ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
- if (!ptrRenderer)
- {
- return -1;
- }
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- return _ptrRenderer->ChangeWindow(window);
-#elif defined(WEBRTC_MAC)
-
- _ptrRenderer = NULL;
- delete _ptrRenderer;
-
-#if defined(COCOA_RENDERING)
- VideoRenderMacCocoaImpl* ptrRenderer;
- ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
-#elif defined(CARBON_RENDERING)
- VideoRenderMacCarbonImpl* ptrRenderer;
- ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
-#endif
- if (!ptrRenderer)
- {
- return -1;
- }
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- return _ptrRenderer->ChangeWindow(window);
-
-#else
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->ChangeWindow(window);
-
-#endif
-
-#else // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
return -1;
-#endif
}
int32_t ModuleVideoRenderImpl::Id()
diff --git a/modules/video_render/video_render_internal.h b/modules/video_render/video_render_internal.h
new file mode 100644
index 00000000..0508c1a7
--- /dev/null
+++ b/modules/video_render/video_render_internal.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
+
+#ifdef ANDROID
+#include <jni.h>
+
+namespace webrtc {
+
+// In order to be able to use the internal webrtc video render
+// for android, the jvm objects must be set via this method.
+int32_t SetRenderAndroidVM(JavaVM* javaVM);
+
+} // namespace webrtc
+
+#endif // ANDROID
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
diff --git a/modules/video_render/video_render_internal_impl.cc b/modules/video_render/video_render_internal_impl.cc
new file mode 100644
index 00000000..106a3754
--- /dev/null
+++ b/modules/video_render/video_render_internal_impl.cc
@@ -0,0 +1,917 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_render/i_video_render.h"
+#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/incoming_video_stream.h"
+#include "webrtc/modules/video_render/video_render_impl.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/trace.h"
+
+#if defined (_WIN32)
+#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
+#define STANDARD_RENDERING kRenderWindows
+
+// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
+// gets defined if WEBRTC_IOS is defined
+#elif defined(WEBRTC_IOS)
+#define STANDARD_RENDERING kRenderiOS
+#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
+#elif defined(WEBRTC_MAC)
+#if defined(COCOA_RENDERING)
+#define STANDARD_RENDERING kRenderCocoa
+#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
+#elif defined(CARBON_RENDERING)
+#define STANDARD_RENDERING kRenderCarbon
+#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+#include "webrtc/modules/video_render/android/video_render_android_impl.h"
+#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
+#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
+#define STANDARD_RENDERING kRenderAndroid
+
+#elif defined(WEBRTC_LINUX)
+#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
+#define STANDARD_RENDERING kRenderX11
+
+#else
+//Other platforms
+#endif
+
+// For external rendering
+#include "webrtc/modules/video_render/external/video_render_external_impl.h"
+#ifndef STANDARD_RENDERING
+#define STANDARD_RENDERING kRenderExternal
+#endif // STANDARD_RENDERING
+
+namespace webrtc {
+
+VideoRender*
+VideoRender::CreateVideoRender(const int32_t id,
+ void* window,
+ const bool fullscreen,
+ const VideoRenderType videoRenderType/*=kRenderDefault*/)
+{
+ VideoRenderType resultVideoRenderType = videoRenderType;
+ if (videoRenderType == kRenderDefault)
+ {
+ resultVideoRenderType = STANDARD_RENDERING;
+ }
+ return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
+ fullscreen);
+}
+
+void VideoRender::DestroyVideoRender(
+ VideoRender* module)
+{
+ if (module)
+ {
+ delete module;
+ }
+}
+
+ModuleVideoRenderImpl::ModuleVideoRenderImpl(
+ const int32_t id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool fullscreen) :
+ _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+ _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
+{
+
+ // Create platform specific renderer
+ switch (videoRenderType)
+ {
+#if defined(_WIN32)
+ case kRenderWindows:
+ {
+ VideoRenderWindowsImpl* ptrRenderer;
+ ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
+ if (ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+ }
+ }
+ break;
+
+#elif defined(WEBRTC_IOS)
+ case kRenderiOS:
+ {
+ VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
+ if(ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+ }
+ }
+ break;
+
+#elif defined(WEBRTC_MAC)
+
+#if defined(COCOA_RENDERING)
+ case kRenderCocoa:
+ {
+ VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
+ if(ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+ }
+ }
+
+ break;
+#elif defined(CARBON_RENDERING)
+ case kRenderCarbon:
+ {
+ VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
+ if(ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+ }
+ }
+ break;
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+ case kRenderAndroid:
+ {
+ if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
+ {
+ AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
+ ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
+ if (ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+ }
+ }
+ else
+ {
+ AndroidSurfaceViewRenderer* ptrRenderer = NULL;
+ ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
+ if (ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+ }
+ }
+
+ }
+ break;
+#elif defined(WEBRTC_LINUX)
+ case kRenderX11:
+ {
+ VideoRenderLinuxImpl* ptrRenderer = NULL;
+ ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
+ if ( ptrRenderer )
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+ }
+ }
+ break;
+
+#else
+ // Other platforms
+#endif
+ case kRenderExternal:
+ {
+ VideoRenderExternalImpl* ptrRenderer(NULL);
+ ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
+ window, _fullScreen);
+ if (ptrRenderer)
+ {
+ _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+ }
+ }
+ break;
+ default:
+ // Error...
+ break;
+ }
+ if (_ptrRenderer)
+ {
+ if (_ptrRenderer->Init() == -1)
+ {
+ }
+ }
+}
+
+ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
+{
+ delete &_moduleCrit;
+
+ for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+ it != _streamRenderMap.end();
+ ++it) {
+ delete it->second;
+ }
+
+ // Delete platform specific renderer
+ if (_ptrRenderer)
+ {
+ VideoRenderType videoRenderType = _ptrRenderer->RenderType();
+
+ switch (videoRenderType)
+ {
+ case kRenderExternal:
+ {
+ VideoRenderExternalImpl
+ * ptrRenderer =
+ reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+#if defined(_WIN32)
+ case kRenderWindows:
+ {
+ VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+#elif defined(WEBRTC_IOS)
+ case kRenderiOS:
+ {
+ VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+#elif defined(WEBRTC_MAC)
+
+#if defined(COCOA_RENDERING)
+ case kRenderCocoa:
+ {
+ VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+#elif defined(CARBON_RENDERING)
+ case kRenderCarbon:
+ {
+ VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+ case kRenderAndroid:
+ {
+ VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+
+#elif defined(WEBRTC_LINUX)
+ case kRenderX11:
+ {
+ VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
+ _ptrRenderer = NULL;
+ delete ptrRenderer;
+ }
+ break;
+#else
+ //other platforms
+#endif
+
+ default:
+ // Error...
+ break;
+ }
+ }
+}
+
+int32_t ModuleVideoRenderImpl::ChangeUniqueId(const int32_t id)
+{
+
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ _id = id;
+
+ if (_ptrRenderer)
+ {
+ _ptrRenderer->ChangeUniqueId(_id);
+ }
+
+ return 0;
+}
+
+int32_t ModuleVideoRenderImpl::TimeUntilNextProcess()
+{
+ // Not used
+ return 50;
+}
+int32_t ModuleVideoRenderImpl::Process()
+{
+ // Not used
+ return 0;
+}
+
+void*
+ModuleVideoRenderImpl::Window()
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+ return _ptrWindow;
+}
+
+int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
+{
+
+ CriticalSectionScoped cs(&_moduleCrit);
+
+#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC
+ _ptrRenderer = NULL;
+ delete _ptrRenderer;
+
+ VideoRenderIosImpl* ptrRenderer;
+ ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
+ if (!ptrRenderer)
+ {
+ return -1;
+ }
+ _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+ return _ptrRenderer->ChangeWindow(window);
+#elif defined(WEBRTC_MAC)
+
+ _ptrRenderer = NULL;
+ delete _ptrRenderer;
+
+#if defined(COCOA_RENDERING)
+ VideoRenderMacCocoaImpl* ptrRenderer;
+ ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
+#elif defined(CARBON_RENDERING)
+ VideoRenderMacCarbonImpl* ptrRenderer;
+ ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
+#endif
+ if (!ptrRenderer)
+ {
+ return -1;
+ }
+ _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+ return _ptrRenderer->ChangeWindow(window);
+
+#else
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+ return _ptrRenderer->ChangeWindow(window);
+
+#endif
+}
+
+int32_t ModuleVideoRenderImpl::Id()
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+ return _id;
+}
+
+uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
+
+ if (it == _streamRenderMap.end()) {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: stream doesn't exist",
+ __FUNCTION__);
+ return 0;
+ }
+ assert(it->second != NULL);
+ return it->second->IncomingRate();
+}
+
+VideoRenderCallback*
+ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
+ const uint32_t zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return NULL;
+ }
+
+ if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
+ // The stream already exists...
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream already exists", __FUNCTION__);
+ return NULL;
+ }
+
+ VideoRenderCallback* ptrRenderCallback =
+ _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
+ right, bottom);
+ if (ptrRenderCallback == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: Can't create incoming stream in renderer",
+ __FUNCTION__);
+ return NULL;
+ }
+
+ // Create platform independant code
+ IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(_id,
+ streamId);
+ if (ptrIncomingStream == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: Can't create incoming stream", __FUNCTION__);
+ return NULL;
+ }
+
+
+ if (ptrIncomingStream->SetRenderCallback(ptrRenderCallback) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: Can't set render callback", __FUNCTION__);
+ delete ptrIncomingStream;
+ _ptrRenderer->DeleteIncomingRenderStream(streamId);
+ return NULL;
+ }
+
+ VideoRenderCallback* moduleCallback =
+ ptrIncomingStream->ModuleCallback();
+
+ // Store the stream
+ _streamRenderMap[streamId] = ptrIncomingStream;
+
+ return moduleCallback;
+}
+
+int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
+ const uint32_t streamId)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream doesn't exist", __FUNCTION__);
+ return -1;
+ }
+
+ delete item->second;
+
+ _ptrRenderer->DeleteIncomingRenderStream(streamId);
+
+ _streamRenderMap.erase(item);
+
+ return 0;
+}
+
+int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
+ const uint32_t streamId,
+ VideoRenderCallback* renderObject) {
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+ if (item == _streamRenderMap.end())
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream doesn't exist", __FUNCTION__);
+ return -1;
+ }
+
+ if (item->second == NULL) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: could not get stream", __FUNCTION__);
+ return -1;
+ }
+ return item->second->SetExternalCallback(renderObject);
+}
+
+int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
+ const uint32_t streamId,
+ uint32_t& zOrder,
+ float& left,
+ float& top,
+ float& right,
+ float& bottom) const {
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
+ left, top, right,
+ bottom);
+}
+
+uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ return static_cast<uint32_t>(_streamRenderMap.size());
+}
+
+bool ModuleVideoRenderImpl::HasIncomingRenderStream(
+ const uint32_t streamId) const {
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ return _streamRenderMap.find(streamId) != _streamRenderMap.end();
+}
+
+int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
+ const uint32_t streamId,
+ VideoRenderCallback* callbackObj) {
+ return -1;
+}
+
+int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ // Start the stream
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+ if (item == _streamRenderMap.end())
+ {
+ return -1;
+ }
+
+ if (item->second->Start() == -1)
+ {
+ return -1;
+ }
+
+ // Start the HW renderer
+ if (_ptrRenderer->StartRender() == -1)
+ {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s(%d): No renderer", __FUNCTION__, streamId);
+ return -1;
+ }
+
+ // Stop the incoming stream
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+ if (item == _streamRenderMap.end())
+ {
+ return -1;
+ }
+
+ if (item->second->Stop() == -1)
+ {
+ return -1;
+ }
+
+ return 0;
+}
+
+int32_t ModuleVideoRenderImpl::ResetRender()
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ int32_t ret = 0;
+ // Loop through all incoming streams and reset them
+ for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+ it != _streamRenderMap.end();
+ ++it) {
+ if (it->second->Reset() == -1)
+ ret = -1;
+ }
+ return ret;
+}
+
+RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (_ptrRenderer == NULL)
+ {
+ return kVideoI420;
+ }
+
+ return _ptrRenderer->PerferedVideoType();
+}
+
+bool ModuleVideoRenderImpl::IsFullScreen()
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+ return _ptrRenderer->FullScreen();
+}
+
+int32_t ModuleVideoRenderImpl::GetScreenResolution(
+ uint32_t& screenWidth,
+ uint32_t& screenHeight) const
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+ return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
+}
+
+uint32_t ModuleVideoRenderImpl::RenderFrameRate(
+ const uint32_t streamId)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+ return _ptrRenderer->RenderFrameRate(streamId);
+}
+
+int32_t ModuleVideoRenderImpl::SetStreamCropping(
+ const uint32_t streamId,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+ return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+ return _ptrRenderer->SetTransparentBackground(enable);
+}
+
+int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
+{
+ return -1;
+}
+
+int32_t ModuleVideoRenderImpl::SetText(
+ const uint8_t textId,
+ const uint8_t* text,
+ const int32_t textLength,
+ const uint32_t textColorRef,
+ const uint32_t backgroundColorRef,
+ const float left, const float top,
+ const float right,
+ const float bottom)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+ return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
+ backgroundColorRef, left, top, right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
+ const uint8_t pictureId,
+ const void* colorKey,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+ return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
+ right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::GetLastRenderedFrame(
+ const uint32_t streamId,
+ I420VideoFrame &frame) const
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
+ {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream doesn't exist", __FUNCTION__);
+ return 0;
+ }
+
+ assert(item->second != NULL);
+ return item->second->GetLastRenderedFrame(frame);
+}
+
+int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
+ uint32_t stream_id, int32_t delay_ms) {
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(stream_id);
+ if (item == _streamRenderMap.end()) {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
+ delay_ms);
+ return -1;
+ }
+
+ assert(item->second != NULL);
+ return item->second->SetExpectedRenderDelay(delay_ms);
+}
+
+int32_t ModuleVideoRenderImpl::ConfigureRenderer(
+ const uint32_t streamId,
+ const unsigned int zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return false;
+ }
+ return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
+ bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetStartImage(
+ const uint32_t streamId,
+ const I420VideoFrame& videoFrame)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
+ {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream doesn't exist", __FUNCTION__);
+ return -1;
+ }
+ assert (item->second != NULL);
+ return item->second->SetStartImage(videoFrame);
+
+}
+
+int32_t ModuleVideoRenderImpl::SetTimeoutImage(
+ const uint32_t streamId,
+ const I420VideoFrame& videoFrame,
+ const uint32_t timeout)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
+ {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream doesn't exist", __FUNCTION__);
+ return -1;
+ }
+ assert(item->second != NULL);
+ return item->second->SetTimeoutImage(videoFrame, timeout);
+}
+
+int32_t ModuleVideoRenderImpl::MirrorRenderStream(const int renderId,
+ const bool enable,
+ const bool mirrorXAxis,
+ const bool mirrorYAxis)
+{
+ CriticalSectionScoped cs(&_moduleCrit);
+
+ if (!_ptrRenderer)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No renderer", __FUNCTION__);
+ return -1;
+ }
+
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(renderId);
+ if (item == _streamRenderMap.end())
+ {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: stream doesn't exist", __FUNCTION__);
+ return 0;
+ }
+ assert(item->second != NULL);
+
+ return item->second->EnableMirroring(enable, mirrorXAxis, mirrorYAxis);
+}
+
+} // namespace webrtc
diff --git a/overrides/OWNERS b/overrides/OWNERS
new file mode 100644
index 00000000..1a24a6a8
--- /dev/null
+++ b/overrides/OWNERS
@@ -0,0 +1,13 @@
+henrika@webrtc.org
+henrike@webrtc.org
+henrikg@webrtc.org
+hta@webrtc.org
+jiayl@webrtc.org
+juberti@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+pthatcher@webrtc.org
+sergeyu@chromium.org
+tommi@webrtc.org
+
+per-file BUILD.gn=kjellander@webrtc.org
diff --git a/overrides/webrtc/base/constructormagic.h b/overrides/webrtc/base/constructormagic.h
index bb89f91f..72b334c1 100644
--- a/overrides/webrtc/base/constructormagic.h
+++ b/overrides/webrtc/base/constructormagic.h
@@ -15,6 +15,6 @@
#ifndef OVERRIDES_WEBRTC_BASE_CONSTRUCTORMAGIC_H__
#define OVERRIDES_WEBRTC_BASE_CONSTRUCTORMAGIC_H__
-#include "base/basictypes.h"
+#include "base/macros.h"
#endif // OVERRIDES_WEBRTC_BASE_CONSTRUCTORMAGIC_H__
diff --git a/test/channel_transport/udp_socket2_win.h b/test/channel_transport/udp_socket2_win.h
index e4d6b8c2..629c9c36 100644
--- a/test/channel_transport/udp_socket2_win.h
+++ b/test/channel_transport/udp_socket2_win.h
@@ -42,36 +42,37 @@ public:
bool ipV6Enable = false, bool disableGQOS = false);
virtual ~UdpSocket2Windows();
- virtual int32_t ChangeUniqueId(const int32_t id);
+ virtual int32_t ChangeUniqueId(const int32_t id) OVERRIDE;
- virtual bool ValidHandle();
+ virtual bool ValidHandle() OVERRIDE;
- virtual bool SetCallback(CallbackObj, IncomingSocketCallback);
+ virtual bool SetCallback(CallbackObj, IncomingSocketCallback) OVERRIDE;
- virtual bool Bind(const SocketAddress& name);
+ virtual bool Bind(const SocketAddress& name) OVERRIDE;
virtual bool SetSockopt(int32_t level, int32_t optname,
- const int8_t* optval, int32_t optlen);
+ const int8_t* optval, int32_t optlen) OVERRIDE;
- virtual bool StartReceiving(const uint32_t receiveBuffers);
- virtual inline bool StartReceiving() {return StartReceiving(8);}
- virtual bool StopReceiving();
+ virtual bool StartReceiving(const uint32_t receiveBuffers) OVERRIDE;
+ virtual inline bool StartReceiving() OVERRIDE {return StartReceiving(8);}
+ virtual bool StopReceiving() OVERRIDE;
virtual int32_t SendTo(const int8_t* buf, int32_t len,
- const SocketAddress& to);
+ const SocketAddress& to) OVERRIDE;
- virtual void CloseBlocking();
+ virtual void CloseBlocking() OVERRIDE;
+
+ SOCKET GetFd() { return _socket;}
- virtual SOCKET GetFd() { return _socket;}
virtual bool SetQos(int32_t serviceType, int32_t tokenRate,
int32_t bucketSize, int32_t peekBandwith,
int32_t minPolicedSize, int32_t maxSduSize,
const SocketAddress &stRemName,
- int32_t overrideDSCP = 0);
+ int32_t overrideDSCP = 0) OVERRIDE;
- virtual int32_t SetTOS(const int32_t serviceType);
- virtual int32_t SetPCP(const int32_t pcp);
+ virtual int32_t SetTOS(const int32_t serviceType) OVERRIDE;
+ virtual int32_t SetPCP(const int32_t pcp) OVERRIDE;
- virtual uint32_t ReceiveBuffers(){return _receiveBuffers.Value();}
+ virtual uint32_t ReceiveBuffers() OVERRIDE {return _receiveBuffers.Value();}
protected:
void IOCompleted(PerIoContext* pIOContext, uint32_t ioSize, uint32_t error);
diff --git a/test/channel_transport/udp_socket_posix.cc b/test/channel_transport/udp_socket_posix.cc
index 6fc9441d..355da536 100644
--- a/test/channel_transport/udp_socket_posix.cc
+++ b/test/channel_transport/udp_socket_posix.cc
@@ -33,7 +33,6 @@ UdpSocketPosix::UdpSocketPosix(const int32_t id, UdpSocketManager* mgr,
"UdpSocketPosix::UdpSocketPosix()");
_wantsIncoming = false;
- _error = 0;
_mgr = mgr;
_id = id;
@@ -129,9 +128,8 @@ bool UdpSocketPosix::SetSockopt(int32_t level, int32_t optname,
return true;
}
- _error = errno;
WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
- "UdpSocketPosix::SetSockopt(), error:%d", _error);
+ "UdpSocketPosix::SetSockopt(), error:%d", errno);
return false;
}
@@ -151,9 +149,8 @@ bool UdpSocketPosix::Bind(const SocketAddress& name)
{
return true;
}
- _error = errno;
WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
- "UdpSocketPosix::Bind() error: %d",_error);
+ "UdpSocketPosix::Bind() error: %d", errno);
return false;
}
@@ -165,16 +162,14 @@ int32_t UdpSocketPosix::SendTo(const int8_t* buf, int32_t len,
reinterpret_cast<const sockaddr*>(&to), size);
if(retVal == SOCKET_ERROR)
{
- _error = errno;
WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
- "UdpSocketPosix::SendTo() error: %d", _error);
+ "UdpSocketPosix::SendTo() error: %d", errno);
}
return retVal;
}
SOCKET UdpSocketPosix::GetFd() { return _socket; }
-int32_t UdpSocketPosix::GetError() { return _error; }
bool UdpSocketPosix::ValidHandle()
{
diff --git a/test/channel_transport/udp_socket_posix.h b/test/channel_transport/udp_socket_posix.h
index 9b2c037a..8458ca0d 100644
--- a/test/channel_transport/udp_socket_posix.h
+++ b/test/channel_transport/udp_socket_posix.h
@@ -52,8 +52,7 @@ public:
// TODO (hellner): make destructor protected.
virtual void CloseBlocking() OVERRIDE;
- virtual SOCKET GetFd();
- virtual int32_t GetError();
+ SOCKET GetFd();
virtual bool ValidHandle() OVERRIDE;
@@ -76,7 +75,6 @@ private:
int32_t _id;
IncomingSocketCallback _incomingCb;
CallbackObj _obj;
- int32_t _error;
SOCKET _socket;
UdpSocketManager* _mgr;
diff --git a/test/channel_transport/udp_transport_impl.h b/test/channel_transport/udp_transport_impl.h
index 8c74daf9..2b804c35 100644
--- a/test/channel_transport/udp_transport_impl.h
+++ b/test/channel_transport/udp_transport_impl.h
@@ -60,8 +60,8 @@ public:
virtual int32_t InitializeSourcePorts(
const uint16_t rtpPort,
const uint16_t rtcpPort = 0) OVERRIDE;
- virtual int32_t SourcePorts(uint16_t& rtpPort, uint16_t& rtcpPort) const
- OVERRIDE;
+ virtual int32_t SourcePorts(uint16_t& rtpPort,
+ uint16_t& rtcpPort) const OVERRIDE;
virtual int32_t ReceiveSocketInformation(
char ipAddr[kIpAddressVersion6Length],
uint16_t& rtpPort,
diff --git a/test/configurable_frame_size_encoder.cc b/test/configurable_frame_size_encoder.cc
index b246da35..d3ed784c 100644
--- a/test/configurable_frame_size_encoder.cc
+++ b/test/configurable_frame_size_encoder.cc
@@ -12,9 +12,11 @@
#include <string.h>
-#include "webrtc/common_video/interface/video_image.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_video/interface/video_image.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+
namespace webrtc {
namespace test {
diff --git a/test/configurable_frame_size_encoder.h b/test/configurable_frame_size_encoder.h
index f29038fa..4120bc6c 100644
--- a/test/configurable_frame_size_encoder.h
+++ b/test/configurable_frame_size_encoder.h
@@ -14,7 +14,7 @@
#include <vector>
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/video_encoder.h"
namespace webrtc {
namespace test {
diff --git a/test/encoder_settings.cc b/test/encoder_settings.cc
index 9842d1ed..0eeb0b9f 100644
--- a/test/encoder_settings.cc
+++ b/test/encoder_settings.cc
@@ -12,6 +12,7 @@
#include <assert.h>
#include <string.h>
+#include "webrtc/video_encoder.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
@@ -68,19 +69,9 @@ VideoCodec CreateDecoderVideoCodec(
}
if (codec.codecType == kVideoCodecVP8) {
- codec.codecSpecific.VP8.resilience = kResilientStream;
- codec.codecSpecific.VP8.numberOfTemporalLayers = 1;
- codec.codecSpecific.VP8.denoisingOn = true;
- codec.codecSpecific.VP8.errorConcealmentOn = false;
- codec.codecSpecific.VP8.automaticResizeOn = false;
- codec.codecSpecific.VP8.frameDroppingOn = true;
- codec.codecSpecific.VP8.keyFrameInterval = 3000;
- }
-
- if (codec.codecType == kVideoCodecH264) {
- codec.codecSpecific.H264.profile = kProfileBase;
- codec.codecSpecific.H264.frameDroppingOn = true;
- codec.codecSpecific.H264.keyFrameInterval = 3000;
+ codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
+ } else if (codec.codecType == kVideoCodecH264) {
+ codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
}
codec.width = 320;
diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc
index ecd3dd87..9551c820 100644
--- a/test/fake_encoder.cc
+++ b/test/fake_encoder.cc
@@ -12,6 +12,8 @@
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+
namespace webrtc {
namespace test {
diff --git a/test/fake_encoder.h b/test/fake_encoder.h
index 3c6f7350..50b86520 100644
--- a/test/fake_encoder.h
+++ b/test/fake_encoder.h
@@ -13,8 +13,9 @@
#include <vector>
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/common_types.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/video_encoder.h"
namespace webrtc {
namespace test {
diff --git a/test/webrtc_test_common.gyp b/test/webrtc_test_common.gyp
index be6b303f..71b875d1 100644
--- a/test/webrtc_test_common.gyp
+++ b/test/webrtc_test_common.gyp
@@ -59,7 +59,8 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/modules/modules.gyp:media_file',
- '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module_impl',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_impl',
'<(webrtc_root)/test/test.gyp:frame_generator',
'<(webrtc_root)/test/test.gyp:test_support',
'<(webrtc_root)/webrtc.gyp:webrtc',
@@ -103,11 +104,29 @@
'sources!': [
'null_platform_renderer.cc',
],
+ 'variables': {
+ # 'directx_sdk_path' will be overridden in the condition block
+ # below, but it must not be declared as empty here since gyp
+ # will check if the first character is '/' for some reason.
+ # If it's empty, we'll get an out-of-bounds error.
+ 'directx_sdk_path': 'will_be_overridden',
+ 'directx_sdk_default_path': '<(DEPTH)/third_party/directxsdk/files',
+ 'conditions': [
+ ['"<!(python <(DEPTH)/build/dir_exists.py <(directx_sdk_default_path))"=="True"', {
+ 'directx_sdk_path': '<(DEPTH)/third_party/directxsdk/files',
+ }, {
+ 'directx_sdk_path': '$(DXSDK_DIR)',
+ }],
+ ],
+ },
+
+ 'include_dirs': [
+ '<(directx_sdk_path)/Include',
+ ],
}],
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/modules/modules.gyp:video_capture_module',
'<(webrtc_root)/modules/modules.gyp:media_file',
'<(webrtc_root)/test/test.gyp:frame_generator',
'<(webrtc_root)/test/test.gyp:test_support',
@@ -138,39 +157,6 @@
],
},
},
- {
- # This target is only needed since the video render module builds platform
- # specific code and depends on these libraries. This target should be
- # removed as soon as the new video API doesn't depend on the module.
- # TODO(mflodman) Remove this target as described above.
- 'target_name': 'webrtc_test_video_render_dependencies',
- 'type': 'static_library',
- 'direct_dependent_settings': {
- 'conditions': [
- ['OS=="linux"', {
- 'libraries': [
- '-lXext',
- '-lX11',
- '-lGL',
- ],
- }],
- ['OS=="android"', {
- 'libraries' : [
- '-lGLESv2', '-llog',
- ],
- }],
- ['OS=="mac"', {
- 'xcode_settings' : {
- 'OTHER_LDFLAGS' : [
- '-framework Cocoa',
- '-framework OpenGL',
- '-framework CoreVideo',
- ],
- },
- }],
- ],
- },
- },
],
'conditions': [
['include_tests==1', {
diff --git a/test/win/d3d_renderer.h b/test/win/d3d_renderer.h
index e8b06926..7f375351 100644
--- a/test/win/d3d_renderer.h
+++ b/test/win/d3d_renderer.h
@@ -12,6 +12,7 @@
#include <Windows.h>
#include <d3d9.h>
+#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/test/video_renderer.h"
diff --git a/video/bitrate_estimator_tests.cc b/video/bitrate_estimator_tests.cc
index 9b55cd10..40c1ed68 100644
--- a/video/bitrate_estimator_tests.cc
+++ b/video/bitrate_estimator_tests.cc
@@ -26,9 +26,6 @@
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator_capturer.h"
-// Disabled on Android since all tests currently fail (webrtc:3770).
-#ifndef WEBRTC_ANDROID
-
namespace webrtc {
namespace {
// Note: consider to write tests that don't depend on the trace system instead
@@ -332,5 +329,3 @@ TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOF) {
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
} // namespace webrtc
-
-#endif // !WEBRTC_ANDROID
diff --git a/video/call.cc b/video/call.cc
index 8b71acfd..b4adafd7 100644
--- a/video/call.cc
+++ b/video/call.cc
@@ -18,6 +18,7 @@
#include "webrtc/common.h"
#include "webrtc/config.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -41,6 +42,15 @@ bool RtpExtension::IsSupported(const std::string& name) {
name == webrtc::RtpExtension::kAbsSendTime;
}
+VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
+ switch (codec_type) {
+ case kVp8:
+ return VP8Encoder::Create();
+ }
+ assert(false);
+ return NULL;
+}
+
namespace internal {
class CpuOveruseObserverProxy : public webrtc::CpuOveruseObserver {
diff --git a/video/call_perf_tests.cc b/video/call_perf_tests.cc
index 3d1bf7f5..557c5149 100644
--- a/video/call_perf_tests.cc
+++ b/video/call_perf_tests.cc
@@ -41,9 +41,6 @@
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
#include "webrtc/voice_engine/include/voe_video_sync.h"
-// Disabled on Android since all these tests currently fail (webrtc:3770).
-#ifndef WEBRTC_ANDROID
-
namespace webrtc {
class CallPerfTest : public test::CallTest {
@@ -579,5 +576,3 @@ TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) {
}
} // namespace webrtc
-
-#endif // !WEBRTC_ANDROID
diff --git a/video/end_to_end_tests.cc b/video/end_to_end_tests.cc
index 114ab65b..46420ecc 100644
--- a/video/end_to_end_tests.cc
+++ b/video/end_to_end_tests.cc
@@ -19,7 +19,6 @@
#include "webrtc/call.h"
#include "webrtc/frame_callback.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
-#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -35,11 +34,10 @@
#include "webrtc/test/null_transport.h"
#include "webrtc/test/rtp_rtcp_observer.h"
#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video/transport_adapter.h"
-
-// Disabled on Android since all tests currently fail (webrtc:3770).
-#ifndef WEBRTC_ANDROID
+#include "webrtc/video_encoder.h"
namespace webrtc {
@@ -580,7 +578,8 @@ TEST_F(EndToEndTest, DecodesRetransmittedFrameOverRtx) {
DecodesRetransmittedFrame(true);
}
-TEST_F(EndToEndTest, UsesFrameCallbacks) {
+// Disabled due to: https://code.google.com/p/webrtc/issues/detail?id=3770
+TEST_F(EndToEndTest, DISABLED_ON_ANDROID(UsesFrameCallbacks)) {
static const int kWidth = 320;
static const int kHeight = 240;
@@ -645,7 +644,8 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
receiver_transport.SetReceiver(sender_call_->Receiver());
CreateSendConfig(1);
- scoped_ptr<VP8Encoder> encoder(VP8Encoder::Create());
+ scoped_ptr<VideoEncoder> encoder(
+ VideoEncoder::Create(VideoEncoder::kVp8));
send_config_.encoder_settings.encoder = encoder.get();
send_config_.encoder_settings.payload_name = "VP8";
ASSERT_EQ(1u, video_streams_.size()) << "Test setup error.";
@@ -925,7 +925,9 @@ TEST_F(EndToEndTest, UsesRtcpReducedSizeMode) {
// Another is set up to receive all three of these with different renderers.
// Each renderer verifies that it receives the expected resolution, and as soon
// as every renderer has received a frame, the test finishes.
-TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
+//
+// Disabled due to: https://code.google.com/p/webrtc/issues/detail?id=3770
+TEST_F(EndToEndTest, DISABLED_ON_ANDROID(SendsAndReceivesMultipleStreams)) {
static const size_t kNumStreams = 3;
class VideoOutputObserver : public VideoRenderer {
@@ -974,9 +976,9 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
VideoOutputObserver* observers[kNumStreams];
test::FrameGeneratorCapturer* frame_generators[kNumStreams];
- scoped_ptr<VP8Encoder> encoders[kNumStreams];
+ scoped_ptr<VideoEncoder> encoders[kNumStreams];
for (size_t i = 0; i < kNumStreams; ++i)
- encoders[i].reset(VP8Encoder::Create());
+ encoders[i].reset(VideoEncoder::Create(VideoEncoder::kVp8));
for (size_t i = 0; i < kNumStreams; ++i) {
uint32_t ssrc = codec_settings[i].ssrc;
@@ -2075,5 +2077,3 @@ TEST_F(EndToEndTest, NewReceiveStreamsRespectNetworkDown) {
DestroyStreams();
}
} // namespace webrtc
-
-#endif // !WEBRTC_ANDROID
diff --git a/video/full_stack.cc b/video/full_stack.cc
index b00eb0ed..cd1190cc 100644
--- a/video/full_stack.cc
+++ b/video/full_stack.cc
@@ -17,7 +17,6 @@
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
@@ -33,9 +32,6 @@
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h"
-// Disabled on Android since all tests currently fail (webrtc:3770).
-#ifndef WEBRTC_ANDROID
-
namespace webrtc {
static const int kFullStackTestDurationSecs = 60;
@@ -391,7 +387,8 @@ void FullStackTest::RunTest(const FullStackTestParams& params) {
CreateSendConfig(1);
- scoped_ptr<VP8Encoder> encoder(VP8Encoder::Create());
+ scoped_ptr<VideoEncoder> encoder(
+ VideoEncoder::Create(VideoEncoder::kVp8));
send_config_.encoder_settings.encoder = encoder.get();
send_config_.encoder_settings.payload_name = "VP8";
send_config_.encoder_settings.payload_type = 124;
@@ -535,5 +532,3 @@ TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
RunTest(foreman_cif);
}
} // namespace webrtc
-
-#endif // !WEBRTC_ANDROID
diff --git a/video/loopback.cc b/video/loopback.cc
index 4ec73882..29a3c782 100644
--- a/video/loopback.cc
+++ b/video/loopback.cc
@@ -120,7 +120,7 @@ void Loopback() {
send_config.local_renderer = local_preview.get();
scoped_ptr<VideoEncoder> encoder;
if (flags::Codec() == "VP8") {
- encoder.reset(VP8Encoder::Create());
+ encoder.reset(VideoEncoder::Create(VideoEncoder::kVp8));
} else {
// Codec not supported.
assert(false && "Codec not supported!");
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
index 624aca82..7d64755d 100644
--- a/video/video_send_stream.cc
+++ b/video/video_send_stream.cc
@@ -313,17 +313,9 @@ bool VideoSendStream::ReconfigureVideoEncoder(
}
if (video_codec.codecType == kVideoCodecVP8) {
- video_codec.codecSpecific.VP8.resilience = kResilientStream;
- video_codec.codecSpecific.VP8.numberOfTemporalLayers = 1;
- video_codec.codecSpecific.VP8.denoisingOn = true;
- video_codec.codecSpecific.VP8.errorConcealmentOn = false;
- video_codec.codecSpecific.VP8.automaticResizeOn = false;
- video_codec.codecSpecific.VP8.frameDroppingOn = true;
- video_codec.codecSpecific.VP8.keyFrameInterval = 3000;
+ video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
} else if (video_codec.codecType == kVideoCodecH264) {
- video_codec.codecSpecific.H264.profile = kProfileBase;
- video_codec.codecSpecific.H264.frameDroppingOn = true;
- video_codec.codecSpecific.H264.keyFrameInterval = 3000;
+ video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
}
if (video_codec.codecType == kVideoCodecVP8) {
diff --git a/video_encoder.h b/video_encoder.h
new file mode 100644
index 00000000..cbdf1ef0
--- /dev/null
+++ b/video_encoder.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENCODER_H_
+#define WEBRTC_VIDEO_ENCODER_H_
+
+#include <vector>
+
+#include "webrtc/common_types.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+class RTPFragmentationHeader;
+// TODO(pbos): Expose these through a public (root) header or change these APIs.
+struct CodecSpecificInfo;
+struct VideoCodec;
+
+class EncodedImageCallback {
+ public:
+ virtual ~EncodedImageCallback() {}
+
+ // Callback function which is called when an image has been encoded.
+ // TODO(pbos): Make encoded_image const or pointer. Remove default arguments.
+ virtual int32_t Encoded(
+ EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info = NULL,
+ const RTPFragmentationHeader* fragmentation = NULL) = 0;
+};
+
+class VideoEncoder {
+ public:
+ enum EncoderType {
+ kVp8,
+ };
+
+ static VideoEncoder* Create(EncoderType codec_type);
+
+ static VideoCodecVP8 GetDefaultVp8Settings();
+ static VideoCodecH264 GetDefaultH264Settings();
+
+ virtual ~VideoEncoder() {}
+
+ virtual int32_t InitEncode(const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ uint32_t max_payload_size) = 0;
+ virtual int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) = 0;
+ virtual int32_t Release() = 0;
+
+
+ virtual int32_t Encode(const I420VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<VideoFrameType>* frame_types) = 0;
+
+ virtual int32_t SetChannelParameters(uint32_t packet_loss, int rtt) = 0;
+ virtual int32_t SetRates(uint32_t bitrate, uint32_t framerate) = 0;
+
+ virtual int32_t SetPeriodicKeyFrames(bool enable) { return -1; }
+ virtual int32_t CodecConfigParameters(uint8_t* /*buffer*/, int32_t /*size*/) {
+ return -1;
+ }
+};
+
+} // namespace webrtc
+#endif // WEBRTC_VIDEO_ENCODER_H_
diff --git a/video_engine/include/vie_base.h b/video_engine/include/vie_base.h
index 23625706..4e619e58 100644
--- a/video_engine/include/vie_base.h
+++ b/video_engine/include/vie_base.h
@@ -21,10 +21,6 @@
#include "webrtc/common_types.h"
-#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-#include <jni.h>
-#endif
-
namespace webrtc {
class Config;
@@ -142,11 +138,6 @@ class WEBRTC_DLLEXPORT VideoEngine {
// user receives callbacks for generated trace messages.
static int SetTraceCallback(TraceCallback* callback);
-#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
- // Android specific.
- static int SetAndroidObjects(JavaVM* java_vm, jobject context);
-#endif
-
protected:
VideoEngine() {}
virtual ~VideoEngine() {}
diff --git a/video_engine/test/auto_test/source/vie_autotest_android.cc b/video_engine/test/auto_test/source/vie_autotest_android.cc
index ac0dd17e..ced235f6 100644
--- a/video_engine/test/auto_test/source/vie_autotest_android.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_android.cc
@@ -13,15 +13,18 @@
#include <android/log.h>
#include <stdio.h>
-#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
+#include "webrtc/modules/video_capture/video_capture_internal.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
void* window1, void* window2,
JavaVM* javaVM, void* env, void* context) {
ViEAutoTest vieAutoTest(window1, window2);
ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
- webrtc::VideoEngine::SetAndroidObjects(javaVM, static_cast<jobject>(context));
+ webrtc::SetCaptureAndroidVM(javaVM, static_cast<jobject>(context));
+ webrtc::SetRenderAndroidVM(javaVM);
#ifndef WEBRTC_ANDROID_OPENSLES
// voice engine calls into ADM directly
webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context);
diff --git a/video_engine/test/auto_test/vie_auto_test.gypi b/video_engine/test/auto_test/vie_auto_test.gypi
index 96291660..a415b3b3 100644
--- a/video_engine/test/auto_test/vie_auto_test.gypi
+++ b/video_engine/test/auto_test/vie_auto_test.gypi
@@ -14,8 +14,8 @@
'dependencies': [
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
- '<(webrtc_root)/modules/modules.gyp:video_render_module',
- '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
diff --git a/video_engine/video_engine_core.gypi b/video_engine/video_engine_core.gypi
index 8ce25234..fd61043e 100644
--- a/video_engine/video_engine_core.gypi
+++ b/video_engine/video_engine_core.gypi
@@ -122,6 +122,8 @@
'type': '<(gtest_target_type)',
'dependencies': [
'video_engine_core',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(webrtc_root)/test/test.gyp:test_support_main',
diff --git a/video_engine/vie_impl.cc b/video_engine/vie_impl.cc
index 3cdf5da2..79f033b4 100644
--- a/video_engine/vie_impl.cc
+++ b/video_engine/vie_impl.cc
@@ -14,11 +14,6 @@
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_ANDROID
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
-#include "webrtc/modules/video_render/include/video_render.h"
-#endif
-
namespace webrtc {
enum { kModuleId = 0 };
@@ -139,20 +134,4 @@ int VideoEngine::SetTraceCallback(TraceCallback* callback) {
return Trace::SetTraceCallback(callback);
}
-#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-int VideoEngine::SetAndroidObjects(JavaVM* javaVM, jobject context) {
- LOG_F(LS_INFO);
-
- if (SetCaptureAndroidVM(javaVM, context) != 0) {
- LOG(LS_ERROR) << "Could not set capture Android VM";
- return -1;
- }
- if (SetRenderAndroidVM(javaVM) != 0) {
- LOG(LS_ERROR) << "Could not set render Android VM";
- return -1;
- }
- return 0;
-}
-#endif
-
} // namespace webrtc
diff --git a/video_frame.h b/video_frame.h
new file mode 100644
index 00000000..f76b9af3
--- /dev/null
+++ b/video_frame.h
@@ -0,0 +1,219 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_FRAME_H_
+#define WEBRTC_VIDEO_FRAME_H_
+
+#include <assert.h>
+
+#include "webrtc/common_video/plane.h"
+// TODO(pbos): Remove scoped_refptr include (and AddRef/Release if they're not
+// used).
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+enum PlaneType {
+ kYPlane = 0,
+ kUPlane = 1,
+ kVPlane = 2,
+ kNumOfPlanes = 3
+};
+
+class I420VideoFrame {
+ public:
+ I420VideoFrame();
+ virtual ~I420VideoFrame();
+ // Infrastructure for refCount implementation.
+ // Implements dummy functions for reference counting so that non reference
+ // counted instantiation can be done. These functions should not be called
+ // when creating the frame with new I420VideoFrame().
+ // Note: do not pass a I420VideoFrame created with new I420VideoFrame() or
+ // equivalent to a scoped_refptr or memory leak will occur.
+ virtual int32_t AddRef() {
+ assert(false);
+ return -1;
+ }
+ virtual int32_t Release() {
+ assert(false);
+ return -1;
+ }
+
+ // CreateEmptyFrame: Sets frame dimensions and allocates buffers based
+ // on set dimensions - height and plane stride.
+ // If required size is bigger than the allocated one, new buffers of adequate
+ // size will be allocated.
+ // Return value: 0 on success, -1 on error.
+ virtual int CreateEmptyFrame(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ // CreateFrame: Sets the frame's members and buffers. If required size is
+ // bigger than allocated one, new buffers of adequate size will be allocated.
+ // Return value: 0 on success, -1 on error.
+ virtual int CreateFrame(int size_y,
+ const uint8_t* buffer_y,
+ int size_u,
+ const uint8_t* buffer_u,
+ int size_v,
+ const uint8_t* buffer_v,
+ int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ // Copy frame: If required size is bigger than allocated one, new buffers of
+ // adequate size will be allocated.
+ // Return value: 0 on success, -1 on error.
+ virtual int CopyFrame(const I420VideoFrame& videoFrame);
+
+ // Make a copy of |this|. The caller owns the returned frame.
+ // Return value: a new frame on success, NULL on error.
+ virtual I420VideoFrame* CloneFrame() const;
+
+ // Swap Frame.
+ virtual void SwapFrame(I420VideoFrame* videoFrame);
+
+ // Get pointer to buffer per plane.
+ virtual uint8_t* buffer(PlaneType type);
+ // Overloading with const.
+ virtual const uint8_t* buffer(PlaneType type) const;
+
+ // Get allocated size per plane.
+ virtual int allocated_size(PlaneType type) const;
+
+ // Get allocated stride per plane.
+ virtual int stride(PlaneType type) const;
+
+ // Set frame width.
+ virtual int set_width(int width);
+
+ // Set frame height.
+ virtual int set_height(int height);
+
+ // Get frame width.
+ virtual int width() const { return width_; }
+
+ // Get frame height.
+ virtual int height() const { return height_; }
+
+ // Set frame timestamp (90kHz).
+ virtual void set_timestamp(uint32_t timestamp) { timestamp_ = timestamp; }
+
+ // Get frame timestamp (90kHz).
+ virtual uint32_t timestamp() const { return timestamp_; }
+
+ // Set capture ntp time in miliseconds.
+ virtual void set_ntp_time_ms(int64_t ntp_time_ms) {
+ ntp_time_ms_ = ntp_time_ms;
+ }
+
+ // Get capture ntp time in miliseconds.
+ virtual int64_t ntp_time_ms() const { return ntp_time_ms_; }
+
+ // Set render time in miliseconds.
+ virtual void set_render_time_ms(int64_t render_time_ms) {
+ render_time_ms_ = render_time_ms;
+ }
+
+ // Get render time in miliseconds.
+ virtual int64_t render_time_ms() const { return render_time_ms_; }
+
+ // Return true if underlying plane buffers are of zero size, false if not.
+ virtual bool IsZeroSize() const;
+
+ // Reset underlying plane buffers sizes to 0. This function doesn't
+ // clear memory.
+ virtual void ResetSize();
+
+ // Return the handle of the underlying video frame. This is used when the
+ // frame is backed by a texture. The object should be destroyed when it is no
+ // longer in use, so the underlying resource can be freed.
+ virtual void* native_handle() const;
+
+ protected:
+ // Verifies legality of parameters.
+ // Return value: 0 on success, -1 on error.
+ virtual int CheckDimensions(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ private:
+ // Get the pointer to a specific plane.
+ const Plane* GetPlane(PlaneType type) const;
+ // Overloading with non-const.
+ Plane* GetPlane(PlaneType type);
+
+ Plane y_plane_;
+ Plane u_plane_;
+ Plane v_plane_;
+ int width_;
+ int height_;
+ uint32_t timestamp_;
+ int64_t ntp_time_ms_;
+ int64_t render_time_ms_;
+};
+
+enum VideoFrameType {
+ kKeyFrame = 0,
+ kDeltaFrame = 1,
+ kGoldenFrame = 2,
+ kAltRefFrame = 3,
+ kSkipFrame = 4
+};
+
+// TODO(pbos): Rename EncodedFrame and reformat this class' members.
+class EncodedImage {
+ public:
+ EncodedImage()
+ : _encodedWidth(0),
+ _encodedHeight(0),
+ _timeStamp(0),
+ capture_time_ms_(0),
+ _frameType(kDeltaFrame),
+ _buffer(NULL),
+ _length(0),
+ _size(0),
+ _completeFrame(false) {}
+
+ EncodedImage(uint8_t* buffer, uint32_t length, uint32_t size)
+ : _encodedWidth(0),
+ _encodedHeight(0),
+ _timeStamp(0),
+ ntp_time_ms_(0),
+ capture_time_ms_(0),
+ _frameType(kDeltaFrame),
+ _buffer(buffer),
+ _length(length),
+ _size(size),
+ _completeFrame(false) {}
+
+ uint32_t _encodedWidth;
+ uint32_t _encodedHeight;
+ uint32_t _timeStamp;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms_;
+ int64_t capture_time_ms_;
+ VideoFrameType _frameType;
+ uint8_t* _buffer;
+ uint32_t _length;
+ uint32_t _size;
+ bool _completeFrame;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_VIDEO_FRAME_H_
+
diff --git a/webrtc.gyp b/webrtc.gyp
index b17077bd..dee366ce 100644
--- a/webrtc.gyp
+++ b/webrtc.gyp
@@ -78,6 +78,16 @@
'common.gyp:*',
'<@(webrtc_video_dependencies)',
],
+ 'conditions': [
+ # TODO(andresp): Chromium libpeerconnection should link directly with
+ # this and no if conditions should be needed on webrtc build files.
+ ['build_with_chromium==1', {
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module_impl',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_impl',
+ ],
+ }],
+ ],
},
],
}
diff --git a/webrtc_examples.gyp b/webrtc_examples.gyp
index 8d8e2277..1af259c1 100644
--- a/webrtc_examples.gyp
+++ b/webrtc_examples.gyp
@@ -15,6 +15,8 @@
'type': 'loadable_module',
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
diff --git a/webrtc_tests.gypi b/webrtc_tests.gypi
index 75849543..1d862c65 100644
--- a/webrtc_tests.gypi
+++ b/webrtc_tests.gypi
@@ -48,6 +48,7 @@
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'test/webrtc_test_common.gyp:webrtc_test_common',
'test/webrtc_test_common.gyp:webrtc_test_renderer',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_impl',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'webrtc',
],
@@ -71,9 +72,10 @@
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- 'system_wrappers/source/system_wrappers.gyp:field_trial_default',
'test/webrtc_test_common.gyp:webrtc_test_common',
'test/webrtc_test_common.gyp:webrtc_test_renderer',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_impl',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'webrtc',
],
},
@@ -90,11 +92,11 @@
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
- 'modules/modules.gyp:rtp_rtcp',
+ '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_impl',
'test/metrics.gyp:metrics',
'test/webrtc_test_common.gyp:webrtc_test_common',
'test/test.gyp:test_main',
- 'test/webrtc_test_common.gyp:webrtc_test_video_render_dependencies',
'webrtc',
],
'conditions': [
@@ -114,9 +116,6 @@
'video/full_stack.cc',
'video/rampup_tests.cc',
'video/rampup_tests.h',
- # Needed to make the test binary pass since all tests are disabled on
- # Android (webrtc:3770).
- 'test/testsupport/always_passing_unittest.cc',
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
@@ -124,7 +123,6 @@
'modules/modules.gyp:rtp_rtcp',
'test/webrtc_test_common.gyp:webrtc_test_common',
'test/test.gyp:test_main',
- 'test/webrtc_test_common.gyp:webrtc_test_video_render_dependencies',
'webrtc',
],
'conditions': [