aboutsummaryrefslogtreecommitdiff
path: root/webrtc/modules/video_capture
diff options
context:
space:
mode:
Diffstat (limited to 'webrtc/modules/video_capture')
-rw-r--r--webrtc/modules/video_capture/BUILD.gn166
-rw-r--r--webrtc/modules/video_capture/OWNERS13
-rw-r--r--webrtc/modules/video_capture/device_info_impl.cc359
-rw-r--r--webrtc/modules/video_capture/device_info_impl.h68
-rw-r--r--webrtc/modules/video_capture/external/device_info_external.cc53
-rw-r--r--webrtc/modules/video_capture/external/video_capture_external.cc28
-rw-r--r--webrtc/modules/video_capture/include/video_capture.h160
-rw-r--r--webrtc/modules/video_capture/include/video_capture_defines.h118
-rw-r--r--webrtc/modules/video_capture/include/video_capture_factory.h45
-rw-r--r--webrtc/modules/video_capture/ios/device_info_ios.h60
-rw-r--r--webrtc/modules/video_capture/ios/device_info_ios.mm178
-rw-r--r--webrtc/modules/video_capture/ios/device_info_ios_objc.h29
-rw-r--r--webrtc/modules/video_capture/ios/device_info_ios_objc.mm100
-rw-r--r--webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h39
-rw-r--r--webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm423
-rw-r--r--webrtc/modules/video_capture/ios/video_capture_ios.h44
-rw-r--r--webrtc/modules/video_capture/ios/video_capture_ios.mm103
-rw-r--r--webrtc/modules/video_capture/linux/device_info_linux.cc339
-rw-r--r--webrtc/modules/video_capture/linux/device_info_linux.h53
-rw-r--r--webrtc/modules/video_capture/linux/video_capture_linux.cc492
-rw-r--r--webrtc/modules/video_capture/linux/video_capture_linux.h64
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h79
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm211
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h93
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm125
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h67
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm191
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h61
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm260
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h36
-rw-r--r--webrtc/modules/video_capture/mac/video_capture_mac.mm271
-rw-r--r--webrtc/modules/video_capture/test/video_capture_main_mac.mm17
-rw-r--r--webrtc/modules/video_capture/test/video_capture_unittest.cc540
-rw-r--r--webrtc/modules/video_capture/video_capture.gypi204
-rw-r--r--webrtc/modules/video_capture/video_capture_config.h33
-rw-r--r--webrtc/modules/video_capture/video_capture_delay.h36
-rw-r--r--webrtc/modules/video_capture/video_capture_factory.cc41
-rw-r--r--webrtc/modules/video_capture/video_capture_impl.cc399
-rw-r--r--webrtc/modules/video_capture/video_capture_impl.h142
-rw-r--r--webrtc/modules/video_capture/video_capture_internal.h27
-rw-r--r--webrtc/modules/video_capture/video_capture_tests.isolate23
-rw-r--r--webrtc/modules/video_capture/windows/device_info_ds.cc798
-rw-r--r--webrtc/modules/video_capture/windows/device_info_ds.h106
-rw-r--r--webrtc/modules/video_capture/windows/device_info_mf.cc51
-rw-r--r--webrtc/modules/video_capture/windows/device_info_mf.h43
-rw-r--r--webrtc/modules/video_capture/windows/help_functions_ds.cc119
-rw-r--r--webrtc/modules/video_capture/windows/help_functions_ds.h35
-rw-r--r--webrtc/modules/video_capture/windows/sink_filter_ds.cc519
-rw-r--r--webrtc/modules/video_capture/windows/sink_filter_ds.h100
-rw-r--r--webrtc/modules/video_capture/windows/video_capture_ds.cc414
-rw-r--r--webrtc/modules/video_capture/windows/video_capture_ds.h79
-rw-r--r--webrtc/modules/video_capture/windows/video_capture_factory_windows.cc41
-rw-r--r--webrtc/modules/video_capture/windows/video_capture_mf.cc42
-rw-r--r--webrtc/modules/video_capture/windows/video_capture_mf.h43
54 files changed, 8180 insertions, 0 deletions
diff --git a/webrtc/modules/video_capture/BUILD.gn b/webrtc/modules/video_capture/BUILD.gn
new file mode 100644
index 0000000000..b0ed6f4e6c
--- /dev/null
+++ b/webrtc/modules/video_capture/BUILD.gn
@@ -0,0 +1,166 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../build/webrtc.gni")
+
+# Note this target is missing an implementation for the video capture.
+# Targets must link with either 'video_capture' or
+# 'video_capture_internal_impl' depending on whether they want to
+# use the internal capturer.
+source_set("video_capture_module") {
+ sources = [
+ "device_info_impl.cc",
+ "device_info_impl.h",
+ "include/video_capture.h",
+ "include/video_capture_defines.h",
+ "include/video_capture_factory.h",
+ "video_capture_config.h",
+ "video_capture_delay.h",
+ "video_capture_factory.cc",
+ "video_capture_impl.cc",
+ "video_capture_impl.h",
+ ]
+
+ deps = [
+ "../..:webrtc_common",
+ "../../common_video",
+ "../../system_wrappers",
+ "../utility",
+ ]
+
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+}
+
+source_set("video_capture") {
+ sources = [
+ "external/device_info_external.cc",
+ "external/video_capture_external.cc",
+ ]
+
+ deps = [
+ ":video_capture_module",
+ "../../system_wrappers",
+ ]
+
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+}
+
+if (!build_with_chromium) {
+ config("video_capture_internal_impl_config") {
+ if (is_ios) {
+ libs = [
+ "AVFoundation.framework",
+ "CoreMedia.framework",
+ "CoreVideo.framework",
+ ]
+ }
+ }
+
+ source_set("video_capture_internal_impl") {
+ deps = [
+ ":video_capture_module",
+ "../../system_wrappers",
+ ]
+
+ if (is_linux) {
+ sources = [
+ "linux/device_info_linux.cc",
+ "linux/device_info_linux.h",
+ "linux/video_capture_linux.cc",
+ "linux/video_capture_linux.h",
+ ]
+ deps += [ "../..:webrtc_common" ]
+ }
+ if (is_mac) {
+ sources = [
+ "mac/qtkit/video_capture_qtkit.h",
+ "mac/qtkit/video_capture_qtkit.mm",
+ "mac/qtkit/video_capture_qtkit_info.h",
+ "mac/qtkit/video_capture_qtkit_info.mm",
+ "mac/qtkit/video_capture_qtkit_info_objc.h",
+ "mac/qtkit/video_capture_qtkit_info_objc.mm",
+ "mac/qtkit/video_capture_qtkit_objc.h",
+ "mac/qtkit/video_capture_qtkit_objc.mm",
+ "mac/qtkit/video_capture_qtkit_utility.h",
+ "mac/video_capture_mac.mm",
+ ]
+
+ libs = [
+ "CoreVideo.framework",
+ "QTKit.framework",
+ ]
+ }
+ if (is_win) {
+ sources = [
+ "windows/device_info_ds.cc",
+ "windows/device_info_ds.h",
+ "windows/device_info_mf.cc",
+ "windows/device_info_mf.h",
+ "windows/help_functions_ds.cc",
+ "windows/help_functions_ds.h",
+ "windows/sink_filter_ds.cc",
+ "windows/sink_filter_ds.h",
+ "windows/video_capture_ds.cc",
+ "windows/video_capture_ds.h",
+ "windows/video_capture_factory_windows.cc",
+ "windows/video_capture_mf.cc",
+ "windows/video_capture_mf.h",
+ ]
+
+ libs = [ "Strmiids.lib" ]
+
+ deps += [ "//third_party/winsdk_samples" ]
+ }
+ if (is_ios) {
+ sources = [
+ "ios/device_info_ios.h",
+ "ios/device_info_ios.mm",
+ "ios/device_info_ios_objc.h",
+ "ios/device_info_ios_objc.mm",
+ "ios/rtc_video_capture_ios_objc.h",
+ "ios/rtc_video_capture_ios_objc.mm",
+ "ios/video_capture_ios.h",
+ "ios/video_capture_ios.mm",
+ ]
+
+ cflags = [
+ "-fobjc-arc", # CLANG_ENABLE_OBJC_ARC = YES.
+
+ # To avoid warnings for deprecated videoMinFrameDuration and
+ # videoMaxFrameDuration properties in iOS 7.0.
+ # See webrtc:3705 for more details.
+ "-Wno-deprecated-declarations",
+ ]
+ }
+
+ all_dependent_configs = [ ":video_capture_internal_impl_config" ]
+
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+ }
+}
diff --git a/webrtc/modules/video_capture/OWNERS b/webrtc/modules/video_capture/OWNERS
new file mode 100644
index 0000000000..f8a55684dc
--- /dev/null
+++ b/webrtc/modules/video_capture/OWNERS
@@ -0,0 +1,13 @@
+glaznev@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+tkchin@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
+per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/video_capture/device_info_impl.cc b/webrtc/modules/video_capture/device_info_impl.cc
new file mode 100644
index 0000000000..e1695561c1
--- /dev/null
+++ b/webrtc/modules/video_capture/device_info_impl.cc
@@ -0,0 +1,359 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/system_wrappers/include/logging.h"
+
+#ifndef abs
+#define abs(a) (a>=0?a:-a)
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+DeviceInfoImpl::DeviceInfoImpl(const int32_t id)
+ : _id(id), _apiLock(*RWLockWrapper::CreateRWLock()), _lastUsedDeviceName(NULL),
+ _lastUsedDeviceNameLength(0)
+{
+}
+
+DeviceInfoImpl::~DeviceInfoImpl(void)
+{
+ _apiLock.AcquireLockExclusive();
+ free(_lastUsedDeviceName);
+ _apiLock.ReleaseLockExclusive();
+
+ delete &_apiLock;
+}
+int32_t DeviceInfoImpl::NumberOfCapabilities(
+ const char* deviceUniqueIdUTF8)
+{
+
+ if (!deviceUniqueIdUTF8)
+ return -1;
+
+ _apiLock.AcquireLockShared();
+
+ if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8))
+ {
+ // Is it the same device that is asked for again.
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+ if(strncasecmp((char*)_lastUsedDeviceName,
+ (char*) deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength)==0)
+#else
+ if (_strnicmp((char*) _lastUsedDeviceName,
+ (char*) deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength) == 0)
+#endif
+ {
+ //yes
+ _apiLock.ReleaseLockShared();
+ return static_cast<int32_t>(_captureCapabilities.size());
+ }
+ }
+ // Need to get exclusive rights to create the new capability map.
+ _apiLock.ReleaseLockShared();
+ WriteLockScoped cs2(_apiLock);
+
+ int32_t ret = CreateCapabilityMap(deviceUniqueIdUTF8);
+ return ret;
+}
+
+int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability)
+{
+ assert(deviceUniqueIdUTF8 != NULL);
+
+ ReadLockScoped cs(_apiLock);
+
+ if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+ || (strncasecmp((char*)_lastUsedDeviceName,
+ (char*) deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength)!=0))
+#else
+ || (_strnicmp((char*) _lastUsedDeviceName,
+ (char*) deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength) != 0))
+#endif
+
+ {
+ _apiLock.ReleaseLockShared();
+ _apiLock.AcquireLockExclusive();
+ if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8))
+ {
+ _apiLock.ReleaseLockExclusive();
+ _apiLock.AcquireLockShared();
+ return -1;
+ }
+ _apiLock.ReleaseLockExclusive();
+ _apiLock.AcquireLockShared();
+ }
+
+ // Make sure the number is valid
+ if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.size())
+ {
+ LOG(LS_ERROR) << "Invalid deviceCapabilityNumber "
+ << deviceCapabilityNumber << ">= number of capabilities ("
+ << _captureCapabilities.size() << ").";
+ return -1;
+ }
+
+ capability = _captureCapabilities[deviceCapabilityNumber];
+ return 0;
+}
+
+int32_t DeviceInfoImpl::GetBestMatchedCapability(
+ const char*deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting)
+{
+
+
+ if (!deviceUniqueIdUTF8)
+ return -1;
+
+ ReadLockScoped cs(_apiLock);
+ if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+ || (strncasecmp((char*)_lastUsedDeviceName,
+ (char*) deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength)!=0))
+#else
+ || (_strnicmp((char*) _lastUsedDeviceName,
+ (char*) deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength) != 0))
+#endif
+ {
+ _apiLock.ReleaseLockShared();
+ _apiLock.AcquireLockExclusive();
+ if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8))
+ {
+ return -1;
+ }
+ _apiLock.ReleaseLockExclusive();
+ _apiLock.AcquireLockShared();
+ }
+
+ int32_t bestformatIndex = -1;
+ int32_t bestWidth = 0;
+ int32_t bestHeight = 0;
+ int32_t bestFrameRate = 0;
+ RawVideoType bestRawType = kVideoUnknown;
+ webrtc::VideoCodecType bestCodecType = webrtc::kVideoCodecUnknown;
+
+ const int32_t numberOfCapabilies =
+ static_cast<int32_t>(_captureCapabilities.size());
+
+ for (int32_t tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities
+ {
+ VideoCaptureCapability& capability = _captureCapabilities[tmp];
+
+ const int32_t diffWidth = capability.width - requested.width;
+ const int32_t diffHeight = capability.height - requested.height;
+ const int32_t diffFrameRate = capability.maxFPS - requested.maxFPS;
+
+ const int32_t currentbestDiffWith = bestWidth - requested.width;
+ const int32_t currentbestDiffHeight = bestHeight - requested.height;
+ const int32_t currentbestDiffFrameRate = bestFrameRate - requested.maxFPS;
+
+ if ((diffHeight >= 0 && diffHeight <= abs(currentbestDiffHeight)) // Height better or equalt that previouse.
+ || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight))
+ {
+
+ if (diffHeight == currentbestDiffHeight) // Found best height. Care about the width)
+ {
+ if ((diffWidth >= 0 && diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+ || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith))
+ {
+ if (diffWidth == currentbestDiffWith && diffHeight
+ == currentbestDiffHeight) // Same size as previously
+ {
+ //Also check the best frame rate if the diff is the same as previouse
+ if (((diffFrameRate >= 0 &&
+ diffFrameRate <= currentbestDiffFrameRate) // Frame rate to high but better match than previouse and we have not selected IUV
+ ||
+ (currentbestDiffFrameRate < 0 &&
+ diffFrameRate >= currentbestDiffFrameRate)) // Current frame rate is lower than requested. This is better.
+ )
+ {
+ if ((currentbestDiffFrameRate == diffFrameRate) // Same frame rate as previous or frame rate allready good enough
+ || (currentbestDiffFrameRate >= 0))
+ {
+ if (bestRawType != requested.rawType
+ && requested.rawType != kVideoUnknown
+ && (capability.rawType == requested.rawType
+ || capability.rawType == kVideoI420
+ || capability.rawType == kVideoYUY2
+ || capability.rawType == kVideoYV12))
+ {
+ bestCodecType = capability.codecType;
+ bestRawType = capability.rawType;
+ bestformatIndex = tmp;
+ }
+ // If width height and frame rate is full filled we can use the camera for encoding if it is supported.
+ if (capability.height == requested.height
+ && capability.width == requested.width
+ && capability.maxFPS >= requested.maxFPS)
+ {
+ if (capability.codecType == requested.codecType
+ && bestCodecType != requested.codecType)
+ {
+ bestCodecType = capability.codecType;
+ bestformatIndex = tmp;
+ }
+ }
+ }
+ else // Better frame rate
+ {
+ if (requested.codecType == capability.codecType)
+ {
+
+ bestWidth = capability.width;
+ bestHeight = capability.height;
+ bestFrameRate = capability.maxFPS;
+ bestCodecType = capability.codecType;
+ bestRawType = capability.rawType;
+ bestformatIndex = tmp;
+ }
+ }
+ }
+ }
+ else // Better width than previously
+ {
+ if (requested.codecType == capability.codecType)
+ {
+ bestWidth = capability.width;
+ bestHeight = capability.height;
+ bestFrameRate = capability.maxFPS;
+ bestCodecType = capability.codecType;
+ bestRawType = capability.rawType;
+ bestformatIndex = tmp;
+ }
+ }
+ }// else width no good
+ }
+ else // Better height
+ {
+ if (requested.codecType == capability.codecType)
+ {
+ bestWidth = capability.width;
+ bestHeight = capability.height;
+ bestFrameRate = capability.maxFPS;
+ bestCodecType = capability.codecType;
+ bestRawType = capability.rawType;
+ bestformatIndex = tmp;
+ }
+ }
+ }// else height not good
+ }//end for
+
+ LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x" << bestHeight
+ << "@" << bestFrameRate
+ << "fps, color format: " << bestRawType;
+
+ // Copy the capability
+ if (bestformatIndex < 0)
+ return -1;
+ resulting = _captureCapabilities[bestformatIndex];
+ return bestformatIndex;
+}
+
+/* Returns the expected Capture delay*/
+int32_t DeviceInfoImpl::GetExpectedCaptureDelay(
+ const DelayValues delayValues[],
+ const uint32_t sizeOfDelayValues,
+ const char* productId,
+ const uint32_t width,
+ const uint32_t height)
+{
+ int32_t bestDelay = kDefaultCaptureDelay;
+
+ for (uint32_t device = 0; device < sizeOfDelayValues; ++device)
+ {
+ if (delayValues[device].productId && strncmp((char*) productId,
+ (char*) delayValues[device].productId,
+ kVideoCaptureProductIdLength) == 0)
+ {
+ // We have found the camera
+
+ int32_t bestWidth = 0;
+ int32_t bestHeight = 0;
+
+ //Loop through all tested sizes and find one that seems fitting
+ for (uint32_t delayIndex = 0; delayIndex < NoOfDelayValues; ++delayIndex)
+ {
+ const DelayValue& currentValue = delayValues[device].delayValues[delayIndex];
+
+ const int32_t diffWidth = currentValue.width - width;
+ const int32_t diffHeight = currentValue.height - height;
+
+ const int32_t currentbestDiffWith = bestWidth - width;
+ const int32_t currentbestDiffHeight = bestHeight - height;
+
+ if ((diffHeight >= 0 && diffHeight <= abs(currentbestDiffHeight)) // Height better or equal than previous.
+ || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight))
+ {
+
+ if (diffHeight == currentbestDiffHeight) // Found best height. Care about the width)
+ {
+ if ((diffWidth >= 0 && diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+ || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith))
+ {
+ if (diffWidth == currentbestDiffWith && diffHeight
+ == currentbestDiffHeight) // Same size as previous
+ {
+ }
+ else // Better width than previously
+ {
+ bestWidth = currentValue.width;
+ bestHeight = currentValue.height;
+ bestDelay = currentValue.delay;
+ }
+ }// else width no good
+ }
+ else // Better height
+ {
+ bestWidth = currentValue.width;
+ bestHeight = currentValue.height;
+ bestDelay = currentValue.delay;
+ }
+ }// else height not good
+ }//end for
+ break;
+ }
+ }
+ if (bestDelay > kMaxCaptureDelay)
+ {
+ LOG(LS_WARNING) << "Expected capture delay (" << bestDelay
+ << " ms) too high, using " << kMaxCaptureDelay
+ << " ms.";
+ bestDelay = kMaxCaptureDelay;
+ }
+
+ return bestDelay;
+
+}
+
+//Default implementation. This should be overridden by Mobile implementations.
+int32_t DeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) {
+ orientation = kVideoRotation_0;
+ return -1;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/device_info_impl.h b/webrtc/modules/video_capture/device_info_impl.h
new file mode 100644
index 0000000000..420808bcee
--- /dev/null
+++ b/webrtc/modules/video_capture/device_info_impl.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+
+#include <vector>
+
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture_delay.h"
+#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class DeviceInfoImpl: public VideoCaptureModule::DeviceInfo
+{
+public:
+ DeviceInfoImpl(const int32_t id);
+ virtual ~DeviceInfoImpl(void);
+ virtual int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8);
+ virtual int32_t GetCapability(
+ const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability);
+
+ virtual int32_t GetBestMatchedCapability(
+ const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting);
+ virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation);
+
+protected:
+ /* Initialize this object*/
+
+ virtual int32_t Init()=0;
+ /*
+ * Fills the member variable _captureCapabilities with capabilities for the given device name.
+ */
+ virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8)=0;
+
+ /* Returns the expected Capture delay*/
+ int32_t GetExpectedCaptureDelay(const DelayValues delayValues[],
+ const uint32_t sizeOfDelayValues,
+ const char* productId,
+ const uint32_t width,
+ const uint32_t height);
+protected:
+ // Data members
+ int32_t _id;
+ typedef std::vector<VideoCaptureCapability> VideoCaptureCapabilities;
+ VideoCaptureCapabilities _captureCapabilities;
+ RWLockWrapper& _apiLock;
+ char* _lastUsedDeviceName;
+ uint32_t _lastUsedDeviceNameLength;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
diff --git a/webrtc/modules/video_capture/external/device_info_external.cc b/webrtc/modules/video_capture/external/device_info_external.cc
new file mode 100644
index 0000000000..d89ae16185
--- /dev/null
+++ b/webrtc/modules/video_capture/external/device_info_external.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+class ExternalDeviceInfo : public DeviceInfoImpl {
+ public:
+ ExternalDeviceInfo(const int32_t id)
+ : DeviceInfoImpl(id) {
+ }
+ virtual ~ExternalDeviceInfo() {}
+ virtual uint32_t NumberOfDevices() { return 0; }
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* /*deviceUniqueIdUTF8*/,
+ const char* /*dialogTitleUTF8*/,
+ void* /*parentWindow*/,
+ uint32_t /*positionX*/,
+ uint32_t /*positionY*/) { return -1; }
+ virtual int32_t GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8=0,
+ uint32_t productUniqueIdUTF8Length=0) {
+ return -1;
+ }
+ virtual int32_t CreateCapabilityMap(
+ const char* deviceUniqueIdUTF8) { return 0; }
+ virtual int32_t Init() { return 0; }
+};
+
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+ const int32_t id) {
+ return new ExternalDeviceInfo(id);
+}
+
+} // namespace videocapturemodule
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/external/video_capture_external.cc b/webrtc/modules/video_capture/external/video_capture_external.cc
new file mode 100644
index 0000000000..29b161263c
--- /dev/null
+++ b/webrtc/modules/video_capture/external/video_capture_external.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+ const int32_t id,
+ const char* deviceUniqueIdUTF8) {
+ RefCountImpl<VideoCaptureImpl>* implementation =
+ new RefCountImpl<VideoCaptureImpl>(id);
+ return implementation;
+}
+
+} // namespace videocapturemodule
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/include/video_capture.h b/webrtc/modules/video_capture/include/video_capture.h
new file mode 100644
index 0000000000..09b4502115
--- /dev/null
+++ b/webrtc/modules/video_capture/include/video_capture.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_H_
+
+#include "webrtc/common_video/rotation.h"
+#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/video_capture/include/video_capture_defines.h"
+
+namespace webrtc {
+
+class VideoCaptureModule: public RefCountedModule {
+ public:
+ // Interface for receiving information about available camera devices.
+ class DeviceInfo {
+ public:
+ virtual uint32_t NumberOfDevices() = 0;
+
+ // Returns the available capture devices.
+ // deviceNumber - Index of capture device.
+ // deviceNameUTF8 - Friendly name of the capture device.
+ // deviceUniqueIdUTF8 - Unique name of the capture device if it exist.
+ // Otherwise same as deviceNameUTF8.
+ // productUniqueIdUTF8 - Unique product id if it exist.
+ // Null terminated otherwise.
+ virtual int32_t GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0) = 0;
+
+
+ // Returns the number of capabilities this device.
+ virtual int32_t NumberOfCapabilities(
+ const char* deviceUniqueIdUTF8) = 0;
+
+ // Gets the capabilities of the named device.
+ virtual int32_t GetCapability(
+ const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) = 0;
+
+ // Gets clockwise angle the captured frames should be rotated in order
+ // to be displayed correctly on a normally rotated display.
+ virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) = 0;
+
+ // Gets the capability that best matches the requested width, height and
+ // frame rate.
+ // Returns the deviceCapabilityNumber on success.
+ virtual int32_t GetBestMatchedCapability(
+ const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting) = 0;
+
+ // Display OS /capture device specific settings dialog
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) = 0;
+
+ virtual ~DeviceInfo() {}
+ };
+
+ class VideoCaptureEncodeInterface {
+ public:
+ virtual int32_t ConfigureEncoder(const VideoCodec& codec,
+ uint32_t maxPayloadSize) = 0;
+ // Inform the encoder about the new target bit rate.
+ // - newBitRate : New target bit rate in Kbit/s.
+ // - frameRate : The target frame rate.
+ virtual int32_t SetRates(int32_t newBitRate, int32_t frameRate) = 0;
+ // Inform the encoder about the packet loss and the round-trip time.
+ // - packetLoss : Fraction lost
+ // (loss rate in percent = 100 * packetLoss / 255).
+ // - rtt : Round-trip time in milliseconds.
+ virtual int32_t SetChannelParameters(uint32_t packetLoss, int64_t rtt) = 0;
+
+ // Encode the next frame as key frame.
+ virtual int32_t EncodeFrameType(const FrameType type) = 0;
+ protected:
+ virtual ~VideoCaptureEncodeInterface() {
+ }
+ };
+
+ // Register capture data callback
+ virtual void RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallback) = 0;
+
+ // Remove capture data callback
+ virtual void DeRegisterCaptureDataCallback() = 0;
+
+ // Register capture callback.
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack) = 0;
+
+ // Remove capture callback.
+ virtual void DeRegisterCaptureCallback() = 0;
+
+ // Start capture device
+ virtual int32_t StartCapture(
+ const VideoCaptureCapability& capability) = 0;
+
+ virtual int32_t StopCapture() = 0;
+
+ // Returns the name of the device used by this module.
+ virtual const char* CurrentDeviceName() const = 0;
+
+ // Returns true if the capture device is running
+ virtual bool CaptureStarted() = 0;
+
+ // Gets the current configuration.
+ virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0;
+
+ virtual void SetCaptureDelay(int32_t delayMS) = 0;
+
+ // Returns the current CaptureDelay. Only valid when the camera is running.
+ virtual int32_t CaptureDelay() = 0;
+
+ // Set the rotation of the captured frames.
+ // If the rotation is set to the same as returned by
+ // DeviceInfo::GetOrientation the captured frames are
+ // displayed correctly if rendered.
+ virtual int32_t SetCaptureRotation(VideoRotation rotation) = 0;
+
+ // Tells the capture module whether to apply the pending rotation. By default,
+ // the rotation is applied and the generated frame is up right. When set to
+ // false, generated frames will carry the rotation information from
+ // SetCaptureRotation. Return value indicates whether this operation succeeds.
+ virtual bool SetApplyRotation(bool enable) = 0;
+
+ // Return whether the rotation is applied or left pending.
+ virtual bool GetApplyRotation() = 0;
+
+ // Gets a pointer to an encode interface if the capture device supports the
+ // requested type and size. NULL otherwise.
+ virtual VideoCaptureEncodeInterface* GetEncodeInterface(
+ const VideoCodec& codec) = 0;
+
+ virtual void EnableFrameRateCallback(const bool enable) = 0;
+ virtual void EnableNoPictureAlarm(const bool enable) = 0;
+
+protected:
+ virtual ~VideoCaptureModule() {};
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_H_
diff --git a/webrtc/modules/video_capture/include/video_capture_defines.h b/webrtc/modules/video_capture/include/video_capture_defines.h
new file mode 100644
index 0000000000..1dee4fa814
--- /dev/null
+++ b/webrtc/modules/video_capture/include/video_capture_defines.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_DEFINES_H_
+
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+ #define NULL 0
+#endif
+
+enum {kVideoCaptureUniqueNameLength =1024}; //Max unique capture device name lenght
+enum {kVideoCaptureDeviceNameLength =256}; //Max capture device name lenght
+enum {kVideoCaptureProductIdLength =128}; //Max product id length
+
+struct VideoCaptureCapability
+{
+ int32_t width;
+ int32_t height;
+ int32_t maxFPS;
+ int32_t expectedCaptureDelay;
+ RawVideoType rawType;
+ VideoCodecType codecType;
+ bool interlaced;
+
+ VideoCaptureCapability()
+ {
+ width = 0;
+ height = 0;
+ maxFPS = 0;
+ expectedCaptureDelay = 0;
+ rawType = kVideoUnknown;
+ codecType = kVideoCodecUnknown;
+ interlaced = false;
+ }
+ ;
+ bool operator!=(const VideoCaptureCapability &other) const
+ {
+ if (width != other.width)
+ return true;
+ if (height != other.height)
+ return true;
+ if (maxFPS != other.maxFPS)
+ return true;
+ if (rawType != other.rawType)
+ return true;
+ if (codecType != other.codecType)
+ return true;
+ if (interlaced != other.interlaced)
+ return true;
+ return false;
+ }
+ bool operator==(const VideoCaptureCapability &other) const
+ {
+ return !operator!=(other);
+ }
+};
+
+enum VideoCaptureAlarm
+{
+ Raised = 0,
+ Cleared = 1
+};
+
+/* External Capture interface. Returned by Create
+ and implemented by the capture module.
+ */
+class VideoCaptureExternal
+{
+public:
+ // |capture_time| must be specified in the NTP time format in milliseconds.
+ virtual int32_t IncomingFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime = 0) = 0;
+protected:
+ ~VideoCaptureExternal() {}
+};
+
+// Callback class to be implemented by module user
+class VideoCaptureDataCallback
+{
+public:
+ virtual void OnIncomingCapturedFrame(const int32_t id,
+ const VideoFrame& videoFrame) = 0;
+ virtual void OnCaptureDelayChanged(const int32_t id,
+ const int32_t delay) = 0;
+protected:
+ virtual ~VideoCaptureDataCallback(){}
+};
+
+class VideoCaptureFeedBack
+{
+public:
+ virtual void OnCaptureFrameRate(const int32_t id,
+ const uint32_t frameRate) = 0;
+ virtual void OnNoPictureAlarm(const int32_t id,
+ const VideoCaptureAlarm alarm) = 0;
+protected:
+ virtual ~VideoCaptureFeedBack(){}
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_DEFINES_H_
diff --git a/webrtc/modules/video_capture/include/video_capture_factory.h b/webrtc/modules/video_capture/include/video_capture_factory.h
new file mode 100644
index 0000000000..f78437d1a0
--- /dev/null
+++ b/webrtc/modules/video_capture/include/video_capture_factory.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces used for creating the VideoCaptureModule
+// and DeviceInfo.
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_FACTORY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_FACTORY_H_
+
+#include "webrtc/modules/video_capture/include/video_capture.h"
+
+namespace webrtc {
+
+class VideoCaptureFactory {
+ public:
+ // Create a video capture module object
+ // id - unique identifier of this video capture module object.
+ // deviceUniqueIdUTF8 - name of the device.
+ // Available names can be found by using GetDeviceName
+ static VideoCaptureModule* Create(const int32_t id,
+ const char* deviceUniqueIdUTF8);
+
+ // Create a video capture module object used for external capture.
+ // id - unique identifier of this video capture module object
+ // externalCapture - [out] interface to call when a new frame is captured.
+ static VideoCaptureModule* Create(const int32_t id,
+ VideoCaptureExternal*& externalCapture);
+
+ static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
+ const int32_t id);
+
+ private:
+ ~VideoCaptureFactory();
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_FACTORY_H_
diff --git a/webrtc/modules/video_capture/ios/device_info_ios.h b/webrtc/modules/video_capture/ios/device_info_ios.h
new file mode 100644
index 0000000000..6af7c33899
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/device_info_ios.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_DEVICE_INFO_IOS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_DEVICE_INFO_IOS_H_
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+
+#include <map>
+
+namespace webrtc {
+namespace videocapturemodule {
+class DeviceInfoIos : public DeviceInfoImpl {
+ public:
+ explicit DeviceInfoIos(const int32_t device_id);
+ virtual ~DeviceInfoIos();
+
+ // Implementation of DeviceInfoImpl.
+ int32_t Init() override;
+ uint32_t NumberOfDevices() override;
+ int32_t GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0) override;
+
+ int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) override;
+
+ int32_t GetCapability(const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) override;
+
+ int32_t DisplayCaptureSettingsDialogBox(const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) override;
+
+ int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) override;
+
+ int32_t CreateCapabilityMap(const char* device_unique_id_utf8) override;
+
+ private:
+ std::map<std::string, VideoCaptureCapabilities> _capabilitiesMap;
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_IOS_DEVICE_INFO_IOS_H_
diff --git a/webrtc/modules/video_capture/ios/device_info_ios.mm b/webrtc/modules/video_capture/ios/device_info_ios.mm
new file mode 100644
index 0000000000..307e5d3605
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/device_info_ios.mm
@@ -0,0 +1,178 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#include <AVFoundation/AVFoundation.h>
+
+#include <string>
+
+#include "webrtc/modules/video_capture/ios/device_info_ios.h"
+#include "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+using namespace videocapturemodule;
+
+static NSArray *camera_presets = @[AVCaptureSessionPreset352x288,
+ AVCaptureSessionPreset640x480,
+ AVCaptureSessionPreset1280x720,
+ AVCaptureSessionPreset1920x1080];
+
+
+#define IOS_UNSUPPORTED() \
+ WEBRTC_TRACE(kTraceError, \
+ kTraceVideoCapture, \
+ _id, \
+ "%s is not supported on the iOS platform.", \
+ __FUNCTION__); \
+ return -1;
+
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+ const int32_t device_id) {
+ return new DeviceInfoIos(device_id);
+}
+
+DeviceInfoIos::DeviceInfoIos(const int32_t device_id)
+ : DeviceInfoImpl(device_id) {
+ this->Init();
+}
+
+DeviceInfoIos::~DeviceInfoIos() {}
+
+int32_t DeviceInfoIos::Init() {
+ // Fill in all device capabilities.
+
+ int deviceCount = [DeviceInfoIosObjC captureDeviceCount];
+
+ for (int i = 0; i < deviceCount; i++) {
+ AVCaptureDevice *avDevice = [DeviceInfoIosObjC captureDeviceForIndex:i];
+ VideoCaptureCapabilities capabilityVector;
+
+ for (NSString *preset in camera_presets) {
+ BOOL support = [avDevice supportsAVCaptureSessionPreset:preset];
+ if (support) {
+ VideoCaptureCapability capability =
+ [DeviceInfoIosObjC capabilityForPreset:preset];
+ capabilityVector.push_back(capability);
+ }
+ }
+
+ char deviceNameUTF8[256];
+ char deviceId[256];
+ this->GetDeviceName(i, deviceNameUTF8, 256, deviceId, 256);
+ std::string deviceIdCopy(deviceId);
+ std::pair<std::string, VideoCaptureCapabilities> mapPair =
+ std::pair<std::string, VideoCaptureCapabilities>
+ (deviceIdCopy, capabilityVector);
+ _capabilitiesMap.insert(mapPair);
+ }
+
+ return 0;
+}
+
+uint32_t DeviceInfoIos::NumberOfDevices() {
+ return [DeviceInfoIosObjC captureDeviceCount];
+}
+
+int32_t DeviceInfoIos::GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameUTF8Length,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length) {
+ NSString* deviceName = [DeviceInfoIosObjC deviceNameForIndex:deviceNumber];
+
+ NSString* deviceUniqueId =
+ [DeviceInfoIosObjC deviceUniqueIdForIndex:deviceNumber];
+
+ strncpy(deviceNameUTF8, [deviceName UTF8String], deviceNameUTF8Length);
+ deviceNameUTF8[deviceNameUTF8Length - 1] = '\0';
+
+ strncpy(deviceUniqueIdUTF8,
+ [deviceUniqueId UTF8String],
+ deviceUniqueIdUTF8Length);
+ deviceUniqueIdUTF8[deviceUniqueIdUTF8Length - 1] = '\0';
+
+ if (productUniqueIdUTF8) {
+ productUniqueIdUTF8[0] = '\0';
+ }
+
+ return 0;
+}
+
+int32_t DeviceInfoIos::NumberOfCapabilities(const char* deviceUniqueIdUTF8) {
+ int32_t numberOfCapabilities = 0;
+ std::string deviceUniqueId(deviceUniqueIdUTF8);
+ std::map<std::string, VideoCaptureCapabilities>::iterator it =
+ _capabilitiesMap.find(deviceUniqueId);
+
+ if (it != _capabilitiesMap.end()) {
+ numberOfCapabilities = it->second.size();
+ }
+ return numberOfCapabilities;
+}
+
+int32_t DeviceInfoIos::GetCapability(const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) {
+ std::string deviceUniqueId(deviceUniqueIdUTF8);
+ std::map<std::string, VideoCaptureCapabilities>::iterator it =
+ _capabilitiesMap.find(deviceUniqueId);
+
+ if (it != _capabilitiesMap.end()) {
+ VideoCaptureCapabilities deviceCapabilities = it->second;
+
+ if (deviceCapabilityNumber < deviceCapabilities.size()) {
+ VideoCaptureCapability cap;
+ cap = deviceCapabilities[deviceCapabilityNumber];
+ capability = cap;
+ return 0;
+ }
+ }
+
+ return -1;
+}
+
+int32_t DeviceInfoIos::DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) {
+ IOS_UNSUPPORTED();
+}
+
+int32_t DeviceInfoIos::GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) {
+ if (strcmp(deviceUniqueIdUTF8, "Front Camera") == 0) {
+ orientation = kVideoRotation_0;
+ } else {
+ orientation = kVideoRotation_90;
+ }
+ return orientation;
+}
+
+int32_t DeviceInfoIos::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
+ std::string deviceName(deviceUniqueIdUTF8);
+ std::map<std::string, std::vector<VideoCaptureCapability>>::iterator it =
+ _capabilitiesMap.find(deviceName);
+ VideoCaptureCapabilities deviceCapabilities;
+ if (it != _capabilitiesMap.end()) {
+ _captureCapabilities = it->second;
+ return 0;
+ }
+
+ return -1;
+}
diff --git a/webrtc/modules/video_capture/ios/device_info_ios_objc.h b/webrtc/modules/video_capture/ios/device_info_ios_objc.h
new file mode 100644
index 0000000000..65444bedcb
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/device_info_ios_objc.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_DEVICE_INFO_IOS_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_DEVICE_INFO_IOS_OBJC_H_
+
+#import <AVFoundation/AVFoundation.h>
+
+#include "webrtc/modules/video_capture/include/video_capture_defines.h"
+
+@interface DeviceInfoIosObjC : NSObject
++ (int)captureDeviceCount;
++ (AVCaptureDevice*)captureDeviceForIndex:(int)index;
++ (AVCaptureDevice*)captureDeviceForUniqueId:(NSString*)uniqueId;
++ (NSString*)deviceNameForIndex:(int)index;
++ (NSString*)deviceUniqueIdForIndex:(int)index;
++ (NSString*)deviceNameForUniqueId:(NSString*)uniqueId;
++ (webrtc::VideoCaptureCapability)capabilityForPreset:(NSString*)preset;
+
+@end
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_IOS_DEVICE_INFO_IOS_OBJC_H_
diff --git a/webrtc/modules/video_capture/ios/device_info_ios_objc.mm b/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
new file mode 100644
index 0000000000..818f8624c7
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+
+@implementation DeviceInfoIosObjC
+
++ (int)captureDeviceCount {
+ return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
+}
+
++ (AVCaptureDevice*)captureDeviceForIndex:(int)index {
+ return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]
+ objectAtIndex:index];
+}
+
++ (AVCaptureDevice*)captureDeviceForUniqueId:(NSString*)uniqueId {
+ for (AVCaptureDevice* device in
+ [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if ([uniqueId isEqual:device.uniqueID]) {
+ return device;
+ }
+ }
+
+ return nil;
+}
+
++ (NSString*)deviceNameForIndex:(int)index {
+ return [DeviceInfoIosObjC captureDeviceForIndex:index].localizedName;
+}
+
++ (NSString*)deviceUniqueIdForIndex:(int)index {
+ return [DeviceInfoIosObjC captureDeviceForIndex:index].uniqueID;
+}
+
++ (NSString*)deviceNameForUniqueId:(NSString*)uniqueId {
+ return [[AVCaptureDevice deviceWithUniqueID:uniqueId] localizedName];
+}
+
++ (webrtc::VideoCaptureCapability)capabilityForPreset:(NSString*)preset {
+ webrtc::VideoCaptureCapability capability;
+
+ // TODO(tkchin): Maybe query AVCaptureDevice for supported formats, and
+ // then get the dimensions / frame rate from each supported format
+ if ([preset isEqualToString:AVCaptureSessionPreset352x288]) {
+ capability.width = 352;
+ capability.height = 288;
+ capability.maxFPS = 30;
+ capability.expectedCaptureDelay =
+ webrtc::videocapturemodule::kDefaultCaptureDelay;
+ capability.rawType = webrtc::kVideoNV12;
+ capability.codecType = webrtc::kVideoCodecUnknown;
+ capability.interlaced = false;
+ } else if ([preset isEqualToString:AVCaptureSessionPreset640x480]) {
+ capability.width = 640;
+ capability.height = 480;
+ capability.maxFPS = 30;
+ capability.expectedCaptureDelay =
+ webrtc::videocapturemodule::kDefaultCaptureDelay;
+ capability.rawType = webrtc::kVideoNV12;
+ capability.codecType = webrtc::kVideoCodecUnknown;
+ capability.interlaced = false;
+ } else if ([preset isEqualToString:AVCaptureSessionPreset1280x720]) {
+ capability.width = 1280;
+ capability.height = 720;
+ capability.maxFPS = 30;
+ capability.expectedCaptureDelay =
+ webrtc::videocapturemodule::kDefaultCaptureDelay;
+ capability.rawType = webrtc::kVideoNV12;
+ capability.codecType = webrtc::kVideoCodecUnknown;
+ capability.interlaced = false;
+ } else if ([preset isEqualToString:AVCaptureSessionPreset1920x1080]) {
+ capability.width = 1920;
+ capability.height = 1080;
+ capability.maxFPS = 30;
+ capability.expectedCaptureDelay =
+ webrtc::videocapturemodule::kDefaultCaptureDelay;
+ capability.rawType = webrtc::kVideoNV12;
+ capability.codecType = webrtc::kVideoCodecUnknown;
+ capability.interlaced = false;
+ }
+
+ return capability;
+}
+
+@end
diff --git a/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h b/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h
new file mode 100644
index 0000000000..2653958130
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#include "webrtc/modules/video_capture/ios/video_capture_ios.h"
+
+// The following class listens to a notification with name:
+// 'StatusBarOrientationDidChange'.
+// This notification must be posted in order for the capturer to reflect the
+// orientation change in video w.r.t. the application orientation.
+@interface RTCVideoCaptureIosObjC
+ : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
+
+@property webrtc::VideoRotation frameRotation;
+
+// custom initializer. Instance of VideoCaptureIos is needed
+// for callback purposes.
+// default init methods have been overridden to return nil.
+- (id)initWithOwner:(webrtc::videocapturemodule::VideoCaptureIos*)owner
+ captureId:(int)captureId;
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId;
+- (BOOL)startCaptureWithCapability:
+ (const webrtc::VideoCaptureCapability&)capability;
+- (BOOL)stopCapture;
+
+@end
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
diff --git a/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm b/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
new file mode 100644
index 0000000000..e36c83bad9
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
@@ -0,0 +1,423 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import <UIKit/UIKit.h>
+
+#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
+#import "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
+
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+using namespace webrtc::videocapturemodule;
+
+@interface RTCVideoCaptureIosObjC (hidden)
+- (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
+@end
+
+@implementation RTCVideoCaptureIosObjC {
+ webrtc::videocapturemodule::VideoCaptureIos* _owner;
+ webrtc::VideoCaptureCapability _capability;
+ AVCaptureSession* _captureSession;
+ int _captureId;
+ BOOL _orientationHasChanged;
+ AVCaptureConnection* _connection;
+ BOOL _captureChanging; // Guarded by _captureChangingCondition.
+ NSCondition* _captureChangingCondition;
+}
+
+@synthesize frameRotation = _framRotation;
+
+- (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
+ if (self == [super init]) {
+ _owner = owner;
+ _captureId = captureId;
+ _captureSession = [[AVCaptureSession alloc] init];
+#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
+ NSString* version = [[UIDevice currentDevice] systemVersion];
+ if ([version integerValue] >= 7) {
+ _captureSession.usesApplicationAudioSession = NO;
+ }
+#endif
+ _captureChanging = NO;
+ _captureChangingCondition = [[NSCondition alloc] init];
+
+ if (!_captureSession || !_captureChangingCondition) {
+ return nil;
+ }
+
+ // create and configure a new output (using callbacks)
+ AVCaptureVideoDataOutput* captureOutput =
+ [[AVCaptureVideoDataOutput alloc] init];
+ NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
+
+ NSNumber* val = [NSNumber
+ numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
+ NSDictionary* videoSettings =
+ [NSDictionary dictionaryWithObject:val forKey:key];
+ captureOutput.videoSettings = videoSettings;
+
+ // add new output
+ if ([_captureSession canAddOutput:captureOutput]) {
+ [_captureSession addOutput:captureOutput];
+ } else {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoCapture,
+ _captureId,
+ "%s:%s:%d Could not add output to AVCaptureSession ",
+ __FILE__,
+ __FUNCTION__,
+ __LINE__);
+ }
+
+ [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+
+ NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
+ [notify addObserver:self
+ selector:@selector(onVideoError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:_captureSession];
+ [notify addObserver:self
+ selector:@selector(deviceOrientationDidChange:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+ }
+
+ return self;
+}
+
+- (void)directOutputToSelf {
+ [[self currentOutput]
+ setSampleBufferDelegate:self
+ queue:dispatch_get_global_queue(
+ DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
+}
+
+- (void)directOutputToNil {
+ [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
+}
+
+- (void)deviceOrientationDidChange:(NSNotification*)notification {
+ _orientationHasChanged = YES;
+ [self setRelativeVideoOrientation];
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
+ // check to see if the camera is already set
+ if (_captureSession) {
+ NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
+ if ([currentInputs count] > 0) {
+ AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
+ if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
+ return YES;
+ }
+ }
+ }
+
+ return [self changeCaptureInputByUniqueId:uniqueId];
+}
+
+- (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
+ [self waitForCaptureChangeToFinish];
+ if (!_captureSession) {
+ return NO;
+ }
+
+ // check limits of the resolution
+ if (capability.maxFPS < 0 || capability.maxFPS > 60) {
+ return NO;
+ }
+
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
+ if (capability.width > 1920 || capability.height > 1080) {
+ return NO;
+ }
+ } else if ([_captureSession
+ canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
+ if (capability.width > 1280 || capability.height > 720) {
+ return NO;
+ }
+ } else if ([_captureSession
+ canSetSessionPreset:AVCaptureSessionPreset640x480]) {
+ if (capability.width > 640 || capability.height > 480) {
+ return NO;
+ }
+ } else if ([_captureSession
+ canSetSessionPreset:AVCaptureSessionPreset352x288]) {
+ if (capability.width > 352 || capability.height > 288) {
+ return NO;
+ }
+ } else if (capability.width < 0 || capability.height < 0) {
+ return NO;
+ }
+
+ _capability = capability;
+
+ AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
+ if (!currentOutput)
+ return NO;
+
+ [self directOutputToSelf];
+
+ _orientationHasChanged = NO;
+ _captureChanging = YES;
+ dispatch_async(
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
+ ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });
+ return YES;
+}
+
+- (AVCaptureVideoDataOutput*)currentOutput {
+ return [[_captureSession outputs] firstObject];
+}
+
+- (void)startCaptureInBackgroundWithOutput:
+ (AVCaptureVideoDataOutput*)currentOutput {
+ NSString* captureQuality =
+ [NSString stringWithString:AVCaptureSessionPresetLow];
+ if (_capability.width >= 1920 || _capability.height >= 1080) {
+ captureQuality =
+ [NSString stringWithString:AVCaptureSessionPreset1920x1080];
+ } else if (_capability.width >= 1280 || _capability.height >= 720) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
+ } else if (_capability.width >= 640 || _capability.height >= 480) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
+ } else if (_capability.width >= 352 || _capability.height >= 288) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
+ }
+
+ // begin configuration for the AVCaptureSession
+ [_captureSession beginConfiguration];
+
+ // picture resolution
+ [_captureSession setSessionPreset:captureQuality];
+
+ // take care of capture framerate now
+ NSArray* sessionInputs = _captureSession.inputs;
+ AVCaptureDeviceInput* deviceInput = [sessionInputs count] > 0 ?
+ sessionInputs[0] : nil;
+ AVCaptureDevice* inputDevice = deviceInput.device;
+ if (inputDevice) {
+ AVCaptureDeviceFormat* activeFormat = inputDevice.activeFormat;
+ NSArray* supportedRanges = activeFormat.videoSupportedFrameRateRanges;
+ AVFrameRateRange* targetRange = [supportedRanges count] > 0 ?
+ supportedRanges[0] : nil;
+ // Find the largest supported framerate less than capability maxFPS.
+ for (AVFrameRateRange* range in supportedRanges) {
+ if (range.maxFrameRate <= _capability.maxFPS &&
+ targetRange.maxFrameRate <= range.maxFrameRate) {
+ targetRange = range;
+ }
+ }
+ if (targetRange && [inputDevice lockForConfiguration:NULL]) {
+ inputDevice.activeVideoMinFrameDuration = targetRange.minFrameDuration;
+ inputDevice.activeVideoMaxFrameDuration = targetRange.minFrameDuration;
+ [inputDevice unlockForConfiguration];
+ }
+ }
+
+ _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
+ [self setRelativeVideoOrientation];
+
+ // finished configuring, commit settings to AVCaptureSession.
+ [_captureSession commitConfiguration];
+
+ [_captureSession startRunning];
+ [self signalCaptureChangeEnd];
+}
+
+- (void)setRelativeVideoOrientation {
+ if (!_connection.supportsVideoOrientation) {
+ return;
+ }
+
+ switch ([UIDevice currentDevice].orientation) {
+ case UIDeviceOrientationPortrait:
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationPortrait;
+ break;
+ case UIDeviceOrientationPortraitUpsideDown:
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationPortraitUpsideDown;
+ break;
+ case UIDeviceOrientationLandscapeLeft:
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case UIDeviceOrientationLandscapeRight:
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ case UIDeviceOrientationFaceUp:
+ case UIDeviceOrientationFaceDown:
+ case UIDeviceOrientationUnknown:
+ if (!_orientationHasChanged) {
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationPortrait;
+ }
+ break;
+ }
+}
+
+- (void)onVideoError:(NSNotification*)notification {
+ NSLog(@"onVideoError: %@", notification);
+ // TODO(sjlee): make the specific error handling with this notification.
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoCapture,
+ _captureId,
+ "%s:%s:%d [AVCaptureSession startRunning] error.",
+ __FILE__,
+ __FUNCTION__,
+ __LINE__);
+}
+
+- (BOOL)stopCapture {
+ [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+ _orientationHasChanged = NO;
+ [self waitForCaptureChangeToFinish];
+ [self directOutputToNil];
+
+ if (!_captureSession) {
+ return NO;
+ }
+
+ _captureChanging = YES;
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
+ ^(void) { [self stopCaptureInBackground]; });
+ return YES;
+}
+
+- (void)stopCaptureInBackground {
+ [_captureSession stopRunning];
+ [self signalCaptureChangeEnd];
+}
+
+- (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
+ NSArray* currentInputs = [_captureSession inputs];
+ // remove current input
+ if ([currentInputs count] > 0) {
+ AVCaptureInput* currentInput =
+ (AVCaptureInput*)[currentInputs objectAtIndex:0];
+
+ [_captureSession removeInput:currentInput];
+ }
+
+ // Look for input device with the name requested (as our input param)
+ // get list of available capture devices
+ int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
+ if (captureDeviceCount <= 0) {
+ return NO;
+ }
+
+ AVCaptureDevice* captureDevice =
+ [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
+
+ if (!captureDevice) {
+ return NO;
+ }
+
+ // now create capture session input out of AVCaptureDevice
+ NSError* deviceError = nil;
+ AVCaptureDeviceInput* newCaptureInput =
+ [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
+ error:&deviceError];
+
+ if (!newCaptureInput) {
+ const char* errorMessage = [[deviceError localizedDescription] UTF8String];
+
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoCapture,
+ _captureId,
+ "%s:%s:%d deviceInputWithDevice error:%s",
+ __FILE__,
+ __FUNCTION__,
+ __LINE__,
+ errorMessage);
+
+ return NO;
+ }
+
+ // try to add our new capture device to the capture session
+ [_captureSession beginConfiguration];
+
+ BOOL addedCaptureInput = NO;
+ if ([_captureSession canAddInput:newCaptureInput]) {
+ [_captureSession addInput:newCaptureInput];
+ addedCaptureInput = YES;
+ } else {
+ addedCaptureInput = NO;
+ }
+
+ [_captureSession commitConfiguration];
+
+ return addedCaptureInput;
+}
+
+- (void)captureOutput:(AVCaptureOutput*)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection*)connection {
+ const int kFlags = 0;
+ CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
+
+ if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
+ return;
+ }
+
+ const int kYPlaneIndex = 0;
+ const int kUVPlaneIndex = 1;
+
+ uint8_t* baseAddress =
+ (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
+ size_t yPlaneBytesPerRow =
+ CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
+ size_t yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
+ size_t uvPlaneBytesPerRow =
+ CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
+ size_t uvPlaneHeight =
+ CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
+ size_t frameSize =
+ yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
+
+ VideoCaptureCapability tempCaptureCapability;
+ tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
+ tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
+ tempCaptureCapability.maxFPS = _capability.maxFPS;
+ tempCaptureCapability.rawType = kVideoNV12;
+
+ _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
+
+ CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
+}
+
+- (void)signalCaptureChangeEnd {
+ [_captureChangingCondition lock];
+ _captureChanging = NO;
+ [_captureChangingCondition signal];
+ [_captureChangingCondition unlock];
+}
+
+- (void)waitForCaptureChangeToFinish {
+ [_captureChangingCondition lock];
+ while (_captureChanging) {
+ [_captureChangingCondition wait];
+ }
+ [_captureChangingCondition unlock];
+}
+@end
diff --git a/webrtc/modules/video_capture/ios/video_capture_ios.h b/webrtc/modules/video_capture/ios/video_capture_ios.h
new file mode 100644
index 0000000000..1afcbaddf5
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/video_capture_ios.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_H_
+
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+@class RTCVideoCaptureIosObjC;
+
+namespace webrtc {
+namespace videocapturemodule {
+class VideoCaptureIos : public VideoCaptureImpl {
+ public:
+ explicit VideoCaptureIos(const int32_t capture_id);
+ virtual ~VideoCaptureIos();
+
+ static VideoCaptureModule* Create(const int32_t capture_id,
+ const char* device_unique_id_utf8);
+
+ // Implementation of VideoCaptureImpl.
+ int32_t StartCapture(const VideoCaptureCapability& capability) override;
+ int32_t StopCapture() override;
+ bool CaptureStarted() override;
+ int32_t CaptureSettings(VideoCaptureCapability& settings) override;
+
+ private:
+ RTCVideoCaptureIosObjC* capture_device_;
+ bool is_capturing_;
+ int32_t id_;
+ VideoCaptureCapability capability_;
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_H_
diff --git a/webrtc/modules/video_capture/ios/video_capture_ios.mm b/webrtc/modules/video_capture/ios/video_capture_ios.mm
new file mode 100644
index 0000000000..ae9b7e0805
--- /dev/null
+++ b/webrtc/modules/video_capture/ios/video_capture_ios.mm
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#include "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
+#include "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+using namespace videocapturemodule;
+
+VideoCaptureModule* VideoCaptureImpl::Create(const int32_t capture_id,
+ const char* deviceUniqueIdUTF8) {
+ return VideoCaptureIos::Create(capture_id, deviceUniqueIdUTF8);
+}
+
+VideoCaptureIos::VideoCaptureIos(const int32_t capture_id)
+ : VideoCaptureImpl(capture_id), is_capturing_(false), id_(capture_id) {
+ capability_.width = kDefaultWidth;
+ capability_.height = kDefaultHeight;
+ capability_.maxFPS = kDefaultFrameRate;
+ capture_device_ = nil;
+}
+
+VideoCaptureIos::~VideoCaptureIos() {
+ if (is_capturing_) {
+ [capture_device_ stopCapture];
+ capture_device_ = nil;
+ }
+}
+
+VideoCaptureModule* VideoCaptureIos::Create(const int32_t capture_id,
+ const char* deviceUniqueIdUTF8) {
+ if (!deviceUniqueIdUTF8[0]) {
+ return NULL;
+ }
+
+ RefCountImpl<VideoCaptureIos>* capture_module =
+ new RefCountImpl<VideoCaptureIos>(capture_id);
+
+ const int32_t name_length = strlen(deviceUniqueIdUTF8);
+ if (name_length > kVideoCaptureUniqueNameLength)
+ return NULL;
+
+ capture_module->_deviceUniqueId = new char[name_length + 1];
+ strncpy(capture_module->_deviceUniqueId, deviceUniqueIdUTF8, name_length + 1);
+ capture_module->_deviceUniqueId[name_length] = '\0';
+
+ capture_module->capture_device_ =
+ [[RTCVideoCaptureIosObjC alloc] initWithOwner:capture_module
+ captureId:capture_module->id_];
+ if (!capture_module->capture_device_) {
+ return NULL;
+ }
+
+ if (![capture_module->capture_device_ setCaptureDeviceByUniqueId:[
+ [NSString alloc] initWithCString:deviceUniqueIdUTF8
+ encoding:NSUTF8StringEncoding]]) {
+ return NULL;
+ }
+ return capture_module;
+}
+
+int32_t VideoCaptureIos::StartCapture(
+ const VideoCaptureCapability& capability) {
+ capability_ = capability;
+
+ if (![capture_device_ startCaptureWithCapability:capability]) {
+ return -1;
+ }
+
+ is_capturing_ = true;
+
+ return 0;
+}
+
+int32_t VideoCaptureIos::StopCapture() {
+ if (![capture_device_ stopCapture]) {
+ return -1;
+ }
+
+ is_capturing_ = false;
+ return 0;
+}
+
+bool VideoCaptureIos::CaptureStarted() { return is_capturing_; }
+
+int32_t VideoCaptureIos::CaptureSettings(VideoCaptureCapability& settings) {
+ settings = capability_;
+ settings.rawType = kVideoNV12;
+ return 0;
+}
diff --git a/webrtc/modules/video_capture/linux/device_info_linux.cc b/webrtc/modules/video_capture/linux/device_info_linux.cc
new file mode 100644
index 0000000000..d3a10abb56
--- /dev/null
+++ b/webrtc/modules/video_capture/linux/device_info_linux.cc
@@ -0,0 +1,339 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/linux/device_info_linux.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <unistd.h>
+//v4l includes
+#include <linux/videodev2.h>
+
+#include "webrtc/system_wrappers/include/ref_count.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo(const int32_t id)
+{
+ videocapturemodule::DeviceInfoLinux *deviceInfo =
+ new videocapturemodule::DeviceInfoLinux(id);
+ if (!deviceInfo)
+ {
+ deviceInfo = NULL;
+ }
+
+ return deviceInfo;
+}
+
+DeviceInfoLinux::DeviceInfoLinux(const int32_t id)
+ : DeviceInfoImpl(id)
+{
+}
+
+int32_t DeviceInfoLinux::Init()
+{
+ return 0;
+}
+
+DeviceInfoLinux::~DeviceInfoLinux()
+{
+}
+
+uint32_t DeviceInfoLinux::NumberOfDevices()
+{
+ WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
+
+ uint32_t count = 0;
+ char device[20];
+ int fd = -1;
+
+ /* detect /dev/video [0-63]VideoCaptureModule entries */
+ for (int n = 0; n < 64; n++)
+ {
+ sprintf(device, "/dev/video%d", n);
+ if ((fd = open(device, O_RDONLY)) != -1)
+ {
+ close(fd);
+ count++;
+ }
+ }
+
+ return count;
+}
+
+int32_t DeviceInfoLinux::GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* /*productUniqueIdUTF8*/,
+ uint32_t /*productUniqueIdUTF8Length*/)
+{
+ WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
+
+ // Travel through /dev/video [0-63]
+ uint32_t count = 0;
+ char device[20];
+ int fd = -1;
+ bool found = false;
+ for (int n = 0; n < 64; n++)
+ {
+ sprintf(device, "/dev/video%d", n);
+ if ((fd = open(device, O_RDONLY)) != -1)
+ {
+ if (count == deviceNumber) {
+ // Found the device
+ found = true;
+ break;
+ } else {
+ close(fd);
+ count++;
+ }
+ }
+ }
+
+ if (!found)
+ return -1;
+
+ // query device capabilities
+ struct v4l2_capability cap;
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "error in querying the device capability for device %s. errno = %d",
+ device, errno);
+ close(fd);
+ return -1;
+ }
+
+ close(fd);
+
+ char cameraName[64];
+ memset(deviceNameUTF8, 0, deviceNameLength);
+ memcpy(cameraName, cap.card, sizeof(cap.card));
+
+ if (deviceNameLength >= strlen(cameraName))
+ {
+ memcpy(deviceNameUTF8, cameraName, strlen(cameraName));
+ }
+ else
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "buffer passed is too small");
+ return -1;
+ }
+
+ if (cap.bus_info[0] != 0) // may not available in all drivers
+ {
+ // copy device id
+ if (deviceUniqueIdUTF8Length >= strlen((const char*) cap.bus_info))
+ {
+ memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
+ memcpy(deviceUniqueIdUTF8, cap.bus_info,
+ strlen((const char*) cap.bus_info));
+ }
+ else
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "buffer passed is too small");
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+int32_t DeviceInfoLinux::CreateCapabilityMap(
+ const char* deviceUniqueIdUTF8)
+{
+ int fd;
+ char device[32];
+ bool found = false;
+
+ const int32_t deviceUniqueIdUTF8Length =
+ (int32_t) strlen((char*) deviceUniqueIdUTF8);
+ if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "Device name too long");
+ return -1;
+ }
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
+
+ /* detect /dev/video [0-63] entries */
+ for (int n = 0; n < 64; ++n)
+ {
+ sprintf(device, "/dev/video%d", n);
+ fd = open(device, O_RDONLY);
+ if (fd == -1)
+ continue;
+
+ // query device capabilities
+ struct v4l2_capability cap;
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
+ {
+ if (cap.bus_info[0] != 0)
+ {
+ if (strncmp((const char*) cap.bus_info,
+ (const char*) deviceUniqueIdUTF8,
+ strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+ {
+ found = true;
+ break; // fd matches with device unique id supplied
+ }
+ }
+ else //match for device name
+ {
+ if (IsDeviceNameMatches((const char*) cap.card,
+ (const char*) deviceUniqueIdUTF8))
+ {
+ found = true;
+ break;
+ }
+ }
+ }
+ close(fd); // close since this is not the matching device
+ }
+
+ if (!found)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
+ return -1;
+ }
+
+ // now fd will point to the matching device
+ // reset old capability list.
+ _captureCapabilities.clear();
+
+ int size = FillCapabilities(fd);
+ close(fd);
+
+ // Store the new used device name
+ _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+ _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
+ _lastUsedDeviceNameLength + 1);
+ memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1);
+
+ WEBRTC_TRACE(webrtc::kTraceInfo,
+ webrtc::kTraceVideoCapture,
+ _id,
+ "CreateCapabilityMap %u",
+ static_cast<unsigned int>(_captureCapabilities.size()));
+
+ return size;
+}
+
+bool DeviceInfoLinux::IsDeviceNameMatches(const char* name,
+ const char* deviceUniqueIdUTF8)
+{
+ if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0)
+ return true;
+ return false;
+}
+
+int32_t DeviceInfoLinux::FillCapabilities(int fd)
+{
+
+ // set image format
+ struct v4l2_format video_fmt;
+ memset(&video_fmt, 0, sizeof(struct v4l2_format));
+
+ video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ video_fmt.fmt.pix.sizeimage = 0;
+
+ int totalFmts = 4;
+ unsigned int videoFormats[] = {
+ V4L2_PIX_FMT_MJPEG,
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_UYVY };
+
+ int sizes = 13;
+ unsigned int size[][2] = { { 128, 96 }, { 160, 120 }, { 176, 144 },
+ { 320, 240 }, { 352, 288 }, { 640, 480 },
+ { 704, 576 }, { 800, 600 }, { 960, 720 },
+ { 1280, 720 }, { 1024, 768 }, { 1440, 1080 },
+ { 1920, 1080 } };
+
+ int index = 0;
+ for (int fmts = 0; fmts < totalFmts; fmts++)
+ {
+ for (int i = 0; i < sizes; i++)
+ {
+ video_fmt.fmt.pix.pixelformat = videoFormats[fmts];
+ video_fmt.fmt.pix.width = size[i][0];
+ video_fmt.fmt.pix.height = size[i][1];
+
+ if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0)
+ {
+ if ((video_fmt.fmt.pix.width == size[i][0])
+ && (video_fmt.fmt.pix.height == size[i][1]))
+ {
+ VideoCaptureCapability cap;
+ cap.width = video_fmt.fmt.pix.width;
+ cap.height = video_fmt.fmt.pix.height;
+ cap.expectedCaptureDelay = 120;
+ if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
+ {
+ cap.rawType = kVideoYUY2;
+ }
+ else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420)
+ {
+ cap.rawType = kVideoI420;
+ }
+ else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
+ {
+ cap.rawType = kVideoMJPEG;
+ }
+ else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY)
+ {
+ cap.rawType = kVideoUYVY;
+ }
+
+ // get fps of current camera mode
+ // V4l2 does not have a stable method of knowing so we just guess.
+ if(cap.width >= 800 && cap.rawType != kVideoMJPEG)
+ {
+ cap.maxFPS = 15;
+ }
+ else
+ {
+ cap.maxFPS = 30;
+ }
+
+ _captureCapabilities.push_back(cap);
+ index++;
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "Camera capability, width:%d height:%d type:%d fps:%d",
+ cap.width, cap.height, cap.rawType, cap.maxFPS);
+ }
+ }
+ }
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceInfo,
+ webrtc::kTraceVideoCapture,
+ _id,
+ "CreateCapabilityMap %u",
+ static_cast<unsigned int>(_captureCapabilities.size()));
+ return _captureCapabilities.size();
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/linux/device_info_linux.h b/webrtc/modules/video_capture/linux/device_info_linux.h
new file mode 100644
index 0000000000..cffb22256c
--- /dev/null
+++ b/webrtc/modules/video_capture/linux/device_info_linux.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class DeviceInfoLinux: public DeviceInfoImpl
+{
+public:
+ DeviceInfoLinux(const int32_t id);
+ virtual ~DeviceInfoLinux();
+ virtual uint32_t NumberOfDevices();
+ virtual int32_t GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8=0,
+ uint32_t productUniqueIdUTF8Length=0);
+ /*
+ * Fills the membervariable _captureCapabilities with capabilites for the given device name.
+ */
+ virtual int32_t CreateCapabilityMap (const char* deviceUniqueIdUTF8);
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* /*deviceUniqueIdUTF8*/,
+ const char* /*dialogTitleUTF8*/,
+ void* /*parentWindow*/,
+ uint32_t /*positionX*/,
+ uint32_t /*positionY*/) { return -1;}
+ int32_t FillCapabilities(int fd);
+ int32_t Init();
+private:
+
+ bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
diff --git a/webrtc/modules/video_capture/linux/video_capture_linux.cc b/webrtc/modules/video_capture/linux/video_capture_linux.cc
new file mode 100644
index 0000000000..fe99c7136f
--- /dev/null
+++ b/webrtc/modules/video_capture/linux/video_capture_linux.cc
@@ -0,0 +1,492 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <errno.h>
+#include <fcntl.h>
+#include <linux/videodev2.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include <iostream>
+#include <new>
+
+#include "webrtc/modules/video_capture/linux/video_capture_linux.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(const int32_t id,
+ const char* deviceUniqueId)
+{
+ RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation =
+ new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id);
+
+ if (!implementation || implementation->Init(deviceUniqueId) != 0)
+ {
+ delete implementation;
+ implementation = NULL;
+ }
+
+ return implementation;
+}
+
+VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const int32_t id)
+ : VideoCaptureImpl(id),
+ _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+ _deviceId(-1),
+ _deviceFd(-1),
+ _buffersAllocatedByDevice(-1),
+ _currentWidth(-1),
+ _currentHeight(-1),
+ _currentFrameRate(-1),
+ _captureStarted(false),
+ _captureVideoType(kVideoI420),
+ _pool(NULL)
+{
+}
+
+int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
+{
+ int len = strlen((const char*) deviceUniqueIdUTF8);
+ _deviceUniqueId = new (std::nothrow) char[len + 1];
+ if (_deviceUniqueId)
+ {
+ memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
+ }
+
+ int fd;
+ char device[32];
+ bool found = false;
+
+ /* detect /dev/video [0-63] entries */
+ int n;
+ for (n = 0; n < 64; n++)
+ {
+ sprintf(device, "/dev/video%d", n);
+ if ((fd = open(device, O_RDONLY)) != -1)
+ {
+ // query device capabilities
+ struct v4l2_capability cap;
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
+ {
+ if (cap.bus_info[0] != 0)
+ {
+ if (strncmp((const char*) cap.bus_info,
+ (const char*) deviceUniqueIdUTF8,
+ strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+ {
+ close(fd);
+ found = true;
+ break; // fd matches with device unique id supplied
+ }
+ }
+ }
+ close(fd); // close since this is not the matching device
+ }
+ }
+ if (!found)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
+ return -1;
+ }
+ _deviceId = n; //store the device id
+ return 0;
+}
+
+VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
+{
+ StopCapture();
+ if (_captureCritSect)
+ {
+ delete _captureCritSect;
+ }
+ if (_deviceFd != -1)
+ close(_deviceFd);
+}
+
+int32_t VideoCaptureModuleV4L2::StartCapture(
+ const VideoCaptureCapability& capability)
+{
+ if (_captureStarted)
+ {
+ if (capability.width == _currentWidth &&
+ capability.height == _currentHeight &&
+ _captureVideoType == capability.rawType)
+ {
+ return 0;
+ }
+ else
+ {
+ StopCapture();
+ }
+ }
+
+ CriticalSectionScoped cs(_captureCritSect);
+ //first open /dev/video device
+ char device[20];
+ sprintf(device, "/dev/video%d", (int) _deviceId);
+
+ if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "error in opening %s errono = %d", device, errno);
+ return -1;
+ }
+
+ // Supported video formats in preferred order.
+ // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
+ // I420 otherwise.
+ const int nFormats = 5;
+ unsigned int fmts[nFormats];
+ if (capability.width > 640 || capability.height > 480) {
+ fmts[0] = V4L2_PIX_FMT_MJPEG;
+ fmts[1] = V4L2_PIX_FMT_YUV420;
+ fmts[2] = V4L2_PIX_FMT_YUYV;
+ fmts[3] = V4L2_PIX_FMT_UYVY;
+ fmts[4] = V4L2_PIX_FMT_JPEG;
+ } else {
+ fmts[0] = V4L2_PIX_FMT_YUV420;
+ fmts[1] = V4L2_PIX_FMT_YUYV;
+ fmts[2] = V4L2_PIX_FMT_UYVY;
+ fmts[3] = V4L2_PIX_FMT_MJPEG;
+ fmts[4] = V4L2_PIX_FMT_JPEG;
+ }
+
+ // Enumerate image formats.
+ struct v4l2_fmtdesc fmt;
+ int fmtsIdx = nFormats;
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.index = 0;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "Video Capture enumerats supported image formats:");
+ while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ " { pixelformat = %c%c%c%c, description = '%s' }",
+ fmt.pixelformat & 0xFF, (fmt.pixelformat>>8) & 0xFF,
+ (fmt.pixelformat>>16) & 0xFF, (fmt.pixelformat>>24) & 0xFF,
+ fmt.description);
+ // Match the preferred order.
+ for (int i = 0; i < nFormats; i++) {
+ if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
+ fmtsIdx = i;
+ }
+ // Keep enumerating.
+ fmt.index++;
+ }
+
+ if (fmtsIdx == nFormats)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "no supporting video formats found");
+ return -1;
+ } else {
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "We prefer format %c%c%c%c",
+ fmts[fmtsIdx] & 0xFF, (fmts[fmtsIdx]>>8) & 0xFF,
+ (fmts[fmtsIdx]>>16) & 0xFF, (fmts[fmtsIdx]>>24) & 0xFF);
+ }
+
+ struct v4l2_format video_fmt;
+ memset(&video_fmt, 0, sizeof(struct v4l2_format));
+ video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ video_fmt.fmt.pix.sizeimage = 0;
+ video_fmt.fmt.pix.width = capability.width;
+ video_fmt.fmt.pix.height = capability.height;
+ video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
+
+ if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
+ _captureVideoType = kVideoYUY2;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
+ _captureVideoType = kVideoI420;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
+ _captureVideoType = kVideoUYVY;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
+ video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
+ _captureVideoType = kVideoMJPEG;
+
+ //set format and frame size now
+ if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "error in VIDIOC_S_FMT, errno = %d", errno);
+ return -1;
+ }
+
+ // initialize current width and height
+ _currentWidth = video_fmt.fmt.pix.width;
+ _currentHeight = video_fmt.fmt.pix.height;
+ _captureDelay = 120;
+
+ // Trying to set frame rate, before check driver capability.
+ bool driver_framerate_support = true;
+ struct v4l2_streamparm streamparms;
+ memset(&streamparms, 0, sizeof(streamparms));
+ streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "error in VIDIOC_G_PARM errno = %d", errno);
+ driver_framerate_support = false;
+ // continue
+ } else {
+ // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
+ if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) {
+ // driver supports the feature. Set required framerate.
+ memset(&streamparms, 0, sizeof(streamparms));
+ streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamparms.parm.capture.timeperframe.numerator = 1;
+ streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
+ if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to set the framerate. errno=%d", errno);
+ driver_framerate_support = false;
+ } else {
+ _currentFrameRate = capability.maxFPS;
+ }
+ }
+ }
+ // If driver doesn't support framerate control, need to hardcode.
+ // Hardcoding the value based on the frame size.
+ if (!driver_framerate_support) {
+ if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
+ _currentFrameRate = 15;
+ } else {
+ _currentFrameRate = 30;
+ }
+ }
+
+ if (!AllocateVideoBuffers())
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "failed to allocate video capture buffers");
+ return -1;
+ }
+
+ //start capture thread;
+ if (!_captureThread)
+ {
+ _captureThread = ThreadWrapper::CreateThread(
+ VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread");
+ _captureThread->Start();
+ _captureThread->SetPriority(kHighPriority);
+ }
+
+ // Needed to start UVC camera - from the uvcview application
+ enum v4l2_buf_type type;
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to turn on stream");
+ return -1;
+ }
+
+ _captureStarted = true;
+ return 0;
+}
+
+int32_t VideoCaptureModuleV4L2::StopCapture()
+{
+ if (_captureThread) {
+ // Make sure the capture thread stop stop using the critsect.
+ _captureThread->Stop();
+ _captureThread.reset();
+ }
+
+ CriticalSectionScoped cs(_captureCritSect);
+ if (_captureStarted)
+ {
+ _captureStarted = false;
+
+ DeAllocateVideoBuffers();
+ close(_deviceFd);
+ _deviceFd = -1;
+ }
+
+ return 0;
+}
+
+//critical section protected by the caller
+
+bool VideoCaptureModuleV4L2::AllocateVideoBuffers()
+{
+ struct v4l2_requestbuffers rbuffer;
+ memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
+
+ rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ rbuffer.memory = V4L2_MEMORY_MMAP;
+ rbuffer.count = kNoOfV4L2Bufffers;
+
+ if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Could not get buffers from device. errno = %d", errno);
+ return false;
+ }
+
+ if (rbuffer.count > kNoOfV4L2Bufffers)
+ rbuffer.count = kNoOfV4L2Bufffers;
+
+ _buffersAllocatedByDevice = rbuffer.count;
+
+ //Map the buffers
+ _pool = new Buffer[rbuffer.count];
+
+ for (unsigned int i = 0; i < rbuffer.count; i++)
+ {
+ struct v4l2_buffer buffer;
+ memset(&buffer, 0, sizeof(v4l2_buffer));
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buffer.memory = V4L2_MEMORY_MMAP;
+ buffer.index = i;
+
+ if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0)
+ {
+ return false;
+ }
+
+ _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
+ _deviceFd, buffer.m.offset);
+
+ if (MAP_FAILED == _pool[i].start)
+ {
+ for (unsigned int j = 0; j < i; j++)
+ munmap(_pool[j].start, _pool[j].length);
+ return false;
+ }
+
+ _pool[i].length = buffer.length;
+
+ if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0)
+ {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers()
+{
+ // unmap buffers
+ for (int i = 0; i < _buffersAllocatedByDevice; i++)
+ munmap(_pool[i].start, _pool[i].length);
+
+ delete[] _pool;
+
+ // turn off stream
+ enum v4l2_buf_type type;
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "VIDIOC_STREAMOFF error. errno: %d", errno);
+ }
+
+ return true;
+}
+
+bool VideoCaptureModuleV4L2::CaptureStarted()
+{
+ return _captureStarted;
+}
+
+bool VideoCaptureModuleV4L2::CaptureThread(void* obj)
+{
+ return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess();
+}
+bool VideoCaptureModuleV4L2::CaptureProcess()
+{
+ int retVal = 0;
+ fd_set rSet;
+ struct timeval timeout;
+
+ _captureCritSect->Enter();
+
+ FD_ZERO(&rSet);
+ FD_SET(_deviceFd, &rSet);
+ timeout.tv_sec = 1;
+ timeout.tv_usec = 0;
+
+ retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
+ if (retVal < 0 && errno != EINTR) // continue if interrupted
+ {
+ // select failed
+ _captureCritSect->Leave();
+ return false;
+ }
+ else if (retVal == 0)
+ {
+ // select timed out
+ _captureCritSect->Leave();
+ return true;
+ }
+ else if (!FD_ISSET(_deviceFd, &rSet))
+ {
+ // not event on camera handle
+ _captureCritSect->Leave();
+ return true;
+ }
+
+ if (_captureStarted)
+ {
+ struct v4l2_buffer buf;
+ memset(&buf, 0, sizeof(struct v4l2_buffer));
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ // dequeue a buffer - repeat until dequeued properly!
+ while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0)
+ {
+ if (errno != EINTR)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "could not sync on a buffer on device %s", strerror(errno));
+ _captureCritSect->Leave();
+ return true;
+ }
+ }
+ VideoCaptureCapability frameInfo;
+ frameInfo.width = _currentWidth;
+ frameInfo.height = _currentHeight;
+ frameInfo.rawType = _captureVideoType;
+
+ // convert to to I420 if needed
+ IncomingFrame((unsigned char*) _pool[buf.index].start,
+ buf.bytesused, frameInfo);
+ // enqueue the buffer again
+ if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1)
+ {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+ "Failed to enqueue capture buffer");
+ }
+ }
+ _captureCritSect->Leave();
+ usleep(0);
+ return true;
+}
+
+int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings)
+{
+ settings.width = _currentWidth;
+ settings.height = _currentHeight;
+ settings.maxFPS = _currentFrameRate;
+ settings.rawType=_captureVideoType;
+
+ return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/linux/video_capture_linux.h b/webrtc/modules/video_capture/linux/video_capture_linux.h
new file mode 100644
index 0000000000..996f8e10ca
--- /dev/null
+++ b/webrtc/modules/video_capture/linux/video_capture_linux.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+#include "webrtc/system_wrappers/include/thread_wrapper.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+namespace videocapturemodule
+{
+class VideoCaptureModuleV4L2: public VideoCaptureImpl
+{
+public:
+ VideoCaptureModuleV4L2(int32_t id);
+ virtual ~VideoCaptureModuleV4L2();
+ virtual int32_t Init(const char* deviceUniqueId);
+ virtual int32_t StartCapture(const VideoCaptureCapability& capability);
+ virtual int32_t StopCapture();
+ virtual bool CaptureStarted();
+ virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
+
+private:
+ enum {kNoOfV4L2Bufffers=4};
+
+ static bool CaptureThread(void*);
+ bool CaptureProcess();
+ bool AllocateVideoBuffers();
+ bool DeAllocateVideoBuffers();
+
+ rtc::scoped_ptr<ThreadWrapper> _captureThread;
+ CriticalSectionWrapper* _captureCritSect;
+
+ int32_t _deviceId;
+ int32_t _deviceFd;
+
+ int32_t _buffersAllocatedByDevice;
+ int32_t _currentWidth;
+ int32_t _currentHeight;
+ int32_t _currentFrameRate;
+ bool _captureStarted;
+ RawVideoType _captureVideoType;
+ struct Buffer
+ {
+ void *start;
+ size_t length;
+ };
+ Buffer *_pool;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h
new file mode 100644
index 0000000000..e3ddd9fe10
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
+
+#import <QTKit/QTKit.h>
+#include <stdio.h>
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+@class VideoCaptureMacQTKitObjC;
+@class VideoCaptureMacQTKitInfoObjC;
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+class VideoCaptureMacQTKit : public VideoCaptureImpl
+{
+public:
+ VideoCaptureMacQTKit(const int32_t id);
+ virtual ~VideoCaptureMacQTKit();
+
+ /*
+ * Create a video capture module object
+ *
+ * id - unique identifier of this video capture module object
+ * deviceUniqueIdUTF8 - name of the device. Available names can be found
+ * by using GetDeviceName
+ * deviceUniqueIdUTF8Length - length of deviceUniqueIdUTF8
+ */
+ static void Destroy(VideoCaptureModule* module);
+
+ int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
+
+
+ // Start/Stop
+ virtual int32_t StartCapture(
+ const VideoCaptureCapability& capability);
+ virtual int32_t StopCapture();
+
+ // Properties of the set device
+
+ virtual bool CaptureStarted();
+
+ int32_t CaptureSettings(VideoCaptureCapability& settings);
+
+protected:
+ // Help functions
+ int32_t SetCameraOutput();
+
+private:
+ VideoCaptureMacQTKitObjC* _captureDevice;
+ VideoCaptureMacQTKitInfoObjC* _captureInfo;
+ bool _isCapturing;
+ int32_t _id;
+ int32_t _captureWidth;
+ int32_t _captureHeight;
+ int32_t _captureFrameRate;
+ char _currentDeviceNameUTF8[MAX_NAME_LENGTH];
+ char _currentDeviceUniqueIdUTF8[MAX_NAME_LENGTH];
+ char _currentDeviceProductUniqueIDUTF8[MAX_NAME_LENGTH];
+ int32_t _frameCount;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm
new file mode 100644
index 0000000000..ad057b49db
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h"
+#import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h"
+#import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc
+{
+
+namespace videocapturemodule
+{
+
+VideoCaptureMacQTKit::VideoCaptureMacQTKit(const int32_t id) :
+ VideoCaptureImpl(id),
+ _captureDevice(NULL),
+ _captureInfo(NULL),
+ _isCapturing(false),
+ _id(id),
+ _captureWidth(QTKIT_DEFAULT_WIDTH),
+ _captureHeight(QTKIT_DEFAULT_HEIGHT),
+ _captureFrameRate(QTKIT_DEFAULT_FRAME_RATE),
+ _frameCount(0)
+{
+
+ memset(_currentDeviceNameUTF8, 0, MAX_NAME_LENGTH);
+ memset(_currentDeviceUniqueIdUTF8, 0, MAX_NAME_LENGTH);
+ memset(_currentDeviceProductUniqueIDUTF8, 0, MAX_NAME_LENGTH);
+}
+
+VideoCaptureMacQTKit::~VideoCaptureMacQTKit()
+{
+
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+ "~VideoCaptureMacQTKit() called");
+ if(_captureDevice)
+ {
+ [_captureDevice registerOwner:nil];
+ [_captureDevice stopCapture];
+ [_captureDevice release];
+ }
+
+ if(_captureInfo)
+ {
+ [_captureInfo release];
+ }
+}
+
+int32_t VideoCaptureMacQTKit::Init(
+ const int32_t id, const char* iDeviceUniqueIdUTF8)
+{
+ CriticalSectionScoped cs(&_apiCs);
+
+
+ const int32_t nameLength =
+ (int32_t) strlen((char*)iDeviceUniqueIdUTF8);
+ if(nameLength>kVideoCaptureUniqueNameLength)
+ return -1;
+
+ // Store the device name
+ _deviceUniqueId = new char[nameLength+1];
+ memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1);
+
+ _captureDevice = [[VideoCaptureMacQTKitObjC alloc] init];
+ if(NULL == _captureDevice)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "Failed to create an instance of "
+ "VideoCaptureMacQTKitObjC");
+ return -1;
+ }
+
+ [_captureDevice registerOwner:this];
+
+ if(0 == strcmp((char*)iDeviceUniqueIdUTF8, ""))
+ {
+ // the user doesn't want to set a capture device at this time
+ return 0;
+ }
+
+ _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc]init];
+ if(nil == _captureInfo)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "Failed to create an instance of VideoCaptureMacQTKitInfoObjC");
+ return -1;
+ }
+
+ int captureDeviceCount = [[_captureInfo getCaptureDeviceCount]intValue];
+ if(captureDeviceCount < 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "No Capture Devices Present");
+ return -1;
+ }
+
+ const int NAME_LENGTH = 1024;
+ char deviceNameUTF8[1024] = "";
+ char deviceUniqueIdUTF8[1024] = "";
+ char deviceProductUniqueIDUTF8[1024] = "";
+
+ bool captureDeviceFound = false;
+ for(int index = 0; index < captureDeviceCount; index++){
+
+ memset(deviceNameUTF8, 0, NAME_LENGTH);
+ memset(deviceUniqueIdUTF8, 0, NAME_LENGTH);
+ memset(deviceProductUniqueIDUTF8, 0, NAME_LENGTH);
+ if(-1 == [[_captureInfo getDeviceNamesFromIndex:index
+ DefaultName:deviceNameUTF8 WithLength:NAME_LENGTH
+ AndUniqueID:deviceUniqueIdUTF8 WithLength:NAME_LENGTH
+ AndProductID:deviceProductUniqueIDUTF8
+ WithLength:NAME_LENGTH]intValue])
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "GetDeviceName returned -1 for index %d", index);
+ return -1;
+ }
+ if(0 == strcmp((const char*)iDeviceUniqueIdUTF8,
+ (char*)deviceUniqueIdUTF8))
+ {
+ // we have a match
+ captureDeviceFound = true;
+ break;
+ }
+ }
+
+ if(false == captureDeviceFound)
+ {
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "Failed to find capture device unique ID %s",
+ iDeviceUniqueIdUTF8);
+ return -1;
+ }
+
+ // at this point we know that the user has passed in a valid camera. Let's
+ // set it as the current.
+ if(![_captureDevice setCaptureDeviceById:(char*)deviceUniqueIdUTF8])
+ {
+ strcpy((char*)_deviceUniqueId, (char*)deviceUniqueIdUTF8);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to set capture device %s (unique ID %s) even "
+ "though it was a valid return from "
+ "VideoCaptureMacQTKitInfo", deviceNameUTF8,
+ iDeviceUniqueIdUTF8);
+ return -1;
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "successfully Init VideoCaptureMacQTKit" );
+ return 0;
+}
+
+int32_t VideoCaptureMacQTKit::StartCapture(
+ const VideoCaptureCapability& capability)
+{
+
+ _captureWidth = capability.width;
+ _captureHeight = capability.height;
+ _captureFrameRate = capability.maxFPS;
+ _captureDelay = 120;
+
+ [_captureDevice setCaptureHeight:_captureHeight
+ width:_captureWidth
+ frameRate:_captureFrameRate];
+
+ [_captureDevice startCapture];
+ _isCapturing = true;
+ return 0;
+}
+
+int32_t VideoCaptureMacQTKit::StopCapture()
+{
+ [_captureDevice stopCapture];
+ _isCapturing = false;
+ return 0;
+}
+
+bool VideoCaptureMacQTKit::CaptureStarted()
+{
+ return _isCapturing;
+}
+
+int32_t VideoCaptureMacQTKit::CaptureSettings(VideoCaptureCapability& settings)
+{
+ settings.width = _captureWidth;
+ settings.height = _captureHeight;
+ settings.maxFPS = _captureFrameRate;
+ return 0;
+}
+
+
+// ********** begin functions inherited from DeviceInfoImpl **********
+
+struct VideoCaptureCapabilityMacQTKit:public VideoCaptureCapability
+{
+ VideoCaptureCapabilityMacQTKit()
+ {
+ }
+};
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h
new file mode 100644
index 0000000000..fd994ad676
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+
+@class VideoCaptureMacQTKitInfoObjC;
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+class VideoCaptureMacQTKitInfo: public DeviceInfoImpl
+{
+public:
+
+ VideoCaptureMacQTKitInfo(const int32_t id);
+ virtual ~VideoCaptureMacQTKitInfo();
+
+ int32_t Init();
+
+ virtual uint32_t NumberOfDevices();
+
+ /*
+ * Returns the available capture devices.
+ * deviceNumber -[in] index of capture device
+ * deviceNameUTF8 - friendly name of the capture device
+ * deviceUniqueIdUTF8 - unique name of the capture device if it exist.
+ * Otherwise same as deviceNameUTF8
+ * productUniqueIdUTF8 - unique product id if it exist. Null terminated
+ * otherwise.
+ */
+ virtual int32_t GetDeviceName(
+ uint32_t deviceNumber, char* deviceNameUTF8,
+ uint32_t deviceNameLength, char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0);
+
+ /*
+ * Returns the number of capabilities for this device
+ */
+ virtual int32_t NumberOfCapabilities(
+ const char* deviceUniqueIdUTF8);
+
+ /*
+ * Gets the capabilities of the named device
+ */
+ virtual int32_t GetCapability(
+ const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability);
+
+ /*
+ * Gets the capability that best matches the requested width, height and frame rate.
+ * Returns the deviceCapabilityNumber on success.
+ */
+ virtual int32_t GetBestMatchedCapability(
+ const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting);
+
+ /*
+ * Display OS /capture device specific settings dialog
+ */
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8, void* parentWindow,
+ uint32_t positionX, uint32_t positionY);
+
+protected:
+ virtual int32_t CreateCapabilityMap(
+ const char* deviceUniqueIdUTF8);
+
+ VideoCaptureMacQTKitInfoObjC* _captureInfo;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
new file mode 100644
index 0000000000..1251ecd830
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h"
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const int32_t id) :
+ DeviceInfoImpl(id)
+{
+ _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init];
+}
+
+VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo()
+{
+ [_captureInfo release];
+
+}
+
+int32_t VideoCaptureMacQTKitInfo::Init()
+{
+
+ return 0;
+}
+
+uint32_t VideoCaptureMacQTKitInfo::NumberOfDevices()
+{
+
+ uint32_t captureDeviceCount =
+ [[_captureInfo getCaptureDeviceCount]intValue];
+ return captureDeviceCount;
+
+}
+
+int32_t VideoCaptureMacQTKitInfo::GetDeviceName(
+ uint32_t deviceNumber, char* deviceNameUTF8,
+ uint32_t deviceNameLength, char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length)
+{
+ int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber
+ DefaultName:deviceNameUTF8 WithLength:deviceNameLength
+ AndUniqueID:deviceUniqueIdUTF8
+ WithLength:deviceUniqueIdUTF8Length
+ AndProductID:productUniqueIdUTF8
+ WithLength:productUniqueIdUTF8Length]intValue];
+ return errNum;
+}
+
+int32_t VideoCaptureMacQTKitInfo::NumberOfCapabilities(
+ const char* deviceUniqueIdUTF8)
+{
+ // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+ // "analog". QTKit will do it's best to convert frames to what ever format
+ // you ask for.
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "NumberOfCapabilities is not supported on the Mac platform.");
+ return -1;
+}
+
+
+int32_t VideoCaptureMacQTKitInfo::GetCapability(
+ const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability)
+{
+ // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+ // "analog". QTKit will do it's best to convert frames to what ever format
+ // you ask for.
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "NumberOfCapabilities is not supported on the Mac platform.");
+ return -1;
+}
+
+
+int32_t VideoCaptureMacQTKitInfo::GetBestMatchedCapability(
+ const char*deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested, VideoCaptureCapability& resulting)
+{
+ // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+ // "analog". QTKit will do it's best to convert frames to what ever format
+ // you ask for.
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "NumberOfCapabilities is not supported on the Mac platform.");
+ return -1;
+}
+
+int32_t VideoCaptureMacQTKitInfo::DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8, void* parentWindow,
+ uint32_t positionX, uint32_t positionY)
+{
+
+ return [[_captureInfo
+ displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8
+ AndTitle:dialogTitleUTF8
+ AndParentWindow:parentWindow AtX:positionX AndY:positionY]
+ intValue];
+}
+
+int32_t VideoCaptureMacQTKitInfo::CreateCapabilityMap(
+ const char* deviceUniqueIdUTF8)
+{
+ // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+ // "analog". QTKit will do it's best to convert frames to what ever format
+ // you ask for.
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "NumberOfCapabilities is not supported on the Mac platform.");
+ return -1;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h
new file mode 100644
index 0000000000..c03aa935f5
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// video_capture_qtkit_info_objc.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
+
+#import <Foundation/Foundation.h>
+#import <QTKit/QTKit.h>
+
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h"
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h"
+
+@interface VideoCaptureMacQTKitInfoObjC : NSObject{
+ bool _OSSupportedInfo;
+ NSArray* _captureDevicesInfo;
+ NSAutoreleasePool* _poolInfo;
+ int _captureDeviceCountInfo;
+
+}
+
+/**************************************************************************
+ *
+ * The following functions are considered to be private
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDevices;
+- (NSNumber*)initializeVariables;
+- (void)checkOSSupported;
+
+
+/**************************************************************************
+ *
+ * The following functions are considered to be public and called by VideoCaptureMacQTKitInfo class
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDeviceCount;
+
+- (NSNumber*)getDeviceNamesFromIndex:(uint32_t)index
+ DefaultName:(char*)deviceName
+ WithLength:(uint32_t)deviceNameLength
+ AndUniqueID:(char*)deviceUniqueID
+ WithLength:(uint32_t)deviceUniqueIDLength
+ AndProductID:(char*)deviceProductID
+ WithLength:(uint32_t)deviceProductIDLength;
+
+- (NSNumber*)displayCaptureSettingsDialogBoxWithDevice:
+ (const char*)deviceUniqueIdUTF8
+ AndTitle:(const char*)dialogTitleUTF8
+ AndParentWindow:(void*) parentWindow AtX:(uint32_t)positionX
+ AndY:(uint32_t) positionY;
+@end
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
new file mode 100644
index 0000000000..d3f2188bae
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma mark **** imports/includes
+
+#import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h"
+
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+#pragma mark **** hidden class interface
+
+@implementation VideoCaptureMacQTKitInfoObjC
+
+// ****************** over-written OS methods ***********************
+#pragma mark **** over-written OS methods
+
+/// ***** Objective-C. Similar to C++ constructor, although invoked manually
+/// ***** Potentially returns an instance of self
+-(id)init{
+ self = [super init];
+ if(nil != self){
+ [self checkOSSupported];
+ [self initializeVariables];
+ }
+ else
+ {
+ return nil;
+ }
+ return self;
+}
+
+/// ***** Objective-C. Similar to C++ destructor
+/// ***** Returns nothing
+- (void)dealloc {
+
+ [_captureDevicesInfo release];
+
+ [super dealloc];
+}
+
+// ****************** public methods ******************
+#pragma mark **** public method implementations
+
+/// ***** Creates a message box with Cocoa framework
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)displayCaptureSettingsDialogBoxWithDevice:(const char*)deviceUniqueIdUTF8
+ AndTitle:(const char*)dialogTitleUTF8
+ AndParentWindow:(void*) parentWindow
+ AtX:(uint32_t)positionX
+ AndY:(uint32_t) positionY
+{
+ NSString* strTitle = [NSString stringWithFormat:@"%s", dialogTitleUTF8];
+ NSString* strButton = @"Alright";
+ NSAlert* alert = [NSAlert alertWithMessageText:strTitle
+ defaultButton:strButton
+ alternateButton:nil otherButton:nil
+ informativeTextWithFormat:@"Device %s is capturing", deviceUniqueIdUTF8];
+ [alert setAlertStyle:NSInformationalAlertStyle];
+ [alert runModal];
+ return [NSNumber numberWithInt:0];
+}
+
+- (NSNumber*)getCaptureDeviceCount{
+ [self getCaptureDevices];
+ return [NSNumber numberWithInt:_captureDeviceCountInfo];
+}
+
+
+- (NSNumber*)getDeviceNamesFromIndex:(uint32_t)index
+ DefaultName:(char*)deviceName
+ WithLength:(uint32_t)deviceNameLength
+ AndUniqueID:(char*)deviceUniqueID
+ WithLength:(uint32_t)deviceUniqueIDLength
+ AndProductID:(char*)deviceProductID
+ WithLength:(uint32_t)deviceProductIDLength
+{
+ if(NO == _OSSupportedInfo)
+ {
+ return [NSNumber numberWithInt:0];
+ }
+
+ if(index >= (uint32_t)_captureDeviceCountInfo)
+ {
+ return [NSNumber numberWithInt:-1];
+ }
+
+ QTCaptureDevice* tempCaptureDevice =
+ (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
+ if(!tempCaptureDevice)
+ {
+ return [NSNumber numberWithInt:-1];
+ }
+
+ memset(deviceName, 0, deviceNameLength);
+ memset(deviceUniqueID, 0, deviceUniqueIDLength);
+
+ bool successful = NO;
+
+ NSString* tempString = [tempCaptureDevice localizedDisplayName];
+ successful = [tempString getCString:(char*)deviceName
+ maxLength:deviceNameLength encoding:NSUTF8StringEncoding];
+ if(NO == successful)
+ {
+ memset(deviceName, 0, deviceNameLength);
+ return [NSNumber numberWithInt:-1];
+ }
+
+ tempString = [tempCaptureDevice uniqueID];
+ successful = [tempString getCString:(char*)deviceUniqueID
+ maxLength:deviceUniqueIDLength encoding:NSUTF8StringEncoding];
+ if(NO == successful)
+ {
+ memset(deviceUniqueID, 0, deviceNameLength);
+ return [NSNumber numberWithInt:-1];
+ }
+
+ return [NSNumber numberWithInt:0];
+
+}
+
+// ****************** "private" category functions below here ******************
+#pragma mark **** "private" method implementations
+
+- (NSNumber*)initializeVariables
+{
+ if(NO == _OSSupportedInfo)
+ {
+ return [NSNumber numberWithInt:0];
+ }
+
+ _poolInfo = [[NSAutoreleasePool alloc]init];
+ _captureDeviceCountInfo = 0;
+ [self getCaptureDevices];
+
+ return [NSNumber numberWithInt:0];
+}
+
+// ***** Checks to see if the QTCaptureSession framework is available in the OS
+// ***** If it is not, isOSSupprted = NO
+// ***** Throughout the rest of the class isOSSupprted is checked and functions
+// ***** are/aren't called depending
+// ***** The user can use weak linking to the QTKit framework and run on older
+// ***** versions of the OS
+// ***** I.E. Backwards compaitibility
+// ***** Returns nothing. Sets member variable
+- (void)checkOSSupported
+{
+ Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
+ if(nil == osSupportedTest)
+ {
+ _OSSupportedInfo = NO;
+ }
+ else
+ {
+ _OSSupportedInfo = YES;
+ }
+}
+
+/// ***** Retrieves the number of capture devices currently available
+/// ***** Stores them in an NSArray instance
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)getCaptureDevices
+{
+ if(NO == _OSSupportedInfo)
+ {
+ return [NSNumber numberWithInt:0];
+ }
+
+ if(_captureDevicesInfo)
+ {
+ [_captureDevicesInfo release];
+ }
+ _captureDevicesInfo = [[NSArray alloc]
+ initWithArray:[QTCaptureDevice
+ inputDevicesWithMediaType:QTMediaTypeVideo]];
+
+ _captureDeviceCountInfo = _captureDevicesInfo.count;
+
+ return [NSNumber numberWithInt:0];
+}
+
+@end
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h
new file mode 100644
index 0000000000..50076003cb
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// video_capture_qtkit_objc.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
+
+#import <AppKit/AppKit.h>
+#import <CoreData/CoreData.h>
+#import <CoreFoundation/CoreFoundation.h>
+#import <CoreVideo/CoreVideo.h>
+#import <Foundation/Foundation.h>
+#import <QTKit/QTKit.h>
+
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h"
+
+@interface VideoCaptureMacQTKitObjC : NSObject {
+ bool _capturing;
+ int _frameRate;
+ int _frameWidth;
+ int _frameHeight;
+ int _framesDelivered;
+ int _framesRendered;
+ bool _captureInitialized;
+
+ webrtc::videocapturemodule::VideoCaptureMacQTKit* _owner;
+ NSLock* _lock;
+
+ QTCaptureSession* _captureSession;
+ QTCaptureDeviceInput* _captureVideoDeviceInput;
+ QTCaptureDecompressedVideoOutput* _captureDecompressedVideoOutput;
+ NSArray* _captureDevices;
+ int _captureDeviceCount;
+ char _captureDeviceNameUTF8[1024];
+ char _captureDeviceNameUniqueID[1024];
+}
+
+- (void)getCaptureDevices;
+- (BOOL)initializeVideoCapture;
+- (BOOL)initializeVariables;
+
+- (void)registerOwner:(webrtc::videocapturemodule::VideoCaptureMacQTKit*)owner;
+- (BOOL)setCaptureDeviceById:(char*)uniqueId;
+- (void)setCaptureHeight:(int)height width:(int)width frameRate:(int)frameRate;
+- (void)startCapture;
+- (void)stopCapture;
+
+@end
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm
new file mode 100644
index 0000000000..8a6d448a93
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm
@@ -0,0 +1,260 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#define DEFAULT_CAPTURE_DEVICE_INDEX 1
+#define DEFAULT_FRAME_RATE 30
+#define DEFAULT_FRAME_WIDTH 352
+#define DEFAULT_FRAME_HEIGHT 288
+#define ROTATE_CAPTURED_FRAME 1
+#define LOW_QUALITY 1
+
+#import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h"
+
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+using namespace videocapturemodule;
+
+@implementation VideoCaptureMacQTKitObjC
+
+-(id)init {
+ self = [super init];
+ if (self) {
+ [self initializeVariables];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ if (_captureSession)
+ [_captureSession stopRunning];
+
+ if (_captureVideoDeviceInput) {
+ if ([[_captureVideoDeviceInput device] isOpen])
+ [[_captureVideoDeviceInput device] close];
+
+ [_captureVideoDeviceInput release];
+ }
+
+ [_captureDecompressedVideoOutput release];
+ [_captureSession release];
+ [_captureDevices release];
+ [_lock release];
+
+ [super dealloc];
+}
+
+#pragma mark Public methods
+
+- (void)registerOwner:(VideoCaptureMacQTKit*)owner {
+ [_lock lock];
+ _owner = owner;
+ [_lock unlock];
+}
+
+- (BOOL)setCaptureDeviceById:(char*)uniqueId {
+ if (uniqueId == nil || !strcmp("", uniqueId)) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+ "Incorrect capture id argument");
+ return NO;
+ }
+
+ if (!strcmp(uniqueId, _captureDeviceNameUniqueID))
+ return YES;
+
+ QTCaptureDevice* captureDevice;
+ for(int index = 0; index < _captureDeviceCount; index++) {
+ captureDevice = (QTCaptureDevice*)[_captureDevices objectAtIndex:index];
+ char captureDeviceId[1024] = "";
+ [[captureDevice uniqueID] getCString:captureDeviceId
+ maxLength:1024
+ encoding:NSUTF8StringEncoding];
+ if (strcmp(uniqueId, captureDeviceId) == 0) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+ "%s:%d Found capture device id %s as index %d",
+ __FUNCTION__, __LINE__, captureDeviceId, index);
+ [[captureDevice localizedDisplayName] getCString:_captureDeviceNameUTF8
+ maxLength:1024
+ encoding:NSUTF8StringEncoding];
+ [[captureDevice uniqueID] getCString:_captureDeviceNameUniqueID
+ maxLength:1024
+ encoding:NSUTF8StringEncoding];
+ break;
+ }
+ captureDevice = nil;
+ }
+
+ if (!captureDevice)
+ return NO;
+
+ NSError* error;
+ if (![captureDevice open:&error]) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0,
+ "Failed to open capture device: %s", _captureDeviceNameUTF8);
+ return NO;
+ }
+
+ if (_captureVideoDeviceInput) {
+ [_captureVideoDeviceInput release];
+ }
+ _captureVideoDeviceInput =
+ [[QTCaptureDeviceInput alloc] initWithDevice:captureDevice];
+
+ if (![_captureSession addInput:_captureVideoDeviceInput error:&error]) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0,
+ "Failed to add input from %s to the capture session",
+ _captureDeviceNameUTF8);
+ return NO;
+ }
+
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+ "%s:%d successfully added capture device: %s", __FUNCTION__,
+ __LINE__, _captureDeviceNameUTF8);
+ return YES;
+}
+
+- (void)setCaptureHeight:(int)height width:(int)width frameRate:(int)frameRate {
+ _frameWidth = width;
+ _frameHeight = height;
+ _frameRate = frameRate;
+
+ NSDictionary* captureDictionary =
+ [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithDouble:_frameWidth],
+ (id)kCVPixelBufferWidthKey,
+ [NSNumber numberWithDouble:_frameHeight],
+ (id)kCVPixelBufferHeightKey,
+ [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
+ (id)kCVPixelBufferPixelFormatTypeKey,
+ nil];
+ [_captureDecompressedVideoOutput
+ performSelectorOnMainThread:@selector(setPixelBufferAttributes:)
+ withObject:captureDictionary
+ waitUntilDone:YES];
+}
+
+- (void)startCapture {
+ if (_capturing)
+ return;
+
+ [_captureSession startRunning];
+ _capturing = YES;
+}
+
+- (void)stopCapture {
+ if (!_capturing)
+ return;
+
+ [_captureSession stopRunning];
+ _capturing = NO;
+}
+
+#pragma mark Private methods
+
+- (BOOL)initializeVariables {
+ if (NSClassFromString(@"QTCaptureSession") == nil)
+ return NO;
+
+ memset(_captureDeviceNameUTF8, 0, 1024);
+ _framesDelivered = 0;
+ _framesRendered = 0;
+ _captureDeviceCount = 0;
+ _capturing = NO;
+ _captureInitialized = NO;
+ _frameRate = DEFAULT_FRAME_RATE;
+ _frameWidth = DEFAULT_FRAME_WIDTH;
+ _frameHeight = DEFAULT_FRAME_HEIGHT;
+ _lock = [[NSLock alloc] init];
+ _captureSession = [[QTCaptureSession alloc] init];
+ _captureDecompressedVideoOutput =
+ [[QTCaptureDecompressedVideoOutput alloc] init];
+ [_captureDecompressedVideoOutput setDelegate:self];
+
+ [self getCaptureDevices];
+ if (![self initializeVideoCapture])
+ return NO;
+
+ return NO;
+}
+
+- (void)getCaptureDevices {
+ if (_captureDevices)
+ [_captureDevices release];
+
+ _captureDevices = [[NSArray alloc] initWithArray:
+ [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]];
+
+ _captureDeviceCount = _captureDevices.count;
+}
+
+- (BOOL)initializeVideoCapture{
+ NSDictionary *captureDictionary =
+ [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithDouble:_frameWidth],
+ (id)kCVPixelBufferWidthKey,
+ [NSNumber numberWithDouble:_frameHeight],
+ (id)kCVPixelBufferHeightKey,
+ [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
+ (id)kCVPixelBufferPixelFormatTypeKey,
+ nil];
+
+ [_captureDecompressedVideoOutput setPixelBufferAttributes:captureDictionary];
+ [_captureDecompressedVideoOutput setAutomaticallyDropsLateVideoFrames:YES];
+ [_captureDecompressedVideoOutput
+ setMinimumVideoFrameInterval:(NSTimeInterval)1/(float)_frameRate];
+
+ NSError *error;
+ if (![_captureSession addOutput:_captureDecompressedVideoOutput error:&error])
+ return NO;
+
+ return YES;
+}
+
+- (void)captureOutput:(QTCaptureOutput *)captureOutput
+ didDropVideoFrameWithSampleBuffer:(QTSampleBuffer *)sampleBuffer
+ fromConnection:(QTCaptureConnection *)connection {
+ // TODO(mflodman) Experiment more when this happens.
+}
+
+- (void)captureOutput:(QTCaptureOutput *)captureOutput
+ didOutputVideoFrame:(CVImageBufferRef)videoFrame
+ withSampleBuffer:(QTSampleBuffer *)sampleBuffer
+ fromConnection:(QTCaptureConnection *)connection {
+
+ [_lock lock];
+ if (!_owner) {
+ [_lock unlock];
+ return;
+ }
+
+ const int kFlags = 0;
+ if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) == kCVReturnSuccess) {
+ void *baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
+ size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
+ size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
+ size_t frameSize = bytesPerRow * frameHeight;
+
+ VideoCaptureCapability tempCaptureCapability;
+ tempCaptureCapability.width = _frameWidth;
+ tempCaptureCapability.height = _frameHeight;
+ tempCaptureCapability.maxFPS = _frameRate;
+ // TODO(wu) : Update actual type and not hard-coded value.
+ tempCaptureCapability.rawType = kVideoBGRA;
+
+ _owner->IncomingFrame((unsigned char*)baseAddress, frameSize,
+ tempCaptureCapability, 0);
+ CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
+ }
+ [_lock unlock];
+ _framesDelivered++;
+ _framesRendered++;
+}
+
+@end
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h
new file mode 100644
index 0000000000..5ef0b96b6b
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_capture_qtkit_utility.h
+ *
+ */
+
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
+
+#define MAX_NAME_LENGTH 1024
+
+#define QTKIT_MIN_WIDTH 0
+#define QTKIT_MAX_WIDTH 2560
+#define QTKIT_DEFAULT_WIDTH 352
+
+#define QTKIT_MIN_HEIGHT 0
+#define QTKIT_MAX_HEIGHT 1440
+#define QTKIT_DEFAULT_HEIGHT 288
+
+#define QTKIT_MIN_FRAME_RATE 1
+#define QTKIT_MAX_FRAME_RATE 60
+#define QTKIT_DEFAULT_FRAME_RATE 30
+
+#define RELEASE_AND_CLEAR(p) if (p) { (p) -> Release () ; (p) = NULL ; }
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
diff --git a/webrtc/modules/video_capture/mac/video_capture_mac.mm b/webrtc/modules/video_capture/mac/video_capture_mac.mm
new file mode 100644
index 0000000000..a9dab96e93
--- /dev/null
+++ b/webrtc/modules/video_capture/mac/video_capture_mac.mm
@@ -0,0 +1,271 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_capture_mac.cc
+ *
+ */
+
+#include <QuickTime/QuickTime.h>
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+// 10.4 support must be decided runtime. We will just decide which framework to
+// use at compile time "work" classes. One for QTKit, one for QuickTime
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+#include <QuickTime/video_capture_quick_time.h>
+#include <QuickTime/video_capture_quick_time_info.h>
+#else
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h"
+#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h"
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+// static
+bool CheckOSVersion()
+{
+ // Check OSX version
+ OSErr err = noErr;
+
+ SInt32 version;
+
+ err = Gestalt(gestaltSystemVersion, &version);
+ if (err != noErr)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+ "Could not get OS version");
+ return false;
+ }
+
+ if (version < 0x00001040) // Older version than Mac OSX 10.4
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+ "OS version too old: 0x%x", version);
+ return false;
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+ "OS version compatible: 0x%x", version);
+
+ return true;
+}
+
+// static
+bool CheckQTVersion()
+{
+ // Check OSX version
+ OSErr err = noErr;
+
+ SInt32 version;
+
+ err = Gestalt(gestaltQuickTime, &version);
+ if (err != noErr)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+ "Could not get QuickTime version");
+ return false;
+ }
+
+ if (version < 0x07000000) // QT v. 7.x or newer (QT 5.0.2 0x05020000)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+ "QuickTime version too old: 0x%x", version);
+ return false;
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+ "QuickTime version compatible: 0x%x", version);
+ return true;
+}
+
+/**************************************************************************
+ *
+ * Create/Destroy a VideoCaptureModule
+ *
+ ***************************************************************************/
+
+/*
+ * Returns version of the module and its components
+ *
+ * version - buffer to which the version will be written
+ * remainingBufferInBytes - remaining number of int8_t in the version
+ * buffer
+ * position - position of the next empty int8_t in the
+ * version buffer
+ */
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+ const int32_t id, const char* deviceUniqueIdUTF8)
+{
+
+ if (webrtc::videocapturemodule::CheckOSVersion() == false)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "OS version is too old. Could not create video capture "
+ "module. Returning NULL");
+ return NULL;
+ }
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+ if (webrtc::videocapturemodule::CheckQTVersion() == false)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "QuickTime version is too old. Could not create video "
+ "capture module. Returning NULL");
+ return NULL;
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "%s line %d. QTKit is not supported on this machine. Using "
+ "QuickTime framework to capture video",
+ __FILE__, __LINE__);
+
+ RefCountImpl<videocapturemodule::VideoCaptureMacQuickTime>*
+ newCaptureModule =
+ new RefCountImpl<videocapturemodule::VideoCaptureMacQuickTime>(id);
+
+ if (!newCaptureModule)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+ "could not Create for unique device %s, !newCaptureModule",
+ deviceUniqueIdUTF8);
+ return NULL;
+ }
+
+ if (newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+ "could not Create for unique device %s, "
+ "newCaptureModule->Init()!=0",
+ deviceUniqueIdUTF8);
+ delete newCaptureModule;
+ return NULL;
+ }
+
+ // Successfully created VideoCaptureMacQuicktime. Return it
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "Module created for unique device %s. Will use QuickTime "
+ "framework to capture",
+ deviceUniqueIdUTF8);
+ return newCaptureModule;
+
+#else // QTKit version
+
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "Using QTKit framework to capture video", id);
+
+ RefCountImpl<videocapturemodule::VideoCaptureMacQTKit>* newCaptureModule =
+ new RefCountImpl<videocapturemodule::VideoCaptureMacQTKit>(id);
+
+ if(!newCaptureModule)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+ "could not Create for unique device %s, !newCaptureModule",
+ deviceUniqueIdUTF8);
+ return NULL;
+ }
+ if(newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+ "could not Create for unique device %s, "
+ "newCaptureModule->Init()!=0", deviceUniqueIdUTF8);
+ delete newCaptureModule;
+ return NULL;
+ }
+
+ // Successfully created VideoCaptureMacQuicktime. Return it
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "Module created for unique device %s, will use QTKit "
+ "framework",deviceUniqueIdUTF8);
+ return newCaptureModule;
+#endif
+}
+
+/**************************************************************************
+ *
+ * Create/Destroy a DeviceInfo
+ *
+ ***************************************************************************/
+
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo(const int32_t id)
+{
+
+
+ if (webrtc::videocapturemodule::CheckOSVersion() == false)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "OS version is too old. Could not create video capture "
+ "module. Returning NULL");
+ return NULL;
+ }
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+ if (webrtc::videocapturemodule::CheckQTVersion() == false)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+ "QuickTime version is too old. Could not create video "
+ "capture module. Returning NULL");
+ return NULL;
+ }
+
+ webrtc::videocapturemodule::VideoCaptureMacQuickTimeInfo* newCaptureInfoModule =
+ new webrtc::videocapturemodule::VideoCaptureMacQuickTimeInfo(id);
+
+ if (!newCaptureInfoModule || newCaptureInfoModule->Init() != 0)
+ {
+ Destroy(newCaptureInfoModule);
+ newCaptureInfoModule = NULL;
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "Failed to Init newCaptureInfoModule created with id %d "
+ "and device \"\" ", id);
+ return NULL;
+ }
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "VideoCaptureModule created for id", id);
+ return newCaptureInfoModule;
+
+#else // QTKit version
+ webrtc::videocapturemodule::VideoCaptureMacQTKitInfo* newCaptureInfoModule =
+ new webrtc::videocapturemodule::VideoCaptureMacQTKitInfo(id);
+
+ if(!newCaptureInfoModule || newCaptureInfoModule->Init() != 0)
+ {
+ //Destroy(newCaptureInfoModule);
+ delete newCaptureInfoModule;
+ newCaptureInfoModule = NULL;
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "Failed to Init newCaptureInfoModule created with id %d "
+ "and device \"\" ", id);
+ return NULL;
+ }
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+ "VideoCaptureModule created for id", id);
+ return newCaptureInfoModule;
+
+#endif
+
+}
+
+/**************************************************************************
+ *
+ * End Create/Destroy VideoCaptureModule
+ *
+ ***************************************************************************/
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/test/video_capture_main_mac.mm b/webrtc/modules/video_capture/test/video_capture_main_mac.mm
new file mode 100644
index 0000000000..15cd1d555f
--- /dev/null
+++ b/webrtc/modules/video_capture/test/video_capture_main_mac.mm
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/testsupport/mac/run_threaded_main_mac.h"
+
+int ImplementThisToRunYourTest(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc
new file mode 100644
index 0000000000..2b8786b0fe
--- /dev/null
+++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -0,0 +1,540 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <map>
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+#include "webrtc/video_frame.h"
+
+using rtc::scoped_ptr;
+using webrtc::CriticalSectionWrapper;
+using webrtc::CriticalSectionScoped;
+using webrtc::SleepMs;
+using webrtc::TickTime;
+using webrtc::VideoCaptureAlarm;
+using webrtc::VideoCaptureCapability;
+using webrtc::VideoCaptureDataCallback;
+using webrtc::VideoCaptureFactory;
+using webrtc::VideoCaptureFeedBack;
+using webrtc::VideoCaptureModule;
+
+
+#define WAIT_(ex, timeout, res) \
+ do { \
+ res = (ex); \
+ int64_t start = TickTime::MillisecondTimestamp(); \
+ while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \
+ SleepMs(5); \
+ res = (ex); \
+ } \
+ } while (0);\
+
+#define EXPECT_TRUE_WAIT(ex, timeout) \
+ do { \
+ bool res; \
+ WAIT_(ex, timeout, res); \
+ if (!res) EXPECT_TRUE(ex); \
+ } while (0);
+
+
+static const int kTimeOut = 5000;
+static const int kTestHeight = 288;
+static const int kTestWidth = 352;
+static const int kTestFramerate = 30;
+
+// Compares the content of two video frames.
+static bool CompareFrames(const webrtc::VideoFrame& frame1,
+ const webrtc::VideoFrame& frame2) {
+ bool result =
+ (frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) &&
+ (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) &&
+ (frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) &&
+ (frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height());
+
+ if (!result)
+ return false;
+ for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
+ webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
+ int allocated_size1 = frame1.allocated_size(plane_type);
+ int allocated_size2 = frame2.allocated_size(plane_type);
+ if (allocated_size1 != allocated_size2)
+ return false;
+ const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
+ const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
+ if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
+ return false;
+ }
+ return true;
+}
+
+class TestVideoCaptureCallback : public VideoCaptureDataCallback {
+ public:
+ TestVideoCaptureCallback()
+ : capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ capture_delay_(-1),
+ last_render_time_ms_(0),
+ incoming_frames_(0),
+ timing_warnings_(0),
+ rotate_frame_(webrtc::kVideoRotation_0) {}
+
+ ~TestVideoCaptureCallback() {
+ if (timing_warnings_ > 0)
+ printf("No of timing warnings %d\n", timing_warnings_);
+ }
+
+ virtual void OnIncomingCapturedFrame(const int32_t id,
+ const webrtc::VideoFrame& videoFrame) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ int height = videoFrame.height();
+ int width = videoFrame.width();
+#if ANDROID
+ // Android camera frames may be rotated depending on test device
+ // orientation.
+ EXPECT_TRUE(height == capability_.height || height == capability_.width);
+ EXPECT_TRUE(width == capability_.width || width == capability_.height);
+#else
+ if (rotate_frame_ == webrtc::kVideoRotation_90 ||
+ rotate_frame_ == webrtc::kVideoRotation_270) {
+ EXPECT_EQ(width, capability_.height);
+ EXPECT_EQ(height, capability_.width);
+ } else {
+ EXPECT_EQ(height, capability_.height);
+ EXPECT_EQ(width, capability_.width);
+ }
+#endif
+ // RenderTimstamp should be the time now.
+ EXPECT_TRUE(
+ videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
+ videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp());
+
+ if ((videoFrame.render_time_ms() >
+ last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
+ last_render_time_ms_ > 0) ||
+ (videoFrame.render_time_ms() <
+ last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
+ last_render_time_ms_ > 0)) {
+ timing_warnings_++;
+ }
+
+ incoming_frames_++;
+ last_render_time_ms_ = videoFrame.render_time_ms();
+ last_frame_.CopyFrame(videoFrame);
+ }
+
+ virtual void OnCaptureDelayChanged(const int32_t id,
+ const int32_t delay) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ capture_delay_ = delay;
+ }
+
+ void SetExpectedCapability(VideoCaptureCapability capability) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ capability_= capability;
+ incoming_frames_ = 0;
+ last_render_time_ms_ = 0;
+ capture_delay_ = -1;
+ }
+ int incoming_frames() {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return incoming_frames_;
+ }
+
+ int capture_delay() {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return capture_delay_;
+ }
+ int timing_warnings() {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return timing_warnings_;
+ }
+ VideoCaptureCapability capability() {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return capability_;
+ }
+
+ bool CompareLastFrame(const webrtc::VideoFrame& frame) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return CompareFrames(last_frame_, frame);
+ }
+
+ void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ rotate_frame_ = rotation;
+ }
+
+ private:
+ scoped_ptr<CriticalSectionWrapper> capture_cs_;
+ VideoCaptureCapability capability_;
+ int capture_delay_;
+ int64_t last_render_time_ms_;
+ int incoming_frames_;
+ int timing_warnings_;
+ webrtc::VideoFrame last_frame_;
+ webrtc::VideoRotation rotate_frame_;
+};
+
+class TestVideoCaptureFeedBack : public VideoCaptureFeedBack {
+ public:
+ TestVideoCaptureFeedBack() :
+ capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ frame_rate_(0),
+ alarm_(webrtc::Cleared) {
+ }
+
+ virtual void OnCaptureFrameRate(const int32_t id,
+ const uint32_t frameRate) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ frame_rate_ = frameRate;
+ }
+
+ virtual void OnNoPictureAlarm(const int32_t id,
+ const VideoCaptureAlarm reported_alarm) {
+ CriticalSectionScoped cs(capture_cs_.get());
+ alarm_ = reported_alarm;
+ }
+ int frame_rate() {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return frame_rate_;
+
+ }
+ VideoCaptureAlarm alarm() {
+ CriticalSectionScoped cs(capture_cs_.get());
+ return alarm_;
+ }
+
+ private:
+ scoped_ptr<CriticalSectionWrapper> capture_cs_;
+ unsigned int frame_rate_;
+ VideoCaptureAlarm alarm_;
+};
+
+class VideoCaptureTest : public testing::Test {
+ public:
+ VideoCaptureTest() : number_of_devices_(0) {}
+
+ void SetUp() {
+ device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(0));
+ assert(device_info_.get());
+ number_of_devices_ = device_info_->NumberOfDevices();
+ ASSERT_GT(number_of_devices_, 0u);
+ }
+
+ rtc::scoped_refptr<VideoCaptureModule> OpenVideoCaptureDevice(
+ unsigned int device,
+ VideoCaptureDataCallback* callback) {
+ char device_name[256];
+ char unique_name[256];
+
+ EXPECT_EQ(0, device_info_->GetDeviceName(
+ device, device_name, 256, unique_name, 256));
+
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ VideoCaptureFactory::Create(device, unique_name));
+ if (module.get() == NULL)
+ return NULL;
+
+ EXPECT_FALSE(module->CaptureStarted());
+
+ module->RegisterCaptureDataCallback(*callback);
+ return module;
+ }
+
+ void StartCapture(VideoCaptureModule* capture_module,
+ VideoCaptureCapability capability) {
+ ASSERT_EQ(0, capture_module->StartCapture(capability));
+ EXPECT_TRUE(capture_module->CaptureStarted());
+
+ VideoCaptureCapability resulting_capability;
+ EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability));
+ EXPECT_EQ(capability.width, resulting_capability.width);
+ EXPECT_EQ(capability.height, resulting_capability.height);
+ }
+
+ scoped_ptr<VideoCaptureModule::DeviceInfo> device_info_;
+ unsigned int number_of_devices_;
+};
+
+TEST_F(VideoCaptureTest, CreateDelete) {
+ for (int i = 0; i < 5; ++i) {
+ int64_t start_time = TickTime::MillisecondTimestamp();
+ TestVideoCaptureCallback capture_observer;
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ OpenVideoCaptureDevice(0, &capture_observer));
+ ASSERT_TRUE(module.get() != NULL);
+
+ VideoCaptureCapability capability;
+#ifndef WEBRTC_MAC
+ device_info_->GetCapability(module->CurrentDeviceName(), 0, capability);
+#else
+ capability.width = kTestWidth;
+ capability.height = kTestHeight;
+ capability.maxFPS = kTestFramerate;
+ capability.rawType = webrtc::kVideoUnknown;
+#endif
+ capture_observer.SetExpectedCapability(capability);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
+
+ // Less than 4s to start the camera.
+ EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
+
+ // Make sure 5 frames are captured.
+ EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
+
+ EXPECT_GE(capture_observer.capture_delay(), 0);
+
+ int64_t stop_time = TickTime::MillisecondTimestamp();
+ EXPECT_EQ(0, module->StopCapture());
+ EXPECT_FALSE(module->CaptureStarted());
+
+ // Less than 3s to stop the camera.
+ EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000);
+ }
+}
+
+TEST_F(VideoCaptureTest, Capabilities) {
+#ifdef WEBRTC_MAC
+ printf("Video capture capabilities are not supported on Mac.\n");
+ return;
+#endif
+
+ TestVideoCaptureCallback capture_observer;
+
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ OpenVideoCaptureDevice(0, &capture_observer));
+ ASSERT_TRUE(module.get() != NULL);
+
+ int number_of_capabilities = device_info_->NumberOfCapabilities(
+ module->CurrentDeviceName());
+ EXPECT_GT(number_of_capabilities, 0);
+ // Key is <width>x<height>, value is vector of maxFPS values at that
+ // resolution.
+ typedef std::map<std::string, std::vector<int> > FrameRatesByResolution;
+ FrameRatesByResolution frame_rates_by_resolution;
+ for (int i = 0; i < number_of_capabilities; ++i) {
+ VideoCaptureCapability capability;
+ EXPECT_EQ(0, device_info_->GetCapability(module->CurrentDeviceName(), i,
+ capability));
+ std::ostringstream resolutionStream;
+ resolutionStream << capability.width << "x" << capability.height;
+ resolutionStream.flush();
+ std::string resolution = resolutionStream.str();
+ frame_rates_by_resolution[resolution].push_back(capability.maxFPS);
+
+ // Since Android presents so many resolution/FPS combinations and the test
+ // runner imposes a timeout, we only actually start the capture and test
+ // that a frame was captured for 2 frame-rates at each resolution.
+ if (frame_rates_by_resolution[resolution].size() > 2)
+ continue;
+
+ capture_observer.SetExpectedCapability(capability);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
+ // Make sure at least one frame is captured.
+ EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 1, kTimeOut);
+
+ EXPECT_EQ(0, module->StopCapture());
+ }
+
+#if ANDROID
+ // There's no reason for this to _necessarily_ be true, but in practice all
+ // Android devices this test runs on in fact do support multiple capture
+ // resolutions and multiple frame-rates per captured resolution, so we assert
+ // this fact here as a regression-test against the time that we only noticed a
+ // single frame-rate per resolution (bug 2974). If this test starts being run
+ // on devices for which this is untrue (e.g. Nexus4) then the following should
+ // probably be wrapped in a base::android::BuildInfo::model()/device() check.
+ EXPECT_GT(frame_rates_by_resolution.size(), 1U);
+ for (FrameRatesByResolution::const_iterator it =
+ frame_rates_by_resolution.begin();
+ it != frame_rates_by_resolution.end();
+ ++it) {
+ EXPECT_GT(it->second.size(), 1U) << it->first;
+ }
+#endif // ANDROID
+}
+
+// NOTE: flaky, crashes sometimes.
+// http://code.google.com/p/webrtc/issues/detail?id=777
+TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) {
+ if (number_of_devices_ < 2) {
+ printf("There are not two cameras available. Aborting test. \n");
+ return;
+ }
+
+ TestVideoCaptureCallback capture_observer1;
+ rtc::scoped_refptr<VideoCaptureModule> module1(
+ OpenVideoCaptureDevice(0, &capture_observer1));
+ ASSERT_TRUE(module1.get() != NULL);
+ VideoCaptureCapability capability1;
+#ifndef WEBRTC_MAC
+ device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability1);
+#else
+ capability1.width = kTestWidth;
+ capability1.height = kTestHeight;
+ capability1.maxFPS = kTestFramerate;
+ capability1.rawType = webrtc::kVideoUnknown;
+#endif
+ capture_observer1.SetExpectedCapability(capability1);
+
+ TestVideoCaptureCallback capture_observer2;
+ rtc::scoped_refptr<VideoCaptureModule> module2(
+ OpenVideoCaptureDevice(1, &capture_observer2));
+ ASSERT_TRUE(module1.get() != NULL);
+
+
+ VideoCaptureCapability capability2;
+#ifndef WEBRTC_MAC
+ device_info_->GetCapability(module2->CurrentDeviceName(), 0, capability2);
+#else
+ capability2.width = kTestWidth;
+ capability2.height = kTestHeight;
+ capability2.maxFPS = kTestFramerate;
+ capability2.rawType = webrtc::kVideoUnknown;
+#endif
+ capture_observer2.SetExpectedCapability(capability2);
+
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability1));
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability2));
+ EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut);
+ EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut);
+ EXPECT_EQ(0, module2->StopCapture());
+ EXPECT_EQ(0, module1->StopCapture());
+}
+
+// Test class for testing external capture and capture feedback information
+// such as frame rate and picture alarm.
+class VideoCaptureExternalTest : public testing::Test {
+ public:
+ void SetUp() {
+ capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
+ process_module_ = webrtc::ProcessThread::Create("ProcessThread");
+ process_module_->Start();
+ process_module_->RegisterModule(capture_module_);
+
+ VideoCaptureCapability capability;
+ capability.width = kTestWidth;
+ capability.height = kTestHeight;
+ capability.rawType = webrtc::kVideoYV12;
+ capability.maxFPS = kTestFramerate;
+ capture_callback_.SetExpectedCapability(capability);
+
+ test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
+ ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
+ SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
+ memset(test_frame_.buffer(webrtc::kYPlane), 127, kTestWidth * kTestHeight);
+ memset(test_frame_.buffer(webrtc::kUPlane), 127,
+ ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
+ memset(test_frame_.buffer(webrtc::kVPlane), 127,
+ ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
+
+ capture_module_->RegisterCaptureDataCallback(capture_callback_);
+ capture_module_->RegisterCaptureCallback(capture_feedback_);
+ capture_module_->EnableFrameRateCallback(true);
+ capture_module_->EnableNoPictureAlarm(true);
+ }
+
+ void TearDown() {
+ process_module_->Stop();
+ }
+
+ webrtc::VideoCaptureExternal* capture_input_interface_;
+ rtc::scoped_refptr<VideoCaptureModule> capture_module_;
+ rtc::scoped_ptr<webrtc::ProcessThread> process_module_;
+ webrtc::VideoFrame test_frame_;
+ TestVideoCaptureCallback capture_callback_;
+ TestVideoCaptureFeedBack capture_feedback_;
+};
+
+// Test input of external video frames.
+TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
+ size_t length = webrtc::CalcBufferSize(webrtc::kI420,
+ test_frame_.width(),
+ test_frame_.height());
+ scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
+ webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+ EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
+}
+
+// Test frame rate and no picture alarm.
+// Flaky on Win32, see webrtc:3270.
+TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) {
+ int64_t testTime = 3;
+ TickTime startTime = TickTime::Now();
+
+ while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+ size_t length = webrtc::CalcBufferSize(webrtc::kI420,
+ test_frame_.width(),
+ test_frame_.height());
+ scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
+ webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+ SleepMs(100);
+ }
+ EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
+ capture_feedback_.frame_rate() <= 10);
+ SleepMs(500);
+ EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm());
+
+ startTime = TickTime::Now();
+ while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+ size_t length = webrtc::CalcBufferSize(webrtc::kI420,
+ test_frame_.width(),
+ test_frame_.height());
+ scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
+ webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+ SleepMs(1000 / 30);
+ }
+ EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm());
+ // Frame rate might be less than 33 since we have paused providing
+ // frames for a while.
+ EXPECT_TRUE(capture_feedback_.frame_rate() >= 25 &&
+ capture_feedback_.frame_rate() <= 33);
+}
+
+TEST_F(VideoCaptureExternalTest, Rotation) {
+ EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
+ size_t length = webrtc::CalcBufferSize(webrtc::kI420,
+ test_frame_.width(),
+ test_frame_.height());
+ scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
+ webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+ EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_90));
+ capture_callback_.SetExpectedCaptureRotation(webrtc::kVideoRotation_90);
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+ EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_180));
+ capture_callback_.SetExpectedCaptureRotation(webrtc::kVideoRotation_180);
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+ EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_270));
+ capture_callback_.SetExpectedCaptureRotation(webrtc::kVideoRotation_270);
+ EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+ length, capture_callback_.capability(), 0));
+}
diff --git a/webrtc/modules/video_capture/video_capture.gypi b/webrtc/modules/video_capture/video_capture.gypi
new file mode 100644
index 0000000000..f552df7758
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture.gypi
@@ -0,0 +1,204 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'targets': [
+ {
+ # Note this library is missing an implementation for the video capture.
+ # Targets must link with either 'video_capture' or
+ # 'video_capture_module_internal_impl' depending on whether they want to
+ # use the internal capturer.
+ 'target_name': 'video_capture_module',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'webrtc_utility',
+ '<(webrtc_root)/common_video/common_video.gyp:common_video',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ ],
+ 'sources': [
+ 'device_info_impl.cc',
+ 'device_info_impl.h',
+ 'include/video_capture.h',
+ 'include/video_capture_defines.h',
+ 'include/video_capture_factory.h',
+ 'video_capture_config.h',
+ 'video_capture_delay.h',
+ 'video_capture_factory.cc',
+ 'video_capture_impl.cc',
+ 'video_capture_impl.h',
+ ],
+ },
+ {
+ # Default video capture module implementation that only supports external
+ # capture.
+ 'target_name': 'video_capture',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'video_capture_module',
+ ],
+ 'sources': [
+ 'external/device_info_external.cc',
+ 'external/video_capture_external.cc',
+ ],
+ },
+ ], # targets
+ 'conditions': [
+ ['build_with_chromium==0', {
+ 'targets': [
+ {
+ 'target_name': 'video_capture_module_internal_impl',
+ 'type': 'static_library',
+ 'conditions': [
+ ['OS!="android"', {
+ 'dependencies': [
+ 'video_capture_module',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'sources': [
+ 'linux/device_info_linux.cc',
+ 'linux/device_info_linux.h',
+ 'linux/video_capture_linux.cc',
+ 'linux/video_capture_linux.h',
+ ],
+ }], # linux
+ ['OS=="mac"', {
+ 'sources': [
+ 'mac/qtkit/video_capture_qtkit.h',
+ 'mac/qtkit/video_capture_qtkit.mm',
+ 'mac/qtkit/video_capture_qtkit_info.h',
+ 'mac/qtkit/video_capture_qtkit_info.mm',
+ 'mac/qtkit/video_capture_qtkit_info_objc.h',
+ 'mac/qtkit/video_capture_qtkit_info_objc.mm',
+ 'mac/qtkit/video_capture_qtkit_objc.h',
+ 'mac/qtkit/video_capture_qtkit_objc.mm',
+ 'mac/qtkit/video_capture_qtkit_utility.h',
+ 'mac/video_capture_mac.mm',
+ ],
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework Cocoa',
+ '-framework CoreVideo',
+ '-framework QTKit',
+ ],
+ },
+ },
+ }], # mac
+ ['OS=="win"', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
+ ],
+ 'sources': [
+ 'windows/device_info_ds.cc',
+ 'windows/device_info_ds.h',
+ 'windows/device_info_mf.cc',
+ 'windows/device_info_mf.h',
+ 'windows/help_functions_ds.cc',
+ 'windows/help_functions_ds.h',
+ 'windows/sink_filter_ds.cc',
+ 'windows/sink_filter_ds.h',
+ 'windows/video_capture_ds.cc',
+ 'windows/video_capture_ds.h',
+ 'windows/video_capture_factory_windows.cc',
+ 'windows/video_capture_mf.cc',
+ 'windows/video_capture_mf.h',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lStrmiids.lib',
+ ],
+ },
+ }], # win
+ ['OS=="ios"', {
+ 'sources': [
+ 'ios/device_info_ios.h',
+ 'ios/device_info_ios.mm',
+ 'ios/device_info_ios_objc.h',
+ 'ios/device_info_ios_objc.mm',
+ 'ios/rtc_video_capture_ios_objc.h',
+ 'ios/rtc_video_capture_ios_objc.mm',
+ 'ios/video_capture_ios.h',
+ 'ios/video_capture_ios.mm',
+ ],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework AVFoundation',
+ '-framework CoreMedia',
+ '-framework CoreVideo',
+ '-framework UIKit',
+ ],
+ },
+ },
+ }], # ios
+ ], # conditions
+ },
+ ],
+ }], # build_with_chromium==0
+ ['include_tests==1 and OS!="android"', {
+ 'targets': [
+ {
+ 'target_name': 'video_capture_tests',
+ 'type': '<(gtest_target_type)',
+ 'dependencies': [
+ 'video_capture_module',
+ 'video_capture_module_internal_impl',
+ 'webrtc_utility',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'test/video_capture_unittest.cc',
+ 'test/video_capture_main_mac.mm',
+ ],
+ 'conditions': [
+ ['OS=="mac" or OS=="linux"', {
+ 'cflags': [
+ '-Wno-write-strings',
+ ],
+ 'ldflags': [
+ '-lpthread -lm',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'libraries': [
+ '-lrt',
+ '-lXext',
+ '-lX11',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'dependencies': [
+ # Link with a special main for mac so we can use the webcam.
+ '<(webrtc_root)/test/test.gyp:test_support_main_threaded_mac',
+ ],
+ 'xcode_settings': {
+ # TODO(andrew): CoreAudio and AudioToolbox shouldn't be needed.
+ 'OTHER_LDFLAGS': [
+ '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL -framework CoreVideo -framework CoreAudio -framework AudioToolbox',
+ ],
+ },
+ }], # OS=="mac"
+ ['OS!="mac"', {
+ 'dependencies': [
+ # Otherwise, use the regular main.
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ }], # OS!="mac"
+ ] # conditions
+ },
+ ], # targets
+ }],
+ ],
+}
+
diff --git a/webrtc/modules/video_capture/video_capture_config.h b/webrtc/modules/video_capture/video_capture_config.h
new file mode 100644
index 0000000000..829a6bea2d
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_config.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+enum {kDefaultWidth = 640}; // Start width
+enum {kDefaultHeight = 480}; // Start heigt
+enum {kDefaultFrameRate = 30}; // Start frame rate
+
+enum {kMaxFrameRate =60}; // Max allowed frame rate of the start image
+
+enum {kDefaultCaptureDelay = 120};
+enum {kMaxCaptureDelay = 270}; // Max capture delay allowed in the precompiled capture delay values.
+
+enum {kFrameRateCallbackInterval = 1000};
+enum {kFrameRateCountHistorySize = 90};
+enum {kFrameRateHistoryWindowMs = 2000};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
diff --git a/webrtc/modules/video_capture/video_capture_delay.h b/webrtc/modules/video_capture/video_capture_delay.h
new file mode 100644
index 0000000000..f8924e25ad
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_delay.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+struct DelayValue
+{
+ int32_t width;
+ int32_t height;
+ int32_t delay;
+};
+
+enum { NoOfDelayValues = 40 };
+struct DelayValues
+{
+ char * deviceName;
+ char* productId;
+ DelayValue delayValues[NoOfDelayValues];
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
diff --git a/webrtc/modules/video_capture/video_capture_factory.cc b/webrtc/modules/video_capture/video_capture_factory.cc
new file mode 100644
index 0000000000..f88f916ba4
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_factory.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+namespace webrtc
+{
+
+VideoCaptureModule* VideoCaptureFactory::Create(const int32_t id,
+ const char* deviceUniqueIdUTF8) {
+#if defined(ANDROID)
+ return nullptr;
+#else
+ return videocapturemodule::VideoCaptureImpl::Create(id, deviceUniqueIdUTF8);
+#endif
+}
+
+VideoCaptureModule* VideoCaptureFactory::Create(const int32_t id,
+ VideoCaptureExternal*& externalCapture) {
+ return videocapturemodule::VideoCaptureImpl::Create(id, externalCapture);
+}
+
+VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
+ const int32_t id) {
+#if defined(ANDROID)
+ return nullptr;
+#else
+ return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(id);
+#endif
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
new file mode 100644
index 0000000000..4046181505
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -0,0 +1,399 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+#include <stdlib.h>
+
+#include "webrtc/base/trace_event.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc
+{
+
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(
+ const int32_t id,
+ VideoCaptureExternal*& externalCapture)
+{
+ RefCountImpl<VideoCaptureImpl>* implementation =
+ new RefCountImpl<VideoCaptureImpl>(id);
+ externalCapture = implementation;
+ return implementation;
+}
+
+const char* VideoCaptureImpl::CurrentDeviceName() const
+{
+ return _deviceUniqueId;
+}
+
+// static
+int32_t VideoCaptureImpl::RotationFromDegrees(int degrees,
+ VideoRotation* rotation) {
+ switch (degrees) {
+ case 0:
+ *rotation = kVideoRotation_0;
+ return 0;
+ case 90:
+ *rotation = kVideoRotation_90;
+ return 0;
+ case 180:
+ *rotation = kVideoRotation_180;
+ return 0;
+ case 270:
+ *rotation = kVideoRotation_270;
+ return 0;
+ default:
+ return -1;;
+ }
+}
+
+// static
+int32_t VideoCaptureImpl::RotationInDegrees(VideoRotation rotation,
+ int* degrees) {
+ switch (rotation) {
+ case kVideoRotation_0:
+ *degrees = 0;
+ return 0;
+ case kVideoRotation_90:
+ *degrees = 90;
+ return 0;
+ case kVideoRotation_180:
+ *degrees = 180;
+ return 0;
+ case kVideoRotation_270:
+ *degrees = 270;
+ return 0;
+ }
+ return -1;
+}
+
+// returns the number of milliseconds until the module want a worker thread to call Process
+int64_t VideoCaptureImpl::TimeUntilNextProcess()
+{
+ CriticalSectionScoped cs(&_callBackCs);
+ const int64_t kProcessIntervalMs = 300;
+ return kProcessIntervalMs -
+ (TickTime::Now() - _lastProcessTime).Milliseconds();
+}
+
+// Process any pending tasks such as timeouts
+int32_t VideoCaptureImpl::Process()
+{
+ CriticalSectionScoped cs(&_callBackCs);
+
+ const TickTime now = TickTime::Now();
+ _lastProcessTime = TickTime::Now();
+
+ // Handle No picture alarm
+
+ if (_lastProcessFrameCount.Ticks() == _incomingFrameTimes[0].Ticks() &&
+ _captureAlarm != Raised)
+ {
+ if (_noPictureAlarmCallBack && _captureCallBack)
+ {
+ _captureAlarm = Raised;
+ _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
+ }
+ }
+ else if (_lastProcessFrameCount.Ticks() != _incomingFrameTimes[0].Ticks() &&
+ _captureAlarm != Cleared)
+ {
+ if (_noPictureAlarmCallBack && _captureCallBack)
+ {
+ _captureAlarm = Cleared;
+ _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
+
+ }
+ }
+
+ // Handle frame rate callback
+ if ((now - _lastFrameRateCallbackTime).Milliseconds()
+ > kFrameRateCallbackInterval)
+ {
+ if (_frameRateCallBack && _captureCallBack)
+ {
+ const uint32_t frameRate = CalculateFrameRate(now);
+ _captureCallBack->OnCaptureFrameRate(_id, frameRate);
+ }
+ _lastFrameRateCallbackTime = now; // Can be set by EnableFrameRateCallback
+
+ }
+
+ _lastProcessFrameCount = _incomingFrameTimes[0];
+
+ return 0;
+}
+
+VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
+ : _id(id),
+ _deviceUniqueId(NULL),
+ _apiCs(*CriticalSectionWrapper::CreateCriticalSection()),
+ _captureDelay(0),
+ _requestedCapability(),
+ _callBackCs(*CriticalSectionWrapper::CreateCriticalSection()),
+ _lastProcessTime(TickTime::Now()),
+ _lastFrameRateCallbackTime(TickTime::Now()),
+ _frameRateCallBack(false),
+ _noPictureAlarmCallBack(false),
+ _captureAlarm(Cleared),
+ _setCaptureDelay(0),
+ _dataCallBack(NULL),
+ _captureCallBack(NULL),
+ _lastProcessFrameCount(TickTime::Now()),
+ _rotateFrame(kVideoRotation_0),
+ apply_rotation_(true) {
+ _requestedCapability.width = kDefaultWidth;
+ _requestedCapability.height = kDefaultHeight;
+ _requestedCapability.maxFPS = 30;
+ _requestedCapability.rawType = kVideoI420;
+ _requestedCapability.codecType = kVideoCodecUnknown;
+ memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+}
+
+VideoCaptureImpl::~VideoCaptureImpl()
+{
+ DeRegisterCaptureDataCallback();
+ DeRegisterCaptureCallback();
+ delete &_callBackCs;
+ delete &_apiCs;
+
+ if (_deviceUniqueId)
+ delete[] _deviceUniqueId;
+}
+
+void VideoCaptureImpl::RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallBack) {
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _dataCallBack = &dataCallBack;
+}
+
+void VideoCaptureImpl::DeRegisterCaptureDataCallback() {
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _dataCallBack = NULL;
+}
+void VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack) {
+
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _captureCallBack = &callBack;
+}
+void VideoCaptureImpl::DeRegisterCaptureCallback() {
+
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _captureCallBack = NULL;
+}
+void VideoCaptureImpl::SetCaptureDelay(int32_t delayMS) {
+ CriticalSectionScoped cs(&_apiCs);
+ _captureDelay = delayMS;
+}
+int32_t VideoCaptureImpl::CaptureDelay()
+{
+ CriticalSectionScoped cs(&_apiCs);
+ return _setCaptureDelay;
+}
+
+int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) {
+ UpdateFrameCount(); // frame count used for local frame rate callback.
+
+ const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
+ // Capture delay changed
+ if (_setCaptureDelay != _captureDelay) {
+ _setCaptureDelay = _captureDelay;
+ }
+
+ if (_dataCallBack) {
+ if (callOnCaptureDelayChanged) {
+ _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
+ }
+ _dataCallBack->OnIncomingCapturedFrame(_id, captureFrame);
+ }
+
+ return 0;
+}
+
+int32_t VideoCaptureImpl::IncomingFrame(
+ uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime/*=0*/)
+{
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+
+ const int32_t width = frameInfo.width;
+ const int32_t height = frameInfo.height;
+
+ TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime);
+
+ if (frameInfo.codecType == kVideoCodecUnknown)
+ {
+ // Not encoded, convert to I420.
+ const VideoType commonVideoType =
+ RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
+
+ if (frameInfo.rawType != kVideoMJPEG &&
+ CalcBufferSize(commonVideoType, width,
+ abs(height)) != videoFrameLength)
+ {
+ LOG(LS_ERROR) << "Wrong incoming frame length.";
+ return -1;
+ }
+
+ int stride_y = width;
+ int stride_uv = (width + 1) / 2;
+ int target_width = width;
+ int target_height = height;
+
+ // SetApplyRotation doesn't take any lock. Make a local copy here.
+ bool apply_rotation = apply_rotation_;
+
+ if (apply_rotation) {
+ // Rotating resolution when for 90/270 degree rotations.
+ if (_rotateFrame == kVideoRotation_90 ||
+ _rotateFrame == kVideoRotation_270) {
+ target_width = abs(height);
+ target_height = width;
+ }
+ }
+
+ // TODO(mikhal): Update correct aligned stride values.
+ //Calc16ByteAlignedStride(target_width, &stride_y, &stride_uv);
+ // Setting absolute height (in case it was negative).
+ // In Windows, the image starts bottom left, instead of top left.
+ // Setting a negative source height, inverts the image (within LibYuv).
+ int ret = _captureFrame.CreateEmptyFrame(target_width,
+ abs(target_height),
+ stride_y,
+ stride_uv, stride_uv);
+ if (ret < 0)
+ {
+ LOG(LS_ERROR) << "Failed to create empty frame, this should only "
+ "happen due to bad parameters.";
+ return -1;
+ }
+ const int conversionResult = ConvertToI420(
+ commonVideoType, videoFrame, 0, 0, // No cropping
+ width, height, videoFrameLength,
+ apply_rotation ? _rotateFrame : kVideoRotation_0, &_captureFrame);
+ if (conversionResult < 0)
+ {
+ LOG(LS_ERROR) << "Failed to convert capture frame from type "
+ << frameInfo.rawType << "to I420.";
+ return -1;
+ }
+
+ if (!apply_rotation) {
+ _captureFrame.set_rotation(_rotateFrame);
+ } else {
+ _captureFrame.set_rotation(kVideoRotation_0);
+ }
+ _captureFrame.set_ntp_time_ms(captureTime);
+ _captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
+
+ DeliverCapturedFrame(_captureFrame);
+ }
+ else // Encoded format
+ {
+ assert(false);
+ return -1;
+ }
+
+ return 0;
+}
+
+int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _rotateFrame = rotation;
+ return 0;
+}
+
+void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _frameRateCallBack = enable;
+ if (enable)
+ {
+ _lastFrameRateCallbackTime = TickTime::Now();
+ }
+}
+
+bool VideoCaptureImpl::SetApplyRotation(bool enable) {
+ // We can't take any lock here as it'll cause deadlock with IncomingFrame.
+
+ // The effect of this is the last caller wins.
+ apply_rotation_ = enable;
+ return true;
+}
+
+void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
+ _noPictureAlarmCallBack = enable;
+}
+
+void VideoCaptureImpl::UpdateFrameCount()
+{
+ if (_incomingFrameTimes[0].MicrosecondTimestamp() == 0)
+ {
+ // first no shift
+ }
+ else
+ {
+ // shift
+ for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
+ {
+ _incomingFrameTimes[i + 1] = _incomingFrameTimes[i];
+ }
+ }
+ _incomingFrameTimes[0] = TickTime::Now();
+}
+
+uint32_t VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
+{
+ int32_t num = 0;
+ int32_t nrOfFrames = 0;
+ for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
+ {
+ if (_incomingFrameTimes[num].Ticks() <= 0
+ || (now - _incomingFrameTimes[num]).Milliseconds() > kFrameRateHistoryWindowMs) // don't use data older than 2sec
+ {
+ break;
+ }
+ else
+ {
+ nrOfFrames++;
+ }
+ }
+ if (num > 1)
+ {
+ int64_t diff = (now - _incomingFrameTimes[num - 1]).Milliseconds();
+ if (diff > 0)
+ {
+ return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
+ }
+ }
+
+ return nrOfFrames;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h
new file mode 100644
index 0000000000..deb989c251
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_impl.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+
+/*
+ * video_capture_impl.h
+ */
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/common_video/rotation.h"
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+namespace videocapturemodule {
+// Class definitions
+class VideoCaptureImpl: public VideoCaptureModule, public VideoCaptureExternal
+{
+public:
+
+ /*
+ * Create a video capture module object
+ *
+ * id - unique identifier of this video capture module object
+ * deviceUniqueIdUTF8 - name of the device. Available names can be found by using GetDeviceName
+ */
+ static VideoCaptureModule* Create(const int32_t id,
+ const char* deviceUniqueIdUTF8);
+
+ /*
+ * Create a video capture module object used for external capture.
+ *
+ * id - unique identifier of this video capture module object
+ * externalCapture - [out] interface to call when a new frame is captured.
+ */
+ static VideoCaptureModule* Create(const int32_t id,
+ VideoCaptureExternal*& externalCapture);
+
+ static DeviceInfo* CreateDeviceInfo(const int32_t id);
+
+ // Helpers for converting between (integral) degrees and
+ // VideoRotation values. Return 0 on success.
+ static int32_t RotationFromDegrees(int degrees, VideoRotation* rotation);
+ static int32_t RotationInDegrees(VideoRotation rotation, int* degrees);
+
+ //Call backs
+ virtual void RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallback);
+ virtual void DeRegisterCaptureDataCallback();
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
+ virtual void DeRegisterCaptureCallback();
+
+ virtual void SetCaptureDelay(int32_t delayMS);
+ virtual int32_t CaptureDelay();
+ virtual int32_t SetCaptureRotation(VideoRotation rotation);
+ virtual bool SetApplyRotation(bool enable);
+ virtual bool GetApplyRotation() {
+ return apply_rotation_;
+ }
+
+ virtual void EnableFrameRateCallback(const bool enable);
+ virtual void EnableNoPictureAlarm(const bool enable);
+
+ virtual const char* CurrentDeviceName() const;
+
+ // Module handling
+ virtual int64_t TimeUntilNextProcess();
+ virtual int32_t Process();
+
+ // Implement VideoCaptureExternal
+ // |capture_time| must be specified in NTP time format in milliseconds.
+ virtual int32_t IncomingFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime = 0);
+
+ // Platform dependent
+ virtual int32_t StartCapture(const VideoCaptureCapability& capability)
+ {
+ _requestedCapability = capability;
+ return -1;
+ }
+ virtual int32_t StopCapture() { return -1; }
+ virtual bool CaptureStarted() {return false; }
+ virtual int32_t CaptureSettings(VideoCaptureCapability& /*settings*/)
+ { return -1; }
+ VideoCaptureEncodeInterface* GetEncodeInterface(const VideoCodec& /*codec*/)
+ { return NULL; }
+
+protected:
+ VideoCaptureImpl(const int32_t id);
+ virtual ~VideoCaptureImpl();
+ int32_t DeliverCapturedFrame(VideoFrame& captureFrame);
+
+ int32_t _id; // Module ID
+ char* _deviceUniqueId; // current Device unique name;
+ CriticalSectionWrapper& _apiCs;
+ int32_t _captureDelay; // Current capture delay. May be changed of platform dependent parts.
+ VideoCaptureCapability _requestedCapability; // Should be set by platform dependent code in StartCapture.
+private:
+ void UpdateFrameCount();
+ uint32_t CalculateFrameRate(const TickTime& now);
+
+ CriticalSectionWrapper& _callBackCs;
+
+ TickTime _lastProcessTime; // last time the module process function was called.
+ TickTime _lastFrameRateCallbackTime; // last time the frame rate callback function was called.
+ bool _frameRateCallBack; // true if EnableFrameRateCallback
+ bool _noPictureAlarmCallBack; //true if EnableNoPictureAlarm
+ VideoCaptureAlarm _captureAlarm; // current value of the noPictureAlarm
+
+ int32_t _setCaptureDelay; // The currently used capture delay
+ VideoCaptureDataCallback* _dataCallBack;
+ VideoCaptureFeedBack* _captureCallBack;
+
+ TickTime _lastProcessFrameCount;
+ TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
+ VideoRotation _rotateFrame; // Set if the frame should be rotated by the
+ // capture module.
+
+ VideoFrame _captureFrame;
+
+ // Indicate whether rotation should be applied before delivered externally.
+ bool apply_rotation_;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
diff --git a/webrtc/modules/video_capture/video_capture_internal.h b/webrtc/modules/video_capture/video_capture_internal.h
new file mode 100644
index 0000000000..1a90af130a
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_internal.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_INTERNAL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_INTERNAL_H_
+
+#ifdef ANDROID
+#include <jni.h>
+
+namespace webrtc {
+
+// In order to be able to use the internal webrtc video capture
+// for android, the jvm objects must be set via this method.
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context);
+
+} // namespace webrtc
+
+#endif // ANDROID
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_INTERNAL_H_
diff --git a/webrtc/modules/video_capture/video_capture_tests.isolate b/webrtc/modules/video_capture/video_capture_tests.isolate
new file mode 100644
index 0000000000..a0668e7263
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_tests.isolate
@@ -0,0 +1,23 @@
+# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'conditions': [
+ ['OS=="linux" or OS=="mac" or OS=="win"', {
+ 'variables': {
+ 'command': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)',
+ ],
+ 'files': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)',
+ ],
+ },
+ }],
+ ],
+}
diff --git a/webrtc/modules/video_capture/windows/device_info_ds.cc b/webrtc/modules/video_capture/windows/device_info_ds.cc
new file mode 100644
index 0000000000..066a741839
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/device_info_ds.cc
@@ -0,0 +1,798 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/windows/device_info_ds.h"
+
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/modules/video_capture/video_capture_delay.h"
+#include "webrtc/modules/video_capture/windows/help_functions_ds.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+#include <Dvdmedia.h>
+#include <Streams.h>
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+const int32_t NoWindowsCaptureDelays = 1;
+const DelayValues WindowsCaptureDelays[NoWindowsCaptureDelays] = {
+ "Microsoft LifeCam Cinema",
+ "usb#vid_045e&pid_075d",
+ {
+ {640,480,125},
+ {640,360,117},
+ {424,240,111},
+ {352,288,111},
+ {320,240,116},
+ {176,144,101},
+ {160,120,109},
+ {1280,720,166},
+ {960,544,126},
+ {800,448,120},
+ {800,600,127}
+ },
+};
+
+// static
+DeviceInfoDS* DeviceInfoDS::Create(const int32_t id)
+{
+ DeviceInfoDS* dsInfo = new DeviceInfoDS(id);
+ if (!dsInfo || dsInfo->Init() != 0)
+ {
+ delete dsInfo;
+ dsInfo = NULL;
+ }
+ return dsInfo;
+}
+
+DeviceInfoDS::DeviceInfoDS(const int32_t id)
+ : DeviceInfoImpl(id), _dsDevEnum(NULL), _dsMonikerDevEnum(NULL),
+ _CoUninitializeIsRequired(true)
+{
+ // 1) Initialize the COM library (make Windows load the DLLs).
+ //
+ // CoInitializeEx must be called at least once, and is usually called only once,
+ // for each thread that uses the COM library. Multiple calls to CoInitializeEx
+ // by the same thread are allowed as long as they pass the same concurrency flag,
+ // but subsequent valid calls return S_FALSE.
+ // To close the COM library gracefully on a thread, each successful call to
+ // CoInitializeEx, including any call that returns S_FALSE, must be balanced
+ // by a corresponding call to CoUninitialize.
+ //
+
+ /*Apartment-threading, while allowing for multiple threads of execution,
+ serializes all incoming calls by requiring that calls to methods of objects created by this thread always run on the same thread
+ the apartment/thread that created them. In addition, calls can arrive only at message-queue boundaries (i.e., only during a
+ PeekMessage, SendMessage, DispatchMessage, etc.). Because of this serialization, it is not typically necessary to write concurrency control into
+ the code for the object, other than to avoid calls to PeekMessage and SendMessage during processing that must not be interrupted by other method
+ invocations or calls to other objects in the same apartment/thread.*/
+
+ ///CoInitializeEx(NULL, COINIT_APARTMENTTHREADED ); //| COINIT_SPEED_OVER_MEMORY
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); // Use COINIT_MULTITHREADED since Voice Engine uses COINIT_MULTITHREADED
+ if (FAILED(hr))
+ {
+ // Avoid calling CoUninitialize() since CoInitializeEx() failed.
+ _CoUninitializeIsRequired = FALSE;
+
+ if (hr == RPC_E_CHANGED_MODE)
+ {
+ // Calling thread has already initialized COM to be used in a single-threaded
+ // apartment (STA). We are then prevented from using STA.
+ // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is set".
+ //
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+ "VideoCaptureWindowsDSInfo::VideoCaptureWindowsDSInfo "
+ "CoInitializeEx(NULL, COINIT_APARTMENTTHREADED) => "
+ "RPC_E_CHANGED_MODE, error 0x%x",
+ hr);
+ }
+ }
+}
+
+DeviceInfoDS::~DeviceInfoDS()
+{
+ RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+ RELEASE_AND_CLEAR(_dsDevEnum);
+ if (_CoUninitializeIsRequired)
+ {
+ CoUninitialize();
+ }
+}
+
+int32_t DeviceInfoDS::Init()
+{
+ HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
+ IID_ICreateDevEnum, (void **) &_dsDevEnum);
+ if (hr != NOERROR)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to create CLSID_SystemDeviceEnum, error 0x%x", hr);
+ return -1;
+ }
+ return 0;
+}
+uint32_t DeviceInfoDS::NumberOfDevices()
+{
+ ReadLockScoped cs(_apiLock);
+ return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0);
+}
+
+int32_t DeviceInfoDS::GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length)
+{
+ ReadLockScoped cs(_apiLock);
+ const int32_t result = GetDeviceInfo(deviceNumber, deviceNameUTF8,
+ deviceNameLength,
+ deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length,
+ productUniqueIdUTF8,
+ productUniqueIdUTF8Length);
+ return result > (int32_t) deviceNumber ? 0 : -1;
+}
+
+int32_t DeviceInfoDS::GetDeviceInfo(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length)
+
+{
+
+ // enumerate all video capture devices
+ RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+ HRESULT hr =
+ _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+ &_dsMonikerDevEnum, 0);
+ if (hr != NOERROR)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to enumerate CLSID_SystemDeviceEnum, error 0x%x."
+ " No webcam exist?", hr);
+ return 0;
+ }
+
+ _dsMonikerDevEnum->Reset();
+ ULONG cFetched;
+ IMoniker *pM;
+ int index = 0;
+ while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched))
+ {
+ IPropertyBag *pBag;
+ hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **) &pBag);
+ if (S_OK == hr)
+ {
+ // Find the description or friendly name.
+ VARIANT varName;
+ VariantInit(&varName);
+ hr = pBag->Read(L"Description", &varName, 0);
+ if (FAILED(hr))
+ {
+ hr = pBag->Read(L"FriendlyName", &varName, 0);
+ }
+ if (SUCCEEDED(hr))
+ {
+ // ignore all VFW drivers
+ if ((wcsstr(varName.bstrVal, (L"(VFW)")) == NULL) &&
+ (_wcsnicmp(varName.bstrVal, (L"Google Camera Adapter"),21)
+ != 0))
+ {
+ // Found a valid device.
+ if (index == static_cast<int>(deviceNumber))
+ {
+ int convResult = 0;
+ if (deviceNameLength > 0)
+ {
+ convResult = WideCharToMultiByte(CP_UTF8, 0,
+ varName.bstrVal, -1,
+ (char*) deviceNameUTF8,
+ deviceNameLength, NULL,
+ NULL);
+ if (convResult == 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError,
+ webrtc::kTraceVideoCapture, _id,
+ "Failed to convert device name to UTF8. %d",
+ GetLastError());
+ return -1;
+ }
+ }
+ if (deviceUniqueIdUTF8Length > 0)
+ {
+ hr = pBag->Read(L"DevicePath", &varName, 0);
+ if (FAILED(hr))
+ {
+ strncpy_s((char *) deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length,
+ (char *) deviceNameUTF8, convResult);
+ WEBRTC_TRACE(webrtc::kTraceError,
+ webrtc::kTraceVideoCapture, _id,
+ "Failed to get deviceUniqueIdUTF8 using deviceNameUTF8");
+ }
+ else
+ {
+ convResult = WideCharToMultiByte(
+ CP_UTF8,
+ 0,
+ varName.bstrVal,
+ -1,
+ (char*) deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length,
+ NULL, NULL);
+ if (convResult == 0)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError,
+ webrtc::kTraceVideoCapture, _id,
+ "Failed to convert device name to UTF8. %d",
+ GetLastError());
+ return -1;
+ }
+ if (productUniqueIdUTF8
+ && productUniqueIdUTF8Length > 0)
+ {
+ GetProductId(deviceUniqueIdUTF8,
+ productUniqueIdUTF8,
+ productUniqueIdUTF8Length);
+ }
+ }
+ }
+
+ }
+ ++index; // increase the number of valid devices
+ }
+ }
+ VariantClear(&varName);
+ pBag->Release();
+ pM->Release();
+ }
+
+ }
+ if (deviceNameLength)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "%s %s",
+ __FUNCTION__, deviceNameUTF8);
+ }
+ return index;
+}
+
+IBaseFilter * DeviceInfoDS::GetDeviceFilter(
+ const char* deviceUniqueIdUTF8,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length)
+{
+
+ const int32_t deviceUniqueIdUTF8Length =
+ (int32_t) strlen((char*) deviceUniqueIdUTF8); // UTF8 is also NULL terminated
+ if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Device name too long");
+ return NULL;
+ }
+
+ // enumerate all video capture devices
+ RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+ HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+ &_dsMonikerDevEnum, 0);
+ if (hr != NOERROR)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to enumerate CLSID_SystemDeviceEnum, error 0x%x."
+ " No webcam exist?", hr);
+ return 0;
+ }
+ _dsMonikerDevEnum->Reset();
+ ULONG cFetched;
+ IMoniker *pM;
+
+ IBaseFilter *captureFilter = NULL;
+ bool deviceFound = false;
+ while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched) && !deviceFound)
+ {
+ IPropertyBag *pBag;
+ hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **) &pBag);
+ if (S_OK == hr)
+ {
+ // Find the description or friendly name.
+ VARIANT varName;
+ VariantInit(&varName);
+ if (deviceUniqueIdUTF8Length > 0)
+ {
+ hr = pBag->Read(L"DevicePath", &varName, 0);
+ if (FAILED(hr))
+ {
+ hr = pBag->Read(L"Description", &varName, 0);
+ if (FAILED(hr))
+ {
+ hr = pBag->Read(L"FriendlyName", &varName, 0);
+ }
+ }
+ if (SUCCEEDED(hr))
+ {
+ char tempDevicePathUTF8[256];
+ tempDevicePathUTF8[0] = 0;
+ WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
+ tempDevicePathUTF8,
+ sizeof(tempDevicePathUTF8), NULL,
+ NULL);
+ if (strncmp(tempDevicePathUTF8,
+ (const char*) deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length) == 0)
+ {
+ // We have found the requested device
+ deviceFound = true;
+ hr = pM->BindToObject(0, 0, IID_IBaseFilter,
+ (void**) &captureFilter);
+ if FAILED(hr)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+ _id, "Failed to bind to the selected capture device %d",hr);
+ }
+
+ if (productUniqueIdUTF8
+ && productUniqueIdUTF8Length > 0) // Get the device name
+ {
+
+ GetProductId(deviceUniqueIdUTF8,
+ productUniqueIdUTF8,
+ productUniqueIdUTF8Length);
+ }
+
+ }
+ }
+ }
+ VariantClear(&varName);
+ pBag->Release();
+ pM->Release();
+ }
+ }
+ return captureFilter;
+}
+
+int32_t DeviceInfoDS::GetWindowsCapability(
+ const int32_t capabilityIndex,
+ VideoCaptureCapabilityWindows& windowsCapability) {
+ ReadLockScoped cs(_apiLock);
+
+ if (capabilityIndex < 0 || static_cast<size_t>(capabilityIndex) >=
+ _captureCapabilitiesWindows.size()) {
+ return -1;
+ }
+
+ windowsCapability = _captureCapabilitiesWindows[capabilityIndex];
+ return 0;
+}
+
+int32_t DeviceInfoDS::CreateCapabilityMap(
+ const char* deviceUniqueIdUTF8)
+
+{
+ // Reset old capability list
+ _captureCapabilities.clear();
+
+ const int32_t deviceUniqueIdUTF8Length =
+ (int32_t) strlen((char*) deviceUniqueIdUTF8);
+ if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Device name too long");
+ return -1;
+ }
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
+
+
+ char productId[kVideoCaptureProductIdLength];
+ IBaseFilter* captureDevice = DeviceInfoDS::GetDeviceFilter(
+ deviceUniqueIdUTF8,
+ productId,
+ kVideoCaptureProductIdLength);
+ if (!captureDevice)
+ return -1;
+ IPin* outputCapturePin = GetOutputPin(captureDevice, GUID_NULL);
+ if (!outputCapturePin)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to get capture device output pin");
+ RELEASE_AND_CLEAR(captureDevice);
+ return -1;
+ }
+ IAMExtDevice* extDevice = NULL;
+ HRESULT hr = captureDevice->QueryInterface(IID_IAMExtDevice,
+ (void **) &extDevice);
+ if (SUCCEEDED(hr) && extDevice)
+ {
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "This is an external device");
+ extDevice->Release();
+ }
+
+ IAMStreamConfig* streamConfig = NULL;
+ hr = outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+ (void**) &streamConfig);
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to get IID_IAMStreamConfig interface from capture device");
+ return -1;
+ }
+
+ // this gets the FPS
+ IAMVideoControl* videoControlConfig = NULL;
+ HRESULT hrVC = captureDevice->QueryInterface(IID_IAMVideoControl,
+ (void**) &videoControlConfig);
+ if (FAILED(hrVC))
+ {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+ "IID_IAMVideoControl Interface NOT SUPPORTED");
+ }
+
+ AM_MEDIA_TYPE *pmt = NULL;
+ VIDEO_STREAM_CONFIG_CAPS caps;
+ int count, size;
+
+ hr = streamConfig->GetNumberOfCapabilities(&count, &size);
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to GetNumberOfCapabilities");
+ RELEASE_AND_CLEAR(videoControlConfig);
+ RELEASE_AND_CLEAR(streamConfig);
+ RELEASE_AND_CLEAR(outputCapturePin);
+ RELEASE_AND_CLEAR(captureDevice);
+ return -1;
+ }
+
+ // Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo.
+ // Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2
+ // Interlace flag is only supported in FORMAT_VideoInfo2
+ bool supportFORMAT_VideoInfo2 = false;
+ bool supportFORMAT_VideoInfo = false;
+ bool foundInterlacedFormat = false;
+ GUID preferedVideoFormat = FORMAT_VideoInfo;
+ for (int32_t tmp = 0; tmp < count; ++tmp)
+ {
+ hr = streamConfig->GetStreamCaps(tmp, &pmt,
+ reinterpret_cast<BYTE*> (&caps));
+ if (!FAILED(hr))
+ {
+ if (pmt->majortype == MEDIATYPE_Video
+ && pmt->formattype == FORMAT_VideoInfo2)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+ " Device support FORMAT_VideoInfo2");
+ supportFORMAT_VideoInfo2 = true;
+ VIDEOINFOHEADER2* h =
+ reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+ assert(h);
+ foundInterlacedFormat |= h->dwInterlaceFlags
+ & (AMINTERLACE_IsInterlaced
+ | AMINTERLACE_DisplayModeBobOnly);
+ }
+ if (pmt->majortype == MEDIATYPE_Video
+ && pmt->formattype == FORMAT_VideoInfo)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+ " Device support FORMAT_VideoInfo2");
+ supportFORMAT_VideoInfo = true;
+ }
+ }
+ }
+ if (supportFORMAT_VideoInfo2)
+ {
+ if (supportFORMAT_VideoInfo && !foundInterlacedFormat)
+ {
+ preferedVideoFormat = FORMAT_VideoInfo;
+ }
+ else
+ {
+ preferedVideoFormat = FORMAT_VideoInfo2;
+ }
+ }
+
+ for (int32_t tmp = 0; tmp < count; ++tmp)
+ {
+ hr = streamConfig->GetStreamCaps(tmp, &pmt,
+ reinterpret_cast<BYTE*> (&caps));
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to GetStreamCaps");
+ RELEASE_AND_CLEAR(videoControlConfig);
+ RELEASE_AND_CLEAR(streamConfig);
+ RELEASE_AND_CLEAR(outputCapturePin);
+ RELEASE_AND_CLEAR(captureDevice);
+ return -1;
+ }
+
+ if (pmt->majortype == MEDIATYPE_Video
+ && pmt->formattype == preferedVideoFormat)
+ {
+
+ VideoCaptureCapabilityWindows capability;
+ int64_t avgTimePerFrame = 0;
+
+ if (pmt->formattype == FORMAT_VideoInfo)
+ {
+ VIDEOINFOHEADER* h =
+ reinterpret_cast<VIDEOINFOHEADER*> (pmt->pbFormat);
+ assert(h);
+ capability.directShowCapabilityIndex = tmp;
+ capability.width = h->bmiHeader.biWidth;
+ capability.height = h->bmiHeader.biHeight;
+ avgTimePerFrame = h->AvgTimePerFrame;
+ }
+ if (pmt->formattype == FORMAT_VideoInfo2)
+ {
+ VIDEOINFOHEADER2* h =
+ reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+ assert(h);
+ capability.directShowCapabilityIndex = tmp;
+ capability.width = h->bmiHeader.biWidth;
+ capability.height = h->bmiHeader.biHeight;
+ capability.interlaced = h->dwInterlaceFlags
+ & (AMINTERLACE_IsInterlaced
+ | AMINTERLACE_DisplayModeBobOnly);
+ avgTimePerFrame = h->AvgTimePerFrame;
+ }
+
+ if (hrVC == S_OK)
+ {
+ LONGLONG *frameDurationList;
+ LONGLONG maxFPS;
+ long listSize;
+ SIZE size;
+ size.cx = capability.width;
+ size.cy = capability.height;
+
+ // GetMaxAvailableFrameRate doesn't return max frame rate always
+ // eg: Logitech Notebook. This may be due to a bug in that API
+ // because GetFrameRateList array is reversed in the above camera. So
+ // a util method written. Can't assume the first value will return
+ // the max fps.
+ hrVC = videoControlConfig->GetFrameRateList(outputCapturePin,
+ tmp, size,
+ &listSize,
+ &frameDurationList);
+
+ // On some odd cameras, you may get a 0 for duration.
+ // GetMaxOfFrameArray returns the lowest duration (highest FPS)
+ if (hrVC == S_OK && listSize > 0 &&
+ 0 != (maxFPS = GetMaxOfFrameArray(frameDurationList,
+ listSize)))
+ {
+ capability.maxFPS = static_cast<int> (10000000
+ / maxFPS);
+ capability.supportFrameRateControl = true;
+ }
+ else // use existing method
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+ _id,
+ "GetMaxAvailableFrameRate NOT SUPPORTED");
+ if (avgTimePerFrame > 0)
+ capability.maxFPS = static_cast<int> (10000000
+ / avgTimePerFrame);
+ else
+ capability.maxFPS = 0;
+ }
+ }
+ else // use existing method in case IAMVideoControl is not supported
+ {
+ if (avgTimePerFrame > 0)
+ capability.maxFPS = static_cast<int> (10000000
+ / avgTimePerFrame);
+ else
+ capability.maxFPS = 0;
+ }
+
+ // can't switch MEDIATYPE :~(
+ if (pmt->subtype == MEDIASUBTYPE_I420)
+ {
+ capability.rawType = kVideoI420;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_IYUV)
+ {
+ capability.rawType = kVideoIYUV;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_RGB24)
+ {
+ capability.rawType = kVideoRGB24;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_YUY2)
+ {
+ capability.rawType = kVideoYUY2;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_RGB565)
+ {
+ capability.rawType = kVideoRGB565;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_MJPG)
+ {
+ capability.rawType = kVideoMJPEG;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_dvsl
+ || pmt->subtype == MEDIASUBTYPE_dvsd
+ || pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
+ {
+ capability.rawType = kVideoYUY2;// MS DV filter seems to create this type
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
+ {
+ capability.rawType = kVideoUYVY;
+ }
+ else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
+ {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+ "Device support HDYC.");
+ capability.rawType = kVideoUYVY;
+ }
+ else
+ {
+ WCHAR strGuid[39];
+ StringFromGUID2(pmt->subtype, strGuid, 39);
+ WEBRTC_TRACE( webrtc::kTraceWarning,
+ webrtc::kTraceVideoCapture, _id,
+ "Device support unknown media type %ls, width %d, height %d",
+ strGuid);
+ continue;
+ }
+
+ // Get the expected capture delay from the static list
+ capability.expectedCaptureDelay
+ = GetExpectedCaptureDelay(WindowsCaptureDelays,
+ NoWindowsCaptureDelays,
+ productId,
+ capability.width,
+ capability.height);
+ _captureCapabilities.push_back(capability);
+ _captureCapabilitiesWindows.push_back(capability);
+ WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "Camera capability, width:%d height:%d type:%d fps:%d",
+ capability.width, capability.height,
+ capability.rawType, capability.maxFPS);
+ }
+ DeleteMediaType(pmt);
+ pmt = NULL;
+ }
+ RELEASE_AND_CLEAR(streamConfig);
+ RELEASE_AND_CLEAR(videoControlConfig);
+ RELEASE_AND_CLEAR(outputCapturePin);
+ RELEASE_AND_CLEAR(captureDevice); // Release the capture device
+
+ // Store the new used device name
+ _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+ _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
+ _lastUsedDeviceNameLength
+ + 1);
+ memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength+ 1);
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "CreateCapabilityMap %d", _captureCapabilities.size());
+
+ return static_cast<int32_t>(_captureCapabilities.size());
+}
+
+/* Constructs a product ID from the Windows DevicePath. on a USB device the devicePath contains product id and vendor id.
+ This seems to work for firewire as well
+ /* Example of device path
+ "\\?\usb#vid_0408&pid_2010&mi_00#7&258e7aaf&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+ "\\?\avc#sony&dv-vcr&camcorder&dv#65b2d50301460008#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+ */
+void DeviceInfoDS::GetProductId(const char* devicePath,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length)
+{
+ *productUniqueIdUTF8 = '\0';
+ char* startPos = strstr((char*) devicePath, "\\\\?\\");
+ if (!startPos)
+ {
+ strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+ "Failed to get the product Id");
+ return;
+ }
+ startPos += 4;
+
+ char* pos = strchr(startPos, '&');
+ if (!pos || pos >= (char*) devicePath + strlen((char*) devicePath))
+ {
+ strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+ "Failed to get the product Id");
+ return;
+ }
+ // Find the second occurrence.
+ pos = strchr(pos + 1, '&');
+ uint32_t bytesToCopy = (uint32_t)(pos - startPos);
+ if (pos && (bytesToCopy <= productUniqueIdUTF8Length) && bytesToCopy
+ <= kVideoCaptureProductIdLength)
+ {
+ strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length,
+ (char*) startPos, bytesToCopy);
+ }
+ else
+ {
+ strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+ "Failed to get the product Id");
+ }
+}
+
+int32_t DeviceInfoDS::DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY)
+{
+ ReadLockScoped cs(_apiLock);
+ HWND window = (HWND) parentWindow;
+
+ IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0);
+ if (!filter)
+ return -1;
+
+ ISpecifyPropertyPages* pPages = NULL;
+ CAUUID uuid;
+ HRESULT hr = S_OK;
+
+ hr = filter->QueryInterface(IID_ISpecifyPropertyPages, (LPVOID*) &pPages);
+ if (!SUCCEEDED(hr))
+ {
+ filter->Release();
+ return -1;
+ }
+ hr = pPages->GetPages(&uuid);
+ if (!SUCCEEDED(hr))
+ {
+ filter->Release();
+ return -1;
+ }
+
+ WCHAR tempDialogTitleWide[256];
+ tempDialogTitleWide[0] = 0;
+ int size = 255;
+
+ // UTF-8 to wide char
+ MultiByteToWideChar(CP_UTF8, 0, (char*) dialogTitleUTF8, -1,
+ tempDialogTitleWide, size);
+
+ // Invoke a dialog box to display.
+
+ hr = OleCreatePropertyFrame(window, // You must create the parent window.
+ positionX, // Horizontal position for the dialog box.
+ positionY, // Vertical position for the dialog box.
+ tempDialogTitleWide,// String used for the dialog box caption.
+ 1, // Number of pointers passed in pPlugin.
+ (LPUNKNOWN*) &filter, // Pointer to the filter.
+ uuid.cElems, // Number of property pages.
+ uuid.pElems, // Array of property page CLSIDs.
+ LOCALE_USER_DEFAULT, // Locale ID for the dialog box.
+ 0, NULL); // Reserved
+ // Release memory.
+ if (uuid.pElems)
+ {
+ CoTaskMemFree(uuid.pElems);
+ }
+ filter->Release();
+ return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/device_info_ds.h b/webrtc/modules/video_capture/windows/device_info_ds.h
new file mode 100644
index 0000000000..7acbfa622d
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/device_info_ds.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+#include <Dshow.h>
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+struct VideoCaptureCapabilityWindows: public VideoCaptureCapability
+{
+ uint32_t directShowCapabilityIndex;
+ bool supportFrameRateControl;
+ VideoCaptureCapabilityWindows()
+ {
+ directShowCapabilityIndex = 0;
+ supportFrameRateControl = false;
+ }
+};
+
+class DeviceInfoDS: public DeviceInfoImpl
+{
+public:
+ // Factory function.
+ static DeviceInfoDS* Create(const int32_t id);
+
+ DeviceInfoDS(const int32_t id);
+ virtual ~DeviceInfoDS();
+
+ int32_t Init();
+ virtual uint32_t NumberOfDevices();
+
+ /*
+ * Returns the available capture devices.
+ */
+ virtual int32_t
+ GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length);
+
+ /*
+ * Display OS /capture device specific settings dialog
+ */
+ virtual int32_t
+ DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY);
+
+ // Windows specific
+
+ /* Gets a capture device filter
+ The user of this API is responsible for releasing the filter when it not needed.
+ */
+ IBaseFilter * GetDeviceFilter(const char* deviceUniqueIdUTF8,
+ char* productUniqueIdUTF8 = NULL,
+ uint32_t productUniqueIdUTF8Length = 0);
+
+ int32_t
+ GetWindowsCapability(const int32_t capabilityIndex,
+ VideoCaptureCapabilityWindows& windowsCapability);
+
+ static void GetProductId(const char* devicePath,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length);
+
+protected:
+ int32_t GetDeviceInfo(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length);
+
+ virtual int32_t
+ CreateCapabilityMap(const char* deviceUniqueIdUTF8);
+
+private:
+ ICreateDevEnum* _dsDevEnum;
+ IEnumMoniker* _dsMonikerDevEnum;
+ bool _CoUninitializeIsRequired;
+ std::vector<VideoCaptureCapabilityWindows> _captureCapabilitiesWindows;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
diff --git a/webrtc/modules/video_capture/windows/device_info_mf.cc b/webrtc/modules/video_capture/windows/device_info_mf.cc
new file mode 100644
index 0000000000..87c4026384
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/device_info_mf.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/windows/device_info_mf.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+DeviceInfoMF::DeviceInfoMF(const int32_t id) : DeviceInfoImpl(id) {
+}
+
+DeviceInfoMF::~DeviceInfoMF() {
+}
+
+int32_t DeviceInfoMF::Init() {
+ return -1;
+}
+
+uint32_t DeviceInfoMF::NumberOfDevices() {
+ return 0;
+}
+
+int32_t DeviceInfoMF::GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length) {
+ return -1;
+}
+
+int32_t DeviceInfoMF::DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) {
+ return -1;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/device_info_mf.h b/webrtc/modules/video_capture/windows/device_info_mf.h
new file mode 100644
index 0000000000..b787e002c9
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/device_info_mf.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_DEVICE_INFO_MF_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_DEVICE_INFO_MF_H_
+
+#include "webrtc/modules/video_capture/device_info_impl.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// Provides video capture device information using the Media Foundation API.
+class DeviceInfoMF : public DeviceInfoImpl {
+ public:
+ explicit DeviceInfoMF(const int32_t id);
+ virtual ~DeviceInfoMF();
+
+ int32_t Init();
+ virtual uint32_t NumberOfDevices();
+
+ virtual int32_t GetDeviceName(uint32_t deviceNumber, char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length);
+
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8, const char* dialogTitleUTF8,
+ void* parentWindow, uint32_t positionX, uint32_t positionY);
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_DEVICE_INFO_MF_H_
diff --git a/webrtc/modules/video_capture/windows/help_functions_ds.cc b/webrtc/modules/video_capture/windows/help_functions_ds.cc
new file mode 100644
index 0000000000..057bffb0af
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/help_functions_ds.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <initguid.h> // Must come before the help_functions_ds.h include so
+ // that DEFINE_GUID() entries will be defined in this
+ // object file.
+
+#include "webrtc/modules/video_capture/windows/help_functions_ds.h"
+
+#include <cguid.h>
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+// This returns minimum :), which will give max frame rate...
+LONGLONG GetMaxOfFrameArray(LONGLONG *maxFps, long size)
+{
+ LONGLONG maxFPS = maxFps[0];
+ for (int i = 0; i < size; i++)
+ {
+ if (maxFPS > maxFps[i])
+ maxFPS = maxFps[i];
+ }
+ return maxFPS;
+}
+
+IPin* GetInputPin(IBaseFilter* filter)
+{
+ HRESULT hr;
+ IPin* pin = NULL;
+ IEnumPins* pPinEnum = NULL;
+ filter->EnumPins(&pPinEnum);
+ if (pPinEnum == NULL)
+ {
+ return NULL;
+ }
+
+ // get first unconnected pin
+ hr = pPinEnum->Reset(); // set to first pin
+
+ while (S_OK == pPinEnum->Next(1, &pin, NULL))
+ {
+ PIN_DIRECTION pPinDir;
+ pin->QueryDirection(&pPinDir);
+ if (PINDIR_INPUT == pPinDir) // This is an input pin
+ {
+ IPin* tempPin = NULL;
+ if (S_OK != pin->ConnectedTo(&tempPin)) // The pint is not connected
+ {
+ pPinEnum->Release();
+ return pin;
+ }
+ }
+ pin->Release();
+ }
+ pPinEnum->Release();
+ return NULL;
+}
+
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category)
+{
+ HRESULT hr;
+ IPin* pin = NULL;
+ IEnumPins* pPinEnum = NULL;
+ filter->EnumPins(&pPinEnum);
+ if (pPinEnum == NULL)
+ {
+ return NULL;
+ }
+ // get first unconnected pin
+ hr = pPinEnum->Reset(); // set to first pin
+ while (S_OK == pPinEnum->Next(1, &pin, NULL))
+ {
+ PIN_DIRECTION pPinDir;
+ pin->QueryDirection(&pPinDir);
+ if (PINDIR_OUTPUT == pPinDir) // This is an output pin
+ {
+ if (Category == GUID_NULL || PinMatchesCategory(pin, Category))
+ {
+ pPinEnum->Release();
+ return pin;
+ }
+ }
+ pin->Release();
+ pin = NULL;
+ }
+ pPinEnum->Release();
+ return NULL;
+}
+
+BOOL PinMatchesCategory(IPin *pPin, REFGUID Category)
+{
+ BOOL bFound = FALSE;
+ IKsPropertySet *pKs = NULL;
+ HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs));
+ if (SUCCEEDED(hr))
+ {
+ GUID PinCategory;
+ DWORD cbReturned;
+ hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, &PinCategory,
+ sizeof(GUID), &cbReturned);
+ if (SUCCEEDED(hr) && (cbReturned == sizeof(GUID)))
+ {
+ bFound = (PinCategory == Category);
+ }
+ pKs->Release();
+ }
+ return bFound;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/help_functions_ds.h b/webrtc/modules/video_capture/windows/help_functions_ds.h
new file mode 100644
index 0000000000..d675f9299a
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/help_functions_ds.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
+
+#include <dshow.h>
+
+DEFINE_GUID(MEDIASUBTYPE_I420, 0x30323449, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+ 0xAA, 0x00, 0x38, 0x9B, 0x71);
+DEFINE_GUID(MEDIASUBTYPE_HDYC, 0x43594448, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+ 0xAA, 0x00, 0x38, 0x9B, 0x71);
+
+#define RELEASE_AND_CLEAR(p) if (p) { (p) -> Release () ; (p) = NULL ; }
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+LONGLONG GetMaxOfFrameArray(LONGLONG *maxFps, long size);
+
+IPin* GetInputPin(IBaseFilter* filter);
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category);
+BOOL PinMatchesCategory(IPin *pPin, REFGUID Category);
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
diff --git a/webrtc/modules/video_capture/windows/sink_filter_ds.cc b/webrtc/modules/video_capture/windows/sink_filter_ds.cc
new file mode 100644
index 0000000000..ba0f605e69
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/sink_filter_ds.cc
@@ -0,0 +1,519 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/windows/sink_filter_ds.h"
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_capture/windows/help_functions_ds.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+#include <Dvdmedia.h> // VIDEOINFOHEADER2
+#include <initguid.h>
+
+#define DELETE_RESET(p) { delete (p) ; (p) = NULL ;}
+
+DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
+ 0xe2, 0xce, 0x12, 0xc3);
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+typedef struct tagTHREADNAME_INFO
+{
+ DWORD dwType; // must be 0x1000
+ LPCSTR szName; // pointer to name (in user addr space)
+ DWORD dwThreadID; // thread ID (-1=caller thread)
+ DWORD dwFlags; // reserved for future use, must be zero
+} THREADNAME_INFO;
+
+CaptureInputPin::CaptureInputPin (int32_t moduleId,
+ IN TCHAR * szName,
+ IN CaptureSinkFilter* pFilter,
+ IN CCritSec * pLock,
+ OUT HRESULT * pHr,
+ IN LPCWSTR pszName)
+ : CBaseInputPin (szName, pFilter, pLock, pHr, pszName),
+ _requestedCapability(),
+ _resultingCapability()
+{
+ _moduleId=moduleId;
+ _threadHandle = NULL;
+}
+
+CaptureInputPin::~CaptureInputPin()
+{
+}
+
+HRESULT
+CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
+{
+ // reset the thread handle
+ _threadHandle = NULL;
+
+ if(iPosition < 0)
+ return E_INVALIDARG;
+
+ VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*) pmt->AllocFormatBuffer(
+ sizeof(VIDEOINFOHEADER));
+ if(NULL == pvi)
+ {
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+ "CheckMediaType VIDEOINFOHEADER is NULL. Returning...Line:%d\n", __LINE__);
+ return(E_OUTOFMEMORY);
+ }
+
+ ZeroMemory(pvi, sizeof(VIDEOINFOHEADER));
+ pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ pvi->bmiHeader.biPlanes = 1;
+ pvi->bmiHeader.biClrImportant = 0;
+ pvi->bmiHeader.biClrUsed = 0;
+ if (_requestedCapability.maxFPS != 0) {
+ pvi->AvgTimePerFrame = 10000000/_requestedCapability.maxFPS;
+ }
+
+ SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+ SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+ pmt->SetType(&MEDIATYPE_Video);
+ pmt->SetFormatType(&FORMAT_VideoInfo);
+ pmt->SetTemporalCompression(FALSE);
+
+ int32_t positionOffset=1;
+ if(_requestedCapability.codecType!=kVideoCodecUnknown)
+ {
+ positionOffset=0;
+ }
+
+ switch (iPosition+positionOffset)
+ {
+ case 0:
+ {
+ pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
+ pvi->bmiHeader.biBitCount = 12; //bit per pixel
+ pvi->bmiHeader.biWidth = _requestedCapability.width;
+ pvi->bmiHeader.biHeight = _requestedCapability.height;
+ pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+ *_requestedCapability.width/2;
+ pmt->SetSubtype(&MEDIASUBTYPE_I420);
+ }
+ break;
+ case 1:
+ {
+ pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');;
+ pvi->bmiHeader.biBitCount = 16; //bit per pixel
+ pvi->bmiHeader.biWidth = _requestedCapability.width;
+ pvi->bmiHeader.biHeight = _requestedCapability.height;
+ pvi->bmiHeader.biSizeImage = 2*_requestedCapability.width
+ *_requestedCapability.height;
+ pmt->SetSubtype(&MEDIASUBTYPE_YUY2);
+ }
+ break;
+ case 2:
+ {
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24; //bit per pixel
+ pvi->bmiHeader.biWidth = _requestedCapability.width;
+ pvi->bmiHeader.biHeight = _requestedCapability.height;
+ pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+ *_requestedCapability.width;
+ pmt->SetSubtype(&MEDIASUBTYPE_RGB24);
+ }
+ break;
+ case 3:
+ {
+ pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
+ pvi->bmiHeader.biBitCount = 16; //bit per pixel
+ pvi->bmiHeader.biWidth = _requestedCapability.width;
+ pvi->bmiHeader.biHeight = _requestedCapability.height;
+ pvi->bmiHeader.biSizeImage = 2*_requestedCapability.height
+ *_requestedCapability.width;
+ pmt->SetSubtype(&MEDIASUBTYPE_UYVY);
+ }
+ break;
+ case 4:
+ {
+ pvi->bmiHeader.biCompression = MAKEFOURCC('M','J','P','G');
+ pvi->bmiHeader.biBitCount = 12; //bit per pixel
+ pvi->bmiHeader.biWidth = _requestedCapability.width;
+ pvi->bmiHeader.biHeight = _requestedCapability.height;
+ pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+ *_requestedCapability.width/2;
+ pmt->SetSubtype(&MEDIASUBTYPE_MJPG);
+ }
+ break;
+ default :
+ return VFW_S_NO_MORE_ITEMS;
+ }
+ pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+ "GetMediaType position %d, width %d, height %d, biCompression 0x%x",
+ iPosition, _requestedCapability.width,
+ _requestedCapability.height,pvi->bmiHeader.biCompression);
+ return NOERROR;
+}
+
+HRESULT
+CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
+{
+ // reset the thread handle
+ _threadHandle = NULL;
+
+ const GUID *type = pMediaType->Type();
+ if (*type != MEDIATYPE_Video)
+ return E_INVALIDARG;
+
+ const GUID *formatType = pMediaType->FormatType();
+
+ // Check for the subtypes we support
+ const GUID *SubType = pMediaType->Subtype();
+ if (SubType == NULL)
+ {
+ return E_INVALIDARG;
+ }
+
+ if(*formatType == FORMAT_VideoInfo)
+ {
+ VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pMediaType->Format();
+ if(pvi == NULL)
+ {
+ return E_INVALIDARG;
+ }
+
+ // Store the incoming width and height
+ _resultingCapability.width = pvi->bmiHeader.biWidth;
+
+ // Store the incoming height,
+ // for RGB24 we assume the frame to be upside down
+ if(*SubType == MEDIASUBTYPE_RGB24
+ && pvi->bmiHeader.biHeight > 0)
+ {
+ _resultingCapability.height = -(pvi->bmiHeader.biHeight);
+ }
+ else
+ {
+ _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+ "CheckMediaType width:%d height:%d Compression:0x%x\n",
+ pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
+ pvi->bmiHeader.biCompression);
+
+ if(*SubType == MEDIASUBTYPE_MJPG
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
+ {
+ _resultingCapability.rawType = kVideoMJPEG;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_I420
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
+ {
+ _resultingCapability.rawType = kVideoI420;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_YUY2
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
+ {
+ _resultingCapability.rawType = kVideoYUY2;
+ ::Sleep(60); // workaround for bad driver
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_UYVY
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
+ {
+ _resultingCapability.rawType = kVideoUYVY;
+ return S_OK; // This format is acceptable.
+ }
+
+ if(*SubType == MEDIASUBTYPE_HDYC)
+ {
+ _resultingCapability.rawType = kVideoUYVY;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_RGB24
+ && pvi->bmiHeader.biCompression == BI_RGB)
+ {
+ _resultingCapability.rawType = kVideoRGB24;
+ return S_OK; // This format is acceptable.
+ }
+ }
+ if(*formatType == FORMAT_VideoInfo2)
+ {
+ // VIDEOINFOHEADER2 that has dwInterlaceFlags
+ VIDEOINFOHEADER2 *pvi = (VIDEOINFOHEADER2 *) pMediaType->Format();
+
+ if(pvi == NULL)
+ {
+ return E_INVALIDARG;
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+ "CheckMediaType width:%d height:%d Compression:0x%x\n",
+ pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
+ pvi->bmiHeader.biCompression);
+
+ _resultingCapability.width = pvi->bmiHeader.biWidth;
+
+ // Store the incoming height,
+ // for RGB24 we assume the frame to be upside down
+ if(*SubType == MEDIASUBTYPE_RGB24
+ && pvi->bmiHeader.biHeight > 0)
+ {
+ _resultingCapability.height = -(pvi->bmiHeader.biHeight);
+ }
+ else
+ {
+ _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
+ }
+
+ if(*SubType == MEDIASUBTYPE_MJPG
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
+ {
+ _resultingCapability.rawType = kVideoMJPEG;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_I420
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
+ {
+ _resultingCapability.rawType = kVideoI420;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_YUY2
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
+ {
+ _resultingCapability.rawType = kVideoYUY2;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_UYVY
+ && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
+ {
+ _resultingCapability.rawType = kVideoUYVY;
+ return S_OK; // This format is acceptable.
+ }
+
+ if(*SubType == MEDIASUBTYPE_HDYC)
+ {
+ _resultingCapability.rawType = kVideoUYVY;
+ return S_OK; // This format is acceptable.
+ }
+ if(*SubType == MEDIASUBTYPE_RGB24
+ && pvi->bmiHeader.biCompression == BI_RGB)
+ {
+ _resultingCapability.rawType = kVideoRGB24;
+ return S_OK; // This format is acceptable.
+ }
+ }
+ return E_INVALIDARG;
+}
+
+HRESULT
+CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
+{
+ HRESULT hr = S_OK;
+
+ ASSERT (m_pFilter);
+ ASSERT (pIMediaSample);
+
+ // get the thread handle of the delivering thread inc its priority
+ if( _threadHandle == NULL)
+ {
+ HANDLE handle= GetCurrentThread();
+ SetThreadPriority(handle, THREAD_PRIORITY_HIGHEST);
+ _threadHandle = handle;
+
+ rtc::SetCurrentThreadName("webrtc_video_capture");
+ }
+
+ reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->LockReceive();
+ hr = CBaseInputPin::Receive (pIMediaSample);
+
+ if (SUCCEEDED (hr))
+ {
+ const LONG length = pIMediaSample->GetActualDataLength();
+ ASSERT(length >= 0);
+
+ unsigned char* pBuffer = NULL;
+ if(S_OK != pIMediaSample->GetPointer(&pBuffer))
+ {
+ reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+ return S_FALSE;
+ }
+
+ // NOTE: filter unlocked within Send call
+ reinterpret_cast <CaptureSinkFilter *> (m_pFilter)->ProcessCapturedFrame(
+ pBuffer, static_cast<size_t>(length), _resultingCapability);
+ }
+ else
+ {
+ reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+ }
+
+ return hr;
+}
+
+// called under LockReceive
+HRESULT CaptureInputPin::SetMatchingMediaType(
+ const VideoCaptureCapability& capability)
+{
+
+ _requestedCapability = capability;
+ _resultingCapability = VideoCaptureCapability();
+ return S_OK;
+}
+// ----------------------------------------------------------------------------
+CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName,
+ IN LPUNKNOWN punk,
+ OUT HRESULT * phr,
+ VideoCaptureExternal& captureObserver,
+ int32_t moduleId)
+ : CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER),
+ m_pInput(NULL),
+ _captureObserver(captureObserver),
+ _moduleId(moduleId)
+{
+ (* phr) = S_OK;
+ m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"),
+ this,
+ & m_crtFilter,
+ phr, L"VideoCapture");
+ if (m_pInput == NULL || FAILED (* phr))
+ {
+ (* phr) = FAILED (* phr) ? (* phr) : E_OUTOFMEMORY;
+ goto cleanup;
+ }
+ cleanup :
+ return;
+}
+
+CaptureSinkFilter::~CaptureSinkFilter()
+{
+ delete m_pInput;
+}
+
+int CaptureSinkFilter::GetPinCount()
+{
+ return 1;
+}
+
+CBasePin *
+CaptureSinkFilter::GetPin(IN int Index)
+{
+ CBasePin * pPin;
+ LockFilter ();
+ if (Index == 0)
+ {
+ pPin = m_pInput;
+ }
+ else
+ {
+ pPin = NULL;
+ }
+ UnlockFilter ();
+ return pPin;
+}
+
+STDMETHODIMP CaptureSinkFilter::Pause()
+{
+ LockReceive();
+ LockFilter();
+ if (m_State == State_Stopped)
+ {
+ // change the state, THEN activate the input pin
+ m_State = State_Paused;
+ if (m_pInput && m_pInput->IsConnected())
+ {
+ m_pInput->Active();
+ }
+ if (m_pInput && !m_pInput->IsConnected())
+ {
+ m_State = State_Running;
+ }
+ }
+ else if (m_State == State_Running)
+ {
+ m_State = State_Paused;
+ }
+ UnlockFilter();
+ UnlockReceive();
+ return S_OK;
+}
+
+STDMETHODIMP CaptureSinkFilter::Stop()
+{
+ LockReceive();
+ LockFilter();
+
+ // set the state
+ m_State = State_Stopped;
+
+ // inactivate the pins
+ if (m_pInput)
+ m_pInput->Inactive();
+
+ UnlockFilter();
+ UnlockReceive();
+ return S_OK;
+}
+
+void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
+{
+ LockFilter();
+ m_pGraph = graph;
+ UnlockFilter();
+}
+
+void CaptureSinkFilter::ProcessCapturedFrame(
+ unsigned char* pBuffer,
+ size_t length,
+ const VideoCaptureCapability& frameInfo)
+{
+ // we have the receiver lock
+ if (m_State == State_Running)
+ {
+ _captureObserver.IncomingFrame(pBuffer, length, frameInfo);
+
+ // trying to hold it since it's only a memcpy
+ // IMPROVEMENT if this work move critsect
+ UnlockReceive();
+ return;
+ }
+ UnlockReceive();
+ return;
+}
+
+STDMETHODIMP CaptureSinkFilter::SetMatchingMediaType(
+ const VideoCaptureCapability& capability)
+{
+ LockReceive();
+ LockFilter();
+ HRESULT hr;
+ if (m_pInput)
+ {
+ hr = m_pInput->SetMatchingMediaType(capability);
+ }
+ else
+ {
+ hr = E_UNEXPECTED;
+ }
+ UnlockFilter();
+ UnlockReceive();
+ return hr;
+}
+
+STDMETHODIMP CaptureSinkFilter::GetClassID( OUT CLSID * pCLSID )
+{
+ (* pCLSID) = CLSID_SINKFILTER;
+ return S_OK;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/sink_filter_ds.h b/webrtc/modules/video_capture/windows/sink_filter_ds.h
new file mode 100644
index 0000000000..064cd9d7d3
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/sink_filter_ds.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
+
+#include <Streams.h> // Include base DS filter header files
+
+#include "webrtc/modules/video_capture/include/video_capture_defines.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+//forward declaration
+
+class CaptureSinkFilter;
+/**
+ * input pin for camera input
+ *
+ */
+class CaptureInputPin: public CBaseInputPin
+{
+public:
+ int32_t _moduleId;
+
+ VideoCaptureCapability _requestedCapability;
+ VideoCaptureCapability _resultingCapability;
+ HANDLE _threadHandle;
+
+ CaptureInputPin(int32_t moduleId,
+ IN TCHAR* szName,
+ IN CaptureSinkFilter* pFilter,
+ IN CCritSec * pLock,
+ OUT HRESULT * pHr,
+ IN LPCWSTR pszName);
+ virtual ~CaptureInputPin();
+
+ HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt);
+ HRESULT CheckMediaType (IN const CMediaType * pmt);
+ STDMETHODIMP Receive (IN IMediaSample *);
+ HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
+};
+
+class CaptureSinkFilter: public CBaseFilter
+{
+
+public:
+ CaptureSinkFilter(IN TCHAR * tszName,
+ IN LPUNKNOWN punk,
+ OUT HRESULT * phr,
+ VideoCaptureExternal& captureObserver,
+ int32_t moduleId);
+ virtual ~CaptureSinkFilter();
+
+ // --------------------------------------------------------------------
+ // class methods
+
+ void ProcessCapturedFrame(unsigned char* pBuffer, size_t length,
+ const VideoCaptureCapability& frameInfo);
+ // explicit receiver lock aquisition and release
+ void LockReceive() { m_crtRecv.Lock();}
+ void UnlockReceive() {m_crtRecv.Unlock();}
+ // explicit filter lock aquisition and release
+ void LockFilter() {m_crtFilter.Lock();}
+ void UnlockFilter() { m_crtFilter.Unlock(); }
+ void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
+
+ // --------------------------------------------------------------------
+ // COM interfaces
+DECLARE_IUNKNOWN ;
+ STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
+
+ // --------------------------------------------------------------------
+ // CBaseFilter methods
+ int GetPinCount ();
+ CBasePin * GetPin ( IN int Index);
+ STDMETHODIMP Pause ();
+ STDMETHODIMP Stop ();
+ STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
+ // --------------------------------------------------------------------
+ // class factory calls this
+ static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
+private:
+ CCritSec m_crtFilter; // filter lock
+ CCritSec m_crtRecv; // receiver lock; always acquire before filter lock
+ CaptureInputPin * m_pInput;
+ VideoCaptureExternal& _captureObserver;
+ int32_t _moduleId;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
diff --git a/webrtc/modules/video_capture/windows/video_capture_ds.cc b/webrtc/modules/video_capture/windows/video_capture_ds.cc
new file mode 100644
index 0000000000..b69e50121d
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/video_capture_ds.cc
@@ -0,0 +1,414 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/windows/video_capture_ds.h"
+
+#include "webrtc/modules/video_capture/video_capture_config.h"
+#include "webrtc/modules/video_capture/windows/help_functions_ds.h"
+#include "webrtc/modules/video_capture/windows/sink_filter_ds.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+#include <Dvdmedia.h> // VIDEOINFOHEADER2
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureDS::VideoCaptureDS(const int32_t id)
+ : VideoCaptureImpl(id), _dsInfo(id), _captureFilter(NULL),
+ _graphBuilder(NULL), _mediaControl(NULL), _sinkFilter(NULL),
+ _inputSendPin(NULL), _outputCapturePin(NULL), _dvFilter(NULL),
+ _inputDvPin(NULL), _outputDvPin(NULL)
+{
+}
+
+VideoCaptureDS::~VideoCaptureDS()
+{
+ if (_mediaControl)
+ {
+ _mediaControl->Stop();
+ }
+ if (_graphBuilder)
+ {
+ if (_sinkFilter)
+ _graphBuilder->RemoveFilter(_sinkFilter);
+ if (_captureFilter)
+ _graphBuilder->RemoveFilter(_captureFilter);
+ if (_dvFilter)
+ _graphBuilder->RemoveFilter(_dvFilter);
+ }
+ RELEASE_AND_CLEAR(_inputSendPin);
+ RELEASE_AND_CLEAR(_outputCapturePin);
+
+ RELEASE_AND_CLEAR(_captureFilter); // release the capture device
+ RELEASE_AND_CLEAR(_sinkFilter);
+ RELEASE_AND_CLEAR(_dvFilter);
+
+ RELEASE_AND_CLEAR(_mediaControl);
+
+ RELEASE_AND_CLEAR(_inputDvPin);
+ RELEASE_AND_CLEAR(_outputDvPin);
+
+ RELEASE_AND_CLEAR(_graphBuilder);
+}
+
+int32_t VideoCaptureDS::Init(const int32_t id, const char* deviceUniqueIdUTF8)
+{
+ const int32_t nameLength =
+ (int32_t) strlen((char*) deviceUniqueIdUTF8);
+ if (nameLength > kVideoCaptureUniqueNameLength)
+ return -1;
+
+ // Store the device name
+ _deviceUniqueId = new (std::nothrow) char[nameLength + 1];
+ memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+ if (_dsInfo.Init() != 0)
+ return -1;
+
+ _captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8);
+ if (!_captureFilter)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to create capture filter.");
+ return -1;
+ }
+
+ // Get the interface for DirectShow's GraphBuilder
+ HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL,
+ CLSCTX_INPROC_SERVER, IID_IGraphBuilder,
+ (void **) &_graphBuilder);
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to create graph builder.");
+ return -1;
+ }
+
+ hr = _graphBuilder->QueryInterface(IID_IMediaControl,
+ (void **) &_mediaControl);
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to create media control builder.");
+ return -1;
+ }
+ hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME);
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to add the capture device to the graph.");
+ return -1;
+ }
+
+ _outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE);
+
+ // Create the sink filte used for receiving Captured frames.
+ _sinkFilter = new CaptureSinkFilter(SINK_FILTER_NAME, NULL, &hr,
+ *this, _id);
+ if (hr != S_OK)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to create send filter");
+ return -1;
+ }
+ _sinkFilter->AddRef();
+
+ hr = _graphBuilder->AddFilter(_sinkFilter, SINK_FILTER_NAME);
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to add the send filter to the graph.");
+ return -1;
+ }
+ _inputSendPin = GetInputPin(_sinkFilter);
+
+ // Temporary connect here.
+ // This is done so that no one else can use the capture device.
+ if (SetCameraOutput(_requestedCapability) != 0)
+ {
+ return -1;
+ }
+ hr = _mediaControl->Pause();
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to Pause the Capture device. Is it already occupied? %d.",
+ hr);
+ return -1;
+ }
+ WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, _id,
+ "Capture device '%s' initialized.", deviceUniqueIdUTF8);
+ return 0;
+}
+
+int32_t VideoCaptureDS::StartCapture(
+ const VideoCaptureCapability& capability)
+{
+ CriticalSectionScoped cs(&_apiCs);
+
+ if (capability != _requestedCapability)
+ {
+ DisconnectGraph();
+
+ if (SetCameraOutput(capability) != 0)
+ {
+ return -1;
+ }
+ }
+ HRESULT hr = _mediaControl->Run();
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to start the Capture device.");
+ return -1;
+ }
+ return 0;
+}
+
+int32_t VideoCaptureDS::StopCapture()
+{
+ CriticalSectionScoped cs(&_apiCs);
+
+ HRESULT hr = _mediaControl->Pause();
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to stop the capture graph. %d", hr);
+ return -1;
+ }
+ return 0;
+}
+bool VideoCaptureDS::CaptureStarted()
+{
+ OAFilterState state = 0;
+ HRESULT hr = _mediaControl->GetState(1000, &state);
+ if (hr != S_OK && hr != VFW_S_CANT_CUE)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to get the CaptureStarted status");
+ }
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "CaptureStarted %d", state);
+ return state == State_Running;
+
+}
+int32_t VideoCaptureDS::CaptureSettings(
+ VideoCaptureCapability& settings)
+{
+ settings = _requestedCapability;
+ return 0;
+}
+
+int32_t VideoCaptureDS::SetCameraOutput(
+ const VideoCaptureCapability& requestedCapability)
+{
+
+ // Get the best matching capability
+ VideoCaptureCapability capability;
+ int32_t capabilityIndex;
+
+ // Store the new requested size
+ _requestedCapability = requestedCapability;
+ // Match the requested capability with the supported.
+ if ((capabilityIndex = _dsInfo.GetBestMatchedCapability(_deviceUniqueId,
+ _requestedCapability,
+ capability)) < 0)
+ {
+ return -1;
+ }
+ //Reduce the frame rate if possible.
+ if (capability.maxFPS > requestedCapability.maxFPS)
+ {
+ capability.maxFPS = requestedCapability.maxFPS;
+ } else if (capability.maxFPS <= 0)
+ {
+ capability.maxFPS = 30;
+ }
+ // Store the new expected capture delay
+ _captureDelay = capability.expectedCaptureDelay;
+
+ // Convert it to the windows capability index since they are not nexessary
+ // the same
+ VideoCaptureCapabilityWindows windowsCapability;
+ if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0)
+ {
+ return -1;
+ }
+
+ IAMStreamConfig* streamConfig = NULL;
+ AM_MEDIA_TYPE *pmt = NULL;
+ VIDEO_STREAM_CONFIG_CAPS caps;
+
+ HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+ (void**) &streamConfig);
+ if (hr)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Can't get the Capture format settings.");
+ return -1;
+ }
+
+ //Get the windows capability from the capture device
+ bool isDVCamera = false;
+ hr = streamConfig->GetStreamCaps(
+ windowsCapability.directShowCapabilityIndex,
+ &pmt, reinterpret_cast<BYTE*> (&caps));
+ if (!FAILED(hr))
+ {
+ if (pmt->formattype == FORMAT_VideoInfo2)
+ {
+ VIDEOINFOHEADER2* h =
+ reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+ if (capability.maxFPS > 0
+ && windowsCapability.supportFrameRateControl)
+ {
+ h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
+ / capability.maxFPS);
+ }
+ }
+ else
+ {
+ VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>
+ (pmt->pbFormat);
+ if (capability.maxFPS > 0
+ && windowsCapability.supportFrameRateControl)
+ {
+ h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
+ / capability.maxFPS);
+ }
+
+ }
+
+ // Set the sink filter to request this capability
+ _sinkFilter->SetMatchingMediaType(capability);
+ //Order the capture device to use this capability
+ hr += streamConfig->SetFormat(pmt);
+
+ //Check if this is a DV camera and we need to add MS DV Filter
+ if (pmt->subtype == MEDIASUBTYPE_dvsl
+ || pmt->subtype == MEDIASUBTYPE_dvsd
+ || pmt->subtype == MEDIASUBTYPE_dvhd)
+ isDVCamera = true; // This is a DV camera. Use MS DV filter
+ }
+ RELEASE_AND_CLEAR(streamConfig);
+
+ if (FAILED(hr))
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to set capture device output format");
+ return -1;
+ }
+
+ if (isDVCamera)
+ {
+ hr = ConnectDVCamera();
+ }
+ else
+ {
+ hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin,
+ NULL);
+ }
+ if (hr != S_OK)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to connect the Capture graph %d", hr);
+ return -1;
+ }
+ return 0;
+}
+
+int32_t VideoCaptureDS::DisconnectGraph()
+{
+ HRESULT hr = _mediaControl->Stop();
+ hr += _graphBuilder->Disconnect(_outputCapturePin);
+ hr += _graphBuilder->Disconnect(_inputSendPin);
+
+ //if the DV camera filter exist
+ if (_dvFilter)
+ {
+ _graphBuilder->Disconnect(_inputDvPin);
+ _graphBuilder->Disconnect(_outputDvPin);
+ }
+ if (hr != S_OK)
+ {
+ WEBRTC_TRACE( webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to Stop the Capture device for reconfiguration %d",
+ hr);
+ return -1;
+ }
+ return 0;
+}
+HRESULT VideoCaptureDS::ConnectDVCamera()
+{
+ HRESULT hr = S_OK;
+
+ if (!_dvFilter)
+ {
+ hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC,
+ IID_IBaseFilter, (void **) &_dvFilter);
+ if (hr != S_OK)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to create the dv decoder: %x", hr);
+ return hr;
+ }
+ hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV");
+ if (hr != S_OK)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to add the dv decoder to the graph: %x", hr);
+ return hr;
+ }
+ _inputDvPin = GetInputPin(_dvFilter);
+ if (_inputDvPin == NULL)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to get input pin from DV decoder");
+ return -1;
+ }
+ _outputDvPin = GetOutputPin(_dvFilter, GUID_NULL);
+ if (_outputDvPin == NULL)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to get output pin from DV decoder");
+ return -1;
+ }
+ }
+ hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL);
+ if (hr != S_OK)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to connect capture device to the dv devoder: %x",
+ hr);
+ return hr;
+ }
+
+ hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL);
+ if (hr != S_OK)
+ {
+ if (hr == 0x80070004)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to connect the capture device, busy");
+ }
+ else
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "Failed to connect capture device to the send graph: 0x%x",
+ hr);
+ }
+ return hr;
+ }
+ return hr;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/video_capture_ds.h b/webrtc/modules/video_capture/windows/video_capture_ds.h
new file mode 100644
index 0000000000..1f6193dc32
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/video_capture_ds.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
+
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+#include "webrtc/modules/video_capture/windows/device_info_ds.h"
+
+#define CAPTURE_FILTER_NAME L"VideoCaptureFilter"
+#define SINK_FILTER_NAME L"SinkFilter"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+// Forward declaraion
+class CaptureSinkFilter;
+
+class VideoCaptureDS: public VideoCaptureImpl
+{
+public:
+ VideoCaptureDS(const int32_t id);
+
+ virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
+
+ /*************************************************************************
+ *
+ * Start/Stop
+ *
+ *************************************************************************/
+ virtual int32_t
+ StartCapture(const VideoCaptureCapability& capability);
+ virtual int32_t StopCapture();
+
+ /**************************************************************************
+ *
+ * Properties of the set device
+ *
+ **************************************************************************/
+
+ virtual bool CaptureStarted();
+ virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
+
+protected:
+ virtual ~VideoCaptureDS();
+
+ // Help functions
+
+ int32_t
+ SetCameraOutput(const VideoCaptureCapability& requestedCapability);
+ int32_t DisconnectGraph();
+ HRESULT VideoCaptureDS::ConnectDVCamera();
+
+ DeviceInfoDS _dsInfo;
+
+ IBaseFilter* _captureFilter;
+ IGraphBuilder* _graphBuilder;
+ IMediaControl* _mediaControl;
+ CaptureSinkFilter* _sinkFilter;
+ IPin* _inputSendPin;
+ IPin* _outputCapturePin;
+
+ // Microsoft DV interface (external DV cameras)
+ IBaseFilter* _dvFilter;
+ IPin* _inputDvPin;
+ IPin* _outputDvPin;
+
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
diff --git a/webrtc/modules/video_capture/windows/video_capture_factory_windows.cc b/webrtc/modules/video_capture/windows/video_capture_factory_windows.cc
new file mode 100644
index 0000000000..747d3d60cf
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/video_capture_factory_windows.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/windows/video_capture_ds.h"
+#include "webrtc/modules/video_capture/windows/video_capture_mf.h"
+#include "webrtc/system_wrappers/include/ref_count.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// static
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+ const int32_t id) {
+ // TODO(tommi): Use the Media Foundation version on Vista and up.
+ return DeviceInfoDS::Create(id);
+}
+
+VideoCaptureModule* VideoCaptureImpl::Create(const int32_t id,
+ const char* device_id) {
+ if (device_id == NULL)
+ return NULL;
+
+ // TODO(tommi): Use Media Foundation implementation for Vista and up.
+ RefCountImpl<VideoCaptureDS>* capture = new RefCountImpl<VideoCaptureDS>(id);
+ if (capture->Init(id, device_id) != 0) {
+ delete capture;
+ capture = NULL;
+ }
+
+ return capture;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/video_capture_mf.cc b/webrtc/modules/video_capture/windows/video_capture_mf.cc
new file mode 100644
index 0000000000..c79ed222c0
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/video_capture_mf.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_capture/windows/video_capture_mf.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+VideoCaptureMF::VideoCaptureMF(const int32_t id) : VideoCaptureImpl(id) {}
+VideoCaptureMF::~VideoCaptureMF() {}
+
+int32_t VideoCaptureMF::Init(const int32_t id, const char* device_id) {
+ return 0;
+}
+
+int32_t VideoCaptureMF::StartCapture(
+ const VideoCaptureCapability& capability) {
+ return -1;
+}
+
+int32_t VideoCaptureMF::StopCapture() {
+ return -1;
+}
+
+bool VideoCaptureMF::CaptureStarted() {
+ return false;
+}
+
+int32_t VideoCaptureMF::CaptureSettings(
+ VideoCaptureCapability& settings) {
+ return -1;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/webrtc/modules/video_capture/windows/video_capture_mf.h b/webrtc/modules/video_capture/windows/video_capture_mf.h
new file mode 100644
index 0000000000..076ef55ecf
--- /dev/null
+++ b/webrtc/modules/video_capture/windows/video_capture_mf.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_VIDEO_CAPTURE_MF_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_VIDEO_CAPTURE_MF_H_
+
+#include "webrtc/modules/video_capture/video_capture_impl.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// VideoCapture implementation that uses the Media Foundation API on Windows.
+// This will replace the DirectShow based implementation on Vista and higher.
+// TODO(tommi): Finish implementing and switch out the DS in the factory method
+// for supported platforms.
+class VideoCaptureMF : public VideoCaptureImpl {
+ public:
+ explicit VideoCaptureMF(const int32_t id);
+
+ int32_t Init(const int32_t id, const char* device_id);
+
+ // Overrides from VideoCaptureImpl.
+ virtual int32_t StartCapture(const VideoCaptureCapability& capability);
+ virtual int32_t StopCapture();
+ virtual bool CaptureStarted();
+ virtual int32_t CaptureSettings(
+ VideoCaptureCapability& settings); // NOLINT
+
+ protected:
+ virtual ~VideoCaptureMF();
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_VIDEO_CAPTURE_MF_H_