aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc/objc/avfoundationvideocapturer.mm
diff options
context:
space:
mode:
Diffstat (limited to 'talk/app/webrtc/objc/avfoundationvideocapturer.mm')
-rw-r--r--talk/app/webrtc/objc/avfoundationvideocapturer.mm26
1 files changed, 8 insertions, 18 deletions
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
index e1b0f88fb6..0f9dc6825e 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -33,6 +33,8 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
+#import "webrtc/base/objc/RTCDispatcher.h"
+
// TODO(tkchin): support other formats.
static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
static cricket::VideoFormat const kDefaultFormat =
@@ -41,11 +43,6 @@ static cricket::VideoFormat const kDefaultFormat =
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_NV12);
-// This queue is used to start and stop the capturer without blocking the
-// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
-// running.
-static dispatch_queue_t kBackgroundQueue = nil;
-
// This class used to capture frames using AVFoundation APIs on iOS. It is meant
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
// because other webrtc objects own cricket::VideoCapturer, which is not
@@ -80,15 +77,6 @@ static dispatch_queue_t kBackgroundQueue = nil;
@synthesize useBackCamera = _useBackCamera;
@synthesize isRunning = _isRunning;
-+ (void)initialize {
- static dispatch_once_t onceToken;
- dispatch_once(&onceToken, ^{
- kBackgroundQueue = dispatch_queue_create(
- "com.google.webrtc.RTCAVFoundationCapturerBackground",
- DISPATCH_QUEUE_SERIAL);
- });
-}
-
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
NSParameterAssert(capturer);
if (self = [super init]) {
@@ -132,9 +120,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
_orientationHasChanged = NO;
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
AVCaptureSession* session = _captureSession;
- dispatch_async(kBackgroundQueue, ^{
+ [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
[session startRunning];
- });
+ }];
_isRunning = YES;
}
@@ -144,9 +133,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
}
[_videoOutput setSampleBufferDelegate:nil queue:nullptr];
AVCaptureSession* session = _captureSession;
- dispatch_async(kBackgroundQueue, ^{
+ [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
[session stopRunning];
- });
+ }];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
_isRunning = NO;
}