1/*
2 *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#if !defined(__has_feature) || !__has_feature(objc_arc)
12#  error "This file requires ARC support."
13#endif
14
15#import <AVFoundation/AVFoundation.h>
16#ifdef WEBRTC_IOS
17#  import <UIKit/UIKit.h>
18#endif
19
20#import "device_info_objc.h"
21#import "rtc_video_capture_objc.h"
22
23#include "rtc_base/logging.h"
24
25using namespace webrtc;
26using namespace webrtc::videocapturemodule;
27
28@interface RTCVideoCaptureIosObjC (hidden)
29- (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
30@end
31
32@implementation RTCVideoCaptureIosObjC {
33  webrtc::videocapturemodule::VideoCaptureIos* _owner;
34  webrtc::VideoCaptureCapability _capability;
35  AVCaptureSession* _captureSession;
36  BOOL _orientationHasChanged;
37  AVCaptureConnection* _connection;
38  BOOL _captureChanging;  // Guarded by _captureChangingCondition.
39  NSCondition* _captureChangingCondition;
40  dispatch_queue_t _frameQueue;
41}
42
43@synthesize frameRotation = _framRotation;
44
45- (id)initWithOwner:(VideoCaptureIos*)owner {
46  if (self == [super init]) {
47    _owner = owner;
48    _captureSession = [[AVCaptureSession alloc] init];
49#if defined(WEBRTC_IOS)
50    _captureSession.usesApplicationAudioSession = NO;
51#endif
52    _captureChanging = NO;
53    _captureChangingCondition = [[NSCondition alloc] init];
54
55    if (!_captureSession || !_captureChangingCondition) {
56      return nil;
57    }
58
59    // create and configure a new output (using callbacks)
60    AVCaptureVideoDataOutput* captureOutput = [[AVCaptureVideoDataOutput alloc] init];
61    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
62
63    NSNumber* val = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8];
64    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:val forKey:key];
65    captureOutput.videoSettings = videoSettings;
66
67    // add new output
68    if ([_captureSession canAddOutput:captureOutput]) {
69      [_captureSession addOutput:captureOutput];
70    } else {
71      RTC_LOG(LS_ERROR) << __FUNCTION__ << ": Could not add output to AVCaptureSession";
72    }
73
74#ifdef WEBRTC_IOS
75    [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
76
77    NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
78    [notify addObserver:self
79               selector:@selector(onVideoError:)
80                   name:AVCaptureSessionRuntimeErrorNotification
81                 object:_captureSession];
82    [notify addObserver:self
83               selector:@selector(deviceOrientationDidChange:)
84                   name:UIDeviceOrientationDidChangeNotification
85                 object:nil];
86#endif
87  }
88
89  // Create a serial queue on which video capture will run. By setting the target,
90  // blocks should still run on DISPATH_QUEUE_PRIORITY_DEFAULT rather than creating
91  // a new thread.
92  _frameQueue = dispatch_queue_create("org.webrtc.videocapture", DISPATCH_QUEUE_SERIAL);
93  dispatch_set_target_queue(_frameQueue,
94                            dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0));
95
96  return self;
97}
98
99- (void)directOutputToSelf {
100  [[self currentOutput] setSampleBufferDelegate:self queue:_frameQueue];
101}
102
103- (void)directOutputToNil {
104  [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
105}
106
107- (void)deviceOrientationDidChange:(NSNotification*)notification {
108  _orientationHasChanged = YES;
109  [self setRelativeVideoOrientation];
110}
111
112- (void)dealloc {
113  [[NSNotificationCenter defaultCenter] removeObserver:self];
114}
115
116- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
117  [self waitForCaptureChangeToFinish];
118  // check to see if the camera is already set
119  if (_captureSession) {
120    NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
121    if ([currentInputs count] > 0) {
122      AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
123      if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
124        return YES;
125      }
126    }
127  }
128
129  return [self changeCaptureInputByUniqueId:uniqueId];
130}
131
132- (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
133  [self waitForCaptureChangeToFinish];
134  if (!_captureSession) {
135    return NO;
136  }
137
138  // check limits of the resolution
139  if (capability.maxFPS < 0 || capability.maxFPS > 60) {
140    return NO;
141  }
142
143  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
144    if (capability.width > 1280 || capability.height > 720) {
145      return NO;
146    }
147  } else if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
148    if (capability.width > 640 || capability.height > 480) {
149      return NO;
150    }
151  } else if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
152    if (capability.width > 352 || capability.height > 288) {
153      return NO;
154    }
155  } else if (capability.width < 0 || capability.height < 0) {
156    return NO;
157  }
158
159  _capability = capability;
160
161  AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
162  if (!currentOutput) return NO;
163
164  [self directOutputToSelf];
165
166  _orientationHasChanged = NO;
167  _captureChanging = YES;
168  dispatch_async(_frameQueue, ^{
169    [self startCaptureInBackgroundWithOutput:currentOutput];
170  });
171  return YES;
172}
173
174- (AVCaptureVideoDataOutput*)currentOutput {
175  return [[_captureSession outputs] firstObject];
176}
177
178- (void)startCaptureInBackgroundWithOutput:(AVCaptureVideoDataOutput*)currentOutput {
179  NSString* captureQuality = [NSString stringWithString:AVCaptureSessionPresetLow];
180  if (_capability.width >= 1280 || _capability.height >= 720) {
181    captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
182  } else if (_capability.width >= 640 || _capability.height >= 480) {
183    captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
184  } else if (_capability.width >= 352 || _capability.height >= 288) {
185    captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
186  }
187
188  // begin configuration for the AVCaptureSession
189  [_captureSession beginConfiguration];
190
191  // picture resolution
192  [_captureSession setSessionPreset:captureQuality];
193
194  _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
195  [self setRelativeVideoOrientation];
196
197  // finished configuring, commit settings to AVCaptureSession.
198  [_captureSession commitConfiguration];
199
200  [_captureSession startRunning];
201  [self signalCaptureChangeEnd];
202}
203
204- (void)setRelativeVideoOrientation {
205  if (!_connection.supportsVideoOrientation) {
206    return;
207  }
208#ifndef WEBRTC_IOS
209  _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
210  return;
211#else
212  switch ([UIDevice currentDevice].orientation) {
213    case UIDeviceOrientationPortrait:
214      _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
215      break;
216    case UIDeviceOrientationPortraitUpsideDown:
217      _connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
218      break;
219    case UIDeviceOrientationLandscapeLeft:
220      _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
221      break;
222    case UIDeviceOrientationLandscapeRight:
223      _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
224      break;
225    case UIDeviceOrientationFaceUp:
226    case UIDeviceOrientationFaceDown:
227    case UIDeviceOrientationUnknown:
228      if (!_orientationHasChanged) {
229        _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
230      }
231      break;
232  }
233#endif
234}
235
236- (void)onVideoError:(NSNotification*)notification {
237  NSLog(@"onVideoError: %@", notification);
238  // TODO(sjlee): make the specific error handling with this notification.
239  RTC_LOG(LS_ERROR) << __FUNCTION__ << ": [AVCaptureSession startRunning] error.";
240}
241
242- (BOOL)stopCapture {
243#ifdef WEBRTC_IOS
244  [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
245#endif
246  _orientationHasChanged = NO;
247  [self waitForCaptureChangeToFinish];
248  [self directOutputToNil];
249
250  if (!_captureSession) {
251    return NO;
252  }
253
254  _captureChanging = YES;
255  [_captureSession stopRunning];
256
257  dispatch_sync(_frameQueue, ^{
258    [self signalCaptureChangeEnd];
259  });
260  return YES;
261}
262
263- (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
264  [self waitForCaptureChangeToFinish];
265  NSArray* currentInputs = [_captureSession inputs];
266  // remove current input
267  if ([currentInputs count] > 0) {
268    AVCaptureInput* currentInput = (AVCaptureInput*)[currentInputs objectAtIndex:0];
269
270    [_captureSession removeInput:currentInput];
271  }
272
273  // Look for input device with the name requested (as our input param)
274  // get list of available capture devices
275  int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
276  if (captureDeviceCount <= 0) {
277    return NO;
278  }
279
280  AVCaptureDevice* captureDevice = [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
281
282  if (!captureDevice) {
283    return NO;
284  }
285
286  // now create capture session input out of AVCaptureDevice
287  NSError* deviceError = nil;
288  AVCaptureDeviceInput* newCaptureInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
289                                                                                error:&deviceError];
290
291  if (!newCaptureInput) {
292    const char* errorMessage = [[deviceError localizedDescription] UTF8String];
293
294    RTC_LOG(LS_ERROR) << __FUNCTION__ << ": deviceInputWithDevice error:" << errorMessage;
295
296    return NO;
297  }
298
299  // try to add our new capture device to the capture session
300  [_captureSession beginConfiguration];
301
302  BOOL addedCaptureInput = NO;
303  if ([_captureSession canAddInput:newCaptureInput]) {
304    [_captureSession addInput:newCaptureInput];
305    addedCaptureInput = YES;
306  } else {
307    addedCaptureInput = NO;
308  }
309
310  [_captureSession commitConfiguration];
311
312  return addedCaptureInput;
313}
314
315- (void)captureOutput:(AVCaptureOutput*)captureOutput
316    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
317           fromConnection:(AVCaptureConnection*)connection {
318  const int kFlags = 0;
319  CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
320
321  if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
322    return;
323  }
324
325  uint8_t* baseAddress = (uint8_t*)CVPixelBufferGetBaseAddress(videoFrame);
326  const size_t width = CVPixelBufferGetWidth(videoFrame);
327  const size_t height = CVPixelBufferGetHeight(videoFrame);
328  const size_t frameSize = width * height * 2;
329
330  VideoCaptureCapability tempCaptureCapability;
331  tempCaptureCapability.width = width;
332  tempCaptureCapability.height = height;
333  tempCaptureCapability.maxFPS = _capability.maxFPS;
334  tempCaptureCapability.videoType = VideoType::kUYVY;
335
336  _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
337
338  CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
339}
340
341- (void)signalCaptureChangeEnd {
342  [_captureChangingCondition lock];
343  _captureChanging = NO;
344  [_captureChangingCondition signal];
345  [_captureChangingCondition unlock];
346}
347
348- (void)waitForCaptureChangeToFinish {
349  [_captureChangingCondition lock];
350  while (_captureChanging) {
351    [_captureChangingCondition wait];
352  }
353  [_captureChangingCondition unlock];
354}
355@end
356