1#include "VideoCameraCapturer.h"
2
3#import <AVFoundation/AVFoundation.h>
4
5#include "rtc_base/logging.h"
6#import "base/RTCLogging.h"
7#import "base/RTCVideoFrameBuffer.h"
8#import "TGRTCCVPixelBuffer.h"
9#import "sdk/objc/native/src/objc_video_track_source.h"
10#import "sdk/objc/native/src/objc_frame_buffer.h"
11#import "api/video_track_source_proxy.h"
12
13#import "helpers/UIDevice+RTCDevice.h"
14
15#import "helpers/AVCaptureSession+DevicePosition.h"
16#import "helpers/RTCDispatcher+Private.h"
17#import "base/RTCVideoFrame.h"
18#include "DarwinVideoSource.h"
19
20#include "common_video/libyuv/include/webrtc_libyuv.h"
21#include "rtc_base/checks.h"
22#include "rtc_base/logging.h"
23#include "third_party/libyuv/include/libyuv.h"
24#include "api/video/i420_buffer.h"
25#include "api/video/nv12_buffer.h"
26
27#include "VideoCaptureView.h"
28
29namespace {
30
31static const int64_t kNanosecondsPerSecond = 1000000000;
32
33static tgcalls::DarwinVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
34    webrtc::VideoTrackSourceProxy *proxy_source =
35    static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
36    return static_cast<tgcalls::DarwinVideoTrackSource *>(proxy_source->internal());
37}
38
39static UIDeviceOrientation deviceOrientation(UIInterfaceOrientation orientation) {
40    switch (orientation) {
41        case UIInterfaceOrientationPortrait:
42            return UIDeviceOrientationPortrait;
43        case UIInterfaceOrientationPortraitUpsideDown:
44            return UIDeviceOrientationPortraitUpsideDown;
45        case UIInterfaceOrientationLandscapeLeft:
46            return UIDeviceOrientationLandscapeRight;
47        case UIInterfaceOrientationLandscapeRight:
48            return UIDeviceOrientationLandscapeLeft;
49        default:
50            return UIDeviceOrientationPortrait;
51    }
52}
53
54}
55
56@interface VideoCameraCapturerPreviewRecord : NSObject
57
58@property (nonatomic, weak) VideoCaptureView *view;
59
60@end
61
62@implementation VideoCameraCapturerPreviewRecord
63
64- (instancetype)initWithCaptureView:(VideoCaptureView *)view {
65    self = [super init];
66    if (self != nil) {
67        self.view = view;
68    }
69    return self;
70}
71
72@end
73
74@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
75    rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
76
77    // Live on main thread.
78    bool _isFrontCamera;
79    bool _keepLandscape;
80
81    dispatch_queue_t _frameQueue;
82
83    // Live on RTCDispatcherTypeCaptureSession.
84    AVCaptureDevice *_currentDevice;
85    BOOL _hasRetriedOnFatalError;
86    BOOL _isRunning;
87
88	// Live on RTCDispatcherTypeCaptureSession and main thread.
89	std::atomic<bool> _willBeRunning;
90
91    AVCaptureVideoDataOutput *_videoDataOutput;
92    AVCaptureSession *_captureSession;
93    FourCharCode _preferredOutputPixelFormat;
94    FourCharCode _outputPixelFormat;
95    RTCVideoRotation _rotation;
96    UIDeviceOrientation _orientation;
97    bool _didReceiveOrientationUpdate;
98    bool _rotationLock;
99
100    // Live on mainThread.
101    void (^_isActiveUpdated)(bool);
102    bool _isActiveValue;
103    bool _inForegroundValue;
104
105    void (^_rotationUpdated)(int);
106
107    // Live on frameQueue and main thread.
108    std::atomic<bool> _isPaused;
109
110    // Live on frameQueue.
111    float _aspectRatio;
112    std::vector<uint8_t> _croppingBuffer;
113    std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
114
115    // Live on frameQueue and RTCDispatcherTypeCaptureSession.
116    std::atomic<int> _warmupFrameCount;
117
118    webrtc::NV12ToI420Scaler _nv12ToI420Scaler;
119
120    NSMutableArray<VideoCameraCapturerPreviewRecord *> *_previews;
121    std::vector<std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>> _directSinks;
122}
123
124@end
125
126@implementation VideoCameraCapturer
127
128- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source useFrontCamera:(bool)useFrontCamera keepLandscape:(bool)keepLandscape isActiveUpdated:(void (^)(bool))isActiveUpdated rotationUpdated:(void (^)(int))rotationUpdated {
129    self = [super init];
130    if (self != nil) {
131        _source = source;
132        _isFrontCamera = useFrontCamera;
133        _keepLandscape = keepLandscape;
134        _isActiveValue = true;
135        _inForegroundValue = true;
136        _isPaused = false;
137        _isActiveUpdated = [isActiveUpdated copy];
138        _rotationUpdated = [rotationUpdated copy];
139
140        _warmupFrameCount = 100;
141
142        _previews = [[NSMutableArray alloc] init];
143
144        if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
145            return nil;
146        }
147
148        NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
149        _orientation = deviceOrientation([[UIApplication sharedApplication] statusBarOrientation]);
150        _rotation = RTCVideoRotation_90;
151
152        switch (_orientation) {
153            case UIDeviceOrientationPortrait:
154                _rotation = RTCVideoRotation_90;
155                break;
156            case UIDeviceOrientationPortraitUpsideDown:
157                _rotation = RTCVideoRotation_270;
158                break;
159            case UIDeviceOrientationLandscapeLeft:
160                _rotation = useFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
161                break;
162            case UIDeviceOrientationLandscapeRight:
163                _rotation = useFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
164                break;
165            case UIDeviceOrientationFaceUp:
166            case UIDeviceOrientationFaceDown:
167            case UIDeviceOrientationUnknown:
168                // Ignore.
169                break;
170        }
171
172        if (_rotationUpdated) {
173            int angle = 0;
174            switch (_rotation) {
175                case RTCVideoRotation_0: {
176                    angle = 0;
177                    break;
178                }
179                case RTCVideoRotation_90: {
180                    angle = 90;
181                    break;
182                }
183                case RTCVideoRotation_180: {
184                    angle = 180;
185                    break;
186                }
187                case RTCVideoRotation_270: {
188                    angle = 270;
189                    break;
190                }
191                default: {
192                    break;
193                }
194            }
195            _rotationUpdated(angle);
196        }
197        [center addObserver:self
198                   selector:@selector(deviceOrientationDidChange:)
199                       name:UIDeviceOrientationDidChangeNotification
200                     object:nil];
201        [center addObserver:self
202                   selector:@selector(handleCaptureSessionInterruption:)
203                       name:AVCaptureSessionWasInterruptedNotification
204                     object:_captureSession];
205        [center addObserver:self
206                   selector:@selector(handleCaptureSessionInterruptionEnded:)
207                       name:AVCaptureSessionInterruptionEndedNotification
208                     object:_captureSession];
209        [center addObserver:self
210                   selector:@selector(handleApplicationDidBecomeActive:)
211                       name:UIApplicationDidBecomeActiveNotification
212                     object:[UIApplication sharedApplication]];
213        [center addObserver:self
214            selector:@selector(handleApplicationWillEnterForeground:)
215                name:UIApplicationWillEnterForegroundNotification
216              object:[UIApplication sharedApplication]];
217        [center addObserver:self
218                   selector:@selector(handleCaptureSessionRuntimeError:)
219                       name:AVCaptureSessionRuntimeErrorNotification
220                     object:_captureSession];
221        [center addObserver:self
222                   selector:@selector(handleCaptureSessionDidStartRunning:)
223                       name:AVCaptureSessionDidStartRunningNotification
224                     object:_captureSession];
225        [center addObserver:self
226                   selector:@selector(handleCaptureSessionDidStopRunning:)
227                       name:AVCaptureSessionDidStopRunningNotification
228                     object:_captureSession];
229    }
230    return self;
231}
232
233- (void)dealloc {
234    NSAssert(!_willBeRunning, @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
235    [[NSNotificationCenter defaultCenter] removeObserver:self];
236}
237
238+ (NSArray<AVCaptureDevice *> *)captureDevices {
239    if (@available(iOS 10.0, *)) {
240        AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
241        return session.devices;
242    } else {
243        NSMutableArray<AVCaptureDevice *> *result = [[NSMutableArray alloc] init];
244        for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
245            if (device.position == AVCaptureDevicePositionFront || device.position == AVCaptureDevicePositionBack) {
246                [result addObject:device];
247            }
248        }
249        return result;
250    }
251}
252
253+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
254  // Support opening the device in any format. We make sure it's converted to a format we
255  // can handle, if needed, in the method `-setupVideoDataOutput`.
256  return device.formats;
257}
258
259- (FourCharCode)preferredOutputPixelFormat {
260  return _preferredOutputPixelFormat;
261}
262
263- (void)startCaptureWithDevice:(AVCaptureDevice *)device
264                        format:(AVCaptureDeviceFormat *)format
265                           fps:(NSInteger)fps {
266  [self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
267}
268
269- (void)stopCapture {
270  _isActiveUpdated = nil;
271  [self stopCaptureWithCompletionHandler:nil];
272}
273
274- (void)setIsEnabled:(bool)isEnabled {
275    _isPaused = !isEnabled;
276    [self updateIsActiveValue];
277}
278
279- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
280	dispatch_async(self.frameQueue, ^{
281		_uncroppedSink = sink;
282	});
283}
284
285- (int)getRotation {
286    switch (_rotation) {
287        case RTCVideoRotation_0:
288            return 0;
289        case RTCVideoRotation_90:
290            return 90;
291        case RTCVideoRotation_180:
292            return 180;
293        case RTCVideoRotation_270:
294            return 270;
295        default:
296            return 0;
297    }
298}
299
300- (void)addPreviewView:(VideoCaptureView *)previewView {
301    [_previews addObject:[[VideoCameraCapturerPreviewRecord alloc] initWithCaptureView:previewView]];
302    [previewView previewLayer].session = _captureSession;
303}
304
305- (void)addDirectSink:(std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)directSink {
306    _directSinks.push_back(directSink);
307}
308
309- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
310	dispatch_async(self.frameQueue, ^{
311		_aspectRatio = aspectRatio;
312	});
313}
314
315- (void)startCaptureWithDevice:(AVCaptureDevice *)device
316                        format:(AVCaptureDeviceFormat *)format
317                           fps:(NSInteger)fps
318             completionHandler:(nullable void (^)(NSError *))completionHandler {
319  _willBeRunning = true;
320  [RTCDispatcher
321      dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
322   block:^{
323      RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
324
325      dispatch_async(dispatch_get_main_queue(), ^{
326          [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
327      });
328
329      _currentDevice = device;
330
331      NSError *error = nil;
332      if (![_currentDevice lockForConfiguration:&error]) {
333          RTCLogError(@"Failed to lock device %@. Error: %@",
334                      _currentDevice,
335                      error.userInfo);
336          if (completionHandler) {
337              completionHandler(error);
338          }
339          _willBeRunning = false;
340          return;
341      }
342      [self reconfigureCaptureSessionInput];
343      [self updateDeviceCaptureFormat:format fps:fps];
344      [self updateVideoDataOutputPixelFormat:format];
345      [_captureSession startRunning];
346      [_currentDevice unlockForConfiguration];
347      _isRunning = YES;
348      if (completionHandler) {
349          completionHandler(nil);
350      }
351  }];
352}
353
354- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
355  _willBeRunning = false;
356  [RTCDispatcher
357   dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
358   block:^{
359      RTCLogInfo("Stop");
360      _currentDevice = nil;
361      for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
362          [_captureSession removeInput:oldInput];
363      }
364      [_captureSession stopRunning];
365
366      dispatch_async(dispatch_get_main_queue(), ^{
367          [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
368      });
369      _isRunning = NO;
370      if (completionHandler) {
371          completionHandler();
372      }
373  }];
374}
375
376#pragma mark iOS notifications
377
378#if TARGET_OS_IPHONE
379- (void)deviceOrientationDidChange:(NSNotification *)notification {
380    [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{
381        _didReceiveOrientationUpdate = true;
382        [self updateOrientation];
383    }];
384}
385#endif
386
387#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
388
389- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)prepareI420Buffer:(CVPixelBufferRef)pixelBuffer {
390    if (!pixelBuffer) {
391        return nullptr;
392    }
393
394    const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
395
396    CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
397
398    auto resultBuffer = new rtc::RefCountedObject<webrtc::I420Buffer>(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
399
400    switch (pixelFormat) {
401        case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
402        case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
403            const uint8_t* srcY =
404            static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
405            const int srcYStride = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
406            const uint8_t* srcUV =
407            static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
408            const int srcUVStride = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
409
410            // TODO(magjed): Use a frame buffer pool.
411            _nv12ToI420Scaler.NV12ToI420Scale(srcY,
412                                             srcYStride,
413                                             srcUV,
414                                             srcUVStride,
415                                             resultBuffer->width(),
416                                             resultBuffer->height(),
417                                             resultBuffer->MutableDataY(),
418                                             resultBuffer->StrideY(),
419                                             resultBuffer->MutableDataU(),
420                                             resultBuffer->StrideU(),
421                                             resultBuffer->MutableDataV(),
422                                             resultBuffer->StrideV(),
423                                             resultBuffer->width(),
424                                             resultBuffer->height());
425            break;
426        }
427        case kCVPixelFormatType_32BGRA:
428        case kCVPixelFormatType_32ARGB: {
429            return nullptr;
430        }
431        default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
432    }
433
434    CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
435
436    return resultBuffer;
437}
438
439- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)prepareNV12Buffer:(CVPixelBufferRef)pixelBuffer {
440    if (!pixelBuffer) {
441        return nullptr;
442    }
443
444    const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
445
446    CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
447
448    switch (pixelFormat) {
449        case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
450        case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
451            const uint8_t* srcY =
452            static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
453            const int srcYStride = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
454            const uint8_t* srcUV =
455            static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
456            const int srcUVStride = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
457
458            const int srcWidth = (int)CVPixelBufferGetWidth(pixelBuffer);
459            const int srcHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
460
461            int resultWidth = (int)(srcWidth * 0.8f);
462            resultWidth &= ~1;
463            int resultHeight = (int)(srcHeight * 0.8f);
464            resultHeight &= ~1;
465
466            rtc::scoped_refptr<webrtc::NV12Buffer> resultBuffer = new rtc::RefCountedObject<webrtc::NV12Buffer>(resultWidth, resultHeight, srcYStride, srcUVStride);
467
468            libyuv::NV12Scale(srcY, srcYStride, srcUV, srcUVStride,
469                                        resultWidth, resultHeight, resultBuffer->MutableDataY(),
470                                        resultBuffer->StrideY(), resultBuffer->MutableDataUV(), resultBuffer->StrideUV(), resultBuffer->width(),
471                                        resultBuffer->height(), libyuv::kFilterBilinear);
472
473            CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
474
475            return resultBuffer;
476        }
477        case kCVPixelFormatType_32BGRA:
478        case kCVPixelFormatType_32ARGB: {
479            CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
480            return nullptr;
481        }
482        default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
483    }
484
485    return nullptr;
486}
487
488- (void)captureOutput:(AVCaptureOutput *)captureOutput
489    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
490           fromConnection:(AVCaptureConnection *)connection {
491    NSParameterAssert(captureOutput == _videoDataOutput);
492
493    int minWarmupFrameCount = 12;
494    _warmupFrameCount++;
495    if (_warmupFrameCount < minWarmupFrameCount) {
496        return;
497    }
498
499    if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
500        !CMSampleBufferDataIsReady(sampleBuffer)) {
501        return;
502    }
503
504    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
505    if (pixelBuffer == nil) {
506        return;
507    }
508
509    // Default to portrait orientation on iPhone.
510    BOOL usingFrontCamera = NO;
511    // Check the image's EXIF for the camera the image came from as the image could have been
512    // delayed as we set alwaysDiscardsLateVideoFrames to NO.
513    AVCaptureDevicePosition cameraPosition =
514    [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
515    if (cameraPosition != AVCaptureDevicePositionUnspecified) {
516        usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
517    } else {
518        AVCaptureDeviceInput *deviceInput =
519        (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
520        usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
521    }
522    if (!_rotationLock) {
523        RTCVideoRotation updatedRotation = _rotation;
524        switch (_orientation) {
525            case UIDeviceOrientationPortrait:
526                updatedRotation = RTCVideoRotation_90;
527                break;
528            case UIDeviceOrientationPortraitUpsideDown:
529                updatedRotation = RTCVideoRotation_270;
530                break;
531            case UIDeviceOrientationLandscapeLeft:
532                updatedRotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
533                break;
534            case UIDeviceOrientationLandscapeRight:
535                updatedRotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
536                break;
537            case UIDeviceOrientationFaceUp:
538            case UIDeviceOrientationFaceDown:
539            case UIDeviceOrientationUnknown:
540                // Ignore.
541                break;
542        }
543        if (_rotation != updatedRotation) {
544            _rotation = updatedRotation;
545            if (_rotationUpdated) {
546                int angle = 0;
547                switch (_rotation) {
548                    case RTCVideoRotation_0: {
549                        angle = 0;
550                        break;
551                    }
552                    case RTCVideoRotation_90: {
553                        angle = 90;
554                        break;
555                    }
556                    case RTCVideoRotation_180: {
557                        angle = 180;
558                        break;
559                    }
560                    case RTCVideoRotation_270: {
561                        angle = 270;
562                        break;
563                    }
564                    default: {
565                        break;
566                    }
567                }
568                _rotationUpdated(angle);
569            }
570        }
571    }
572
573    TGRTCCVPixelBuffer *rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
574    rtcPixelBuffer.shouldBeMirrored = usingFrontCamera;
575
576    TGRTCCVPixelBuffer *uncroppedRtcPixelBuffer = rtcPixelBuffer;
577
578    CGSize initialSize = CGSizeMake(uncroppedRtcPixelBuffer.width, uncroppedRtcPixelBuffer.height);
579
580    if (_aspectRatio > FLT_EPSILON) {
581        float aspect = 1.0f / _aspectRatio;
582
583        int width = rtcPixelBuffer.width;
584        int height = rtcPixelBuffer.height;
585
586        int cropX = 0;
587        int cropY = 0;
588
589        if (_keepLandscape && width > height) {
590            float aspectWidth = 404.0f;
591            float aspectHeight = 720.0f;
592            cropX = (int)((width - aspectWidth) / 2.0f);
593            cropY = (int)((height - aspectHeight) / 2.0f);
594            width = aspectWidth;
595            height = aspectHeight;
596        } else {
597            float aspectWidth = width;
598            float aspectHeight = ((float)(width)) / aspect;
599            cropX = (int)((width - aspectWidth) / 2.0f);
600            cropY = (int)((height - aspectHeight) / 2.0f);
601            width = (int)aspectWidth;
602            width &= ~1;
603            height = (int)aspectHeight;
604            height &= ~1;
605        }
606
607        height = MIN(rtcPixelBuffer.height, height + 16);
608
609        if (width < rtcPixelBuffer.width || height < rtcPixelBuffer.height) {
610            rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer adaptedWidth:width adaptedHeight:height cropWidth:width cropHeight:height cropX:cropX cropY:cropY];
611            rtcPixelBuffer.shouldBeMirrored = usingFrontCamera;
612
613            CVPixelBufferRef outputPixelBufferRef = NULL;
614            OSType pixelFormat = CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
615            CVPixelBufferCreate(NULL, width, height, pixelFormat, NULL, &outputPixelBufferRef);
616            if (outputPixelBufferRef) {
617                int bufferSize = [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:width height:height];
618                if (_croppingBuffer.size() < bufferSize) {
619                    _croppingBuffer.resize(bufferSize);
620                }
621                if ([rtcPixelBuffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:_croppingBuffer.data()]) {
622                    rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
623                    rtcPixelBuffer.shouldBeMirrored = usingFrontCamera;
624                }
625                CVPixelBufferRelease(outputPixelBufferRef);
626            }
627        }
628    }
629
630    int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kNanosecondsPerSecond;
631
632    //RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:_rotation timeStampNs:timeStampNs];
633
634    //RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:(id<RTCVideoFrameBuffer>)[rtcPixelBuffer toI420] rotation:_rotation timeStampNs:timeStampNs];
635
636    webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(_rotation);
637
638    int previewRotation = 0;
639    CGSize previewSize = initialSize;
640    if (rotation == 90 || rotation == 270) {
641        previewSize = CGSizeMake(previewSize.height, previewSize.width);
642    }
643
644    for (VideoCameraCapturerPreviewRecord *record in _previews) {
645        dispatch_async(dispatch_get_main_queue(), ^{
646            VideoCaptureView *captureView = record.view;
647            [captureView onFrameGenerated:previewSize isMirrored:true rotation:previewRotation];
648        });
649    }
650
651    auto i420Buffer = [self prepareI420Buffer:[rtcPixelBuffer pixelBuffer]];
652
653    if (!_isPaused && i420Buffer) {
654        auto videoFrame = webrtc::VideoFrame::Builder()
655            .set_video_frame_buffer(i420Buffer)
656            .set_rotation(rotation)
657            .set_timestamp_us(timeStampNs)
658            .build();
659
660        if (getObjCVideoSource(_source)->OnCapturedFrame(videoFrame)) {
661            if (!_directSinks.empty()) {
662                for (const auto &it : _directSinks) {
663                    if (const auto value = it.lock()) {
664                        value->OnFrame(videoFrame);
665                    }
666                }
667            }
668        }
669
670        if (uncroppedRtcPixelBuffer) {
671            const auto uncroppedSink = _uncroppedSink.lock();
672            if (uncroppedSink) {
673                int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
674                kNanosecondsPerSecond;
675                RTCVideoFrame *frame = [[RTCVideoFrame alloc] initWithBuffer:uncroppedRtcPixelBuffer rotation:_rotation timeStampNs:timeStampNs];
676
677                const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
678
679                rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
680                buffer = new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(frame.buffer);
681
682                webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(frame.rotation);
683
684                uncroppedSink->OnFrame(webrtc::VideoFrame::Builder()
685                        .set_video_frame_buffer(buffer)
686                        .set_rotation(rotation)
687                        .set_timestamp_us(timestamp_us)
688                        .build());
689            }
690        }
691    }
692}
693
694- (void)captureOutput:(AVCaptureOutput *)captureOutput
695    didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
696         fromConnection:(AVCaptureConnection *)connection {
697  NSString *droppedReason =
698      (__bridge NSString *)CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
699  RTCLogError(@"Dropped sample buffer. Reason: %@", droppedReason);
700}
701
702#pragma mark - AVCaptureSession notifications
703
704- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
705    NSString *reasonString = nil;
706    NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
707    if (reason) {
708        switch (reason.intValue) {
709            case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
710                reasonString = @"VideoDeviceNotAvailableInBackground";
711                break;
712            case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
713                reasonString = @"AudioDeviceInUseByAnotherClient";
714                break;
715            case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
716                reasonString = @"VideoDeviceInUseByAnotherClient";
717                break;
718            case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
719                reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
720                break;
721        }
722    }
723    RTCLog(@"Capture session interrupted: %@", reasonString);
724}
725
726- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
727    RTCLog(@"Capture session interruption ended.");
728}
729
730- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
731    NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
732    RTCLogError(@"Capture session runtime error: %@", error);
733
734    [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
735                                 block:^{
736        if (error.code == AVErrorMediaServicesWereReset) {
737            [self handleNonFatalError];
738        } else {
739            [self handleFatalError];
740        }
741    }];
742}
743
744- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
745    RTCLog(@"Capture session started.");
746
747    [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
748                                 block:^{
749        // If we successfully restarted after an unknown error,
750        // allow future retries on fatal errors.
751        _hasRetriedOnFatalError = NO;
752    }];
753
754    _inForegroundValue = true;
755    [self updateIsActiveValue];
756}
757
758- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
759  RTCLog(@"Capture session stopped.");
760    _inForegroundValue = false;
761    [self updateIsActiveValue];
762}
763
764- (void)updateIsActiveValue {
765    bool isActive = _inForegroundValue && !_isPaused;
766    if (isActive != _isActiveValue) {
767        _isActiveValue = isActive;
768        if (_isActiveUpdated) {
769            _isActiveUpdated(_isActiveValue);
770        }
771    }
772}
773
774- (void)handleFatalError {
775    [RTCDispatcher
776     dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
777     block:^{
778        if (!_hasRetriedOnFatalError) {
779            RTCLogWarning(@"Attempting to recover from fatal capture error.");
780            [self handleNonFatalError];
781            _hasRetriedOnFatalError = YES;
782        } else {
783            RTCLogError(@"Previous fatal error recovery failed.");
784        }
785    }];
786}
787
788- (void)handleNonFatalError {
789    [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
790                                 block:^{
791        RTCLog(@"Restarting capture session after error.");
792        if (_isRunning) {
793            [_captureSession startRunning];
794        }
795    }];
796}
797
798#pragma mark - UIApplication notifications
799
800- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
801    [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
802                                 block:^{
803        if (_isRunning && !_captureSession.isRunning) {
804            RTCLog(@"Restarting capture session on active.");
805            _warmupFrameCount = 0;
806            [_captureSession startRunning];
807        }
808    }];
809}
810
811- (void)handleApplicationWillEnterForeground:(NSNotification *)notification {
812    [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
813                                 block:^{
814        RTCLog(@"Resetting warmup due to backgrounding.");
815        _warmupFrameCount = 0;
816    }];
817}
818
819#pragma mark - Private
820
821- (dispatch_queue_t)frameQueue {
822    if (!_frameQueue) {
823        _frameQueue =
824        dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
825        dispatch_set_target_queue(_frameQueue,
826                                  dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
827    }
828    return _frameQueue;
829}
830
831- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
832    NSAssert(_captureSession == nil, @"Setup capture session called twice.");
833    _captureSession = captureSession;
834    _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
835    _captureSession.usesApplicationAudioSession = true;
836    [self setupVideoDataOutput];
837    // Add the output.
838    if (![_captureSession canAddOutput:_videoDataOutput]) {
839        RTCLogError(@"Video data output unsupported.");
840        return NO;
841    }
842    [_captureSession addOutput:_videoDataOutput];
843
844    return YES;
845}
846
847- (void)setupVideoDataOutput {
848    NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
849    AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
850
851    // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
852    // device with the most efficient output format first. Find the first format that we support.
853    NSSet<NSNumber *> *supportedPixelFormats = [TGRTCCVPixelBuffer supportedPixelFormats];
854    NSMutableOrderedSet *availablePixelFormats =
855    [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
856    [availablePixelFormats intersectSet:supportedPixelFormats];
857    NSNumber *pixelFormat = availablePixelFormats.firstObject;
858    NSAssert(pixelFormat, @"Output device has no supported formats.");
859
860    _preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
861    _outputPixelFormat = _preferredOutputPixelFormat;
862    videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
863    videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
864    [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
865    _videoDataOutput = videoDataOutput;
866}
867
868- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
869    FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
870    if (![[TGRTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
871        mediaSubType = _preferredOutputPixelFormat;
872    }
873
874    if (mediaSubType != _outputPixelFormat) {
875        _outputPixelFormat = mediaSubType;
876        _videoDataOutput.videoSettings =
877        @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
878    }
879}
880
881#pragma mark - Private, called inside capture queue
882
883- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
884    NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
885             @"updateDeviceCaptureFormat must be called on the capture queue.");
886    @try {
887        _currentDevice.activeFormat = format;
888        _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)fps);
889    } @catch (NSException *exception) {
890        RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
891        return;
892    }
893}
894
895- (void)reconfigureCaptureSessionInput {
896    NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
897             @"reconfigureCaptureSessionInput must be called on the capture queue.");
898    NSError *error = nil;
899    AVCaptureDeviceInput *input =
900    [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
901    if (!input) {
902        RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
903        return;
904    }
905    [_captureSession beginConfiguration];
906    for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
907        [_captureSession removeInput:oldInput];
908    }
909    if ([_captureSession canAddInput:input]) {
910        [_captureSession addInput:input];
911    } else {
912        RTCLogError(@"Cannot add camera as an input to the session.");
913    }
914    [_captureSession commitConfiguration];
915}
916
917- (void)updateOrientation {
918    NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
919             @"updateOrientation must be called on the capture queue.");
920    if (_didReceiveOrientationUpdate) {
921        _orientation = [UIDevice currentDevice].orientation;
922    }
923}
924
925@end
926//
927