1/*
2 *  cap_avfoundation.mm
3 *  For iOS video I/O
4 *  by Xiaochao Yang on 06/15/11 modified from
5 *  cap_qtkit.mm for Nicholas Butko for Mac OS version.
6 *  Copyright 2011. All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions are met:
10 *
11 * 1. Redistributions of source code must retain the above copyright notice,
12 *    this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright notice,
14 *    this list of conditions and the following disclaimer in the documentation
15 *    and/or other materials provided with the distribution.
16 * 3. The name of the author may not be used to endorse or promote products
17 *    derived from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
20 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
22 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
24 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
25 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
27 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
28 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 *
30 */
31
32 #pragma clang diagnostic push
33 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
34
35#include "precomp.hpp"
36#include "opencv2/imgproc.hpp"
37#include "cap_interface.hpp"
38#include <iostream>
39#include <Availability.h>
40#import <AVFoundation/AVFoundation.h>
41#import <Foundation/NSException.h>
42
43#define CV_CAP_MODE_BGR CV_FOURCC_MACRO('B','G','R','3')
44#define CV_CAP_MODE_RGB CV_FOURCC_MACRO('R','G','B','3')
45#define CV_CAP_MODE_GRAY CV_FOURCC_MACRO('G','R','E','Y')
46#define CV_CAP_MODE_YUYV CV_FOURCC_MACRO('Y', 'U', 'Y', 'V')
47
48/********************** Declaration of class headers ************************/
49
50/*****************************************************************************
51 *
52 * CaptureDelegate Declaration.
53 *
54 * CaptureDelegate is notified on a separate thread by the OS whenever there
55 *   is a new frame. When "updateImage" is called from the main thread, it
56 *   copies this new frame into an IplImage, but only if this frame has not
57 *   been copied before. When "getOutput" is called from the main thread,
58 *   it gives the last copied IplImage.
59 *
60 *****************************************************************************/
61
62#define DISABLE_AUTO_RESTART 999
63
64@interface CaptureDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
65{
66    int newFrame;
67    CVImageBufferRef  mCurrentImageBuffer;
68    char* imagedata;
69    IplImage* image;
70    char* bgr_imagedata;
71    IplImage* bgr_image;
72    IplImage* bgr_image_r90;
73    size_t currSize;
74}
75
76- (void)captureOutput:(AVCaptureOutput *)captureOutput
77didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
78fromConnection:(AVCaptureConnection *)connection;
79
80
81- (int)updateImage;
82- (IplImage*)getOutput;
83
84@end
85
86/*****************************************************************************
87 *
88 * CvCaptureCAM Declaration.
89 *
90 * CvCaptureCAM is the instantiation of a capture source for cameras.
91 *
92 *****************************************************************************/
93
94class CvCaptureCAM : public CvCapture {
95    public:
96        CvCaptureCAM(int cameraNum = -1) ;
97        ~CvCaptureCAM();
98        bool grabFrame() CV_OVERRIDE;
99        IplImage* retrieveFrame(int) CV_OVERRIDE;
100        double getProperty(int property_id) const CV_OVERRIDE;
101        bool setProperty(int property_id, double value) CV_OVERRIDE;
102        int getCaptureDomain() /*const*/ CV_OVERRIDE { return cv::CAP_AVFOUNDATION; }
103
104        virtual IplImage* queryFrame();
105        virtual int didStart();
106    private:
107        AVCaptureSession            *mCaptureSession;
108        AVCaptureDeviceInput        *mCaptureDeviceInput;
109        AVCaptureVideoDataOutput    *mCaptureDecompressedVideoOutput;
110        AVCaptureDevice 						*mCaptureDevice;
111        CaptureDelegate							*capture;
112
113        int startCaptureDevice(int cameraNum);
114        void stopCaptureDevice();
115
116        void setWidthHeight();
117        bool grabFrame(double timeOut);
118
119        int camNum;
120        int width;
121        int height;
122        int settingWidth;
123        int settingHeight;
124        int started;
125        int disableAutoRestart;
126};
127
128
129/*****************************************************************************
130 *
131 * CvCaptureFile Declaration.
132 *
133 * CvCaptureFile is the instantiation of a capture source for video files.
134 *
135 *****************************************************************************/
136
137class CvCaptureFile : public CvCapture {
138public:
139    CvCaptureFile(const char* filename) ;
140    ~CvCaptureFile();
141    bool grabFrame() CV_OVERRIDE;
142    IplImage* retrieveFrame(int) CV_OVERRIDE;
143    double getProperty(int property_id) const CV_OVERRIDE;
144    bool setProperty(int property_id, double value) CV_OVERRIDE;
145    int getCaptureDomain() /*const*/ CV_OVERRIDE { return cv::CAP_AVFOUNDATION; }
146
147    virtual int didStart();
148private:
149    AVAsset                  *mAsset;
150    AVAssetTrack             *mAssetTrack;
151    AVAssetReader            *mAssetReader;
152    AVAssetReaderTrackOutput *mTrackOutput;
153
154    CMSampleBufferRef mCurrentSampleBuffer;
155    CVImageBufferRef  mGrabbedPixels;
156    IplImage *mDeviceImage;
157    uint8_t  *mOutImagedata;
158    IplImage *mOutImage;
159    size_t    currSize;
160    uint32_t  mMode;
161    int       mFormat;
162
163    bool setupReadingAt(CMTime position);
164    IplImage* retrieveFramePixelBuffer();
165
166    CMTime mFrameTimestamp;
167    size_t mFrameNum;
168
169    int started;
170};
171
172
173/*****************************************************************************
174 *
175 * CvCaptureFile Declaration.
176 *
177 * CvCaptureFile is the instantiation of a capture source for video files.
178 *
179 *****************************************************************************/
180
181class CvVideoWriter_AVFoundation : public CvVideoWriter{
182    public:
183        CvVideoWriter_AVFoundation(const char* filename, int fourcc,
184                double fps, CvSize frame_size,
185                int is_color=1);
186        ~CvVideoWriter_AVFoundation();
187        bool writeFrame(const IplImage* image) CV_OVERRIDE;
188        int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_AVFOUNDATION; }
189    private:
190        IplImage* argbimage;
191
192        AVAssetWriter *mMovieWriter;
193        AVAssetWriterInput* mMovieWriterInput;
194        AVAssetWriterInputPixelBufferAdaptor* mMovieWriterAdaptor;
195
196        NSString* path;
197        NSString* codec;
198        NSString* fileType;
199        double movieFPS;
200        CvSize movieSize;
201        int movieColor;
202        unsigned long frameCount;
203};
204
205
206/****************** Implementation of interface functions ********************/
207
208
209cv::Ptr<cv::IVideoCapture> cv::create_AVFoundation_capture_file(const std::string &filename)
210{
211    CvCaptureFile *retval = new CvCaptureFile(filename.c_str());
212    if(retval->didStart())
213        return makePtr<LegacyCapture>(retval);
214    delete retval;
215    return NULL;
216
217}
218
219cv::Ptr<cv::IVideoCapture> cv::create_AVFoundation_capture_cam(int index)
220{
221    CvCaptureCAM* retval = new CvCaptureCAM(index);
222    if (retval->didStart())
223        return cv::makePtr<cv::LegacyCapture>(retval);
224    delete retval;
225    return 0;
226}
227
228cv::Ptr<cv::IVideoWriter> cv::create_AVFoundation_writer(const std::string& filename, int fourcc,
229                                                         double fps, const cv::Size &frameSize,
230                                                         const cv::VideoWriterParameters& params)
231{
232    CvSize sz = { frameSize.width, frameSize.height };
233    const bool isColor = params.get(VIDEOWRITER_PROP_IS_COLOR, true);
234    CvVideoWriter_AVFoundation* wrt = new CvVideoWriter_AVFoundation(filename.c_str(), fourcc, fps, sz, isColor);
235    return cv::makePtr<cv::LegacyWriter>(wrt);
236}
237
238/********************** Implementation of Classes ****************************/
239/*****************************************************************************
240 *
241 * CvCaptureCAM Implementation.
242 *
243 * CvCaptureCAM is the instantiation of a capture source for cameras.
244 *
245 *****************************************************************************/
246
247CvCaptureCAM::CvCaptureCAM(int cameraNum) {
248    mCaptureSession = nil;
249    mCaptureDeviceInput = nil;
250    mCaptureDecompressedVideoOutput = nil;
251    capture = nil;
252
253    width = 0;
254    height = 0;
255    settingWidth = 0;
256    settingHeight = 0;
257    disableAutoRestart = 0;
258
259    camNum = cameraNum;
260
261    if (!startCaptureDevice(camNum)) {
262        std::cout << "Warning, camera failed to properly initialize!" << std::endl;
263        started = 0;
264    } else {
265        started = 1;
266    }
267
268}
269
270CvCaptureCAM::~CvCaptureCAM() {
271    stopCaptureDevice();
272    //cout << "Cleaned up camera." << endl;
273}
274
275int CvCaptureCAM::didStart() {
276    return started;
277}
278
279
280bool CvCaptureCAM::grabFrame() {
281    return grabFrame(5);
282}
283
284bool CvCaptureCAM::grabFrame(double timeOut) {
285
286    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
287    double sleepTime = 0.005;
288    double total = 0;
289
290    NSDate *loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
291    while (![capture updateImage] && (total += sleepTime)<=timeOut &&
292            [[NSRunLoop currentRunLoop] runMode: NSDefaultRunLoopMode
293            beforeDate:loopUntil])
294        loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
295
296    [localpool drain];
297
298    return total <= timeOut;
299}
300
301IplImage* CvCaptureCAM::retrieveFrame(int) {
302    return [capture getOutput];
303}
304
305IplImage* CvCaptureCAM::queryFrame() {
306    while (!grabFrame()) {
307        std::cout << "WARNING: Couldn't grab new frame from camera!!!" << std::endl;
308        /*
309             cout << "Attempting to restart camera; set capture property DISABLE_AUTO_RESTART to disable." << endl;
310             stopCaptureDevice();
311             startCaptureDevice(camNum);
312         */
313    }
314    return retrieveFrame(0);
315}
316
317void CvCaptureCAM::stopCaptureDevice() {
318    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
319
320    [mCaptureSession stopRunning];
321
322    [mCaptureSession release];
323    [mCaptureDeviceInput release];
324
325    [mCaptureDecompressedVideoOutput release];
326    [capture release];
327    [localpool drain];
328
329}
330
331int CvCaptureCAM::startCaptureDevice(int cameraNum) {
332    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
333
334    capture = [[CaptureDelegate alloc] init];
335
336    AVCaptureDevice *device;
337    NSArray* devices = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]
338            arrayByAddingObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]];
339    if ([devices count] == 0) {
340        std::cout << "AV Foundation didn't find any attached Video Input Devices!" << std::endl;
341        [localpool drain];
342        return 0;
343    }
344
345    if (cameraNum >= 0) {
346        camNum = cameraNum % [devices count];
347        if (camNum != cameraNum) {
348            std::cout << "Warning: Max Camera Num is " << [devices count]-1 << "; Using camera " << camNum << std::endl;
349        }
350        device = [devices objectAtIndex:camNum];
351    } else {
352        device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]  ;
353    }
354    mCaptureDevice = device;
355    //int success;
356    NSError* error;
357
358    if (device) {
359
360        mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error] ;
361        mCaptureSession = [[AVCaptureSession alloc] init] ;
362
363        /*
364             success = [mCaptureSession addInput:mCaptureDeviceInput];
365
366             if (!success) {
367             cout << "AV Foundation failed to start capture session with opened Capture Device" << endl;
368             [localpool drain];
369             return 0;
370             }
371         */
372
373        mCaptureDecompressedVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
374
375        dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
376        [mCaptureDecompressedVideoOutput setSampleBufferDelegate:capture queue:queue];
377        dispatch_release(queue);
378
379
380        NSDictionary *pixelBufferOptions ;
381        if (width > 0 && height > 0) {
382            pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
383                [NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
384                [NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
385                [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
386                (id)kCVPixelBufferPixelFormatTypeKey,
387                nil];
388        } else {
389            pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
390                [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
391                (id)kCVPixelBufferPixelFormatTypeKey,
392                nil];
393        }
394
395        //TODO: add new interface for setting fps and capturing resolution.
396        [mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
397        mCaptureDecompressedVideoOutput.alwaysDiscardsLateVideoFrames = YES;
398
399#if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR) && (!defined(TARGET_OS_MACCATALYST) || !TARGET_OS_MACCATALYST)
400        mCaptureDecompressedVideoOutput.minFrameDuration = CMTimeMake(1, 30);
401#endif
402
403        //Slow. 1280*720 for iPhone4, iPod back camera. 640*480 for front camera
404        //mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; // fps ~= 5 slow for OpenCV
405
406        mCaptureSession.sessionPreset = AVCaptureSessionPresetMedium; //480*360
407        if (width == 0 ) width = 480;
408        if (height == 0 ) height = 360;
409
410        [mCaptureSession addInput:mCaptureDeviceInput];
411        [mCaptureSession addOutput:mCaptureDecompressedVideoOutput];
412
413        /*
414        // Does not work! This is the preferred way (hardware acceleration) to change pixel buffer orientation.
415        // I'm now using cvtranspose and cvflip instead, which takes cpu cycles.
416        AVCaptureConnection *connection = [[mCaptureDecompressedVideoOutput connections] objectAtIndex:0];
417        if([connection isVideoOrientationSupported]) {
418            //NSLog(@"Setting pixel buffer orientation");
419            connection.videoOrientation = AVCaptureVideoOrientationPortrait;
420        }
421        */
422
423        [mCaptureSession startRunning];
424
425        grabFrame(60);
426        [localpool drain];
427        return 1;
428    }
429
430    [localpool drain];
431    return 0;
432}
433
434void CvCaptureCAM::setWidthHeight() {
435    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
436    NSDictionary* pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
437        [NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
438        [NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
439        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
440        (id)kCVPixelBufferPixelFormatTypeKey,
441        nil];
442
443    [mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
444    grabFrame(60);
445    [localpool drain];
446}
447
448//added macros into headers in videoio_c.h
449/*
450#define CV_CAP_PROP_IOS_DEVICE_FOCUS 9001
451#define CV_CAP_PROP_IOS_DEVICE_EXPOSURE 9002
452#define CV_CAP_PROP_IOS_DEVICE_FLASH 9003
453#define CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE 9004
454#define CV_CAP_PROP_IOS_DEVICE_TORCH 9005
455*/
456
457
458/*
459// All available settings are taken from iOS API
460
461enum {
462   AVCaptureFlashModeOff    = 0,
463   AVCaptureFlashModeOn     = 1,
464   AVCaptureFlashModeAuto   = 2
465};
466typedef NSInteger AVCaptureFlashMode;
467
468enum {
469   AVCaptureTorchModeOff    = 0,
470   AVCaptureTorchModeOn     = 1,
471   AVCaptureTorchModeAuto   = 2
472};
473typedef NSInteger AVCaptureTorchMode;
474
475enum {
476   AVCaptureFocusModeLocked                = 0,
477   AVCaptureFocusModeAutoFocus             = 1,
478   AVCaptureFocusModeContinuousAutoFocus   = 2,
479};
480typedef NSInteger AVCaptureFocusMode;
481
482enum {
483   AVCaptureExposureModeLocked                    = 0,
484   AVCaptureExposureModeAutoExpose                = 1,
485   AVCaptureExposureModeContinuousAutoExposure    = 2,
486};
487typedef NSInteger AVCaptureExposureMode;
488
489enum {
490   AVCaptureWhiteBalanceModeLocked             = 0,
491   AVCaptureWhiteBalanceModeAutoWhiteBalance   = 1,
492   AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
493};
494typedef NSInteger AVCaptureWhiteBalanceMode;
495*/
496
497double CvCaptureCAM::getProperty(int property_id) const{
498    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
499
500    /*
501         NSArray* connections = [mCaptureDeviceInput	connections];
502         QTFormatDescription* format = [[connections objectAtIndex:0] formatDescription];
503         NSSize s1 = [[format attributeForKey:QTFormatDescriptionVideoCleanApertureDisplaySizeAttribute] sizeValue];
504     */
505
506    NSArray* ports = mCaptureDeviceInput.ports;
507    CMFormatDescriptionRef format = [[ports objectAtIndex:0] formatDescription];
508    CGSize s1 = CMVideoFormatDescriptionGetPresentationDimensions(format, YES, YES);
509
510    int w=(int)s1.width, h=(int)s1.height;
511
512    [localpool drain];
513
514    switch (property_id) {
515        case CV_CAP_PROP_FRAME_WIDTH:
516            return w;
517        case CV_CAP_PROP_FRAME_HEIGHT:
518            return h;
519
520        case CV_CAP_PROP_IOS_DEVICE_FOCUS:
521            return mCaptureDevice.focusMode;
522        case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
523            return mCaptureDevice.exposureMode;
524        case CV_CAP_PROP_IOS_DEVICE_FLASH:
525            return mCaptureDevice.flashMode;
526        case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
527            return mCaptureDevice.whiteBalanceMode;
528        case CV_CAP_PROP_IOS_DEVICE_TORCH:
529            return mCaptureDevice.torchMode;
530
531        default:
532            return 0;
533    }
534
535
536}
537
538bool CvCaptureCAM::setProperty(int property_id, double value) {
539    switch (property_id) {
540        case CV_CAP_PROP_FRAME_WIDTH:
541            width = value;
542            settingWidth = 1;
543            if (settingWidth && settingHeight) {
544                setWidthHeight();
545                settingWidth =0;
546                settingHeight = 0;
547            }
548            return true;
549
550        case CV_CAP_PROP_FRAME_HEIGHT:
551            height = value;
552            settingHeight = 1;
553            if (settingWidth && settingHeight) {
554                setWidthHeight();
555                settingWidth =0;
556                settingHeight = 0;
557            }
558            return true;
559
560        case CV_CAP_PROP_IOS_DEVICE_FOCUS:
561            if ([mCaptureDevice isFocusModeSupported:(AVCaptureFocusMode)value]){
562                NSError* error = nil;
563                [mCaptureDevice lockForConfiguration:&error];
564                if (error) return false;
565                [mCaptureDevice setFocusMode:(AVCaptureFocusMode)value];
566                [mCaptureDevice unlockForConfiguration];
567                //NSLog(@"Focus set");
568                return true;
569            }else {
570                return false;
571            }
572
573        case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
574            if ([mCaptureDevice isExposureModeSupported:(AVCaptureExposureMode)value]){
575                NSError* error = nil;
576                [mCaptureDevice lockForConfiguration:&error];
577                if (error) return false;
578                [mCaptureDevice setExposureMode:(AVCaptureExposureMode)value];
579                [mCaptureDevice unlockForConfiguration];
580                //NSLog(@"Exposure set");
581                return true;
582            }else {
583                return false;
584            }
585
586        case CV_CAP_PROP_IOS_DEVICE_FLASH:
587            if ( [mCaptureDevice hasFlash] && [mCaptureDevice isFlashModeSupported:(AVCaptureFlashMode)value]){
588                NSError* error = nil;
589                [mCaptureDevice lockForConfiguration:&error];
590                if (error) return false;
591                [mCaptureDevice setFlashMode:(AVCaptureFlashMode)value];
592                [mCaptureDevice unlockForConfiguration];
593                //NSLog(@"Flash mode set");
594                return true;
595            }else {
596                return false;
597            }
598
599        case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
600            if ([mCaptureDevice isWhiteBalanceModeSupported:(AVCaptureWhiteBalanceMode)value]){
601                NSError* error = nil;
602                [mCaptureDevice lockForConfiguration:&error];
603                if (error) return false;
604                [mCaptureDevice setWhiteBalanceMode:(AVCaptureWhiteBalanceMode)value];
605                [mCaptureDevice unlockForConfiguration];
606                //NSLog(@"White balance set");
607                return true;
608            }else {
609                return false;
610            }
611
612        case CV_CAP_PROP_IOS_DEVICE_TORCH:
613            if ([mCaptureDevice hasFlash] && [mCaptureDevice isTorchModeSupported:(AVCaptureTorchMode)value]){
614                NSError* error = nil;
615                [mCaptureDevice lockForConfiguration:&error];
616                if (error) return false;
617                [mCaptureDevice setTorchMode:(AVCaptureTorchMode)value];
618                [mCaptureDevice unlockForConfiguration];
619                //NSLog(@"Torch mode set");
620                return true;
621            }else {
622                return false;
623            }
624
625        case DISABLE_AUTO_RESTART:
626            disableAutoRestart = value;
627            return 1;
628        default:
629            return false;
630    }
631}
632
633
634/*****************************************************************************
635 *
636 * CaptureDelegate Implementation.
637 *
638 * CaptureDelegate is notified on a separate thread by the OS whenever there
639 *   is a new frame. When "updateImage" is called from the main thread, it
640 *   copies this new frame into an IplImage, but only if this frame has not
641 *   been copied before. When "getOutput" is called from the main thread,
642 *   it gives the last copied IplImage.
643 *
644 *****************************************************************************/
645
646
647@implementation CaptureDelegate
648
649- (id)init {
650    [super init];
651    newFrame = 0;
652    imagedata = NULL;
653    bgr_imagedata = NULL;
654    currSize = 0;
655    image = NULL;
656    bgr_image = NULL;
657    bgr_image_r90 = NULL;
658    return self;
659}
660
661
662-(void)dealloc {
663    if (imagedata != NULL) free(imagedata);
664    if (bgr_imagedata != NULL) free(bgr_imagedata);
665    cvReleaseImage(&image);
666    cvReleaseImage(&bgr_image);
667    cvReleaseImage(&bgr_image_r90);
668    [super dealloc];
669}
670
671
672
673- (void)captureOutput:(AVCaptureOutput *)captureOutput
674didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
675fromConnection:(AVCaptureConnection *)connection{
676
677    // Failed
678    // connection.videoOrientation = AVCaptureVideoOrientationPortrait;
679    (void)captureOutput;
680    (void)connection;
681
682    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
683
684    CVBufferRetain(imageBuffer);
685    CVImageBufferRef imageBufferToRelease  = mCurrentImageBuffer;
686
687    @synchronized (self) {
688
689        mCurrentImageBuffer = imageBuffer;
690        newFrame = 1;
691    }
692
693    CVBufferRelease(imageBufferToRelease);
694
695}
696
697
698-(IplImage*) getOutput {
699    //return bgr_image;
700    return bgr_image_r90;
701}
702
703-(int) updateImage {
704    if (newFrame==0) return 0;
705    CVPixelBufferRef pixels;
706
707    @synchronized (self){
708        pixels = CVBufferRetain(mCurrentImageBuffer);
709        newFrame = 0;
710    }
711
712    CVPixelBufferLockBaseAddress(pixels, 0);
713    uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
714
715    size_t width = CVPixelBufferGetWidth(pixels);
716    size_t height = CVPixelBufferGetHeight(pixels);
717    size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
718
719    if (rowBytes != 0) {
720
721        if (currSize != rowBytes*height*sizeof(char)) {
722            currSize = rowBytes*height*sizeof(char);
723            if (imagedata != NULL) free(imagedata);
724            if (bgr_imagedata != NULL) free(bgr_imagedata);
725            imagedata = (char*)malloc(currSize);
726            bgr_imagedata = (char*)malloc(currSize);
727        }
728
729        memcpy(imagedata, baseaddress, currSize);
730
731        if (image == NULL) {
732            image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
733        }
734        image->width = (int)width;
735        image->height = (int)height;
736        image->nChannels = 4;
737        image->depth = IPL_DEPTH_8U;
738        image->widthStep = (int)rowBytes;
739        image->imageData = imagedata;
740        image->imageSize = (int)currSize;
741
742        if (bgr_image == NULL) {
743            bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
744        }
745        bgr_image->width = (int)width;
746        bgr_image->height = (int)height;
747        bgr_image->nChannels = 3;
748        bgr_image->depth = IPL_DEPTH_8U;
749        bgr_image->widthStep = (int)rowBytes;
750        bgr_image->imageData = bgr_imagedata;
751        bgr_image->imageSize = (int)currSize;
752
753        cvCvtColor(image, bgr_image, CV_BGRA2BGR);
754
755        // image taken from the buffer is incorrected rotated. I'm using cvTranspose + cvFlip.
756        // There should be an option in iOS API to rotate the buffer output orientation.
757        // iOS provides hardware accelerated rotation through AVCaptureConnection class
758        // I can't get it work.
759        if (bgr_image_r90 == NULL){
760            bgr_image_r90 = cvCreateImage(cvSize((int)height, (int)width), IPL_DEPTH_8U, 3);
761        }
762        cvTranspose(bgr_image, bgr_image_r90);
763        cvFlip(bgr_image_r90, NULL, 1);
764
765    }
766
767    CVPixelBufferUnlockBaseAddress(pixels, 0);
768    CVBufferRelease(pixels);
769
770    return 1;
771}
772
773@end
774
775
776/*****************************************************************************
777 *
778 * CvCaptureFile Implementation.
779 *
780 * CvCaptureFile is the instantiation of a capture source for video files.
781 *
782 *****************************************************************************/
783
784CvCaptureFile::CvCaptureFile(const char* filename) {
785    NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
786
787    mAsset = nil;
788    mAssetTrack = nil;
789    mAssetReader = nil;
790    mTrackOutput = nil;
791    mDeviceImage = NULL;
792    mOutImage = NULL;
793    mOutImagedata = NULL;
794    currSize = 0;
795    mMode = CV_CAP_MODE_BGR;
796    mFormat = CV_8UC3;
797    mCurrentSampleBuffer = NULL;
798    mGrabbedPixels = NULL;
799    mFrameTimestamp = kCMTimeZero;
800    mFrameNum = 0;
801
802    started = 0;
803
804    mAsset = [[AVAsset assetWithURL:[NSURL fileURLWithPath: @(filename)]] retain];
805
806    if ( mAsset == nil ) {
807        fprintf(stderr, "OpenCV: Couldn't read movie file \"%s\"\n", filename);
808        [localpool drain];
809        started = 0;
810        return;
811    }
812
813    NSArray *tracks = [mAsset tracksWithMediaType:AVMediaTypeVideo];
814    if ([tracks count] == 0) {
815        fprintf(stderr, "OpenCV: Couldn't read video stream from file \"%s\"\n", filename);
816        [localpool drain];
817        started = 0;
818        return;
819    }
820
821    mAssetTrack = [tracks[0] retain];
822
823    if ( ! setupReadingAt(kCMTimeZero) ) {
824        fprintf(stderr, "OpenCV: Couldn't read movie file \"%s\"\n", filename);
825        [localpool drain];
826        started = 0;
827        return;
828    }
829
830    started = 1;
831    [localpool drain];
832}
833
834CvCaptureFile::~CvCaptureFile() {
835    NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
836
837    free(mOutImagedata);
838    cvReleaseImage(&mOutImage);
839    cvReleaseImage(&mDeviceImage);
840    [mAssetReader release];
841    [mTrackOutput release];
842    [mAssetTrack release];
843    [mAsset release];
844    CVBufferRelease(mGrabbedPixels);
845    if ( mCurrentSampleBuffer ) {
846        CFRelease(mCurrentSampleBuffer);
847    }
848
849    [localpool drain];
850}
851
852bool CvCaptureFile::setupReadingAt(CMTime position) {
853    if (mAssetReader) {
854        if (mAssetReader.status == AVAssetReaderStatusReading) {
855            [mAssetReader cancelReading];
856        }
857        [mAssetReader release];
858        mAssetReader = nil;
859    }
860    if (mTrackOutput) {
861        [mTrackOutput release];
862        mTrackOutput = nil;
863    }
864
865    // Capture in a pixel format that can be converted efficiently to the output mode.
866    OSType pixelFormat;
867    if (mMode == CV_CAP_MODE_BGR || mMode == CV_CAP_MODE_RGB) {
868        pixelFormat = kCVPixelFormatType_32BGRA;
869        mFormat = CV_8UC3;
870    } else if (mMode == CV_CAP_MODE_GRAY) {
871        pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
872        mFormat = CV_8UC1;
873    } else if (mMode == CV_CAP_MODE_YUYV) {
874        pixelFormat = kCVPixelFormatType_422YpCbCr8;
875        mFormat = CV_8UC2;
876    } else {
877        fprintf(stderr, "VIDEOIO ERROR: AVF Mac: Unsupported mode: %d\n", mMode);
878        return false;
879    }
880
881    NSDictionary *settings =
882    @{
883      (id)kCVPixelBufferPixelFormatTypeKey: @(pixelFormat)
884      };
885    mTrackOutput = [[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack: mAssetTrack
886                                                               outputSettings: settings] retain];
887
888    if ( !mTrackOutput ) {
889        fprintf(stderr, "OpenCV: error in [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:outputSettings:]\n");
890        return false;
891    }
892
893    NSError *error = nil;
894    mAssetReader = [[AVAssetReader assetReaderWithAsset: mAsset
895                                                  error: &error] retain];
896    if ( error ) {
897        fprintf(stderr, "OpenCV: error in [AVAssetReader assetReaderWithAsset:error:]\n");
898        NSLog(@"OpenCV: %@", error.localizedDescription);
899        return false;
900    }
901
902    mAssetReader.timeRange = CMTimeRangeMake(position, kCMTimePositiveInfinity);
903    mFrameTimestamp = position;
904    mFrameNum = round((mFrameTimestamp.value * mAssetTrack.nominalFrameRate) / double(mFrameTimestamp.timescale));
905    [mAssetReader addOutput: mTrackOutput];
906    return [mAssetReader startReading];
907}
908
909int CvCaptureFile::didStart() {
910    return started;
911}
912
913bool CvCaptureFile::grabFrame() {
914    NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
915
916    CVBufferRelease(mGrabbedPixels);
917    if ( mCurrentSampleBuffer ) {
918        CFRelease(mCurrentSampleBuffer);
919    }
920    mCurrentSampleBuffer = [mTrackOutput copyNextSampleBuffer];
921    mGrabbedPixels = CMSampleBufferGetImageBuffer(mCurrentSampleBuffer);
922    CVBufferRetain(mGrabbedPixels);
923    mFrameTimestamp = CMSampleBufferGetOutputPresentationTimeStamp(mCurrentSampleBuffer);
924    mFrameNum++;
925
926    bool isReading = (mAssetReader.status == AVAssetReaderStatusReading);
927    [localpool drain];
928    return isReading;
929}
930
931IplImage* CvCaptureFile::retrieveFramePixelBuffer() {
932    if ( ! mGrabbedPixels ) {
933        return 0;
934    }
935
936    NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
937
938    CVPixelBufferLockBaseAddress(mGrabbedPixels, 0);
939    void *baseaddress;
940    size_t width, height, rowBytes;
941
942    OSType pixelFormat = CVPixelBufferGetPixelFormatType(mGrabbedPixels);
943
944    if (CVPixelBufferIsPlanar(mGrabbedPixels)) {
945        baseaddress = CVPixelBufferGetBaseAddressOfPlane(mGrabbedPixels, 0);
946        width = CVPixelBufferGetWidthOfPlane(mGrabbedPixels, 0);
947        height = CVPixelBufferGetHeightOfPlane(mGrabbedPixels, 0);
948        rowBytes = CVPixelBufferGetBytesPerRowOfPlane(mGrabbedPixels, 0);
949    } else {
950        baseaddress = CVPixelBufferGetBaseAddress(mGrabbedPixels);
951        width = CVPixelBufferGetWidth(mGrabbedPixels);
952        height = CVPixelBufferGetHeight(mGrabbedPixels);
953        rowBytes = CVPixelBufferGetBytesPerRow(mGrabbedPixels);
954    }
955
956    if ( rowBytes == 0 ) {
957        fprintf(stderr, "OpenCV: error: rowBytes == 0\n");
958        CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
959        CVBufferRelease(mGrabbedPixels);
960        mGrabbedPixels = NULL;
961        return 0;
962    }
963
964    int outChannels;
965    if (mMode == CV_CAP_MODE_BGR || mMode == CV_CAP_MODE_RGB) {
966        outChannels = 3;
967    } else if (mMode == CV_CAP_MODE_GRAY) {
968        outChannels = 1;
969    } else if (mMode == CV_CAP_MODE_YUYV) {
970        outChannels = 2;
971    } else {
972        fprintf(stderr, "VIDEOIO ERROR: AVF Mac: Unsupported mode: %d\n", mMode);
973        CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
974        CVBufferRelease(mGrabbedPixels);
975        mGrabbedPixels = NULL;
976        return 0;
977    }
978
979    if ( currSize != width*outChannels*height ) {
980        currSize = width*outChannels*height;
981        free(mOutImagedata);
982        mOutImagedata = reinterpret_cast<uint8_t*>(malloc(currSize));
983    }
984
985    if (mOutImage == NULL) {
986        mOutImage = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, outChannels);
987    }
988    mOutImage->width = int(width);
989    mOutImage->height = int(height);
990    mOutImage->nChannels = outChannels;
991    mOutImage->depth = IPL_DEPTH_8U;
992    mOutImage->widthStep = int(width*outChannels);
993    mOutImage->imageData = reinterpret_cast<char *>(mOutImagedata);
994    mOutImage->imageSize = int(currSize);
995
996    int deviceChannels;
997    int cvtCode;
998
999    if ( pixelFormat == kCVPixelFormatType_32BGRA ) {
1000        deviceChannels = 4;
1001
1002        if (mMode == CV_CAP_MODE_BGR) {
1003            cvtCode = CV_BGRA2BGR;
1004        } else if (mMode == CV_CAP_MODE_RGB) {
1005            cvtCode = CV_BGRA2RGB;
1006        } else if (mMode == CV_CAP_MODE_GRAY) {
1007            cvtCode = CV_BGRA2GRAY;
1008        } else {
1009            CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
1010            CVBufferRelease(mGrabbedPixels);
1011            mGrabbedPixels = NULL;
1012            fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
1013            return 0;
1014        }
1015    } else if ( pixelFormat == kCVPixelFormatType_24RGB ) {
1016        deviceChannels = 3;
1017
1018        if (mMode == CV_CAP_MODE_BGR) {
1019            cvtCode = CV_RGB2BGR;
1020        } else if (mMode == CV_CAP_MODE_RGB) {
1021            cvtCode = 0;
1022        } else if (mMode == CV_CAP_MODE_GRAY) {
1023            cvtCode = CV_RGB2GRAY;
1024        } else {
1025            CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
1026            CVBufferRelease(mGrabbedPixels);
1027            mGrabbedPixels = NULL;
1028            fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
1029            return 0;
1030        }
1031    } else if ( pixelFormat == kCVPixelFormatType_422YpCbCr8 ) {    // 422 (2vuy, UYVY)
1032        deviceChannels = 2;
1033
1034        if (mMode == CV_CAP_MODE_BGR) {
1035            cvtCode = CV_YUV2BGR_UYVY;
1036        } else if (mMode == CV_CAP_MODE_RGB) {
1037            cvtCode = CV_YUV2RGB_UYVY;
1038        } else if (mMode == CV_CAP_MODE_GRAY) {
1039            cvtCode = CV_YUV2GRAY_UYVY;
1040        } else if (mMode == CV_CAP_MODE_YUYV) {
1041            cvtCode = -1;    // Copy
1042        } else {
1043            CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
1044            CVBufferRelease(mGrabbedPixels);
1045            mGrabbedPixels = NULL;
1046            fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
1047            return 0;
1048        }
1049    } else if ( pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||   // 420v
1050               pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ) {   // 420f
1051        height = height * 3 / 2;
1052        deviceChannels = 1;
1053
1054        if (mMode == CV_CAP_MODE_BGR) {
1055            cvtCode = CV_YUV2BGR_YV12;
1056        } else if (mMode == CV_CAP_MODE_RGB) {
1057            cvtCode = CV_YUV2RGB_YV12;
1058        } else if (mMode == CV_CAP_MODE_GRAY) {
1059            cvtCode = CV_YUV2GRAY_420;
1060        } else {
1061            CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
1062            CVBufferRelease(mGrabbedPixels);
1063            mGrabbedPixels = NULL;
1064            fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
1065            return 0;
1066        }
1067    } else {
1068        char pfBuf[] = { (char)pixelFormat, (char)(pixelFormat >> 8),
1069                         (char)(pixelFormat >> 16), (char)(pixelFormat >> 24), '\0' };
1070        fprintf(stderr, "OpenCV: unsupported pixel format '%s'\n", pfBuf);
1071        CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
1072        CVBufferRelease(mGrabbedPixels);
1073        mGrabbedPixels = NULL;
1074        return 0;
1075    }
1076
1077    if (mDeviceImage == NULL) {
1078        mDeviceImage = cvCreateImageHeader(cvSize(int(width),int(height)), IPL_DEPTH_8U, deviceChannels);
1079    }
1080    mDeviceImage->width = int(width);
1081    mDeviceImage->height = int(height);
1082    mDeviceImage->nChannels = deviceChannels;
1083    mDeviceImage->depth = IPL_DEPTH_8U;
1084    mDeviceImage->widthStep = int(rowBytes);
1085    mDeviceImage->imageData = reinterpret_cast<char *>(baseaddress);
1086    mDeviceImage->imageSize = int(rowBytes*height);
1087
1088    if (cvtCode == -1) {
1089        cv::cvarrToMat(mDeviceImage).copyTo(cv::cvarrToMat(mOutImage));
1090    } else {
1091        cvCvtColor(mDeviceImage, mOutImage, cvtCode);
1092    }
1093
1094    CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
1095
1096    [localpool drain];
1097
1098    return mOutImage;
1099}
1100
1101
1102IplImage* CvCaptureFile::retrieveFrame(int) {
1103    return retrieveFramePixelBuffer();
1104}
1105
1106double CvCaptureFile::getProperty(int property_id) const{
1107    if (mAsset == nil) return 0;
1108
1109    CMTime t;
1110
1111    switch (property_id) {
1112        case CV_CAP_PROP_POS_MSEC:
1113            return mFrameTimestamp.value * 1000.0 / mFrameTimestamp.timescale;
1114        case CV_CAP_PROP_POS_FRAMES:
1115            return mAssetTrack.nominalFrameRate > 0 ? mFrameNum : 0;
1116        case CV_CAP_PROP_POS_AVI_RATIO:
1117            t = [mAsset duration];
1118            return (mFrameTimestamp.value * t.timescale) / double(mFrameTimestamp.timescale * t.value);
1119        case CV_CAP_PROP_FRAME_WIDTH:
1120            return mAssetTrack.naturalSize.width;
1121        case CV_CAP_PROP_FRAME_HEIGHT:
1122            return mAssetTrack.naturalSize.height;
1123        case CV_CAP_PROP_FPS:
1124            return mAssetTrack.nominalFrameRate;
1125        case CV_CAP_PROP_FRAME_COUNT:
1126            t = [mAsset duration];
1127            return round((t.value * mAssetTrack.nominalFrameRate) / double(t.timescale));
1128        case CV_CAP_PROP_FORMAT:
1129            return mFormat;
1130        case CV_CAP_PROP_FOURCC:
1131            return mMode;
1132        default:
1133            break;
1134    }
1135
1136    return 0;
1137}
1138
1139bool CvCaptureFile::setProperty(int property_id, double value) {
1140    if (mAsset == nil) return false;
1141
1142    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1143
1144    bool retval = false;
1145    CMTime t;
1146
1147    switch (property_id) {
1148        case CV_CAP_PROP_POS_MSEC:
1149            t = mAsset.duration;
1150            t.value = value * t.timescale / 1000;
1151            retval = setupReadingAt(t);
1152            break;
1153        case CV_CAP_PROP_POS_FRAMES:
1154            retval = mAssetTrack.nominalFrameRate > 0 ? setupReadingAt(CMTimeMake(value, mAssetTrack.nominalFrameRate)) : false;
1155            break;
1156        case CV_CAP_PROP_POS_AVI_RATIO:
1157            t = mAsset.duration;
1158            t.value = round(t.value * value);
1159            retval = setupReadingAt(t);
1160            break;
1161        case CV_CAP_PROP_FOURCC:
1162            uint32_t mode;
1163            mode = cvRound(value);
1164            if (mMode == mode) {
1165                retval = true;
1166            } else {
1167                switch (mode) {
1168                    case CV_CAP_MODE_BGR:
1169                    case CV_CAP_MODE_RGB:
1170                    case CV_CAP_MODE_GRAY:
1171                    case CV_CAP_MODE_YUYV:
1172                        mMode = mode;
1173                        retval = setupReadingAt(mFrameTimestamp);
1174                        break;
1175                    default:
1176                        fprintf(stderr, "VIDEOIO ERROR: AVF iOS: Unsupported mode: %d\n", mode);
1177                        retval=false;
1178                        break;
1179                }
1180            }
1181            break;
1182        default:
1183            break;
1184    }
1185
1186    [localpool drain];
1187    return retval;
1188}
1189
1190
1191/*****************************************************************************
1192 *
1193 * CvVideoWriter Implementation.
1194 *
1195 * CvVideoWriter is the instantiation of a video output class
1196 *
1197 *****************************************************************************/
1198
1199
1200CvVideoWriter_AVFoundation::CvVideoWriter_AVFoundation(const char* filename, int fourcc,
1201        double fps, CvSize frame_size,
1202        int is_color) {
1203
1204    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1205
1206
1207    frameCount = 0;
1208    movieFPS = fps;
1209    movieSize = frame_size;
1210    movieColor = is_color;
1211    argbimage = cvCreateImage(movieSize, IPL_DEPTH_8U, 4);
1212    path = [[[NSString stringWithCString:filename encoding:NSASCIIStringEncoding] stringByExpandingTildeInPath] retain];
1213
1214
1215    /*
1216         AVFileTypeQuickTimeMovie
1217         UTI for the QuickTime movie file format.
1218         The value of this UTI is com.apple.quicktime-movie. Files are identified with the .mov and .qt extensions.
1219
1220         AVFileTypeMPEG4
1221         UTI for the MPEG-4 file format.
1222         The value of this UTI is public.mpeg-4. Files are identified with the .mp4 extension.
1223
1224         AVFileTypeAppleM4V
1225         UTI for the iTunes video file format.
1226         The value of this UTI is com.apple.mpeg-4-video. Files are identified with the .m4v extension.
1227
1228         AVFileType3GPP
1229         UTI for the 3GPP file format.
1230         The value of this UTI is public.3gpp. Files are identified with the .3gp, .3gpp, and .sdv extensions.
1231     */
1232
1233    NSString *fileExt =[[[path pathExtension] lowercaseString] copy];
1234    if ([fileExt isEqualToString:@"mov"] || [fileExt isEqualToString:@"qt"]){
1235        fileType = [AVFileTypeQuickTimeMovie copy];
1236    }else if ([fileExt isEqualToString:@"mp4"]){
1237        fileType = [AVFileTypeMPEG4 copy];
1238    }else if ([fileExt isEqualToString:@"m4v"]){
1239        fileType = [AVFileTypeAppleM4V copy];
1240#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
1241    }else if ([fileExt isEqualToString:@"3gp"] || [fileExt isEqualToString:@"3gpp"] || [fileExt isEqualToString:@"sdv"]  ){
1242        fileType = [AVFileType3GPP copy];
1243#endif
1244    } else{
1245        fileType = [AVFileTypeMPEG4 copy];  //default mp4
1246    }
1247    [fileExt release];
1248
1249    char cc[5];
1250    cc[0] = fourcc & 255;
1251    cc[1] = (fourcc >> 8) & 255;
1252    cc[2] = (fourcc >> 16) & 255;
1253    cc[3] = (fourcc >> 24) & 255;
1254    cc[4] = 0;
1255    int cc2 = CV_FOURCC(cc[0], cc[1], cc[2], cc[3]);
1256    if (cc2!=fourcc) {
1257        std::cout << "WARNING: Didn't properly encode FourCC. Expected " << fourcc
1258            << " but got " << cc2 << "." << std::endl;
1259        //exception;
1260    }
1261
1262    // Three codec supported AVVideoCodecH264 AVVideoCodecJPEG AVVideoCodecTypeHEVC
1263    // On iPhone 3G H264 is not supported.
1264    if (fourcc == CV_FOURCC('J','P','E','G') || fourcc == CV_FOURCC('j','p','e','g') ||
1265            fourcc == CV_FOURCC('M','J','P','G') || fourcc == CV_FOURCC('m','j','p','g')){
1266        codec = [AVVideoCodecJPEG copy]; // Use JPEG codec if specified, otherwise H264
1267    }else if(fourcc == CV_FOURCC('H','2','6','4') || fourcc == CV_FOURCC('a','v','c','1')){
1268            codec = [AVVideoCodecH264 copy];
1269// Available since iOS 11
1270#if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000
1271    }else if(fourcc == CV_FOURCC('H','2','6','5') || fourcc == CV_FOURCC('h','v','c','1') ||
1272            fourcc == CV_FOURCC('H','E','V','C') || fourcc == CV_FOURCC('h','e','v','c')){
1273        if (@available(iOS 11, *)) {
1274            codec = [AVVideoCodecTypeHEVC copy];
1275        } else {
1276            codec = [AVVideoCodecH264 copy];
1277        }
1278#endif
1279    }else{
1280        codec = [AVVideoCodecH264 copy]; // default canonical H264.
1281    }
1282
1283    //NSLog(@"Path: %@", path);
1284
1285    NSError *error = nil;
1286
1287
1288    // Make sure the file does not already exist. Necessary to overwrite??
1289    /*
1290    NSFileManager *fileManager = [NSFileManager defaultManager];
1291    if ([fileManager fileExistsAtPath:path]){
1292        [fileManager removeItemAtPath:path error:&error];
1293    }
1294    */
1295
1296    // Wire the writer:
1297    // Supported file types:
1298    //      AVFileTypeQuickTimeMovie AVFileTypeMPEG4 AVFileTypeAppleM4V AVFileType3GPP
1299
1300    mMovieWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
1301        fileType:fileType
1302        error:&error];
1303    //NSParameterAssert(mMovieWriter);
1304
1305    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
1306        codec, AVVideoCodecKey,
1307        [NSNumber numberWithInt:movieSize.width], AVVideoWidthKey,
1308        [NSNumber numberWithInt:movieSize.height], AVVideoHeightKey,
1309        nil];
1310
1311    mMovieWriterInput = [[AVAssetWriterInput
1312        assetWriterInputWithMediaType:AVMediaTypeVideo
1313        outputSettings:videoSettings] retain];
1314
1315    //NSParameterAssert(mMovieWriterInput);
1316    //NSParameterAssert([mMovieWriter canAddInput:mMovieWriterInput]);
1317
1318    [mMovieWriter addInput:mMovieWriterInput];
1319
1320    mMovieWriterAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:mMovieWriterInput sourcePixelBufferAttributes:nil];
1321
1322
1323    //Start a session:
1324    [mMovieWriter startWriting];
1325    [mMovieWriter startSessionAtSourceTime:kCMTimeZero];
1326
1327
1328    if(mMovieWriter.status == AVAssetWriterStatusFailed){
1329        NSLog(@"%@", [mMovieWriter.error localizedDescription]);
1330        // TODO: error handling, cleanup. Throw exception?
1331        // return;
1332    }
1333
1334    [localpool drain];
1335}
1336
1337
1338CvVideoWriter_AVFoundation::~CvVideoWriter_AVFoundation() {
1339    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1340
1341    [mMovieWriterInput markAsFinished];
1342    [mMovieWriter finishWriting];
1343    [mMovieWriter release];
1344    [mMovieWriterInput release];
1345    [mMovieWriterAdaptor release];
1346    [path release];
1347    [codec release];
1348    [fileType release];
1349    cvReleaseImage(&argbimage);
1350
1351    [localpool drain];
1352
1353}
1354
1355bool CvVideoWriter_AVFoundation::writeFrame(const IplImage* iplimage) {
1356    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1357
1358    // writer status check
1359    if (![mMovieWriterInput isReadyForMoreMediaData] || mMovieWriter.status !=  AVAssetWriterStatusWriting ) {
1360        NSLog(@"[mMovieWriterInput isReadyForMoreMediaData] Not ready for media data or ...");
1361        NSLog(@"mMovieWriter.status: %d. Error: %@", (int)mMovieWriter.status, [mMovieWriter.error localizedDescription]);
1362        [localpool drain];
1363        return false;
1364    }
1365
1366    BOOL success = FALSE;
1367
1368    if (iplimage->height!=movieSize.height || iplimage->width!=movieSize.width){
1369        std::cout<<"Frame size does not match video size."<<std::endl;
1370        [localpool drain];
1371        return false;
1372    }
1373
1374    if (movieColor) {
1375        //assert(iplimage->nChannels == 3);
1376        cvCvtColor(iplimage, argbimage, CV_BGR2BGRA);
1377    }else{
1378        //assert(iplimage->nChannels == 1);
1379        cvCvtColor(iplimage, argbimage, CV_GRAY2BGRA);
1380    }
1381    //IplImage -> CGImage conversion
1382    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
1383    NSData *nsData = [NSData dataWithBytes:argbimage->imageData length:argbimage->imageSize];
1384    CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)nsData);
1385    CGImageRef cgImage = CGImageCreate(argbimage->width, argbimage->height,
1386            argbimage->depth, argbimage->depth * argbimage->nChannels, argbimage->widthStep,
1387            colorSpace, kCGImageAlphaLast|kCGBitmapByteOrderDefault,
1388            provider, NULL, false, kCGRenderingIntentDefault);
1389
1390    //CGImage -> CVPixelBufferRef conversion
1391    CVPixelBufferRef pixelBuffer = NULL;
1392    CFDataRef cfData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
1393    int status = CVPixelBufferCreateWithBytes(NULL,
1394            movieSize.width,
1395            movieSize.height,
1396            kCVPixelFormatType_32BGRA,
1397            (void*)CFDataGetBytePtr(cfData),
1398            CGImageGetBytesPerRow(cgImage),
1399            NULL,
1400            0,
1401            NULL,
1402            &pixelBuffer);
1403    if(status == kCVReturnSuccess){
1404        success = [mMovieWriterAdaptor appendPixelBuffer:pixelBuffer
1405            withPresentationTime:CMTimeMake(frameCount, movieFPS)];
1406    }
1407
1408    //cleanup
1409    CFRelease(cfData);
1410    CVPixelBufferRelease(pixelBuffer);
1411    CGImageRelease(cgImage);
1412    CGDataProviderRelease(provider);
1413    CGColorSpaceRelease(colorSpace);
1414
1415    [localpool drain];
1416
1417    if (success) {
1418        frameCount ++;
1419        //NSLog(@"Frame #%d", frameCount);
1420        return true;
1421    }else{
1422        NSLog(@"Frame appendPixelBuffer failed.");
1423        return false;
1424    }
1425
1426}
1427
1428#pragma clang diagnostic pop
1429