1/*
2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
3 * Copyright (C) 2016 Alessandro Decina <twi@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21#ifdef HAVE_CONFIG_H
22#  include "config.h"
23#endif
24
25#include "avfvideosrc.h"
26#include "glcontexthelper.h"
27
28#import <AVFoundation/AVFoundation.h>
29#if !HAVE_IOS
30#import <AppKit/AppKit.h>
31#endif
32#include <gst/video/video.h>
33#include <gst/gl/gstglcontext.h>
34#include "coremediabuffer.h"
35#include "videotexturecache.h"
36
37#define DEFAULT_DEVICE_INDEX  -1
38#define DEFAULT_POSITION      GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT
39#define DEFAULT_ORIENTATION   GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT
40#define DEFAULT_DEVICE_TYPE   GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT
41#define DEFAULT_DO_STATS      FALSE
42
43#define DEVICE_FPS_N          25
44#define DEVICE_FPS_D          1
45
46#define BUFFER_QUEUE_SIZE     2
47
48GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
49#define GST_CAT_DEFAULT gst_avf_video_src_debug
50
51static GstVideoFormat get_gst_video_format(NSNumber *pixel_format);
52static CMVideoDimensions
53get_oriented_dimensions(GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions);
54
55static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
56    GST_PAD_SRC,
57    GST_PAD_ALWAYS,
58    GST_STATIC_CAPS (
59#if !HAVE_IOS
60        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
61        (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
62            "UYVY") ", "
63        "texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
64#else
65        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
66        (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
67            "NV12") ", "
68        "texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
69#endif
70        "video/x-raw, "
71        "format = (string) { NV12, UYVY, YUY2 }, "
72        "framerate = " GST_VIDEO_FPS_RANGE ", "
73        "width = " GST_VIDEO_SIZE_RANGE ", "
74        "height = " GST_VIDEO_SIZE_RANGE "; "
75
76        "video/x-raw, "
77        "format = (string) BGRA, "
78        "framerate = " GST_VIDEO_FPS_RANGE ", "
79        "width = " GST_VIDEO_SIZE_RANGE ", "
80        "height = " GST_VIDEO_SIZE_RANGE "; "
81));
82
83typedef enum _QueueState {
84  NO_BUFFERS = 1,
85  HAS_BUFFER_OR_STOP_REQUEST,
86} QueueState;
87
88#define gst_avf_video_src_parent_class parent_class
89G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
90
91#define GST_TYPE_AVF_VIDEO_SOURCE_POSITION (gst_avf_video_source_position_get_type ())
92static GType
93gst_avf_video_source_position_get_type (void)
94{
95  static GType avf_video_source_position_type = 0;
96
97  if (!avf_video_source_position_type) {
98    static GEnumValue position_types[] = {
99      { GST_AVF_VIDEO_SOURCE_POSITION_FRONT, "Front-facing camera", "front" },
100      { GST_AVF_VIDEO_SOURCE_POSITION_BACK,  "Back-facing camera", "back"  },
101      { GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT,  "Default", "default"  },
102      { 0, NULL, NULL },
103    };
104
105    avf_video_source_position_type =
106    g_enum_register_static ("GstAVFVideoSourcePosition",
107                            position_types);
108  }
109
110  return avf_video_source_position_type;
111}
112
113#define GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION (gst_avf_video_source_orientation_get_type ())
114static GType
115gst_avf_video_source_orientation_get_type (void)
116{
117  static GType avf_video_source_orientation_type = 0;
118
119  if (!avf_video_source_orientation_type) {
120    static GEnumValue orientation_types[] = {
121      { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT, "Indicates that video should be oriented vertically, top at the top.", "portrait" },
122      { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN, "Indicates that video should be oriented vertically, top at the bottom.", "portrat-upside-down" },
123      { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT, "Indicates that video should be oriented horizontally, top on the left.", "landscape-right" },
124      { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT, "Indicates that video should be oriented horizontally, top on the right.", "landscape-left" },
125      { GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT, "Default", "default" },
126      { 0, NULL, NULL },
127    };
128
129    avf_video_source_orientation_type =
130    g_enum_register_static ("GstAVFVideoSourceOrientation",
131                            orientation_types);
132  }
133
134  return avf_video_source_orientation_type;
135}
136
137#define GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE (gst_avf_video_source_device_type_get_type ())
138static GType
139gst_avf_video_source_device_type_get_type (void)
140{
141  static GType avf_video_source_device_type_type = 0;
142
143  if (!avf_video_source_device_type_type) {
144    static GEnumValue device_type_types[] = {
145      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA, "A built-in wide angle camera. These devices are suitable for general purpose use.", "wide-angle" },
146      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA, "A built-in camera device with a longer focal length than a wide-angle camera.", "telephoto" },
147      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA, "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device.", "dual" },
148      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT, "Default", "default" },
149      { 0, NULL, NULL },
150    };
151
152    avf_video_source_device_type_type =
153    g_enum_register_static ("GstAVFVideoSourceDeviceType",
154                            device_type_types);
155  }
156
157  return avf_video_source_device_type_type;
158}
159
160@interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
161  GstElement *element;
162  GstBaseSrc *baseSrc;
163  GstPushSrc *pushSrc;
164
165  gint deviceIndex;
166  const gchar *deviceName;
167  GstAVFVideoSourcePosition position;
168  GstAVFVideoSourceOrientation orientation;
169  GstAVFVideoSourceDeviceType deviceType;
170  BOOL doStats;
171
172  AVCaptureSession *session;
173  AVCaptureInput *input;
174  AVCaptureVideoDataOutput *output;
175  AVCaptureDevice *device;
176  AVCaptureConnection *connection;
177  CMClockRef inputClock;
178
179  dispatch_queue_t mainQueue;
180  dispatch_queue_t workerQueue;
181  NSConditionLock *bufQueueLock;
182  NSMutableArray *bufQueue;
183  BOOL stopRequest;
184
185  GstCaps *caps;
186  GstVideoFormat format;
187  gint width, height;
188  GstClockTime latency;
189  guint64 offset;
190
191  GstClockTime lastSampling;
192  guint count;
193  gint fps;
194  BOOL captureScreen;
195  BOOL captureScreenCursor;
196  BOOL captureScreenMouseClicks;
197
198  BOOL useVideoMeta;
199  GstGLContextHelper *ctxh;
200  GstVideoTextureCache *textureCache;
201}
202
203- (id)init;
204- (id)initWithSrc:(GstPushSrc *)src;
205- (void)finalize;
206
207@property int deviceIndex;
208@property const gchar *deviceName;
209@property GstAVFVideoSourcePosition position;
210@property GstAVFVideoSourceOrientation orientation;
211@property GstAVFVideoSourceDeviceType deviceType;
212@property BOOL doStats;
213@property int fps;
214@property BOOL captureScreen;
215@property BOOL captureScreenCursor;
216@property BOOL captureScreenMouseClicks;
217
218- (BOOL)openScreenInput;
219- (BOOL)openDeviceInput;
220- (BOOL)openDevice;
221- (void)closeDevice;
222- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
223#if !HAVE_IOS
224- (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
225- (float)getScaleFactorFromDeviceIndex;
226#endif
227- (GstCaps *)getDeviceCaps;
228- (BOOL)setDeviceCaps:(GstVideoInfo *)info;
229- (BOOL)getSessionPresetCaps:(GstCaps *)result;
230- (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
231- (GstCaps *)getCaps;
232- (BOOL)setCaps:(GstCaps *)new_caps;
233- (BOOL)start;
234- (BOOL)stop;
235- (BOOL)unlock;
236- (BOOL)unlockStop;
237- (BOOL)query:(GstQuery *)query;
238- (void)setContext:(GstContext *)context;
239- (GstStateChangeReturn)changeState:(GstStateChange)transition;
240- (GstFlowReturn)create:(GstBuffer **)buf;
241- (GstCaps *)fixate:(GstCaps *)caps;
242- (BOOL)decideAllocation:(GstQuery *)query;
243- (void)updateStatistics;
244- (void)captureOutput:(AVCaptureOutput *)captureOutput
245didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
246       fromConnection:(AVCaptureConnection *)connection;
247
248@end
249
250#if HAVE_IOS
251
252static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType(GstAVFVideoSourceDeviceType deviceType) {
253  switch (deviceType) {
254    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA:
255      return AVCaptureDeviceTypeBuiltInWideAngleCamera;
256    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA:
257      return AVCaptureDeviceTypeBuiltInTelephotoCamera;
258    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA:
259      return AVCaptureDeviceTypeBuiltInDuoCamera;
260    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT:
261      g_assert_not_reached();
262  }
263}
264
265static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition(GstAVFVideoSourcePosition position) {
266  switch (position) {
267    case GST_AVF_VIDEO_SOURCE_POSITION_FRONT:
268      return AVCaptureDevicePositionFront;
269    case GST_AVF_VIDEO_SOURCE_POSITION_BACK:
270      return AVCaptureDevicePositionBack;
271    case GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT:
272      g_assert_not_reached();
273  }
274
275}
276
277static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(GstAVFVideoSourceOrientation orientation) {
278  switch (orientation) {
279    case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT:
280      return AVCaptureVideoOrientationPortrait;
281    case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN:
282      return AVCaptureVideoOrientationPortraitUpsideDown;
283    case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT:
284      return AVCaptureVideoOrientationLandscapeLeft;
285    case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT:
286      return AVCaptureVideoOrientationLandscapeRight;
287    case GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT:
288      g_assert_not_reached();
289  }
290}
291
292#endif
293
294@implementation GstAVFVideoSrcImpl
295
296@synthesize deviceIndex, deviceName, position, orientation, deviceType, doStats,
297    fps, captureScreen, captureScreenCursor, captureScreenMouseClicks;
298
299- (id)init
300{
301  return [self initWithSrc:NULL];
302}
303
304- (id)initWithSrc:(GstPushSrc *)src
305{
306  if ((self = [super init])) {
307    element = GST_ELEMENT_CAST (src);
308    baseSrc = GST_BASE_SRC_CAST (src);
309    pushSrc = src;
310
311    deviceIndex = DEFAULT_DEVICE_INDEX;
312    deviceName = NULL;
313    position = DEFAULT_POSITION;
314    orientation = DEFAULT_ORIENTATION;
315    deviceType = DEFAULT_DEVICE_TYPE;
316    captureScreen = NO;
317    captureScreenCursor = NO;
318    captureScreenMouseClicks = NO;
319    useVideoMeta = NO;
320    textureCache = NULL;
321    ctxh = gst_gl_context_helper_new (element);
322    mainQueue =
323        dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
324    workerQueue =
325        dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
326
327    gst_base_src_set_live (baseSrc, TRUE);
328    gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
329  }
330
331  return self;
332}
333
334- (void)finalize
335{
336  mainQueue = NULL;
337  workerQueue = NULL;
338}
339
340- (BOOL)openDeviceInput
341{
342  NSString *mediaType = AVMediaTypeVideo;
343  NSError *err;
344
345  if (deviceIndex == DEFAULT_DEVICE_INDEX) {
346#ifdef HAVE_IOS
347    if (deviceType != DEFAULT_DEVICE_TYPE && position != DEFAULT_POSITION) {
348      device = [AVCaptureDevice
349                defaultDeviceWithDeviceType:GstAVFVideoSourceDeviceType2AVCaptureDeviceType(deviceType)
350                mediaType:mediaType
351                position:GstAVFVideoSourcePosition2AVCaptureDevicePosition(position)];
352    } else {
353      device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
354    }
355#else
356      device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
357#endif
358    if (device == nil) {
359      GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
360                          ("No video capture devices found"), (NULL));
361      return NO;
362    }
363  } else { // deviceIndex takes priority over position and deviceType
364    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
365    if (deviceIndex >= [devices count]) {
366      GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
367                          ("Invalid video capture device index"), (NULL));
368      return NO;
369    }
370    device = [devices objectAtIndex:deviceIndex];
371  }
372  g_assert (device != nil);
373
374  deviceName = [[device localizedName] UTF8String];
375  GST_INFO ("Opening '%s'", deviceName);
376
377  input = [AVCaptureDeviceInput deviceInputWithDevice:device
378                                                error:&err];
379  if (input == nil) {
380    GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
381        ("Failed to open device: %s",
382        [[err localizedDescription] UTF8String]),
383        (NULL));
384    device = nil;
385    return NO;
386  }
387  return YES;
388}
389
390- (BOOL)openScreenInput
391{
392#if HAVE_IOS
393  return NO;
394#else
395  CGDirectDisplayID displayId;
396
397  GST_DEBUG_OBJECT (element, "Opening screen input");
398
399  displayId = [self getDisplayIdFromDeviceIndex];
400  if (displayId == 0)
401    return NO;
402
403  AVCaptureScreenInput *screenInput =
404      [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
405
406
407  @try {
408    [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
409                 forKey:@"capturesCursor"];
410
411  } @catch (NSException *exception) {
412    if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
413      GST_WARNING ("An unexpected error occured: %s",
414                   [[exception reason] UTF8String]);
415    }
416    GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
417  }
418  screenInput.capturesMouseClicks = captureScreenMouseClicks;
419  input = screenInput;
420  return YES;
421#endif
422}
423
424- (BOOL)openDevice
425{
426  BOOL success = NO, *successPtr = &success;
427
428  GST_DEBUG_OBJECT (element, "Opening device");
429
430  // Since Mojave, permissions are now supposed to be explicitly granted
431  // before performing anything on a device
432  if (@available(macOS 10.14, *)) {
433    // Check if permission has already been granted (or denied)
434    AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
435    switch (authStatus) {
436      case AVAuthorizationStatusDenied:
437        // The user has explicitly denied permission for media capture.
438        GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
439          ("Device video access permission has been explicitly denied before"), ("Authorization status: %d", (int)authStatus));
440          return success;
441      case AVAuthorizationStatusRestricted:
442        // The user is not allowed to access media capture devices.
443        GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
444          ("Device video access permission cannot be granted by the user"), ("Authorization status: %d", (int)authStatus));
445        return success;
446      case AVAuthorizationStatusAuthorized:
447        // The user has explicitly granted permission for media capture,
448        // or explicit user permission is not necessary for the media type in question.
449        GST_DEBUG_OBJECT (element, "Device video access permission has already been granted");
450        break;
451      case AVAuthorizationStatusNotDetermined:
452        // Explicit user permission is required for media capture,
453        // but the user has not yet granted or denied such permission.
454        dispatch_sync (mainQueue, ^{
455          [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
456            GST_DEBUG_OBJECT (element, "Device video access permission %s", granted ? "granted" : "not granted");
457          }];
458        });
459        // Check if permission has been granted
460        AVAuthorizationStatus videoAuthorizationStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
461        if (videoAuthorizationStatus != AVAuthorizationStatusAuthorized) {
462          GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
463            ("Device video access permission has just been denied"), ("Authorization status: %d", (int)videoAuthorizationStatus));
464          return success;
465        }
466    }
467  }
468
469  dispatch_sync (mainQueue, ^{
470    BOOL ret;
471
472    if (captureScreen)
473      ret = [self openScreenInput];
474    else
475      ret = [self openDeviceInput];
476
477    if (!ret)
478      return;
479
480    output = [[AVCaptureVideoDataOutput alloc] init];
481    [output setSampleBufferDelegate:self
482                              queue:workerQueue];
483    output.alwaysDiscardsLateVideoFrames = YES;
484    output.videoSettings = nil; /* device native format */
485
486    session = [[AVCaptureSession alloc] init];
487    [session addInput:input];
488    [session addOutput:output];
489
490    /* retained by session */
491    connection = [[output connections] firstObject];
492#ifdef HAVE_IOS
493    if (orientation != DEFAULT_ORIENTATION)
494      connection.videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(orientation);
495#endif
496    inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
497
498    *successPtr = YES;
499  });
500
501  GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
502
503  return success;
504}
505
506- (void)closeDevice
507{
508  GST_DEBUG_OBJECT (element, "Closing device");
509
510  dispatch_sync (mainQueue, ^{
511    g_assert (![session isRunning]);
512
513    connection = nil;
514    inputClock = nil;
515
516    [session removeInput:input];
517    [session removeOutput:output];
518
519    session = nil;
520
521    input = nil;
522
523    output = nil;
524
525    if (!captureScreen) {
526      device = nil;
527    }
528
529    if (caps)
530      gst_caps_unref (caps);
531    caps = NULL;
532  });
533}
534
535#define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d)                  \
536    (gst_caps_new_simple ("video/x-raw",                              \
537        "width", G_TYPE_INT, w,                                       \
538        "height", G_TYPE_INT, h,                                      \
539        "format", G_TYPE_STRING, gst_video_format_to_string (format), \
540        "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d),             \
541        NULL))
542
543#define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
544    (gst_caps_new_simple ("video/x-raw",                              \
545        "width", G_TYPE_INT, w,                                       \
546        "height", G_TYPE_INT, h,                                      \
547        "format", G_TYPE_STRING, gst_video_format_to_string (format), \
548        "framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
549        NULL))
550
551- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
552{
553  GstVideoFormat gst_format = get_gst_video_format(pixel_format);
554  if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
555    GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
556        [[pixel_format stringValue] UTF8String]);
557  }
558  return gst_format;
559}
560
561#if !HAVE_IOS
562- (CGDirectDisplayID)getDisplayIdFromDeviceIndex
563{
564  NSDictionary *description;
565  NSNumber *displayId;
566  NSArray *screens = [NSScreen screens];
567
568  if (deviceIndex == DEFAULT_DEVICE_INDEX)
569    return kCGDirectMainDisplay;
570  if (deviceIndex >= [screens count]) {
571    GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
572                        ("Invalid screen capture device index"), (NULL));
573    return 0;
574  }
575  description = [[screens objectAtIndex:deviceIndex] deviceDescription];
576  displayId = [description objectForKey:@"NSScreenNumber"];
577  return [displayId unsignedIntegerValue];
578}
579
580- (float)getScaleFactorFromDeviceIndex
581{
582  NSArray *screens = [NSScreen screens];
583
584  if (deviceIndex == DEFAULT_DEVICE_INDEX)
585    return [[NSScreen mainScreen] backingScaleFactor];
586  if (deviceIndex >= [screens count]) {
587    GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
588                        ("Invalid screen capture device index"), (NULL));
589    return 1.0;
590  }
591  return [[screens objectAtIndex:deviceIndex] backingScaleFactor];
592}
593#endif
594
595
596- (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
597{
598  return get_oriented_dimensions(orientation, dimensions);
599}
600
601- (GstCaps *)getDeviceCaps
602{
603  GST_DEBUG_OBJECT (element, "Getting device caps");
604  GstCaps *device_caps = gst_av_capture_device_get_caps (device, output, orientation);
605  GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, device_caps);
606
607  return device_caps;
608}
609
610- (BOOL)setDeviceCaps:(GstVideoInfo *)info
611{
612  double framerate;
613  gboolean found_format = FALSE, found_framerate = FALSE;
614  NSArray *formats = [device valueForKey:@"formats"];
615  gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
616
617  GST_DEBUG_OBJECT (element, "Setting device caps");
618
619  if ([device lockForConfiguration:NULL] == YES) {
620    for (NSObject *f in formats) {
621      CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
622      CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
623      dimensions = [self orientedDimensions:dimensions];
624      if (dimensions.width == info->width && dimensions.height == info->height) {
625        found_format = TRUE;
626        [device setValue:f forKey:@"activeFormat"];
627        for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
628          gdouble min_frame_rate, max_frame_rate;
629
630          [[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
631          [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
632          if ((framerate >= min_frame_rate - 0.00001) &&
633              (framerate <= max_frame_rate + 0.00001)) {
634            NSValue *frame_duration_value;
635            found_framerate = TRUE;
636            if (min_frame_rate == max_frame_rate) {
637              /* on mac we get tight ranges and an exception is raised if the
638               * frame duration doesn't match the one reported in the range to
639               * the last decimal point
640               */
641              frame_duration_value = [rate valueForKey:@"minFrameDuration"];
642            } else {
643              // Invert fps_n and fps_d to get frame duration value and timescale (or numerator and denominator)
644              frame_duration_value = [NSValue valueWithCMTime:CMTimeMake (info->fps_d, info->fps_n)];
645            }
646            [device setValue:frame_duration_value forKey:@"activeVideoMinFrameDuration"];
647            @try {
648              /* Only available on OSX >= 10.8 and iOS >= 7.0 */
649              [device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
650            } @catch (NSException *exception) {
651              if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
652                GST_WARNING ("An unexcepted error occured: %s",
653                              [exception.reason UTF8String]);
654              }
655            }
656            break;
657          }
658        }
659      }
660    }
661    if (!found_format) {
662      GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
663      return NO;
664    }
665    if (!found_framerate) {
666      GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
667      return NO;
668    }
669  } else {
670    GST_WARNING ("Couldn't lock device for configuration");
671    return NO;
672  }
673  return YES;
674}
675
676- (BOOL)getSessionPresetCaps:(GstCaps *)result
677{
678  NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
679  for (NSNumber *pixel_format in pixel_formats) {
680    GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
681    if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
682      continue;
683
684#if HAVE_IOS
685    if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
686      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
687#endif
688    if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
689      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
690    if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
691      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
692    if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
693      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
694    if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
695      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
696    if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
697      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
698  }
699
700  GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
701
702  return YES;
703}
704
705- (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
706{
707  GST_DEBUG_OBJECT (element, "Setting session presset caps");
708
709  if ([device lockForConfiguration:NULL] != YES) {
710    GST_WARNING ("Couldn't lock device for configuration");
711    return NO;
712  }
713
714  switch (info->width) {
715  case 192:
716    session.sessionPreset = AVCaptureSessionPresetLow;
717    break;
718  case 352:
719    session.sessionPreset = AVCaptureSessionPreset352x288;
720    break;
721  case 480:
722    session.sessionPreset = AVCaptureSessionPresetMedium;
723    break;
724  case 640:
725    session.sessionPreset = AVCaptureSessionPreset640x480;
726    break;
727  case 1280:
728    session.sessionPreset = AVCaptureSessionPreset1280x720;
729    break;
730#if HAVE_IOS
731  case 1920:
732    session.sessionPreset = AVCaptureSessionPreset1920x1080;
733    break;
734#endif
735  default:
736    GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
737    return NO;
738  }
739  return YES;
740}
741
742- (GstCaps *)getCaps
743{
744  GstCaps *result;
745  NSArray *pixel_formats;
746
747  if (session == nil)
748    return NULL; /* BaseSrc will return template caps */
749
750  result = gst_caps_new_empty ();
751  pixel_formats = output.availableVideoCVPixelFormatTypes;
752
753  if (captureScreen) {
754#if !HAVE_IOS
755    CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
756    float scale = [self getScaleFactorFromDeviceIndex];
757    for (NSNumber *pixel_format in pixel_formats) {
758      GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
759      if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
760        gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
761            "width", G_TYPE_INT, (int)(rect.size.width * scale),
762            "height", G_TYPE_INT, (int)(rect.size.height * scale),
763            "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
764            NULL));
765    }
766#else
767    GST_WARNING ("Screen capture is not supported by iOS");
768#endif
769    return result;
770  }
771
772  @try {
773    result = gst_caps_merge (result, [self getDeviceCaps]);
774  } @catch (NSException *exception) {
775    if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
776      GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
777      return result;
778    }
779
780    /* Fallback on session presets API for iOS < 7.0 */
781    [self getSessionPresetCaps:result];
782  }
783
784  return result;
785}
786
787- (BOOL)setCaps:(GstCaps *)new_caps
788{
789  GstVideoInfo info;
790  BOOL success = YES, *successPtr = &success;
791
792  gst_video_info_init (&info);
793  gst_video_info_from_caps (&info, new_caps);
794
795  width = info.width;
796  height = info.height;
797  format = info.finfo->format;
798  latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
799
800  dispatch_sync (mainQueue, ^{
801    int newformat;
802
803    if (captureScreen) {
804#if !HAVE_IOS
805      AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
806      screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
807#else
808      GST_WARNING ("Screen capture is not supported by iOS");
809      *successPtr = NO;
810      return;
811#endif
812    } else {
813      @try {
814
815        /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
816        *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
817        if (*successPtr != YES)
818          return;
819
820      } @catch (NSException *exception) {
821
822        if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
823          GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
824          *successPtr = NO;
825          return;
826        }
827
828        /* Fallback on session presets API for iOS < 7.0 */
829        *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
830        if (*successPtr != YES)
831          return;
832      }
833    }
834
835    switch (format) {
836      case GST_VIDEO_FORMAT_NV12:
837        newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
838        break;
839      case GST_VIDEO_FORMAT_UYVY:
840        newformat = kCVPixelFormatType_422YpCbCr8;
841        break;
842      case GST_VIDEO_FORMAT_YUY2:
843        newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
844        break;
845      case GST_VIDEO_FORMAT_BGRA:
846        newformat = kCVPixelFormatType_32BGRA;
847        break;
848      default:
849        *successPtr = NO;
850        GST_WARNING ("Unsupported output format %s",
851            gst_video_format_to_string (format));
852        return;
853    }
854
855    GST_INFO_OBJECT (element,
856        "width: %d height: %d format: %s", width, height,
857        gst_video_format_to_string (format));
858
859    output.videoSettings = [NSDictionary
860        dictionaryWithObject:[NSNumber numberWithInt:newformat]
861        forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
862
863    gst_caps_replace (&caps, new_caps);
864    GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT, caps);
865
866    if (![session isRunning])
867      [session startRunning];
868
869    /* Unlock device configuration only after session is started so the session
870     * won't reset the capture formats */
871    [device unlockForConfiguration];
872  });
873
874  return success;
875}
876
877- (BOOL)start
878{
879  bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
880  bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
881  stopRequest = NO;
882
883  offset = 0;
884  latency = GST_CLOCK_TIME_NONE;
885
886  lastSampling = GST_CLOCK_TIME_NONE;
887  count = 0;
888  fps = -1;
889
890  return YES;
891}
892
893- (BOOL)stop
894{
895  dispatch_sync (mainQueue, ^{ [session stopRunning]; });
896  dispatch_sync (workerQueue, ^{});
897
898  bufQueueLock = nil;
899  bufQueue = nil;
900
901  if (textureCache)
902    gst_video_texture_cache_free (textureCache);
903  textureCache = NULL;
904
905  if (ctxh)
906    gst_gl_context_helper_free (ctxh);
907  ctxh = NULL;
908
909  return YES;
910}
911
912- (BOOL)query:(GstQuery *)query
913{
914  BOOL result = NO;
915
916  if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
917    if (device != nil && caps != NULL) {
918      GstClockTime min_latency, max_latency;
919
920      min_latency = max_latency = latency;
921      result = YES;
922
923      GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
924          " max %" GST_TIME_FORMAT,
925          GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
926      gst_query_set_latency (query, TRUE, min_latency, max_latency);
927    }
928  } else {
929    result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
930  }
931
932  return result;
933}
934
935- (BOOL)unlock
936{
937  [bufQueueLock lock];
938  stopRequest = YES;
939  [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
940
941  return YES;
942}
943
944- (BOOL)unlockStop
945{
946  [bufQueueLock lock];
947  stopRequest = NO;
948  [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
949
950  return YES;
951}
952
953- (GstStateChangeReturn)changeState:(GstStateChange)transition
954{
955  GstStateChangeReturn ret;
956
957  if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
958    if (![self openDevice])
959      return GST_STATE_CHANGE_FAILURE;
960  }
961
962  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
963
964  if (transition == GST_STATE_CHANGE_READY_TO_NULL)
965    [self closeDevice];
966
967  return ret;
968}
969
970- (void)captureOutput:(AVCaptureOutput *)captureOutput
971didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
972       fromConnection:(AVCaptureConnection *)aConnection
973{
974  GstClockTime timestamp, duration;
975
976  [bufQueueLock lock];
977
978  if (stopRequest) {
979    [bufQueueLock unlock];
980    return;
981  }
982
983  [self getSampleBuffer:sampleBuffer timestamp:&timestamp duration:&duration];
984
985  if (timestamp == GST_CLOCK_TIME_NONE) {
986    [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
987    return;
988  }
989
990  if ([bufQueue count] == BUFFER_QUEUE_SIZE)
991    [bufQueue removeLastObject];
992
993  [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
994                           @"timestamp": @(timestamp),
995                           @"duration": @(duration)}
996                 atIndex:0];
997
998  [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
999}
1000
1001- (GstFlowReturn)create:(GstBuffer **)buf
1002{
1003  CMSampleBufferRef sbuf;
1004  CVImageBufferRef image_buf;
1005  CVPixelBufferRef pixel_buf;
1006  size_t cur_width, cur_height;
1007  GstClockTime timestamp, duration;
1008
1009  [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
1010  if (stopRequest) {
1011    [bufQueueLock unlock];
1012    return GST_FLOW_FLUSHING;
1013  }
1014
1015  NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
1016  sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
1017  timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
1018  duration = (GstClockTime) [dic[@"duration"] longLongValue];
1019  CFRetain (sbuf);
1020  [bufQueue removeLastObject];
1021  [bufQueueLock unlockWithCondition:
1022      ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
1023
1024  /* Check output frame size dimensions */
1025  image_buf = CMSampleBufferGetImageBuffer (sbuf);
1026  if (image_buf) {
1027    pixel_buf = (CVPixelBufferRef) image_buf;
1028    cur_width = CVPixelBufferGetWidth (pixel_buf);
1029    cur_height = CVPixelBufferGetHeight (pixel_buf);
1030
1031    if (width != cur_width || height != cur_height) {
1032      /* Set new caps according to current frame dimensions */
1033      GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
1034          width, height, (int)cur_width, (int)cur_height);
1035      width = cur_width;
1036      height = cur_height;
1037      gst_caps_set_simple (caps,
1038        "width", G_TYPE_INT, width,
1039        "height", G_TYPE_INT, height,
1040        NULL);
1041      gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
1042    }
1043  }
1044
1045  *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache);
1046  if (*buf == NULL) {
1047    CFRelease (sbuf);
1048    return GST_FLOW_ERROR;
1049  }
1050  CFRelease (sbuf);
1051
1052  GST_BUFFER_OFFSET (*buf) = offset++;
1053  GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
1054  GST_BUFFER_TIMESTAMP (*buf) = timestamp;
1055  GST_BUFFER_DURATION (*buf) = duration;
1056
1057  if (doStats)
1058    [self updateStatistics];
1059
1060  return GST_FLOW_OK;
1061}
1062
1063- (GstCaps *)fixate:(GstCaps *)new_caps
1064{
1065  GstStructure *structure;
1066
1067  new_caps = gst_caps_make_writable (new_caps);
1068  new_caps = gst_caps_truncate (new_caps);
1069  structure = gst_caps_get_structure (new_caps, 0);
1070  /* crank up to 11. This is what the presets do, but we don't use the presets
1071   * in ios >= 7.0 */
1072  gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
1073  gst_structure_fixate_field_nearest_fraction (structure, "framerate", G_MAXINT, 1);
1074
1075  return gst_caps_fixate (new_caps);
1076}
1077
1078- (BOOL)decideAllocation:(GstQuery *)query
1079{
1080  GstCaps *alloc_caps;
1081  GstCapsFeatures *features;
1082  gboolean ret;
1083
1084  ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (baseSrc, query);
1085  if (!ret)
1086    return ret;
1087
1088  gst_query_parse_allocation (query, &alloc_caps, NULL);
1089  features = gst_caps_get_features (alloc_caps, 0);
1090  if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
1091    gst_gl_context_helper_ensure_context (ctxh);
1092    GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
1093        ctxh->context, textureCache ? textureCache->ctx : NULL);
1094    if (textureCache && textureCache->ctx != ctxh->context) {
1095      gst_video_texture_cache_free (textureCache);
1096      textureCache = NULL;
1097    }
1098    textureCache = gst_video_texture_cache_new (ctxh->context);
1099    gst_video_texture_cache_set_format (textureCache, format, alloc_caps);
1100  }
1101
1102  return TRUE;
1103}
1104
1105- (void)setContext:(GstContext *)context
1106{
1107  GST_INFO_OBJECT (element, "setting context %s",
1108          gst_context_get_context_type (context));
1109  gst_gl_handle_set_context (element, context,
1110          &ctxh->display, &ctxh->other_context);
1111  GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
1112}
1113
1114- (void)getSampleBuffer:(CMSampleBufferRef)sbuf
1115              timestamp:(GstClockTime *)outTimestamp
1116               duration:(GstClockTime *)outDuration
1117{
1118  CMSampleTimingInfo time_info;
1119  GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
1120  CMItemCount num_timings;
1121  GstClock *clock;
1122  CMTime now;
1123
1124  timestamp = GST_CLOCK_TIME_NONE;
1125  duration = GST_CLOCK_TIME_NONE;
1126  if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
1127    avf_timestamp = gst_util_uint64_scale (GST_SECOND,
1128            time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
1129
1130    if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
1131      duration = gst_util_uint64_scale (GST_SECOND,
1132          time_info.duration.value, time_info.duration.timescale);
1133
1134    now = CMClockGetTime(inputClock);
1135    input_clock_now = gst_util_uint64_scale (GST_SECOND,
1136        now.value, now.timescale);
1137    input_clock_diff = input_clock_now - avf_timestamp;
1138
1139    GST_OBJECT_LOCK (element);
1140    clock = GST_ELEMENT_CLOCK (element);
1141    if (clock) {
1142      running_time = gst_clock_get_time (clock) - element->base_time;
1143      /* We use presentationTimeStamp to determine how much time it took
1144       * between capturing and receiving the frame in our delegate
1145       * (e.g. how long it spent in AVF queues), then we subtract that time
1146       * from our running time to get the actual timestamp.
1147       */
1148      if (running_time >= input_clock_diff)
1149        timestamp = running_time - input_clock_diff;
1150      else
1151        timestamp = running_time;
1152
1153      GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
1154          ", AVF clock diff: %"GST_TIME_FORMAT
1155          ", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
1156          GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
1157          GST_TIME_ARGS (input_clock_diff),
1158          GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
1159    } else {
1160      /* no clock, can't set timestamps */
1161      timestamp = GST_CLOCK_TIME_NONE;
1162    }
1163    GST_OBJECT_UNLOCK (element);
1164  }
1165
1166  *outTimestamp = timestamp;
1167  *outDuration = duration;
1168}
1169
1170- (void)updateStatistics
1171{
1172  GstClock *clock;
1173
1174  GST_OBJECT_LOCK (element);
1175  clock = GST_ELEMENT_CLOCK (element);
1176  if (clock != NULL)
1177    gst_object_ref (clock);
1178  GST_OBJECT_UNLOCK (element);
1179
1180  if (clock != NULL) {
1181    GstClockTime now = gst_clock_get_time (clock);
1182    gst_object_unref (clock);
1183
1184    count++;
1185
1186    if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
1187      if (now - lastSampling >= GST_SECOND) {
1188        GST_OBJECT_LOCK (element);
1189        fps = count;
1190        GST_OBJECT_UNLOCK (element);
1191
1192        g_object_notify (G_OBJECT (element), "fps");
1193
1194        lastSampling = now;
1195        count = 0;
1196      }
1197    } else {
1198      lastSampling = now;
1199    }
1200  }
1201}
1202
1203@end
1204
1205/*
1206 * Glue code
1207 */
1208
1209enum
1210{
1211  PROP_0,
1212  PROP_DEVICE_INDEX,
1213  PROP_DEVICE_NAME,
1214  PROP_POSITION,
1215  PROP_ORIENTATION,
1216  PROP_DEVICE_TYPE,
1217  PROP_DO_STATS,
1218  PROP_FPS,
1219#if !HAVE_IOS
1220  PROP_CAPTURE_SCREEN,
1221  PROP_CAPTURE_SCREEN_CURSOR,
1222  PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1223#endif
1224};
1225
1226
1227static void gst_avf_video_src_finalize (GObject * obj);
1228static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
1229    GValue * value, GParamSpec * pspec);
1230static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
1231    const GValue * value, GParamSpec * pspec);
1232static GstStateChangeReturn gst_avf_video_src_change_state (
1233    GstElement * element, GstStateChange transition);
1234static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
1235    GstCaps * filter);
1236static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
1237    GstCaps * caps);
1238static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
1239static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
1240static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
1241    GstQuery * query);
1242static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
1243static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
1244static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
1245    GstBuffer ** buf);
1246static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
1247    GstCaps * caps);
1248static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1249    GstQuery * query);
1250static void gst_avf_video_src_set_context (GstElement * element,
1251        GstContext * context);
1252
1253static void
1254gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
1255{
1256  GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1257  GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
1258  GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
1259  GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
1260
1261  gobject_class->finalize = gst_avf_video_src_finalize;
1262  gobject_class->get_property = gst_avf_video_src_get_property;
1263  gobject_class->set_property = gst_avf_video_src_set_property;
1264
1265  gstelement_class->change_state = gst_avf_video_src_change_state;
1266  gstelement_class->set_context = gst_avf_video_src_set_context;
1267
1268  gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
1269  gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
1270  gstbasesrc_class->start = gst_avf_video_src_start;
1271  gstbasesrc_class->stop = gst_avf_video_src_stop;
1272  gstbasesrc_class->query = gst_avf_video_src_query;
1273  gstbasesrc_class->unlock = gst_avf_video_src_unlock;
1274  gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
1275  gstbasesrc_class->fixate = gst_avf_video_src_fixate;
1276  gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
1277
1278  gstpushsrc_class->create = gst_avf_video_src_create;
1279
1280  gst_element_class_set_metadata (gstelement_class,
1281      "Video Source (AVFoundation)", "Source/Video/Hardware",
1282      "Reads frames from an iOS AVFoundation device",
1283      "Ole André Vadla Ravnås <oleavr@soundrop.com>");
1284
1285  gst_element_class_add_static_pad_template (gstelement_class, &src_template);
1286
1287  g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
1288      g_param_spec_int ("device-index", "Device Index",
1289          "The zero-based device index",
1290          -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
1291          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1292  g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
1293      g_param_spec_string ("device-name", "Device Name",
1294          "The name of the currently opened capture device",
1295          NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1296  g_object_class_install_property (gobject_class, PROP_POSITION,
1297                                   g_param_spec_enum ("position", "Position",
1298                                                      "The position of the capture device (front or back-facing)",
1299                                                      GST_TYPE_AVF_VIDEO_SOURCE_POSITION, DEFAULT_POSITION,
1300                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1301  g_object_class_install_property (gobject_class, PROP_ORIENTATION,
1302                                   g_param_spec_enum ("orientation", "Orientation",
1303                                                      "The orientation of the video",
1304                                                      GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, DEFAULT_ORIENTATION,
1305                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1306  g_object_class_install_property (gobject_class, PROP_DEVICE_TYPE,
1307                                   g_param_spec_enum ("device-type", "Device Type",
1308                                                      "The general type of a video capture device",
1309                                                      GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, DEFAULT_DEVICE_TYPE,
1310                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1311  g_object_class_install_property (gobject_class, PROP_DO_STATS,
1312      g_param_spec_boolean ("do-stats", "Enable statistics",
1313          "Enable logging of statistics", DEFAULT_DO_STATS,
1314          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1315  g_object_class_install_property (gobject_class, PROP_FPS,
1316      g_param_spec_int ("fps", "Frames per second",
1317          "Last measured framerate, if statistics are enabled",
1318          -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1319#if !HAVE_IOS
1320  g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
1321      g_param_spec_boolean ("capture-screen", "Enable screen capture",
1322          "Enable screen capture functionality", FALSE,
1323          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1324  g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
1325      g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
1326          "Enable cursor capture while capturing screen", FALSE,
1327          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1328  g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1329      g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
1330          "Enable mouse clicks capture while capturing screen", FALSE,
1331          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1332#endif
1333
1334  GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
1335      0, "iOS AVFoundation video source");
1336}
1337
1338static void
1339gst_avf_video_src_init (GstAVFVideoSrc * src)
1340{
1341  src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1342}
1343
1344static void
1345gst_avf_video_src_finalize (GObject * obj)
1346{
1347  CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
1348
1349  G_OBJECT_CLASS (parent_class)->finalize (obj);
1350}
1351
1352static void
1353gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1354    GParamSpec * pspec)
1355{
1356  GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1357
1358  switch (prop_id) {
1359#if !HAVE_IOS
1360    case PROP_CAPTURE_SCREEN:
1361      g_value_set_boolean (value, impl.captureScreen);
1362      break;
1363    case PROP_CAPTURE_SCREEN_CURSOR:
1364      g_value_set_boolean (value, impl.captureScreenCursor);
1365      break;
1366    case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1367      g_value_set_boolean (value, impl.captureScreenMouseClicks);
1368      break;
1369#endif
1370    case PROP_DEVICE_INDEX:
1371      g_value_set_int (value, impl.deviceIndex);
1372      break;
1373    case PROP_DEVICE_NAME:
1374      g_value_set_string (value, impl.deviceName);
1375      break;
1376    case PROP_POSITION:
1377      g_value_set_enum(value, impl.position);
1378      break;
1379    case PROP_ORIENTATION:
1380      g_value_set_enum(value, impl.orientation);
1381      break;
1382    case PROP_DEVICE_TYPE:
1383      g_value_set_enum(value, impl.deviceType);
1384      break;
1385    case PROP_DO_STATS:
1386      g_value_set_boolean (value, impl.doStats);
1387      break;
1388    case PROP_FPS:
1389      GST_OBJECT_LOCK (object);
1390      g_value_set_int (value, impl.fps);
1391      GST_OBJECT_UNLOCK (object);
1392      break;
1393    default:
1394      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1395      break;
1396  }
1397}
1398
1399static void
1400gst_avf_video_src_set_property (GObject * object, guint prop_id,
1401    const GValue * value, GParamSpec * pspec)
1402{
1403  GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1404
1405  switch (prop_id) {
1406#if !HAVE_IOS
1407    case PROP_CAPTURE_SCREEN:
1408      impl.captureScreen = g_value_get_boolean (value);
1409      break;
1410    case PROP_CAPTURE_SCREEN_CURSOR:
1411      impl.captureScreenCursor = g_value_get_boolean (value);
1412      break;
1413    case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1414      impl.captureScreenMouseClicks = g_value_get_boolean (value);
1415      break;
1416#endif
1417    case PROP_DEVICE_INDEX:
1418      impl.deviceIndex = g_value_get_int (value);
1419      break;
1420    case PROP_POSITION:
1421      impl.position = g_value_get_enum(value);
1422      break;
1423    case PROP_ORIENTATION:
1424      impl.orientation = g_value_get_enum(value);
1425      break;
1426    case PROP_DEVICE_TYPE:
1427      impl.deviceType = g_value_get_enum(value);
1428      break;
1429    case PROP_DO_STATS:
1430      impl.doStats = g_value_get_boolean (value);
1431      break;
1432    default:
1433      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1434      break;
1435  }
1436}
1437
1438static GstStateChangeReturn
1439gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
1440{
1441  GstStateChangeReturn ret;
1442
1443  ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
1444
1445  return ret;
1446}
1447
1448static GstCaps *
1449gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1450{
1451  GstCaps *ret;
1452
1453  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1454
1455  return ret;
1456}
1457
1458static gboolean
1459gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1460{
1461  gboolean ret;
1462
1463  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1464
1465  return ret;
1466}
1467
1468static gboolean
1469gst_avf_video_src_start (GstBaseSrc * basesrc)
1470{
1471  gboolean ret;
1472
1473  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1474
1475  return ret;
1476}
1477
1478static gboolean
1479gst_avf_video_src_stop (GstBaseSrc * basesrc)
1480{
1481  gboolean ret;
1482
1483  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1484
1485  return ret;
1486}
1487
1488static gboolean
1489gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1490{
1491  gboolean ret;
1492
1493  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1494
1495  return ret;
1496}
1497
1498static gboolean
1499gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1500{
1501  gboolean ret;
1502
1503  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1504
1505  return ret;
1506}
1507
1508static gboolean
1509gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1510{
1511  gboolean ret;
1512
1513  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1514
1515  return ret;
1516}
1517
1518static GstFlowReturn
1519gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1520{
1521  GstFlowReturn ret;
1522
1523  ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1524
1525  return ret;
1526}
1527
1528
1529static GstCaps *
1530gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
1531{
1532  GstCaps *ret;
1533
1534  ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
1535
1536  return ret;
1537}
1538
1539static gboolean
1540gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1541    GstQuery * query)
1542{
1543  gboolean ret;
1544
1545  ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
1546
1547  return ret;
1548}
1549
1550static void
1551gst_avf_video_src_set_context (GstElement * element, GstContext * context)
1552{
1553  [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
1554}
1555
1556GstCaps*
1557gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation)
1558{
1559  NSArray *formats = [device valueForKey:@"formats"];
1560  NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
1561  GstCaps *result_caps, *result_gl_caps;
1562#if !HAVE_IOS
1563  GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
1564#else
1565  GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
1566#endif
1567
1568  result_caps = gst_caps_new_empty ();
1569  result_gl_caps = gst_caps_new_empty ();
1570
1571  /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
1572   * available in iOS >= 7.0. We use a dynamic approach with key-value
1573   * coding or performSelector */
1574  for (NSObject *f in [formats reverseObjectEnumerator]) {
1575    /* formatDescription can't be retrieved with valueForKey so use a selector here */
1576    CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
1577    CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions (formatDescription);
1578    dimensions = get_oriented_dimensions (orientation, dimensions);
1579
1580    for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
1581      int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
1582      gdouble min_fps, max_fps;
1583
1584      [[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
1585      gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
1586
1587      [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
1588      gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
1589
1590      for (NSNumber *pixel_format in pixel_formats) {
1591        GstVideoFormat gst_format = get_gst_video_format (pixel_format);
1592
1593        if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
1594          if (min_fps != max_fps)
1595            gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
1596          else
1597            gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
1598        }
1599
1600        if (gst_format == gl_format) {
1601          GstCaps *gl_caps;
1602          if (min_fps != max_fps) {
1603            gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
1604                                                  dimensions.width, dimensions.height,
1605                                                  min_fps_n, min_fps_d,
1606                                                  max_fps_n, max_fps_d);
1607          } else {
1608            gl_caps = GST_AVF_CAPS_NEW (gl_format,
1609                                        dimensions.width, dimensions.height,
1610                                        max_fps_n, max_fps_d);
1611          }
1612          gst_caps_set_features (gl_caps, 0,
1613                                 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
1614                                                        NULL));
1615          gst_caps_set_simple (gl_caps,
1616                               "texture-target", G_TYPE_STRING,
1617#if !HAVE_IOS
1618                               GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
1619#else
1620                               GST_GL_TEXTURE_TARGET_2D_STR,
1621#endif
1622                               NULL);
1623          gst_caps_append (result_gl_caps, gl_caps);
1624        }
1625      }
1626    }
1627  }
1628
1629  result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
1630
1631  return result_gl_caps;
1632}
1633
1634static GstVideoFormat
1635get_gst_video_format (NSNumber *pixel_format)
1636{
1637  GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
1638
1639  switch ([pixel_format integerValue]) {
1640    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
1641      gst_format = GST_VIDEO_FORMAT_NV12;
1642      break;
1643    case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
1644      gst_format = GST_VIDEO_FORMAT_UYVY;
1645      break;
1646    case kCVPixelFormatType_32BGRA: /* BGRA */
1647      gst_format = GST_VIDEO_FORMAT_BGRA;
1648      break;
1649    case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
1650      gst_format = GST_VIDEO_FORMAT_YUY2;
1651      break;
1652    default:
1653      break;
1654  }
1655
1656  return gst_format;
1657}
1658
1659static CMVideoDimensions
1660get_oriented_dimensions (GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions)
1661{
1662  CMVideoDimensions orientedDimensions;
1663  if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
1664      orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
1665    orientedDimensions.width = dimensions.height;
1666    orientedDimensions.height = dimensions.width;
1667  } else {
1668    orientedDimensions = dimensions;
1669  }
1670  return orientedDimensions;
1671}
1672