1 /*
2  * GStreamer OpenNI2 device source element
3  * Copyright (C) 2013 Miguel Casas-Sanchez <miguelecasassanchez@gmail.com>
4 
5  * This library is free software; you can
6  * redistribute it and/or modify it under the terms of the GNU Library
7  * General Public License as published by the Free Software Foundation;
8  * either version 2 of the License, or (at your option) any later version.
9  * This library is distributed in the hope that it will be useful, but
10  * WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Library
12  * General Public License for more details. You should have received a copy
13  * of the GNU Library General Public License along with this library; if
14  * not, write to the Free Software Foundation, Inc., 51 Franklin St,
15  * Fifth Floor, Boston, MA 02110-1301, USA.
16  */
17 
18 /**
19  * SECTION:element-openni2src
20  *
21  * <refsect2>
22  * <title>Examples</title>
23  * <para>
24  * Some recorded .oni files are available at:
25  * <programlisting>
26  *  http://people.cs.pitt.edu/~chang/1635/proj11/kinectRecord
27  * </programlisting>
28  *
29  * <programlisting>
30   LD_LIBRARY_PATH=/usr/lib/OpenNI2/Drivers/ gst-launch-1.0 --gst-debug=openni2src:5   openni2src location='Downloads/mr.oni' sourcetype=depth ! videoconvert ! ximagesink
31  * </programlisting>
32  * <programlisting>
33   LD_LIBRARY_PATH=/usr/lib/OpenNI2/Drivers/ gst-launch-1.0 --gst-debug=openni2src:5   openni2src location='Downloads/mr.oni' sourcetype=color ! videoconvert ! ximagesink
34  * </programlisting>
35  * </para>
36  * </refsect2>
37  */
38 
39 #ifdef HAVE_CONFIG_H
40 #include "config.h"
41 #endif
42 
43 #include "gstopenni2src.h"
44 
45 GST_DEBUG_CATEGORY_STATIC (openni2src_debug);
46 #define GST_CAT_DEFAULT openni2src_debug
47 static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
48     GST_PAD_SRC,
49     GST_PAD_ALWAYS,
50     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{RGBA, RGB, GRAY16_LE}"))
51     );
52 
53 enum
54 {
55   PROP_0,
56   PROP_LOCATION,
57   PROP_SOURCETYPE
58 };
59 typedef enum
60 {
61   SOURCETYPE_DEPTH,
62   SOURCETYPE_COLOR,
63   SOURCETYPE_BOTH
64 } GstOpenni2SourceType;
65 #define DEFAULT_SOURCETYPE  SOURCETYPE_DEPTH
66 
67 #define SAMPLE_READ_WAIT_TIMEOUT 2000   /* 2000ms */
68 
69 #define GST_TYPE_OPENNI2_SRC_SOURCETYPE (gst_openni2_src_sourcetype_get_type ())
70 static GType
gst_openni2_src_sourcetype_get_type(void)71 gst_openni2_src_sourcetype_get_type (void)
72 {
73   static GType etype = 0;
74   if (etype == 0) {
75     static const GEnumValue values[] = {
76       {SOURCETYPE_DEPTH, "Get depth readings", "depth"},
77       {SOURCETYPE_COLOR, "Get color readings", "color"},
78       {SOURCETYPE_BOTH,
79             "Get color and depth (as alpha) readings - EXPERIMENTAL",
80           "both"},
81       {0, NULL, NULL},
82     };
83     etype = g_enum_register_static ("GstOpenni2SrcSourcetype", values);
84   }
85   return etype;
86 }
87 
88 /* GObject methods */
89 static void gst_openni2_src_dispose (GObject * object);
90 static void gst_openni2_src_finalize (GObject * gobject);
91 static void gst_openni2_src_set_property (GObject * object, guint prop_id,
92     const GValue * value, GParamSpec * pspec);
93 static void gst_openni2_src_get_property (GObject * object, guint prop_id,
94     GValue * value, GParamSpec * pspec);
95 
96 /* basesrc methods */
97 static gboolean gst_openni2_src_start (GstBaseSrc * bsrc);
98 static gboolean gst_openni2_src_stop (GstBaseSrc * bsrc);
99 static gboolean gst_openni2_src_set_caps (GstBaseSrc * src, GstCaps * caps);
100 static GstCaps *gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter);
101 static gboolean gst_openni2src_decide_allocation (GstBaseSrc * bsrc,
102     GstQuery * query);
103 
104 /* element methods */
105 static GstStateChangeReturn gst_openni2_src_change_state (GstElement * element,
106     GstStateChange transition);
107 
108 /* pushsrc method */
109 static GstFlowReturn gst_openni2src_fill (GstPushSrc * src, GstBuffer * buf);
110 
111 /* OpenNI2 interaction methods */
112 static gboolean openni2_initialise_library ();
113 static gboolean openni2_initialise_devices (GstOpenni2Src * src);
114 static GstFlowReturn openni2_read_gstbuffer (GstOpenni2Src * src,
115     GstBuffer * buf);
116 
117 #define parent_class gst_openni2_src_parent_class
118 G_DEFINE_TYPE (GstOpenni2Src, gst_openni2_src, GST_TYPE_PUSH_SRC);
119 
120 static void
gst_openni2_src_class_init(GstOpenni2SrcClass * klass)121 gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
122 {
123   GObjectClass *gobject_class;
124   GstPushSrcClass *pushsrc_class;
125   GstBaseSrcClass *basesrc_class;
126   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
127 
128   gobject_class = (GObjectClass *) klass;
129   basesrc_class = (GstBaseSrcClass *) klass;
130   pushsrc_class = (GstPushSrcClass *) klass;
131 
132   gobject_class->dispose = gst_openni2_src_dispose;
133   gobject_class->finalize = gst_openni2_src_finalize;
134   gobject_class->set_property = gst_openni2_src_set_property;
135   gobject_class->get_property = gst_openni2_src_get_property;
136   g_object_class_install_property
137       (gobject_class, PROP_LOCATION,
138       g_param_spec_string ("location", "Location",
139           "Source uri, can be a file or a device.", "", (GParamFlags)
140           (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
141   g_object_class_install_property (gobject_class, PROP_SOURCETYPE,
142       g_param_spec_enum ("sourcetype",
143           "Device source type",
144           "Type of readings to get from the source",
145           GST_TYPE_OPENNI2_SRC_SOURCETYPE, DEFAULT_SOURCETYPE,
146           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
147 
148 
149   basesrc_class->start = GST_DEBUG_FUNCPTR (gst_openni2_src_start);
150   basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_openni2_src_stop);
151   basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_openni2_src_get_caps);
152   basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_openni2_src_set_caps);
153   basesrc_class->decide_allocation =
154       GST_DEBUG_FUNCPTR (gst_openni2src_decide_allocation);
155 
156   gst_element_class_add_static_pad_template (element_class, &srctemplate);
157 
158   gst_element_class_set_static_metadata (element_class, "Openni2 client source",
159       "Source/Video",
160       "Extract readings from an OpenNI supported device (Kinect etc). ",
161       "Miguel Casas-Sanchez <miguelecasassanchez@gmail.com>");
162 
163   element_class->change_state = gst_openni2_src_change_state;
164 
165   pushsrc_class->fill = GST_DEBUG_FUNCPTR (gst_openni2src_fill);
166 
167   GST_DEBUG_CATEGORY_INIT (openni2src_debug, "openni2src", 0,
168       "OpenNI2 Device Source");
169 
170   /* OpenNI2 initialisation inside this function */
171   openni2_initialise_library ();
172 }
173 
174 static void
gst_openni2_src_init(GstOpenni2Src * ni2src)175 gst_openni2_src_init (GstOpenni2Src * ni2src)
176 {
177   gst_base_src_set_live (GST_BASE_SRC (ni2src), TRUE);
178   gst_base_src_set_format (GST_BASE_SRC (ni2src), GST_FORMAT_TIME);
179 
180   ni2src->device = new openni::Device ();
181   ni2src->depth = new openni::VideoStream ();
182   ni2src->color = new openni::VideoStream ();
183   ni2src->depthFrame = new openni::VideoFrameRef ();
184   ni2src->colorFrame = new openni::VideoFrameRef ();
185 
186   ni2src->oni_start_ts = GST_CLOCK_TIME_NONE;
187 }
188 
189 static void
gst_openni2_src_dispose(GObject * object)190 gst_openni2_src_dispose (GObject * object)
191 {
192   GstOpenni2Src *ni2src = GST_OPENNI2_SRC (object);
193 
194   if (ni2src->gst_caps)
195     gst_caps_unref (ni2src->gst_caps);
196 
197   G_OBJECT_CLASS (parent_class)->dispose (object);
198 }
199 
200 static void
gst_openni2_src_finalize(GObject * gobject)201 gst_openni2_src_finalize (GObject * gobject)
202 {
203   GstOpenni2Src *ni2src = GST_OPENNI2_SRC (gobject);
204 
205   if (ni2src->uri_name) {
206     g_free (ni2src->uri_name);
207     ni2src->uri_name = NULL;
208   }
209 
210   if (ni2src->gst_caps) {
211     gst_caps_unref (ni2src->gst_caps);
212     ni2src->gst_caps = NULL;
213   }
214 
215   if (ni2src->device) {
216     delete ni2src->device;
217     ni2src->device = NULL;
218   }
219 
220   if (ni2src->depth) {
221     delete ni2src->depth;
222     ni2src->depth = NULL;
223   }
224 
225   if (ni2src->color) {
226     delete ni2src->color;
227     ni2src->color = NULL;
228   }
229 
230   if (ni2src->depthFrame) {
231     delete ni2src->depthFrame;
232     ni2src->depthFrame = NULL;
233   }
234 
235   if (ni2src->colorFrame) {
236     delete ni2src->colorFrame;
237     ni2src->colorFrame = NULL;
238   }
239 
240   G_OBJECT_CLASS (parent_class)->finalize (gobject);
241 }
242 
243 static void
gst_openni2_src_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)244 gst_openni2_src_set_property (GObject * object, guint prop_id,
245     const GValue * value, GParamSpec * pspec)
246 {
247   GstOpenni2Src *openni2src = GST_OPENNI2_SRC (object);
248 
249   GST_OBJECT_LOCK (openni2src);
250   switch (prop_id) {
251     case PROP_LOCATION:
252       if (!g_value_get_string (value)) {
253         GST_WARNING ("location property cannot be NULL");
254         break;
255       }
256 
257       if (openni2src->uri_name != NULL) {
258         g_free (openni2src->uri_name);
259         openni2src->uri_name = NULL;
260       }
261 
262       openni2src->uri_name = g_value_dup_string (value);
263       break;
264     case PROP_SOURCETYPE:
265       openni2src->sourcetype = g_value_get_enum (value);
266       break;
267     default:
268       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
269       break;
270   }
271 
272   GST_OBJECT_UNLOCK (openni2src);
273 }
274 
275 static void
gst_openni2_src_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)276 gst_openni2_src_get_property (GObject * object, guint prop_id,
277     GValue * value, GParamSpec * pspec)
278 {
279   GstOpenni2Src *openni2src = GST_OPENNI2_SRC (object);
280 
281   GST_OBJECT_LOCK (openni2src);
282   switch (prop_id) {
283     case PROP_LOCATION:
284       g_value_set_string (value, openni2src->uri_name);
285       break;
286     case PROP_SOURCETYPE:
287       g_value_set_enum (value, openni2src->sourcetype);
288       break;
289     default:
290       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
291       break;
292   }
293   GST_OBJECT_UNLOCK (openni2src);
294 }
295 
296 /* Interesting info from gstv4l2src.c:
297  * "start and stop are not symmetric -- start will open the device, but not
298  * start capture. it's setcaps that will start capture, which is called via
299  * basesrc's negotiate method. stop will both stop capture and close t device."
300  */
301 static gboolean
gst_openni2_src_start(GstBaseSrc * bsrc)302 gst_openni2_src_start (GstBaseSrc * bsrc)
303 {
304   GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
305   openni::Status rc = openni::STATUS_OK;
306 
307   if (src->depth->isValid ()) {
308     rc = src->depth->start ();
309     if (rc != openni::STATUS_OK) {
310       GST_ERROR_OBJECT (src, "Couldn't start the depth stream\n%s\n",
311           openni::OpenNI::getExtendedError ());
312       return FALSE;
313     }
314   }
315 
316   if (src->color->isValid ()) {
317     rc = src->color->start ();
318     if (rc != openni::STATUS_OK) {
319       GST_ERROR_OBJECT (src, "Couldn't start the color stream\n%s\n",
320           openni::OpenNI::getExtendedError ());
321       return FALSE;
322     }
323   }
324 
325   return TRUE;
326 }
327 
328 static gboolean
gst_openni2_src_stop(GstBaseSrc * bsrc)329 gst_openni2_src_stop (GstBaseSrc * bsrc)
330 {
331   GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
332 
333   if (src->depthFrame)
334     src->depthFrame->release ();
335 
336   if (src->colorFrame)
337     src->colorFrame->release ();
338 
339   if (src->depth->isValid ()) {
340     src->depth->stop ();
341     src->depth->destroy ();
342   }
343 
344   if (src->color->isValid ()) {
345     src->color->stop ();
346     src->color->destroy ();
347   }
348 
349   src->device->close ();
350 
351   return TRUE;
352 }
353 
354 static GstCaps *
gst_openni2_src_get_caps(GstBaseSrc * src,GstCaps * filter)355 gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
356 {
357   GstOpenni2Src *ni2src;
358   GstCaps *caps;
359   GstVideoInfo info;
360   GstVideoFormat format;
361 
362   ni2src = GST_OPENNI2_SRC (src);
363 
364   GST_OBJECT_LOCK (ni2src);
365   if (ni2src->gst_caps)
366     goto out;
367 
368   // If we are here, we need to compose the caps and return them.
369 
370   if (ni2src->depth->isValid () && ni2src->color->isValid () &&
371       ni2src->sourcetype == SOURCETYPE_BOTH
372       && ni2src->colorpixfmt == openni::PIXEL_FORMAT_RGB888) {
373     format = GST_VIDEO_FORMAT_RGBA;
374   } else if (ni2src->depth->isValid () &&
375              ni2src->sourcetype == SOURCETYPE_DEPTH) {
376     format = GST_VIDEO_FORMAT_GRAY16_LE;
377   } else if (ni2src->color->isValid () && ni2src->sourcetype == SOURCETYPE_COLOR
378       && ni2src->colorpixfmt == openni::PIXEL_FORMAT_RGB888) {
379     format = GST_VIDEO_FORMAT_RGB;
380   } else {
381     goto out;
382   }
383 
384   gst_video_info_init (&info);
385   gst_video_info_set_format (&info, format, ni2src->width, ni2src->height);
386   info.fps_n = ni2src->fps;
387   info.fps_d = 1;
388   caps = gst_video_info_to_caps (&info);
389 
390   GST_INFO_OBJECT (ni2src, "probed caps: %" GST_PTR_FORMAT, caps);
391   ni2src->gst_caps = caps;
392 
393 out:
394   GST_OBJECT_UNLOCK (ni2src);
395 
396   if (!ni2src->gst_caps)
397     return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (ni2src));
398 
399   return (filter)
400       ? gst_caps_intersect_full (filter, ni2src->gst_caps,
401       GST_CAPS_INTERSECT_FIRST)
402       : gst_caps_ref (ni2src->gst_caps);
403 }
404 
405 static gboolean
gst_openni2_src_set_caps(GstBaseSrc * src,GstCaps * caps)406 gst_openni2_src_set_caps (GstBaseSrc * src, GstCaps * caps)
407 {
408   GstOpenni2Src *ni2src;
409 
410   ni2src = GST_OPENNI2_SRC (src);
411 
412   return gst_video_info_from_caps (&ni2src->info, caps);
413 }
414 
415 static GstStateChangeReturn
gst_openni2_src_change_state(GstElement * element,GstStateChange transition)416 gst_openni2_src_change_state (GstElement * element, GstStateChange transition)
417 {
418   GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
419   GstOpenni2Src *src = GST_OPENNI2_SRC (element);
420 
421   switch (transition) {
422     case GST_STATE_CHANGE_NULL_TO_READY:
423       /* Action! */
424       if (!openni2_initialise_devices (src))
425         return GST_STATE_CHANGE_FAILURE;
426       break;
427     case GST_STATE_CHANGE_READY_TO_PAUSED:
428       break;
429     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
430       break;
431     default:
432       break;
433   }
434 
435   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
436   if (ret == GST_STATE_CHANGE_FAILURE) {
437     return ret;
438   }
439 
440   switch (transition) {
441     case GST_STATE_CHANGE_READY_TO_NULL:
442       gst_openni2_src_stop (GST_BASE_SRC (src));
443       if (src->gst_caps) {
444         gst_caps_unref (src->gst_caps);
445         src->gst_caps = NULL;
446       }
447       break;
448     case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
449       break;
450     case GST_STATE_CHANGE_PAUSED_TO_READY:
451       src->oni_start_ts = GST_CLOCK_TIME_NONE;
452       break;
453     default:
454       break;
455   }
456 
457   return ret;
458 }
459 
460 
461 static GstFlowReturn
gst_openni2src_fill(GstPushSrc * src,GstBuffer * buf)462 gst_openni2src_fill (GstPushSrc * src, GstBuffer * buf)
463 {
464   GstOpenni2Src *ni2src = GST_OPENNI2_SRC (src);
465   return openni2_read_gstbuffer (ni2src, buf);
466 }
467 
468 static gboolean
gst_openni2src_decide_allocation(GstBaseSrc * bsrc,GstQuery * query)469 gst_openni2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
470 {
471   GstBufferPool *pool;
472   guint size, min, max;
473   gboolean update;
474   GstStructure *config;
475   GstCaps *caps;
476   GstVideoInfo info;
477 
478   gst_query_parse_allocation (query, &caps, NULL);
479   gst_video_info_from_caps (&info, caps);
480 
481   if (gst_query_get_n_allocation_pools (query) > 0) {
482     gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
483     update = TRUE;
484   } else {
485     pool = NULL;
486     min = max = 0;
487     size = info.size;
488     update = FALSE;
489   }
490 
491   GST_DEBUG_OBJECT (bsrc, "allocation: size:%u min:%u max:%u pool:%"
492       GST_PTR_FORMAT " caps:%" GST_PTR_FORMAT, size, min, max, pool, caps);
493 
494   if (!pool)
495     pool = gst_video_buffer_pool_new ();
496 
497   config = gst_buffer_pool_get_config (pool);
498   gst_buffer_pool_config_set_params (config, caps, size, min, max);
499 
500   if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
501     GST_DEBUG_OBJECT (pool, "activate Video Meta");
502     gst_buffer_pool_config_add_option (config,
503         GST_BUFFER_POOL_OPTION_VIDEO_META);
504   }
505 
506   gst_buffer_pool_set_config (pool, config);
507 
508   if (update)
509     gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
510   else
511     gst_query_add_allocation_pool (query, pool, size, min, max);
512 
513   gst_object_unref (pool);
514 
515   return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
516 }
517 
518 gboolean
gst_openni2src_plugin_init(GstPlugin * plugin)519 gst_openni2src_plugin_init (GstPlugin * plugin)
520 {
521   return gst_element_register (plugin, "openni2src", GST_RANK_NONE,
522       GST_TYPE_OPENNI2_SRC);
523 }
524 
525 
526 static gboolean
openni2_initialise_library(void)527 openni2_initialise_library (void)
528 {
529   openni::Status rc = openni::STATUS_OK;
530   rc = openni::OpenNI::initialize ();
531   if (rc != openni::STATUS_OK) {
532     GST_ERROR ("Initialization failed: %s",
533         openni::OpenNI::getExtendedError ());
534     openni::OpenNI::shutdown ();
535     return GST_FLOW_ERROR;
536   }
537   return (rc == openni::STATUS_OK);
538 }
539 
540 static gboolean
openni2_initialise_devices(GstOpenni2Src * src)541 openni2_initialise_devices (GstOpenni2Src * src)
542 {
543   openni::Status rc = openni::STATUS_OK;
544   const char *deviceURI = openni::ANY_DEVICE;
545 
546   if (src->uri_name)
547     deviceURI = src->uri_name;
548 
549   rc = src->device->open (deviceURI);
550   if (rc != openni::STATUS_OK) {
551     GST_ERROR_OBJECT (src, "Device (%s) open failed: %s", deviceURI,
552         openni::OpenNI::getExtendedError ());
553     openni::OpenNI::shutdown ();
554     return FALSE;
555   }
556 
557   /** depth sensor **/
558   rc = src->depth->create (*src->device, openni::SENSOR_DEPTH);
559   if (rc == openni::STATUS_OK) {
560     rc = src->depth->start ();
561     if (rc != openni::STATUS_OK) {
562       GST_ERROR_OBJECT (src, "%s", openni::OpenNI::getExtendedError ());
563       src->depth->destroy ();
564     }
565   } else {
566     GST_WARNING_OBJECT (src, "Couldn't find depth stream: %s",
567         openni::OpenNI::getExtendedError ());
568   }
569 
570   /** color sensor **/
571   rc = src->color->create (*src->device, openni::SENSOR_COLOR);
572   if (rc == openni::STATUS_OK) {
573     rc = src->color->start ();
574     if (rc != openni::STATUS_OK) {
575       GST_ERROR_OBJECT (src, "Couldn't start color stream: %s ",
576           openni::OpenNI::getExtendedError ());
577       src->color->destroy ();
578     }
579   } else {
580     GST_WARNING_OBJECT (src, "Couldn't find color stream: %s",
581         openni::OpenNI::getExtendedError ());
582   }
583 
584   if (!src->depth->isValid () && !src->color->isValid ()) {
585     GST_ERROR_OBJECT (src, "No valid streams. Exiting\n");
586     openni::OpenNI::shutdown ();
587     return FALSE;
588   }
589 
590   /** Get resolution and make sure is valid **/
591   if (src->depth->isValid () && src->color->isValid ()) {
592     src->depthVideoMode = src->depth->getVideoMode ();
593     src->colorVideoMode = src->color->getVideoMode ();
594 
595     int depthWidth = src->depthVideoMode.getResolutionX ();
596     int depthHeight = src->depthVideoMode.getResolutionY ();
597     int colorWidth = src->colorVideoMode.getResolutionX ();
598     int colorHeight = src->colorVideoMode.getResolutionY ();
599 
600     if (depthWidth == colorWidth && depthHeight == colorHeight) {
601       src->width = depthWidth;
602       src->height = depthHeight;
603       src->fps = src->depthVideoMode.getFps ();
604       src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
605       src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
606     } else {
607       GST_ERROR_OBJECT (src, "Error - expect color and depth to be"
608           " in same resolution: D: %dx%d vs C: %dx%d",
609           depthWidth, depthHeight, colorWidth, colorHeight);
610       return FALSE;
611     }
612     GST_INFO_OBJECT (src, "DEPTH&COLOR resolution: %dx%d",
613         src->width, src->height);
614   } else if (src->depth->isValid ()) {
615     src->depthVideoMode = src->depth->getVideoMode ();
616     src->width = src->depthVideoMode.getResolutionX ();
617     src->height = src->depthVideoMode.getResolutionY ();
618     src->fps = src->depthVideoMode.getFps ();
619     src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
620     GST_INFO_OBJECT (src, "DEPTH resolution: %dx%d", src->width, src->height);
621   } else if (src->color->isValid ()) {
622     src->colorVideoMode = src->color->getVideoMode ();
623     src->width = src->colorVideoMode.getResolutionX ();
624     src->height = src->colorVideoMode.getResolutionY ();
625     src->fps = src->colorVideoMode.getFps ();
626     src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
627     GST_INFO_OBJECT (src, "COLOR resolution: %dx%d", src->width, src->height);
628   } else {
629     GST_ERROR_OBJECT (src, "Expected at least one of the streams to be valid.");
630     return FALSE;
631   }
632 
633   return TRUE;
634 }
635 
636 static GstFlowReturn
openni2_read_gstbuffer(GstOpenni2Src * src,GstBuffer * buf)637 openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
638 {
639   openni::Status rc = openni::STATUS_OK;
640   openni::VideoStream * pStream = src->depth;
641   int changedStreamDummy;
642   GstVideoFrame vframe;
643   uint64_t oni_ts;
644 
645   /* Block until we get some data */
646   rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy,
647       SAMPLE_READ_WAIT_TIMEOUT);
648   if (rc != openni::STATUS_OK) {
649     GST_ERROR_OBJECT (src, "Frame read timeout: %s",
650         openni::OpenNI::getExtendedError ());
651     return GST_FLOW_ERROR;
652   }
653 
654   if (src->depth->isValid () && src->color->isValid () &&
655       src->sourcetype == SOURCETYPE_BOTH) {
656     rc = src->depth->readFrame (src->depthFrame);
657     if (rc != openni::STATUS_OK) {
658       GST_ERROR_OBJECT (src, "Frame read error: %s",
659           openni::OpenNI::getExtendedError ());
660       return GST_FLOW_ERROR;
661     }
662     rc = src->color->readFrame (src->colorFrame);
663     if (rc != openni::STATUS_OK) {
664       GST_ERROR_OBJECT (src, "Frame read error: %s",
665           openni::OpenNI::getExtendedError ());
666       return GST_FLOW_ERROR;
667     }
668 
669     /* Copy colour information */
670     gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
671 
672     guint8 *pData = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
673     guint8 *pColor = (guint8 *) src->colorFrame->getData ();
674     /* Add depth as 8bit alpha channel, depth is 16bit samples. */
675     guint16 *pDepth = (guint16 *) src->depthFrame->getData ();
676 
677     for (int i = 0; i < src->colorFrame->getHeight (); ++i) {
678       for (int j = 0; j < src->colorFrame->getWidth (); ++j) {
679         pData[4 * j + 0] = pColor[3 * j + 0];
680         pData[4 * j + 1] = pColor[3 * j + 1];
681         pData[4 * j + 2] = pColor[3 * j + 2];
682         pData[4 * j + 3] = pDepth[j] >> 8;
683       }
684       pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
685       pColor += src->colorFrame->getStrideInBytes ();
686       pDepth += src->depthFrame->getStrideInBytes () / 2;
687     }
688     gst_video_frame_unmap (&vframe);
689 
690     oni_ts = src->colorFrame->getTimestamp () * 1000;
691 
692     GST_LOG_OBJECT (src, "sending buffer (%d+%d)B",
693         src->colorFrame->getDataSize (),
694         src->depthFrame->getDataSize ());
695   } else if (src->depth->isValid () && src->sourcetype == SOURCETYPE_DEPTH) {
696     rc = src->depth->readFrame (src->depthFrame);
697     if (rc != openni::STATUS_OK) {
698       GST_ERROR_OBJECT (src, "Frame read error: %s",
699           openni::OpenNI::getExtendedError ());
700       return GST_FLOW_ERROR;
701     }
702 
703     /* Copy depth information */
704     gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
705 
706     guint16 *pData = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
707     guint16 *pDepth = (guint16 *) src->depthFrame->getData ();
708 
709     for (int i = 0; i < src->depthFrame->getHeight (); ++i) {
710       memcpy (pData, pDepth, 2 * src->depthFrame->getWidth ());
711       pDepth += src->depthFrame->getStrideInBytes () / 2;
712       pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0) / 2;
713     }
714     gst_video_frame_unmap (&vframe);
715 
716     oni_ts = src->depthFrame->getTimestamp () * 1000;
717 
718     GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB",
719         src->depthFrame->getWidth (),
720         src->depthFrame->getHeight (),
721         src->depthFrame->getDataSize ());
722   } else if (src->color->isValid () && src->sourcetype == SOURCETYPE_COLOR) {
723     rc = src->color->readFrame (src->colorFrame);
724     if (rc != openni::STATUS_OK) {
725       GST_ERROR_OBJECT (src, "Frame read error: %s",
726           openni::OpenNI::getExtendedError ());
727       return GST_FLOW_ERROR;
728     }
729 
730     gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
731 
732     guint8 *pData = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
733     guint8 *pColor = (guint8 *) src->colorFrame->getData ();
734 
735     for (int i = 0; i < src->colorFrame->getHeight (); ++i) {
736       memcpy (pData, pColor, 3 * src->colorFrame->getWidth ());
737       pColor += src->colorFrame->getStrideInBytes ();
738       pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
739     }
740     gst_video_frame_unmap (&vframe);
741 
742     oni_ts = src->colorFrame->getTimestamp () * 1000;
743 
744     GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB",
745         src->colorFrame->getWidth (),
746         src->colorFrame->getHeight (),
747         src->colorFrame->getDataSize ());
748   } else {
749     g_return_val_if_reached (GST_FLOW_ERROR);
750     return GST_FLOW_ERROR;
751   }
752 
753   if (G_UNLIKELY (src->oni_start_ts == GST_CLOCK_TIME_NONE))
754     src->oni_start_ts = oni_ts;
755 
756   GST_BUFFER_PTS (buf) = oni_ts - src->oni_start_ts;
757 
758   GST_LOG_OBJECT (src, "Calculated PTS as %" GST_TIME_FORMAT,
759       GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
760 
761   return GST_FLOW_OK;
762 }
763