1 #ifdef HAVE_CONFIG_H
2 # include "config.h"
3 #endif
4 #include "emotion_gstreamer.h"
5 
6 int _emotion_gstreamer_log_domain = -1;
7 Eina_Bool debug_fps = EINA_FALSE;
8 
9 static int _emotion_init_count = 0;
10 
11 /* Callbacks to get the eos */
12 static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
13 static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
14 
15 static GstElement * _create_pipeline (Emotion_Gstreamer *ev, Evas_Object *o, const char *uri, const char *suburi);
16 
17 static GstBusSyncReply _bus_sync_handler(GstBus *bus,
18                                          GstMessage *message,
19                                          gpointer data);
20 
21 static void em_audio_channel_volume_set(void *video, double vol);
22 static void em_audio_channel_mute_set(void *video, int mute);
23 
24 /* Module interface */
25 
26 static const char *
emotion_visualization_element_name_get(Emotion_Vis visualisation)27 emotion_visualization_element_name_get(Emotion_Vis visualisation)
28 {
29    switch (visualisation)
30      {
31       case EMOTION_VIS_NONE:
32          return NULL;
33       case EMOTION_VIS_GOOM:
34          return "goom";
35       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
36          return "libvisual_bumpscope";
37       case EMOTION_VIS_LIBVISUAL_CORONA:
38          return "libvisual_corona";
39       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
40          return "libvisual_dancingparticles";
41       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
42          return "libvisual_gdkpixbuf";
43       case EMOTION_VIS_LIBVISUAL_G_FORCE:
44          return "libvisual_G-Force";
45       case EMOTION_VIS_LIBVISUAL_GOOM:
46          return "libvisual_goom";
47       case EMOTION_VIS_LIBVISUAL_INFINITE:
48          return "libvisual_infinite";
49       case EMOTION_VIS_LIBVISUAL_JAKDAW:
50          return "libvisual_jakdaw";
51       case EMOTION_VIS_LIBVISUAL_JESS:
52          return "libvisual_jess";
53       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
54          return "libvisual_lv_analyzer";
55       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
56          return "libvisual_lv_flower";
57       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
58          return "libvisual_lv_gltest";
59       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
60          return "libvisual_lv_scope";
61       case EMOTION_VIS_LIBVISUAL_MADSPIN:
62          return "libvisual_madspin";
63       case EMOTION_VIS_LIBVISUAL_NEBULUS:
64          return "libvisual_nebulus";
65       case EMOTION_VIS_LIBVISUAL_OINKSIE:
66          return "libvisual_oinksie";
67       case EMOTION_VIS_LIBVISUAL_PLASMA:
68          return "libvisual_plazma";
69       default:
70          return "goom";
71      }
72 }
73 
74 Emotion_Gstreamer *
emotion_gstreamer_ref(Emotion_Gstreamer * ev)75 emotion_gstreamer_ref(Emotion_Gstreamer *ev)
76 {
77   g_atomic_int_inc (&ev->ref_count);
78   return ev;
79 }
80 
81 void
emotion_gstreamer_unref(Emotion_Gstreamer * ev)82 emotion_gstreamer_unref(Emotion_Gstreamer *ev)
83 {
84   if (g_atomic_int_dec_and_test(&ev->ref_count))
85     {
86        if (ev->subtitle)
87          {
88             eina_stringshare_del(ev->subtitle);
89             ev->subtitle = NULL;
90          }
91        free(ev);
92     }
93 }
94 
95 static Eina_Bool
em_file_open(void * video,const char * file)96 em_file_open(void *video,
97              const char *file)
98 {
99    Emotion_Gstreamer *ev = video;
100    char *uri;
101    char *suburi = NULL;
102    gboolean mute = 0;
103    gdouble vol = 0.0;
104 
105    if (!file) return EINA_FALSE;
106 
107    if (gst_uri_is_valid(file)) uri = strdup(file);
108    else uri = gst_filename_to_uri(file, NULL);
109    if (!uri) return EINA_FALSE;
110 
111    ev->shutdown = EINA_FALSE;
112    ev->ready = EINA_FALSE;
113    ev->live = EINA_FALSE;
114    ev->buffering = EINA_FALSE;
115 
116    DBG("setting file to '%s'", uri);
117 
118    if (ev->subtitle)
119      {
120         if (gst_uri_is_valid(ev->subtitle)) suburi = strdup(ev->subtitle);
121         else suburi = gst_filename_to_uri(ev->subtitle, NULL);
122      }
123    ev->pipeline = _create_pipeline(ev, ev->obj, uri, suburi);
124    g_free(uri);
125 
126    if (!ev->pipeline)
127      return EINA_FALSE;
128 
129    g_object_get(ev->pipeline, "volume", &vol, NULL);
130    g_object_get(ev->pipeline, "mute", &mute, NULL);
131    ev->volume = vol;
132    ev->audio_mute = mute;
133 
134    ev->position = 0.0;
135 
136    return EINA_TRUE;
137 }
138 
139 static void
em_file_close(void * video)140 em_file_close(void *video)
141 {
142    Emotion_Gstreamer *ev = video;
143    Eina_List *l;
144 
145    ev->shutdown = EINA_TRUE;
146 
147    if (ev->threads)
148      {
149         Ecore_Thread *t;
150 
151         EINA_LIST_FOREACH(ev->threads, l, t)
152           {
153              ecore_thread_cancel(t);
154           }
155      }
156 
157    if (ev->pipeline)
158      {
159        if (ev->audio_buffer_probe)
160          {
161             gst_pad_remove_probe(ev->audio_buffer_probe_pad, ev->audio_buffer_probe);
162             gst_object_unref(ev->audio_buffer_probe_pad);
163             ev->audio_buffer_probe_pad = NULL;
164             ev->audio_buffer_probe = 0;
165          }
166 
167        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
168        g_object_set(G_OBJECT(ev->vsink), "emotion-object", NULL, NULL);
169        gst_object_unref(ev->pipeline);
170 
171        ev->pipeline = NULL;
172        ev->vsink = NULL;
173      }
174 
175    if (ev->metadata)
176      {
177         _free_metadata(ev->metadata);
178         ev->metadata = NULL;
179      }
180 
181    ev->ready = EINA_FALSE;
182 }
183 
184 static void
em_del(void * video)185 em_del(void *video)
186 {
187    Emotion_Gstreamer *ev = video;
188 
189    em_file_close(ev);
190 
191    emotion_gstreamer_unref(ev);
192 }
193 
194 static void
em_play(void * video,double pos EINA_UNUSED)195 em_play(void   *video,
196         double  pos EINA_UNUSED)
197 {
198    Emotion_Gstreamer *ev = video;
199 
200    if (!ev->pipeline) return;
201 
202    if (ev->ready && !ev->buffering)
203      gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
204    ev->play = EINA_TRUE;
205 }
206 
207 static void
em_stop(void * video)208 em_stop(void *video)
209 {
210    Emotion_Gstreamer *ev = video;
211 
212    if (!ev->pipeline) return;
213 
214    if (ev->ready)
215      gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
216    ev->play = EINA_FALSE;
217 }
218 
219 static void
em_size_get(void * video,int * width,int * height)220 em_size_get(void  *video,
221             int   *width,
222             int   *height)
223 {
224    Emotion_Gstreamer *ev = video;
225    gint cur;
226    GstPad *pad;
227    GstCaps *caps;
228    GstVideoInfo info;
229 
230    if (width) *width = 0;
231    if (height) *height = 0;
232 
233    if (!ev->ready)
234      return;
235 
236    g_object_get(ev->pipeline, "current-video", &cur, NULL);
237    g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
238    if (!pad)
239      return;
240 
241    caps = gst_pad_get_current_caps(pad);
242    gst_object_unref(pad);
243    if (!caps)
244      return;
245 
246    gst_video_info_from_caps (&info, caps);
247    if (width) *width = info.width;
248    if (height) *height = info.height;
249    gst_caps_unref(caps);
250 }
251 
252 static void
em_pos_set(void * video,double pos)253 em_pos_set(void   *video,
254            double  pos)
255 {
256    Emotion_Gstreamer *ev = video;
257 
258    if (!ev->ready) return;
259 
260    gst_element_seek(ev->pipeline, 1.0,
261                           GST_FORMAT_TIME,
262                           GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
263                           GST_SEEK_TYPE_SET,
264                           (gint64)(pos * (double)GST_SECOND),
265                           GST_SEEK_TYPE_NONE, -1);
266 }
267 
268 static double
em_len_get(void * video)269 em_len_get(void *video)
270 {
271    Emotion_Gstreamer *ev = video;
272    gint64 val;
273    gboolean ret;
274 
275    if (!ev->ready)
276      return 0.0;
277 
278    ret = gst_element_query_duration(ev->pipeline, GST_FORMAT_TIME, &val);
279    if (!ret || val == -1)
280      return 0.0;
281 
282    return val / 1000000000.0;
283 }
284 
285 static double
em_buffer_size_get(void * video)286 em_buffer_size_get(void *video)
287 {
288    Emotion_Gstreamer *ev = video;
289    GstQuery *query;
290    gboolean busy;
291    gint percent;
292 
293    if (!ev->ready) return 0.0;
294 
295    query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
296    if (gst_element_query(ev->pipeline, query))
297      gst_query_parse_buffering_percent(query, &busy, &percent);
298    else
299      percent = 100;
300 
301    gst_query_unref(query);
302    return ((float)(percent)) / 100.0;
303 }
304 
305 static Eina_Bool
_em_fps_get(Emotion_Gstreamer * ev,int * n,int * d)306 _em_fps_get(Emotion_Gstreamer *ev, int *n, int *d)
307 {
308    gint cur;
309    GstPad *pad;
310    GstCaps *caps;
311    GstVideoInfo info;
312    Eina_Bool ret = EINA_FALSE;
313 
314    if (n) *n = 0;
315    if (d) *d = 1;
316 
317    if (!ev->ready)
318      goto on_error;
319 
320    g_object_get(ev->pipeline, "current-video", &cur, NULL);
321    g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
322    if (!pad)
323      goto on_error;
324 
325    caps = gst_pad_get_current_caps(pad);
326    gst_object_unref(pad);
327    if (!caps)
328      goto on_error;
329 
330    gst_video_info_from_caps (&info, caps);
331    if (n) *n = info.fps_n;
332    if (d) *d = info.fps_d;
333    gst_caps_unref(caps);
334    ret = EINA_TRUE;
335 
336  on_error:
337 
338    return ret;
339 }
340 
341 static int
em_fps_num_get(void * video)342 em_fps_num_get(void *video)
343 {
344    Emotion_Gstreamer *ev = video;
345    int num;
346 
347    _em_fps_get(ev, &num, NULL);
348 
349    return num;
350 }
351 
352 static int
em_fps_den_get(void * video)353 em_fps_den_get(void *video)
354 {
355    Emotion_Gstreamer *ev = video;
356    int den;
357 
358    _em_fps_get(ev, NULL, &den);
359 
360    return den;
361 }
362 
363 static double
em_fps_get(void * video)364 em_fps_get(void *video)
365 {
366    Emotion_Gstreamer *ev = video;
367    int num, den;
368 
369    if (!ev->ready)
370      return 0.0;
371 
372    _em_fps_get(ev, &num, &den);
373 
374    return (double)num / (double)den;
375 }
376 
377 static double
em_pos_get(void * video)378 em_pos_get(void *video)
379 {
380    Emotion_Gstreamer *ev = video;
381    gint64 val;
382    gboolean ret;
383 
384    if (!ev->ready) return 0.0;
385 
386    ret = gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &val);
387    if (!ret || val == -1)
388      return ev->position;
389 
390    ev->position = val / 1000000000.0;
391    return ev->position;
392 }
393 
394 static void
em_vis_set(void * video,Emotion_Vis vis)395 em_vis_set(void *video,
396            Emotion_Vis vis)
397 {
398    Emotion_Gstreamer *ev = video;
399 
400    ev->vis = vis;
401 }
402 
403 static Emotion_Vis
em_vis_get(void * video)404 em_vis_get(void *video)
405 {
406    Emotion_Gstreamer *ev = video;
407 
408    return ev->vis;
409 }
410 
411 static Eina_Bool
em_vis_supported(void * ef EINA_UNUSED,Emotion_Vis vis)412 em_vis_supported(void *ef EINA_UNUSED, Emotion_Vis vis)
413 {
414    const char *name;
415    GstElementFactory *factory;
416 
417    if (vis == EMOTION_VIS_NONE)
418      return EINA_TRUE;
419 
420    name = emotion_visualization_element_name_get(vis);
421    if (!name)
422      return EINA_FALSE;
423 
424    factory = gst_element_factory_find(name);
425    if (!factory)
426      return EINA_FALSE;
427 
428    gst_object_unref(factory);
429    return EINA_TRUE;
430 }
431 
432 static double
em_ratio_get(void * video)433 em_ratio_get(void *video)
434 {
435    Emotion_Gstreamer *ev = video;
436    gint cur;
437    GstPad *pad;
438    GstCaps *caps;
439    GstVideoInfo info;
440 
441    info.par_n = info.par_d = 1;
442 
443    if (!ev->ready)
444      goto on_error;
445 
446    g_object_get(ev->pipeline, "current-video", &cur, NULL);
447    g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
448    if (!pad)
449      goto on_error;
450 
451    caps = gst_pad_get_current_caps(pad);
452    gst_object_unref(pad);
453    if (!caps)
454      goto on_error;
455 
456    gst_video_info_from_caps (&info, caps);
457    gst_caps_unref(caps);
458 
459  on_error:
460 
461    return (double)info.par_n / (double)info.par_d;
462 }
463 
464 static int em_audio_channel_count(void *video);
465 static int em_video_channel_count(void *video);
466 
467 static int
em_video_handled(void * video)468 em_video_handled(void *video)
469 {
470    Emotion_Gstreamer *ev = video;
471 
472    return em_video_channel_count(ev) > 0 ? 1 : 0;
473 }
474 
475 static int
em_audio_handled(void * video)476 em_audio_handled(void *video)
477 {
478    Emotion_Gstreamer *ev = video;
479 
480    return em_audio_channel_count(ev) > 0 ? 1 : 0;
481 }
482 
483 static int
em_seekable(void * video)484 em_seekable(void *video)
485 {
486    Emotion_Gstreamer *ev = video;
487    GstQuery *query;
488    int ret = 0;
489    gboolean seekable;
490 
491    if (!ev->ready) return ret;
492 
493    query = gst_query_new_seeking(GST_FORMAT_TIME);
494    if (!gst_element_query(ev->pipeline, query))
495      goto on_error;
496 
497    gst_query_parse_seeking(query, NULL, &seekable, NULL, NULL);
498    if (!seekable)
499      goto on_error;
500 
501    ret = 1;
502 
503 on_error:
504    gst_query_unref(query);
505 
506    return ret;
507 }
508 
509 static void
em_frame_done(void * video EINA_UNUSED)510 em_frame_done(void *video EINA_UNUSED)
511 {
512 }
513 
514 static Emotion_Format
em_format_get(void * video)515 em_format_get(void *video)
516 {
517    Emotion_Gstreamer *ev = video;
518    gint cur;
519    GstPad *pad;
520    GstCaps *caps;
521    GstVideoInfo info;
522    Emotion_Format format = EMOTION_FORMAT_NONE;
523 
524    if (!ev->ready)
525      goto on_error;
526 
527    g_object_get(ev->pipeline, "current-video", &cur, NULL);
528    g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
529    if (!pad)
530      goto on_error;
531 
532    caps = gst_pad_get_current_caps(pad);
533    gst_object_unref(pad);
534    if (!caps)
535      goto on_error;
536 
537    gst_video_info_from_caps (&info, caps);
538    gst_caps_unref(caps);
539 
540    switch (info.finfo->format)
541      {
542       case GST_VIDEO_FORMAT_I420:
543          return EMOTION_FORMAT_I420;
544       case GST_VIDEO_FORMAT_YV12:
545          return EMOTION_FORMAT_YV12;
546       case GST_VIDEO_FORMAT_YUY2:
547          return EMOTION_FORMAT_YUY2;
548       case GST_VIDEO_FORMAT_ARGB:
549          /* FIXME: This will be wrong for big endian archs */
550          return EMOTION_FORMAT_BGRA;
551       default:
552          return EMOTION_FORMAT_NONE;
553      }
554 
555  on_error:
556 
557    return format;
558 }
559 
560 static void
em_video_data_size_get(void * video,int * w,int * h)561 em_video_data_size_get(void *video, int *w, int *h)
562 {
563    em_size_get(video, w, h);
564 }
565 
566 static int
em_yuv_rows_get(void * video EINA_UNUSED,int w EINA_UNUSED,int h EINA_UNUSED,unsigned char ** yrows EINA_UNUSED,unsigned char ** urows EINA_UNUSED,unsigned char ** vrows EINA_UNUSED)567 em_yuv_rows_get(void           *video EINA_UNUSED,
568                 int             w EINA_UNUSED,
569                 int             h EINA_UNUSED,
570                 unsigned char **yrows EINA_UNUSED,
571                 unsigned char **urows EINA_UNUSED,
572                 unsigned char **vrows EINA_UNUSED)
573 {
574    return 0;
575 }
576 
577 static int
em_bgra_data_get(void * video EINA_UNUSED,unsigned char ** bgra_data EINA_UNUSED)578 em_bgra_data_get(void *video EINA_UNUSED, unsigned char **bgra_data EINA_UNUSED)
579 {
580    return 0;
581 }
582 
583 static void
em_event_feed(void * video,int event)584 em_event_feed(void *video, int event)
585 {
586    Emotion_Gstreamer *ev = video;
587    GstNavigationCommand command;
588 
589    if (!ev->ready) return;
590 
591    switch (event)
592      {
593       case EMOTION_EVENT_MENU1:
594         command = GST_NAVIGATION_COMMAND_MENU1;
595         break;
596       case EMOTION_EVENT_MENU2:
597         command = GST_NAVIGATION_COMMAND_MENU2;
598         break;
599       case EMOTION_EVENT_MENU3:
600         command = GST_NAVIGATION_COMMAND_MENU3;
601         break;
602       case EMOTION_EVENT_MENU4:
603         command = GST_NAVIGATION_COMMAND_MENU4;
604         break;
605       case EMOTION_EVENT_MENU5:
606         command = GST_NAVIGATION_COMMAND_MENU5;
607         break;
608       case EMOTION_EVENT_MENU6:
609         command = GST_NAVIGATION_COMMAND_MENU6;
610         break;
611       case EMOTION_EVENT_MENU7:
612         command = GST_NAVIGATION_COMMAND_MENU7;
613         break;
614       case EMOTION_EVENT_UP:
615         command = GST_NAVIGATION_COMMAND_UP;
616         break;
617       case EMOTION_EVENT_DOWN:
618         command = GST_NAVIGATION_COMMAND_DOWN;
619         break;
620       case EMOTION_EVENT_LEFT:
621         command = GST_NAVIGATION_COMMAND_LEFT;
622         break;
623       case EMOTION_EVENT_RIGHT:
624         command = GST_NAVIGATION_COMMAND_RIGHT;
625         break;
626       case EMOTION_EVENT_SELECT:
627         command = GST_NAVIGATION_COMMAND_ACTIVATE;
628         break;
629       case EMOTION_EVENT_NEXT:
630         /* FIXME */
631         command = GST_NAVIGATION_COMMAND_RIGHT;
632         break;
633       case EMOTION_EVENT_PREV:
634         /* FIXME */
635         command = GST_NAVIGATION_COMMAND_LEFT;
636         break;
637       case EMOTION_EVENT_ANGLE_NEXT:
638         command = GST_NAVIGATION_COMMAND_NEXT_ANGLE;
639         break;
640       case EMOTION_EVENT_ANGLE_PREV:
641         command = GST_NAVIGATION_COMMAND_PREV_ANGLE;
642         break;
643       case EMOTION_EVENT_FORCE:
644         /* FIXME */
645         command = GST_NAVIGATION_COMMAND_ACTIVATE;
646         break;
647       case EMOTION_EVENT_0:
648       case EMOTION_EVENT_1:
649       case EMOTION_EVENT_2:
650       case EMOTION_EVENT_3:
651       case EMOTION_EVENT_4:
652       case EMOTION_EVENT_5:
653       case EMOTION_EVENT_6:
654       case EMOTION_EVENT_7:
655       case EMOTION_EVENT_8:
656       case EMOTION_EVENT_9:
657       case EMOTION_EVENT_10:
658       default:
659         return;
660         break;
661      }
662 
663   gst_navigation_send_command (GST_NAVIGATION (ev->pipeline), command);
664 }
665 
666 static void
em_event_mouse_button_feed(void * video,int button,int x,int y)667 em_event_mouse_button_feed(void *video, int button, int x, int y)
668 {
669    Emotion_Gstreamer *ev = video;
670 
671    if (!ev->ready) return;
672 
673    /* FIXME */
674    gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-press", button, x, y);
675    gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-release", button, x, y);
676 }
677 
678 static void
em_event_mouse_move_feed(void * video,int x,int y)679 em_event_mouse_move_feed(void *video, int x, int y)
680 {
681    Emotion_Gstreamer *ev = video;
682 
683    if (!ev->ready) return;
684 
685    gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-move", 0, x, y);
686 }
687 
688 /* Video channels */
689 static int
em_video_channel_count(void * video)690 em_video_channel_count(void *video)
691 {
692    Emotion_Gstreamer *ev = video;
693    gint n;
694 
695    if (!ev->ready) return 0;
696 
697    g_object_get(ev->pipeline, "n-video", &n, NULL);
698 
699    return n;
700 }
701 
702 static void
em_video_channel_set(void * video,int channel)703 em_video_channel_set(void *video,
704                      int   channel)
705 {
706    Emotion_Gstreamer *ev = video;
707 
708    if (!ev->ready) return;
709 
710    if (channel < 0) channel = -1;
711 
712    g_object_set (ev->pipeline, "current-video", channel, NULL);
713 }
714 
715 static int
em_video_channel_get(void * video)716 em_video_channel_get(void *video)
717 {
718    Emotion_Gstreamer *ev = video;
719    gint cur;
720 
721    if (!ev->ready) return -1;
722 
723    g_object_get(ev->pipeline, "current-video", &cur, NULL);
724 
725    return cur;
726 }
727 
728 static void
em_video_subtitle_file_set(void * video,const char * filepath)729 em_video_subtitle_file_set(void *video,
730                            const char *filepath)
731 {
732    Emotion_Gstreamer *ev = video;
733 
734    eina_stringshare_replace(&(ev->subtitle), filepath);
735 }
736 
737 static const char *
em_video_subtitle_file_get(void * video)738 em_video_subtitle_file_get(void *video)
739 {
740    Emotion_Gstreamer *ev = video;
741 
742    return ev->subtitle;
743 }
744 
745 static const char *
em_video_channel_name_get(void * video EINA_UNUSED,int channel EINA_UNUSED)746 em_video_channel_name_get(void *video EINA_UNUSED,
747                           int   channel EINA_UNUSED)
748 {
749    return NULL;
750 }
751 
752 static void
em_video_channel_mute_set(void * video,int mute)753 em_video_channel_mute_set(void *video,
754                           int   mute)
755 {
756    Emotion_Gstreamer *ev = video;
757 
758    ev->video_mute = mute;
759 }
760 
761 static int
em_video_channel_mute_get(void * video)762 em_video_channel_mute_get(void *video)
763 {
764    Emotion_Gstreamer *ev = video;
765 
766    return ev->video_mute;
767 }
768 
769 /* Audio channels */
770 
771 static int
em_audio_channel_count(void * video)772 em_audio_channel_count(void *video)
773 {
774    Emotion_Gstreamer *ev = video;
775    gint n;
776 
777    if (!ev->ready) return 0;
778 
779    g_object_get(ev->pipeline, "n-audio", &n, NULL);
780 
781    return n;
782 }
783 
784 static void
em_audio_channel_set(void * video,int channel)785 em_audio_channel_set(void *video,
786                      int   channel)
787 {
788    Emotion_Gstreamer *ev = video;
789 
790    if (!ev->ready) return;
791 
792    if (channel < 0) channel = -1;
793 
794    g_object_set (ev->pipeline, "current-audio", channel, NULL);
795 }
796 
797 static int
em_audio_channel_get(void * video)798 em_audio_channel_get(void *video)
799 {
800    Emotion_Gstreamer *ev = video;
801    gint cur;
802 
803    if (!ev->ready) return -1;
804 
805    g_object_get(ev->pipeline, "current-audio", &cur, NULL);
806 
807    return cur;
808 }
809 
810 static const char *
em_audio_channel_name_get(void * video EINA_UNUSED,int channel EINA_UNUSED)811 em_audio_channel_name_get(void *video EINA_UNUSED,
812                           int   channel EINA_UNUSED)
813 {
814    return NULL;
815 }
816 
817 static void
em_audio_channel_mute_set(void * video,int mute)818 em_audio_channel_mute_set(void *video,
819                           int   mute)
820 {
821    Emotion_Gstreamer *ev = video;
822 
823    ev->audio_mute = !!mute;
824 
825    if (!ev->pipeline) return;
826 
827    g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
828 }
829 
830 static int
em_audio_channel_mute_get(void * video)831 em_audio_channel_mute_get(void *video)
832 {
833    Emotion_Gstreamer *ev = video;
834    gboolean mute;
835 
836    if (!ev->pipeline)
837      return ev->audio_mute;
838 
839    g_object_get(ev->pipeline, "mute", &mute, NULL);
840 
841    return !!mute;
842 }
843 
844 static void
em_audio_channel_volume_set(void * video,double vol)845 em_audio_channel_volume_set(void  *video,
846                             double vol)
847 {
848    Emotion_Gstreamer *ev = video;
849 
850    if (vol < 0.0)
851      vol = 0.0;
852    ev->volume = vol;
853 
854    if (!ev->pipeline) return;
855 
856    g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
857 }
858 
859 static double
em_audio_channel_volume_get(void * video)860 em_audio_channel_volume_get(void *video)
861 {
862    Emotion_Gstreamer *ev = video;
863    gdouble vol;
864 
865    if (!ev->pipeline)
866      return ev->volume;
867 
868    g_object_get(ev->pipeline, "volume", &vol, NULL);
869 
870    return vol;
871 }
872 
873 /* spu stuff */
874 
875 static int
em_spu_channel_count(void * video)876 em_spu_channel_count(void *video)
877 {
878    Emotion_Gstreamer *ev = video;
879    gint n;
880 
881    if (!ev->ready) return 0;
882 
883    g_object_get(ev->pipeline, "n-text", &n, NULL);
884 
885    return n;
886 }
887 
888 static void
em_spu_channel_set(void * video,int channel)889 em_spu_channel_set(void *video, int channel)
890 {
891    Emotion_Gstreamer *ev = video;
892 
893    if (!ev->ready) return;
894 
895    if (channel < 0) channel = -1;
896 
897    g_object_set(ev->pipeline, "current-text", channel, NULL);
898 }
899 
900 static int
em_spu_channel_get(void * video)901 em_spu_channel_get(void *video)
902 {
903    Emotion_Gstreamer *ev = video;
904    gint cur;
905 
906    if (!ev->ready) return -1;
907 
908    g_object_get(ev->pipeline, "current-text", &cur, NULL);
909 
910    return cur;
911 }
912 
913 static const char *
em_spu_channel_name_get(void * video EINA_UNUSED,int channel EINA_UNUSED)914 em_spu_channel_name_get(void *video EINA_UNUSED, int channel EINA_UNUSED)
915 {
916    return NULL;
917 }
918 
919 static void
em_spu_channel_mute_set(void * video,int mute)920 em_spu_channel_mute_set(void *video, int mute)
921 {
922    Emotion_Gstreamer *ev = video;
923    gint flags;
924 
925    if (!ev->pipeline) return;
926 
927    g_object_get(ev->pipeline, "flags", &flags, NULL);
928 
929    if (mute)
930      flags &= ~GST_PLAY_FLAG_TEXT;
931    else
932      flags |= GST_PLAY_FLAG_TEXT;
933 
934    g_object_set(ev->pipeline, "flags", flags, NULL);
935 }
936 
937 static int
em_spu_channel_mute_get(void * video)938 em_spu_channel_mute_get(void *video)
939 {
940    Emotion_Gstreamer *ev = video;
941    gint flags;
942 
943    if (!ev->pipeline) return 0;
944 
945    g_object_get(ev->pipeline, "flags", &flags, NULL);
946 
947    return (flags & GST_PLAY_FLAG_TEXT) ? 0 : 1;
948 }
949 
950 static int
em_chapter_count(void * video EINA_UNUSED)951 em_chapter_count(void *video EINA_UNUSED)
952 {
953    return 0;
954 }
955 
956 static void
em_chapter_set(void * video EINA_UNUSED,int chapter EINA_UNUSED)957 em_chapter_set(void *video EINA_UNUSED, int chapter EINA_UNUSED)
958 {
959 }
960 
961 static int
em_chapter_get(void * video EINA_UNUSED)962 em_chapter_get(void *video EINA_UNUSED)
963 {
964    return 0;
965 }
966 
967 static const char *
em_chapter_name_get(void * video EINA_UNUSED,int chapter EINA_UNUSED)968 em_chapter_name_get(void *video EINA_UNUSED, int chapter EINA_UNUSED)
969 {
970    return NULL;
971 }
972 
973 static void
em_speed_set(void * video EINA_UNUSED,double speed EINA_UNUSED)974 em_speed_set(void *video EINA_UNUSED, double speed EINA_UNUSED)
975 {
976 }
977 
978 static double
em_speed_get(void * video EINA_UNUSED)979 em_speed_get(void *video EINA_UNUSED)
980 {
981    return 1.0;
982 }
983 
984 static int
em_eject(void * video EINA_UNUSED)985 em_eject(void *video EINA_UNUSED)
986 {
987    return 1;
988 }
989 
990 static void
_img_del_cb(void * data,Evas * e EINA_UNUSED,Evas_Object * obj EINA_UNUSED,void * event_info EINA_UNUSED)991 _img_del_cb(void *data, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
992 {
993    GstBuffer *buffer = data;
994 
995    gst_buffer_unref(buffer);
996 }
997 
998 void *
em_meta_artwork_get(void * video,Evas_Object * img,const char * path,Emotion_Artwork_Info type)999 em_meta_artwork_get(void *video, Evas_Object *img, const char *path, Emotion_Artwork_Info type)
1000 {
1001    Emotion_Gstreamer *ev = video;
1002    GError *err = NULL;
1003 
1004    if (!ev) return NULL;
1005 
1006    gst_init(NULL,NULL);
1007 
1008    gchar *uri = gst_filename_to_uri(path, NULL);
1009 
1010    GstDiscoverer *discoverer = gst_discoverer_new(10 * GST_SECOND, &err);
1011    if (!discoverer) return NULL;
1012    GstDiscovererInfo* info = gst_discoverer_discover_uri(discoverer,
1013                                  uri, &err);
1014    if (!info) return NULL;
1015 
1016    int ret = gst_discoverer_info_get_result(info);
1017    if (ret != GST_DISCOVERER_OK) goto done;
1018 
1019    const GstTagList *tags = gst_discoverer_info_get_tags(info);
1020 
1021    GstSample *sample;
1022    GstBuffer *buffer;
1023    GstMapInfo map;
1024 
1025    const gchar *tag = GST_TAG_PREVIEW_IMAGE;
1026    if (type == EMOTION_ARTWORK_IMAGE) tag = GST_TAG_IMAGE;
1027 
1028    if (gst_tag_list_get_sample(tags, tag, &sample))
1029      {
1030         buffer = gst_sample_get_buffer(sample);
1031         if (!buffer)
1032           {
1033              evas_object_del(img);
1034              img = NULL;
1035              goto done;
1036           }
1037 
1038         if (gst_buffer_map(gst_buffer_ref(buffer), &map, GST_MAP_READ))
1039           {
1040              evas_object_image_memfile_set(img, map.data, map.size, NULL, NULL);
1041              evas_object_event_callback_add(img, EVAS_CALLBACK_DEL, _img_del_cb, buffer);
1042           }
1043         gst_sample_unref(sample);
1044      }
1045    else
1046      {
1047         evas_object_del(img);
1048         img = NULL;
1049      }
1050 
1051 done:
1052    if (err) g_error_free(err);
1053 
1054    gst_discoverer_info_unref(info);
1055    g_free(uri);
1056    g_object_unref(discoverer);
1057 
1058    return img;
1059 }
1060 
1061 static const char *
em_meta_get(void * video,int meta)1062 em_meta_get(void *video, int meta)
1063 {
1064    Emotion_Gstreamer *ev = video;
1065    const char *str = NULL;
1066 
1067    if (!ev->metadata) return NULL;
1068 
1069    switch (meta)
1070      {
1071       case META_TRACK_TITLE:
1072          str = ev->metadata->title;
1073          break;
1074       case META_TRACK_ARTIST:
1075          str = ev->metadata->artist;
1076          break;
1077       case  META_TRACK_ALBUM:
1078          str = ev->metadata->album;
1079          break;
1080       case META_TRACK_YEAR:
1081          str = ev->metadata->year;
1082          break;
1083       case META_TRACK_GENRE:
1084          str = ev->metadata->genre;
1085          break;
1086       case META_TRACK_COMMENT:
1087          str = ev->metadata->comment;
1088          break;
1089       case META_TRACK_DISCID:
1090          str = ev->metadata->disc_id;
1091          break;
1092       default:
1093          break;
1094      }
1095 
1096    return str;
1097 }
1098 
1099 static void *
em_add(const Emotion_Engine * api,Evas_Object * obj,const Emotion_Module_Options * opt EINA_UNUSED)1100 em_add(const Emotion_Engine *api,
1101        Evas_Object *obj,
1102        const Emotion_Module_Options *opt EINA_UNUSED)
1103 {
1104    Emotion_Gstreamer *ev;
1105 
1106    ev = calloc(1, sizeof(Emotion_Gstreamer));
1107    EINA_SAFETY_ON_NULL_RETURN_VAL(ev, NULL);
1108 
1109    ev->api = api;
1110    ev->obj = obj;
1111 
1112    ev->ref_count = 1;
1113 
1114    /* Default values */
1115    ev->vis = EMOTION_VIS_NONE;
1116    ev->volume = 0.8;
1117    ev->ready = EINA_FALSE;
1118    ev->shutdown = EINA_FALSE;
1119    ev->threads = NULL;
1120 
1121    return ev;
1122 }
1123 
1124 static const Emotion_Engine em_engine =
1125 {
1126    EMOTION_ENGINE_API_VERSION,
1127    EMOTION_ENGINE_PRIORITY_DEFAULT,
1128    "gstreamer1",
1129    em_add, /* add */
1130    em_del, /* del */
1131    em_file_open, /* file_open */
1132    em_file_close, /* file_close */
1133    em_play, /* play */
1134    em_stop, /* stop */
1135    em_size_get, /* size_get */
1136    em_pos_set, /* pos_set */
1137    em_len_get, /* len_get */
1138    em_buffer_size_get, /* buffer_size_get */
1139    em_fps_num_get, /* fps_num_get */
1140    em_fps_den_get, /* fps_den_get */
1141    em_fps_get, /* fps_get */
1142    em_pos_get, /* pos_get */
1143    em_vis_set, /* vis_set */
1144    em_vis_get, /* vis_get */
1145    em_vis_supported, /* vis_supported */
1146    em_ratio_get, /* ratio_get */
1147    em_video_handled, /* video_handled */
1148    em_audio_handled, /* audio_handled */
1149    em_seekable, /* seekable */
1150    em_frame_done, /* frame_done */
1151    em_format_get, /* format_get */
1152    em_video_data_size_get, /* video_data_size_get */
1153    em_yuv_rows_get, /* yuv_rows_get */
1154    em_bgra_data_get, /* bgra_data_get */
1155    em_event_feed, /* event_feed */
1156    em_event_mouse_button_feed, /* event_mouse_button_feed */
1157    em_event_mouse_move_feed, /* event_mouse_move_feed */
1158    em_video_channel_count, /* video_channel_count */
1159    em_video_channel_set, /* video_channel_set */
1160    em_video_channel_get, /* video_channel_get */
1161    em_video_subtitle_file_set, /* video_subtitle_file_set */
1162    em_video_subtitle_file_get, /* video_subtitle_file_get */
1163    em_video_channel_name_get, /* video_channel_name_get */
1164    em_video_channel_mute_set, /* video_channel_mute_set */
1165    em_video_channel_mute_get, /* video_channel_mute_get */
1166    em_audio_channel_count, /* audio_channel_count */
1167    em_audio_channel_set, /* audio_channel_set */
1168    em_audio_channel_get, /* audio_channel_get */
1169    em_audio_channel_name_get, /* audio_channel_name_get */
1170    em_audio_channel_mute_set, /* audio_channel_mute_set */
1171    em_audio_channel_mute_get, /* audio_channel_mute_get */
1172    em_audio_channel_volume_set, /* audio_channel_volume_set */
1173    em_audio_channel_volume_get, /* audio_channel_volume_get */
1174    em_spu_channel_count, /* spu_channel_count */
1175    em_spu_channel_set, /* spu_channel_set */
1176    em_spu_channel_get, /* spu_channel_get */
1177    em_spu_channel_name_get, /* spu_channel_name_get */
1178    em_spu_channel_mute_set, /* spu_channel_mute_set */
1179    em_spu_channel_mute_get, /* spu_channel_mute_get */
1180    em_chapter_count, /* chapter_count */
1181    em_chapter_set, /* chapter_set */
1182    em_chapter_get, /* chapter_get */
1183    em_chapter_name_get, /* chapter_name_get */
1184    em_speed_set, /* speed_set */
1185    em_speed_get, /* speed_get */
1186    em_eject, /* eject */
1187    em_meta_get, /* meta_get */
1188    NULL, /* priority_set */
1189    NULL, /* priority_get */
1190    em_meta_artwork_get,
1191 };
1192 
1193 Eina_Bool
gstreamer_module_init(void)1194 gstreamer_module_init(void)
1195 {
1196    GError *error;
1197 
1198    if (_emotion_init_count > 0)
1199      {
1200         _emotion_pending_ecore_begin();
1201         return EINA_TRUE;
1202      }
1203 
1204    if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1205 
1206    eina_threads_init();
1207    eina_log_threads_enable();
1208    _emotion_gstreamer_log_domain = eina_log_domain_register
1209      ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1210    if (_emotion_gstreamer_log_domain < 0)
1211      {
1212         EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1213         return EINA_FALSE;
1214      }
1215 
1216    if (!gst_init_check(0, NULL, &error))
1217      {
1218         EINA_LOG_CRIT("Could not init GStreamer");
1219         goto error_gst_init;
1220      }
1221 
1222    if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1223                                   "emotion-sink",
1224                                   "video sink plugin for Emotion",
1225                                   gstreamer_plugin_init,
1226                                   VERSION,
1227                                   "LGPL",
1228                                   "Enlightenment",
1229                                   PACKAGE,
1230                                   "http://www.enlightenment.org/") == FALSE)
1231      {
1232         EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1233         goto error_gst_plugin;
1234      }
1235 
1236    if (!_emotion_module_register(&em_engine))
1237      {
1238         ERR("Could not register module %p", &em_engine);
1239         goto error_register;
1240      }
1241 
1242    _emotion_init_count = 1;
1243    return EINA_TRUE;
1244 
1245  error_register:
1246  error_gst_plugin:
1247 
1248    gst_deinit();
1249 
1250  error_gst_init:
1251    eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1252    _emotion_gstreamer_log_domain = -1;
1253 
1254    return EINA_FALSE;
1255 }
1256 
1257 void
gstreamer_module_shutdown(void)1258 gstreamer_module_shutdown(void)
1259 {
1260    if (_emotion_init_count > 1)
1261      {
1262         _emotion_init_count--;
1263         return;
1264      }
1265    else if (_emotion_init_count == 0)
1266      {
1267         EINA_LOG_ERR("too many gstreamer_module_shutdown()");
1268         return;
1269      }
1270    _emotion_init_count = 0;
1271 
1272    _emotion_module_unregister(&em_engine);
1273 
1274    eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1275    _emotion_gstreamer_log_domain = -1;
1276 
1277    gst_deinit();
1278 }
1279 
1280 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1281 
1282 EINA_MODULE_INIT(gstreamer_module_init);
1283 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1284 
1285 #endif
1286 
1287 static void
_for_each_tag(GstTagList const * list,gchar const * tag,void * data)1288 _for_each_tag(GstTagList const* list,
1289                     gchar const* tag,
1290                     void *data)
1291 {
1292    Emotion_Gstreamer *ev;
1293    int i;
1294    int count;
1295 
1296 
1297    ev = (Emotion_Gstreamer*)data;
1298 
1299    if (!ev || !ev->metadata) return;
1300 
1301    count = gst_tag_list_get_tag_size(list, tag);
1302 
1303    for (i = 0; i < count; i++)
1304      {
1305         if (!strcmp(tag, GST_TAG_TITLE))
1306           {
1307              char *str;
1308              g_free(ev->metadata->title);
1309              if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1310                ev->metadata->title = str;
1311              else
1312                ev->metadata->title = NULL;
1313              break;
1314           }
1315         if (!strcmp(tag, GST_TAG_ALBUM))
1316           {
1317              gchar *str;
1318              g_free(ev->metadata->album);
1319              if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1320                ev->metadata->album = str;
1321              else
1322                ev->metadata->album = NULL;
1323              break;
1324           }
1325         if (!strcmp(tag, GST_TAG_ARTIST))
1326           {
1327              gchar *str;
1328              g_free(ev->metadata->artist);
1329              if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1330                ev->metadata->artist = str;
1331              else
1332                ev->metadata->artist = NULL;
1333              break;
1334           }
1335         if (!strcmp(tag, GST_TAG_GENRE))
1336           {
1337              gchar *str;
1338              g_free(ev->metadata->genre);
1339              if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1340                ev->metadata->genre = str;
1341              else
1342                ev->metadata->genre = NULL;
1343              break;
1344           }
1345         if (!strcmp(tag, GST_TAG_COMMENT))
1346           {
1347              gchar *str;
1348              g_free(ev->metadata->comment);
1349              if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1350                ev->metadata->comment = str;
1351              else
1352                ev->metadata->comment = NULL;
1353              break;
1354           }
1355         if (!strcmp(tag, GST_TAG_DATE))
1356           {
1357              gchar *str;
1358              const GValue *date;
1359              g_free(ev->metadata->year);
1360              date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1361              if (date)
1362                str = g_strdup_value_contents(date);
1363              else
1364                str = NULL;
1365              ev->metadata->year = str;
1366              break;
1367           }
1368 
1369         if (!strcmp(tag, GST_TAG_DATE_TIME))
1370           {
1371              gchar *str;
1372              const GValue *date;
1373              g_free(ev->metadata->year);
1374              date = gst_tag_list_get_value_index(list, GST_TAG_DATE_TIME, 0);
1375              if (date)
1376                str = g_strdup_value_contents(date);
1377              else
1378                str = NULL;
1379              ev->metadata->year = str;
1380              break;
1381           }
1382 
1383         if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1384           {
1385              gchar *str;
1386              const GValue *track;
1387              g_free(ev->metadata->count);
1388              track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1389              if (track)
1390                str = g_strdup_value_contents(track);
1391              else
1392                str = NULL;
1393              ev->metadata->count = str;
1394              break;
1395           }
1396 
1397         if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1398           {
1399              gchar *str;
1400              const GValue *discid;
1401              g_free(ev->metadata->disc_id);
1402              discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1403              if (discid)
1404                str = g_strdup_value_contents(discid);
1405              else
1406                str = NULL;
1407              ev->metadata->disc_id = str;
1408              break;
1409           }
1410      }
1411 
1412 }
1413 
1414 static void
_free_metadata(Emotion_Gstreamer_Metadata * m)1415 _free_metadata(Emotion_Gstreamer_Metadata *m)
1416 {
1417   if (!m) return;
1418 
1419   g_free(m->title);
1420   g_free(m->album);
1421   g_free(m->artist);
1422   g_free(m->genre);
1423   g_free(m->comment);
1424   g_free(m->year);
1425   g_free(m->count);
1426   g_free(m->disc_id);
1427 
1428   free(m);
1429 }
1430 
1431 static void
audio_buffer_probe_main(void * data)1432 audio_buffer_probe_main(void *data)
1433 {
1434    Emotion_Gstreamer *ev = data;
1435 
1436    if (!ev->shutdown)
1437      _emotion_frame_new(ev->obj);
1438 
1439    g_atomic_int_set(&ev->audio_buffer_probe_pending, 0);
1440 
1441    emotion_gstreamer_unref(ev);
1442    _emotion_pending_ecore_end();
1443 }
1444 
1445 static GstPadProbeReturn
audio_buffer_probe(GstPad * pad EINA_UNUSED,GstPadProbeInfo * info EINA_UNUSED,gpointer user_data)1446 audio_buffer_probe(GstPad *pad EINA_UNUSED, GstPadProbeInfo *info EINA_UNUSED, gpointer user_data)
1447 {
1448    Emotion_Gstreamer *ev = user_data;
1449 
1450    /* Don't call too many of these */
1451    if (!g_atomic_int_compare_and_exchange(&ev->audio_buffer_probe_pending, 0, 1))
1452      return GST_PAD_PROBE_OK;
1453 
1454    _emotion_pending_ecore_begin();
1455    ecore_main_loop_thread_safe_call_async(audio_buffer_probe_main, emotion_gstreamer_ref(ev));
1456 
1457    return GST_PAD_PROBE_OK;
1458 }
1459 
1460 static void
_bus_main_handler(void * data)1461 _bus_main_handler(void *data)
1462 {
1463    Emotion_Gstreamer_Message *send;
1464    Emotion_Gstreamer *ev;
1465    GstMessage              *msg;
1466 
1467    send = data;
1468    ev = send->ev;
1469    msg = send->msg;
1470 
1471    /* Just exit immediately if we're shutting down */
1472    if (ev->shutdown)
1473      {
1474         emotion_gstreamer_message_free(send);
1475         _emotion_pending_ecore_end();
1476         return;
1477      }
1478 
1479    switch (GST_MESSAGE_TYPE(msg))
1480      {
1481       case GST_MESSAGE_EOS:
1482          ev->play = EINA_FALSE;
1483          _emotion_decode_stop(ev->obj);
1484          _emotion_playback_finished(ev->obj);
1485          break;
1486       case GST_MESSAGE_TAG:
1487         {
1488            GstTagList *new_tags;
1489            gst_message_parse_tag(msg, &new_tags);
1490            if (new_tags)
1491              {
1492                 gst_tag_list_foreach(new_tags,
1493                                      (GstTagForeachFunc)_for_each_tag,
1494                                      ev);
1495                 gst_tag_list_free(new_tags);
1496              }
1497            _emotion_title_set(ev->obj, ev->metadata->title);
1498            break;
1499         }
1500       case GST_MESSAGE_ASYNC_DONE:
1501          _emotion_seek_done(ev->obj);
1502          break;
1503       case GST_MESSAGE_STATE_CHANGED:
1504         {
1505            GstState old_state, new_state;
1506 
1507            gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1508            INF("Element %s changed state from %s to %s.",
1509                GST_OBJECT_NAME(msg->src),
1510                gst_element_state_get_name(old_state),
1511                gst_element_state_get_name(new_state));
1512 
1513            if (GST_MESSAGE_SRC(msg) == GST_OBJECT(ev->pipeline) && new_state >= GST_STATE_PAUSED && !ev->ready)
1514              {
1515                 gint n_audio, n_video;
1516 
1517                 ev->ready = EINA_TRUE;
1518 
1519                 g_object_get(G_OBJECT(ev->pipeline),
1520                   "n-audio", &n_audio,
1521                   "n-video", &n_video,
1522                   NULL);
1523 
1524                 if (n_audio == 0 && n_video == 0)
1525                   ERR("No audio nor video stream found");
1526 
1527                 if (n_audio > 0 && n_video == 0)
1528                   {
1529                      GstElement *vis = NULL;
1530                      gint flags;
1531                      const char *vis_name;
1532 
1533                      if ((vis_name = emotion_visualization_element_name_get(ev->vis)))
1534                        {
1535                           vis = gst_element_factory_make(vis_name, "vis");
1536                           g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1537                           g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1538                           flags |= GST_PLAY_FLAG_VIS;
1539                           g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1540                        }
1541                      else
1542                        {
1543                           GstElement *audio_sink;
1544 
1545                           g_object_get(ev->pipeline, "audio-sink", &audio_sink, NULL);
1546                           ev->audio_buffer_probe_pad = gst_element_get_static_pad(audio_sink, "sink");
1547                           ev->audio_buffer_probe = gst_pad_add_probe(ev->audio_buffer_probe_pad,
1548                                                                      GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST,
1549                                                                      audio_buffer_probe,
1550                                                                      ev,
1551                                                                      NULL);
1552                           gst_object_unref(audio_sink);
1553                        }
1554                   }
1555 
1556                 if (n_audio > 0 || n_video > 0)
1557                   {
1558                      /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1559                      /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1560 
1561 #if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1562                      if (getuid() == geteuid())
1563 #endif
1564                        {
1565                           if (getenv("EMOTION_GSTREAMER_DOT"))
1566                             {
1567                                GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1568                                                                  GST_DEBUG_GRAPH_SHOW_ALL,
1569                                                                  getenv("EMOTION_GSTREAMER_DOT"));
1570                             }
1571                        }
1572 
1573                      _emotion_open_done(ev->obj);
1574                      _emotion_playback_started(ev->obj);
1575                   }
1576              }
1577            break;
1578         }
1579       case GST_MESSAGE_ERROR:
1580         {
1581            GError *err = NULL;
1582            gchar *name, *debug = NULL;
1583 
1584            name = gst_object_get_path_string (msg->src);
1585            gst_message_parse_error (msg, &err, &debug);
1586 
1587            ERR("ERROR: from element %s: %s\nAdditional debug info:\n%s", name, err->message, debug);
1588 
1589            g_error_free (err);
1590            g_free (debug);
1591            g_free (name);
1592 
1593            gst_element_set_state(ev->pipeline, GST_STATE_NULL);
1594 
1595            ev->play = EINA_FALSE;
1596            _emotion_decode_stop(ev->obj);
1597            _emotion_playback_finished(ev->obj);
1598 
1599            break;
1600         }
1601       case GST_MESSAGE_WARNING:
1602         {
1603            GError *err = NULL;
1604            gchar *name, *debug = NULL;
1605 
1606            name = gst_object_get_path_string (msg->src);
1607            gst_message_parse_warning (msg, &err, &debug);
1608 
1609            WRN("WARNING: from element %s: %s\nAdditional debug info:\n%s", name, err->message, debug);
1610 
1611            g_error_free (err);
1612            g_free (debug);
1613            g_free (name);
1614 
1615            break;
1616         }
1617       case GST_MESSAGE_BUFFERING:
1618         {
1619            gint percent = 0;
1620 
1621            /* If the stream is live, we do not care about buffering. */
1622            if (ev->live)
1623              {
1624                 ev->buffering = FALSE;
1625                 break;
1626              }
1627 
1628            gst_message_parse_buffering (msg, &percent);
1629 
1630            /* Wait until buffering is complete before start/resume playing */
1631            if (percent < 100)
1632              gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
1633            else if (ev->play)
1634              gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
1635 
1636            ev->buffering = (percent < 100);
1637 
1638            break;
1639         }
1640       case GST_MESSAGE_CLOCK_LOST:
1641         {
1642            gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
1643            gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
1644            break;
1645         }
1646       default:
1647          break;
1648      }
1649 
1650    emotion_gstreamer_message_free(send);
1651    _emotion_pending_ecore_end();
1652 }
1653 
1654 static GstBusSyncReply
_bus_sync_handler(GstBus * bus EINA_UNUSED,GstMessage * msg,gpointer data)1655 _bus_sync_handler(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
1656 {
1657    Emotion_Gstreamer *ev = data;
1658    Emotion_Gstreamer_Message *send;
1659 
1660    INF("Message %s from %s",
1661        GST_MESSAGE_TYPE_NAME(msg),
1662        GST_MESSAGE_SRC_NAME(msg));
1663 
1664    send = emotion_gstreamer_message_alloc(ev, msg);
1665 
1666    if (send)
1667      {
1668         _emotion_pending_ecore_begin();
1669         ecore_main_loop_thread_safe_call_async(_bus_main_handler, send);
1670      }
1671 
1672    gst_message_unref(msg);
1673 
1674    return GST_BUS_DROP;
1675 }
1676 
1677 static void
_emotion_gstreamer_pause(void * data,Ecore_Thread * thread)1678 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
1679 {
1680    Emotion_Gstreamer *ev = data;
1681    gboolean res;
1682 
1683    if (ecore_thread_check(thread) || !ev->pipeline) return;
1684 
1685    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
1686    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1687    if (res == GST_STATE_CHANGE_NO_PREROLL)
1688      {
1689         ev->live = EINA_TRUE;
1690         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1691         gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1692      }
1693 }
1694 
1695 static void
_emotion_gstreamer_cancel(void * data,Ecore_Thread * thread)1696 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
1697 {
1698    Emotion_Gstreamer *ev = data;
1699 
1700    ev->threads = eina_list_remove(ev->threads, thread);
1701 
1702 #if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1703    if (getuid() == geteuid())
1704 #endif
1705      {
1706         if (getenv("EMOTION_GSTREAMER_DOT"))
1707           {
1708              GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1709                                                GST_DEBUG_GRAPH_SHOW_ALL,
1710                                                getenv("EMOTION_GSTREAMER_DOT"));
1711           }
1712      }
1713 
1714    emotion_gstreamer_unref(ev);
1715 }
1716 
1717 static void
_emotion_gstreamer_end(void * data,Ecore_Thread * thread)1718 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
1719 {
1720    Emotion_Gstreamer *ev = data;
1721 
1722    ev->threads = eina_list_remove(ev->threads, thread);
1723 
1724    if (ev->play && !ev->buffering)
1725      {
1726         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1727      }
1728 
1729 #if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1730    if (getuid() == geteuid())
1731 #endif
1732      {
1733         if (getenv("EMOTION_GSTREAMER_DOT"))
1734           {
1735              GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1736                                                GST_DEBUG_GRAPH_SHOW_ALL,
1737                                                getenv("EMOTION_GSTREAMER_DOT"));
1738           }
1739      }
1740 
1741    emotion_gstreamer_unref(ev);
1742 }
1743 
1744 static GstElement *
_create_pipeline(Emotion_Gstreamer * ev,Evas_Object * o,const char * uri,const char * suburi)1745 _create_pipeline(Emotion_Gstreamer *ev,
1746                  Evas_Object *o,
1747                  const char *uri,
1748                  const char *suburi)
1749 {
1750    GstElement *playbin;
1751    GstElement *vsink;
1752    GstBus *bus;
1753    int flags;
1754 
1755    if (!uri)
1756      return NULL;
1757 
1758    playbin = gst_element_factory_make("playbin", "playbin");
1759    if (!playbin)
1760      {
1761         ERR("Unable to create 'playbin' GstElement.");
1762         return NULL;
1763      }
1764 
1765    vsink = gst_element_factory_make("emotion-sink", "sink");
1766    if (!vsink)
1767      {
1768         ERR("Unable to create 'emotion-sink' GstElement.");
1769         goto unref_pipeline;
1770      }
1771 
1772    g_object_set(G_OBJECT(vsink), "emotion-object", o, NULL);
1773 
1774    g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1775    g_object_set(G_OBJECT(playbin), "flags",
1776                 (flags | GST_PLAY_FLAG_DOWNLOAD) & ~GST_PLAY_FLAG_TEXT, NULL);
1777    g_object_set(G_OBJECT(playbin), "video-sink", vsink, NULL);
1778    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1779    if (suburi)
1780      {
1781         g_object_set(G_OBJECT(playbin), "suburi", suburi, NULL);
1782         g_object_set(G_OBJECT(playbin), "subtitle-font-desc", "Sans, 10", NULL);
1783      }
1784 
1785    bus = gst_element_get_bus(playbin);
1786    gst_bus_set_sync_handler(bus, _bus_sync_handler, ev, NULL);
1787    gst_object_unref(bus);
1788 
1789    ev->pipeline = playbin;
1790    ev->vsink = vsink;
1791 
1792    ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
1793 
1794    ev->threads = eina_list_append(ev->threads,
1795                                   ecore_thread_run(_emotion_gstreamer_pause,
1796                                                    _emotion_gstreamer_end,
1797                                                    _emotion_gstreamer_cancel,
1798                                                    emotion_gstreamer_ref(ev)));
1799 
1800    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1801    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1802 #if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1803    if (getuid() == geteuid())
1804 #endif
1805      {
1806         if (getenv("EMOTION_GSTREAMER_DOT"))
1807           {
1808              GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin),
1809                                                GST_DEBUG_GRAPH_SHOW_ALL,
1810                                                getenv("EMOTION_GSTREAMER_DOT"));
1811           }
1812      }
1813 
1814    return playbin;
1815 
1816  unref_pipeline:
1817    gst_object_unref(vsink);
1818    gst_object_unref(playbin);
1819    return NULL;
1820 }
1821