1 #ifndef EMOTION_H
2 #define EMOTION_H
3 
4 /**
5  * @file
6  * @brief Emotion Media Library
7  *
8  * These routines are used for Emotion.
9  */
10 
11 /**
12  *
13  * @page emotion_main Emotion
14  *
15  * @date 2003 (created)
16  *
17  * @section emotion_toc Table of Contents
18  *
19  * @li @ref emotion_main_intro
20  * @li @ref emotion_main_work
21  * @li @ref emotion_main_compiling
22  * @li @ref emotion_main_next_steps
23  * @li @ref emotion_main_intro_example
24  *
25  * @section emotion_main_intro Introduction
26  *
27  * A media object library for Evas and Ecore.
28  *
29  * Emotion is a library that allows playing audio and video files.
30  *
31  * It is integrated into Ecore through its mainloop, and is transparent to the
32  * user of the library how the decoding of audio and video is being done. Once
33  * the objects are created, the user can set callbacks to the specific events
34  * and set options to this object, all in the main loop (no threads are needed).
35  *
36  * Emotion is also integrated with Evas. The emotion object returned by
37  * emotion_object_add() is an Evas smart object, so it can be manipulated with
38  * default Evas object functions. Callbacks can be added to the signals emitted
39  * by this object with evas_object_smart_callback_add().
40  *
41  * @section emotion_main_work How does Emotion work?
42  *
43  * The Emotion library uses Evas smart objects to allow you to manipulate the
44  * created object as any other Evas object, and to connect to its signals,
45  * handling them when needed. It's also possible to swallow Emotion objects
46  * inside Edje themes, and expect it to behave as a normal image or rectangle
47  * when regarding to its dimensions.
48  *
49  * @section emotion_main_compiling How to compile
50  *
51  * Emotion is a library your application links to. The procedure for this is
52  * very simple. You simply have to compile your application with the
53  * appropriate compiler flags that the @c pkg-config script outputs. For
54  * example:
55  *
56  * Compiling C or C++ files into object files:
57  *
58  * @verbatim
59    gcc -c -o main.o main.c `pkg-config --cflags emotion`
60    @endverbatim
61  *
62  * Linking object files into a binary executable:
63  *
64  * @verbatim
65    gcc -o my_application main.o `pkg-config --libs emotion`
66    @endverbatim
67  *
68  * See @ref pkgconfig
69  *
70  * @section emotion_main_next_steps Next Steps
71  *
72  * After you understood what Emotion is and installed it in your
73  * system you should proceed understanding the programming
74  * interface. We'd recommend you to take a while to learn @ref Ecore and
75  * @ref Evas to get started.
76  *
77  * Recommended reading:
78  *
79  * @li @ref Emotion_Init to initialize the library.
80  * @li @ref Emotion_Video to control video parameters.
81  * @li @ref Emotion_Audio to control audio parameters.
82  * @li @ref Emotion_Play to control playback.
83  * @li @ref Emotion_Webcam to show cameras.
84  * @li @ref Emotion_API for general programming interface.
85  *
86  * @section emotion_main_intro_example Introductory Example
87  *
88  * @include emotion_basic_example.c
89  *
90  * More examples can be found at @ref emotion_examples.
91  */
92 
93 #include <Evas.h>
94 #include <Efl_Config.h>
95 
96 #ifdef EAPI
97 # undef EAPI
98 #endif
99 
100 #ifdef _WIN32
101 # ifdef EFL_BUILD
102 #  ifdef DLL_EXPORT
103 #   define EAPI __declspec(dllexport)
104 #  else
105 #   define EAPI
106 #  endif
107 # else
108 #  define EAPI __declspec(dllimport)
109 # endif
110 #else
111 # ifdef __GNUC__
112 #  if __GNUC__ >= 4
113 #   define EAPI __attribute__ ((visibility("default")))
114 #  else
115 #   define EAPI
116 #  endif
117 # else
118 #  define EAPI
119 # endif
120 #endif
121 
122 #ifdef __cplusplus
123 extern "C" {
124 #endif
125 
126 #ifndef EFL_NOLEGACY_API_SUPPORT
127 #include "Emotion_Legacy.h"
128 #endif
129 #include "Emotion_Eo.h"
130 
131 /**
132  * @file Emotion.h
133  * @brief The file that provides Emotion the API, with functions available for
134  *        play, seek, change volume, etc.
135  */
136 
137 enum _Emotion_Event
138 {
139    EMOTION_EVENT_MENU1, // Escape Menu
140    EMOTION_EVENT_MENU2, // Title Menu
141    EMOTION_EVENT_MENU3, // Root Menu
142    EMOTION_EVENT_MENU4, // Subpicture Menu
143    EMOTION_EVENT_MENU5, // Audio Menu
144    EMOTION_EVENT_MENU6, // Angle Menu
145    EMOTION_EVENT_MENU7, // Part Menu
146    EMOTION_EVENT_UP,
147    EMOTION_EVENT_DOWN,
148    EMOTION_EVENT_LEFT,
149    EMOTION_EVENT_RIGHT,
150    EMOTION_EVENT_SELECT,
151    EMOTION_EVENT_NEXT,
152    EMOTION_EVENT_PREV,
153    EMOTION_EVENT_ANGLE_NEXT,
154    EMOTION_EVENT_ANGLE_PREV,
155    EMOTION_EVENT_FORCE,
156    EMOTION_EVENT_0,
157    EMOTION_EVENT_1,
158    EMOTION_EVENT_2,
159    EMOTION_EVENT_3,
160    EMOTION_EVENT_4,
161    EMOTION_EVENT_5,
162    EMOTION_EVENT_6,
163    EMOTION_EVENT_7,
164    EMOTION_EVENT_8,
165    EMOTION_EVENT_9,
166    EMOTION_EVENT_10
167 };
168 
169 /**
170  * @enum _Emotion_Meta_Info
171  *
172  * Used for retrieving information about the media file being played.
173  *
174  * @see emotion_object_meta_info_get()
175  *
176  * @ingroup Emotion_Info
177  */
178 enum _Emotion_Meta_Info
179 {
180    EMOTION_META_INFO_TRACK_TITLE, /**< track title */
181    EMOTION_META_INFO_TRACK_ARTIST, /**< artist name */
182    EMOTION_META_INFO_TRACK_ALBUM, /**< album name */
183    EMOTION_META_INFO_TRACK_YEAR, /**< track year */
184    EMOTION_META_INFO_TRACK_GENRE, /**< track genre */
185    EMOTION_META_INFO_TRACK_COMMENT, /**< track comments */
186    EMOTION_META_INFO_TRACK_DISC_ID, /**< track disc ID */
187    EMOTION_META_INFO_TRACK_COUNT /**< track count - number of the track in the album */
188 };
189 
190 enum _Emotion_Artwork_Info
191 {
192    EMOTION_ARTWORK_IMAGE,
193    EMOTION_ARTWORK_PREVIEW_IMAGE
194 };
195 
196 /**
197  * @enum _Emotion_Vis
198  *
199  * Used for displaying a visualization on the emotion object.
200  *
201  * @see emotion_object_vis_set()
202  *
203  * @ingroup Emotion_Visualization
204  */
205 enum _Emotion_Vis
206 {
207   EMOTION_VIS_NONE, /**< no visualization set */
208   EMOTION_VIS_GOOM, /**< goom */
209   EMOTION_VIS_LIBVISUAL_BUMPSCOPE, /**< bumpscope */
210   EMOTION_VIS_LIBVISUAL_CORONA, /**< corona */
211   EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES, /**< dancing particles */
212   EMOTION_VIS_LIBVISUAL_GDKPIXBUF, /**< gdkpixbuf */
213   EMOTION_VIS_LIBVISUAL_G_FORCE, /**< G force */
214   EMOTION_VIS_LIBVISUAL_GOOM, /**< goom */
215   EMOTION_VIS_LIBVISUAL_INFINITE, /**< infinite */
216   EMOTION_VIS_LIBVISUAL_JAKDAW, /**< jakdaw */
217   EMOTION_VIS_LIBVISUAL_JESS, /**< jess */
218   EMOTION_VIS_LIBVISUAL_LV_ANALYSER, /**< lv analyser */
219   EMOTION_VIS_LIBVISUAL_LV_FLOWER, /**< lv flower */
220   EMOTION_VIS_LIBVISUAL_LV_GLTEST, /**< lv gltest */
221   EMOTION_VIS_LIBVISUAL_LV_SCOPE, /**< lv scope */
222   EMOTION_VIS_LIBVISUAL_MADSPIN, /**< madspin */
223   EMOTION_VIS_LIBVISUAL_NEBULUS, /**< nebulus */
224   EMOTION_VIS_LIBVISUAL_OINKSIE, /**< oinksie */
225   EMOTION_VIS_LIBVISUAL_PLASMA, /**< plasma */
226   EMOTION_VIS_LAST /* sentinel */
227 };
228 
229 /**
230  * @enum Emotion_Suspend
231  *
232  * Used for emotion pipeline resource management.
233  *
234  * @see emotion_object_suspend_set()
235  * @see emotion_object_suspend_get()
236  *
237  * @ingroup Emotion_Ressource
238  */
239 typedef enum
240 {
241   EMOTION_WAKEUP, /**< pipeline is up and running */
242   EMOTION_SLEEP, /**< turn off hardware resource usage like overlay */
243   EMOTION_DEEP_SLEEP, /**< destroy the pipeline, but keep full resolution pixels output around */
244   EMOTION_HIBERNATE /**< destroy the pipeline, and keep half resolution or object resolution if lower */
245 } Emotion_Suspend;
246 
247 /**
248  * @enum _Emotion_Aspect
249  * Defines the aspect ratio option.
250  */
251 enum _Emotion_Aspect
252 {
253   EMOTION_ASPECT_KEEP_NONE, /**< ignore video aspect ratio */
254   EMOTION_ASPECT_KEEP_WIDTH, /**< respect video aspect, fitting its width inside the object width */
255   EMOTION_ASPECT_KEEP_HEIGHT, /**< respect video aspect, fitting its height inside the object height */
256   EMOTION_ASPECT_KEEP_BOTH, /**< respect video aspect, fitting it inside the object area */
257   EMOTION_ASPECT_CROP, /**< respect video aspect, cropping exceding area */
258   EMOTION_ASPECT_CUSTOM, /**< use custom borders/crop for the video */
259 };
260 
261 typedef enum _Emotion_Event     Emotion_Event;
262 typedef enum _Emotion_Meta_Info Emotion_Meta_Info; /**< Meta info type to be retrieved. */
263 typedef enum _Emotion_Vis       Emotion_Vis; /**< Type of visualization. */
264 typedef enum _Emotion_Aspect    Emotion_Aspect; /**< Aspect ratio option. */
265 typedef enum _Emotion_Artwork_Info Emotion_Artwork_Info;
266 
267 #define EMOTION_CHANNEL_AUTO -1
268 #define EMOTION_CHANNEL_DEFAULT 0
269 
270 #define EMOTION_VERSION_MAJOR EFL_VERSION_MAJOR
271 #define EMOTION_VERSION_MINOR EFL_VERSION_MINOR
272 
273 /**
274  * @typedef Emotion_Version
275  * Represents the current version of Emotion
276  */
277 typedef struct _Emotion_Version
278   {
279      int major; /** < major (binary or source incompatible changes) */
280      int minor; /** < minor (new features, bugfixes, major improvements version) */
281      int micro; /** < micro (bugfix, internal improvements, no new features version) */
282      int revision; /** < git revision (0 if a proper release or the git revision number Emotion is built from) */
283   } Emotion_Version;
284 
285 EAPI extern Emotion_Version *emotion_version;
286 
287 /* api calls available */
288 
289 /**
290  * @brief How to create, initialize, manipulate and connect to signals of an
291  * Emotion object.
292  * @defgroup Emotion_API API available for manipulating Emotion object.
293  * @ingroup Emotion
294  *
295  * @{
296  *
297  * Emotion provides an Evas smart object that allows to play, control and
298  * display a video or audio file. The API is synchronous but not everything
299  * happens immediately. There are also some signals to report changed states.
300  *
301  * Basically, once the object is created and initialized, a file will be set to
302  * it, and then it can be resized, moved, and controlled by other Evas object
303  * functions.
304  *
305  * However, the decoding of the music and video occurs not in the Ecore main
306  * loop, but usually in another thread (this depends on the module being used).
307  * The synchronization between this other thread and the main loop not visible
308  * to the end user of the library. The user can just register callbacks to the
309  * available signals to receive information about the changed states, and can
310  * call other functions from the API to request more changes on the current
311  * loaded file.
312  *
313  * There will be a delay between an API being called and it being really
314  * executed, since this request will be done in the main thread, and it needs to
315  * be sent to the decoding thread. For this reason, always call functions like
316  * emotion_object_size_get() or emotion_object_length_get() after some signal
317  * being sent, like "playback_started" or "open_done". @ref
318  * emotion_signals_example.c "This example demonstrates this behavior".
319  *
320  * @section signals Available signals
321  * The Evas_Object returned by emotion_object_add() has a number of signals that
322  * can be listened to using evas' smart callbacks mechanism. All signals have
323  * NULL as event info. The following is a list of interesting signals:
324  * @li "playback_started" - Emitted when the playback starts
325  * @li "playback_finished" - Emitted when the playback finishes
326  * @li "frame_decode" - Emitted every time a frame is decoded
327  * @li "open_done" - Emitted when the media file is opened
328  * @li "position_update" - Emitted when emotion_object_position_set is called
329  * @li "decode_stop" - Emitted after the last frame is decoded
330  *
331  * @section Emotion_Examples
332  *
333  * The following examples exemplify the emotion usage. There's also the
334  * emotion_test binary that is distributed with this library and cover the
335  * entire API, but since it is too long and repetitive to be explained, its code
336  * is just displayed as another example.
337  *
338  * @li @ref emotion_basic_example_c
339  * @li @ref emotion_signals_example.c "Emotion signals"
340  * @li @ref emotion_test_main.c "emotion_test - full API usage"
341  *
342  */
343 
344 /**
345  * @defgroup Emotion_Init Creation and initialization functions
346  */
347 
348 /**
349  * @defgroup Emotion_Audio Audio control functions
350  */
351 
352 /**
353  * @defgroup Emotion_Video Video control functions
354  */
355 
356 /**
357  * @defgroup Emotion_Visualization Visualization control functions
358  */
359 
360 /**
361  * @defgroup Emotion_Info Miscellaneous information retrieval functions
362  */
363 
364 /**
365  * @defgroup Emotion_Ressource Video resource management
366  */
367 
368 /**
369  * @brief Initialise Emotion library
370  *
371  * Initialise needed libraries like eina ecore eet
372  * Initialise needed modules like webcam
373  */
374 EAPI Eina_Bool emotion_init(void);
375 
376 /**
377  * @brief Shutdown Emotion library
378  *
379  * Proper shutdown of all loaded modules and initialised libraries.
380  */
381 EAPI Eina_Bool emotion_shutdown(void);
382 
383 /**
384  * @brief Add an emotion object to the canvas.
385  *
386  * @param evas The canvas where the object will be added to.
387  * @return The emotion object just created.
388  *
389  * This function creates an emotion object and adds it to the specified @p evas.
390  * The returned object can be manipulated as any other Evas object, using the
391  * default object manipulation functions - evas_object_*.
392  *
393  * After creating the object with this function, it's still necessary to
394  * initialize it with emotion_object_init(), and if an audio file is going to be
395  * played with this object instead of a video, use
396  * emotion_object_video_mute_set().
397  *
398  * The next step is to open the desired file with emotion_object_file_set(), and
399  * start playing it with emotion_object_play_set().
400  *
401  * @see emotion_object_init()
402  * @see emotion_object_video_mute_set()
403  * @see emotion_object_file_set()
404  * @see emotion_object_play_set()
405  *
406  * @ingroup Emotion_Init
407  */
408 EAPI Evas_Object *emotion_object_add                   (Evas *evas);
409 
410 /**
411  * @brief Set borders for the emotion object.
412  *
413  * @param obj The emotion object where borders are being set.
414  * @param l The left border.
415  * @param r The right border.
416  * @param t The top border.
417  * @param b The bottom border.
418  *
419  * This function sets borders for the emotion video object (just when a video is
420  * present). When positive values are given to one of the parameters, a border
421  * will be added to the respective position of the object, representing that
422  * size on the original video size. However, if the video is scaled up or down
423  * (i.e. the emotion object size is different from the video size), the borders
424  * will be scaled respectively too.
425  *
426  * If a negative value is given to one of the parameters, instead of a border,
427  * that respective side of the video will be cropped.
428  *
429  * It's possible to set a color for the added borders (default is transparent)
430  * with emotion_object_bg_color_set(). By default, an Emotion object doesn't
431  * have any border.
432  *
433  * @see emotion_object_border_get()
434  * @see emotion_object_bg_color_set()
435  *
436  * @ingroup Emotion_Video
437  */
438 EAPI void emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b);
439 
440 /**
441  * @brief Get the borders set for the emotion object.
442  *
443  * @param obj The emotion object from which the borders are being retrieved.
444  * @param l The left border.
445  * @param r The right border.
446  * @param t The top border.
447  * @param b The bottom border.
448  *
449  * @see emotion_object_border_set()
450  *
451  * @ingroup Emotion_Video
452  */
453 EAPI void emotion_object_border_get(const Evas_Object *obj, int *l, int *r, int *t, int *b);
454 
455 /**
456  * @brief Set a color for the background rectangle of this emotion object.
457  *
458  * @param obj The emotion object where the background color is being set.
459  * @param r Red component of the color.
460  * @param g Green component of the color.
461  * @param b Blue component of the color.
462  * @param a Alpha channel of the color.
463  *
464  * This is useful when a border is added to any side of the Emotion object. The
465  * area between the edge of the video and the edge of the object will be filled
466  * with the specified color.
467  *
468  * The default color is 0, 0, 0, 0 (transparent).
469  *
470  * @see emotion_object_bg_color_get()
471  *
472  * @ingroup Emotion_Video
473  */
474 EAPI void emotion_object_bg_color_set(Evas_Object *obj, int r, int g, int b, int a);
475 
476 /**
477  * @brief Get the background color set for the emotion object.
478  *
479  * @param obj The emotion object from which the background color is being retrieved.
480  * @param r Red component of the color.
481  * @param g Green component of the color.
482  * @param b Blue component of the color.
483  * @param a AAlpha channel of the color.
484  *
485  * @see emotion_object_bg_color_set()
486  *
487  * @ingroup Emotion_Video
488  */
489 EAPI void emotion_object_bg_color_get(const Evas_Object *obj, int *r, int *g, int *b, int *a);
490 
491 /**
492  * @brief Set whether emotion should keep the aspect ratio of the video.
493  *
494  * @param obj The emotion object where to set the aspect.
495  * @param a The aspect ratio policy.
496  *
497  * Instead of manually calculating the required border to set with
498  * emotion_object_border_set(), and using this to fix the aspect ratio of the
499  * video when the emotion object has a different aspect, it's possible to just
500  * set the policy to be used.
501  *
502  * The options are:
503  *
504  * - @b #EMOTION_ASPECT_KEEP_NONE - ignore the video aspect ratio, and reset any
505  *   border set to 0, stretching the video inside the emotion object area. This
506  *   option is similar to EVAS_ASPECT_CONTROL_NONE size hint.
507  * - @b #EMOTION_ASPECT_KEEP_WIDTH - respect the video aspect ratio, fitting the
508  *   video width inside the object width. This option is similar to
509  *   EVAS_ASPECT_CONTROL_HORIZONTAL size hint.
510  * - @b #EMOTION_ASPECT_KEEP_HEIGHT - respect the video aspect ratio, fitting
511  *   the video height inside the object height. This option is similar to
512  *   EVAS_ASPECT_CONTROL_VERTIAL size hint.
513  * - @b #EMOTION_ASPECT_KEEP_BOTH - respect the video aspect ratio, fitting both
514  *   its width and height inside the object area. This option is similar to
515  *   EVAS_ASPECT_CONTROL_BOTH size hint. It's the effect called letterboxing.
516  * - @b #EMOTION_ASPECT_CROP - respect the video aspect ratio, fitting the width
517  *   or height inside the object area, and cropping the exceding areas of the
518  *   video in height or width. It's the effect called pan-and-scan.
519  * - @b #EMOTION_ASPECT_CUSTOM - ignore the video aspect ratio, and use the
520  *   current set from emotion_object_border_set().
521  *
522  * @note Calling this function with any value except #EMOTION_ASPECT_CUSTOM will
523  * invalidate borders set with emotion_object_border_set().
524  *
525  * @note Calling emotion_object_border_set() will automatically set the aspect
526  * policy to #EMOTION_ASPECT_CUSTOM.
527  *
528  * @see emotion_object_border_set()
529  * @see emotion_object_keep_aspect_get()
530  *
531  * @ingroup Emotion_Video
532  */
533 EAPI void emotion_object_keep_aspect_set(Evas_Object *obj, Emotion_Aspect a);
534 
535 /**
536  * @brief Get the current emotion aspect ratio policy.
537  *
538  * @param obj The emotion object from which we are fetching the aspect ratio
539  * policy.
540  * @return The current aspect ratio policy.
541  *
542  * @see emotion_object_keep_aspect_set()
543  *
544  * @ingroup Emotion_Video
545  */
546 EAPI Emotion_Aspect emotion_object_keep_aspect_get(const Evas_Object *obj);
547 
548 /**
549  * @brief Set the file to be played in the Emotion object.
550  *
551  * @param obj The emotion object where the file is being loaded.
552  * @param filename Path to the file to be loaded. It can be absolute or relative
553  * path.
554  * @return EINA_TRUE if the new file could be loaded successfully, and
555  * EINA_FALSE if the file could not be loaded. This happens when the filename is
556  * could not be found, when the module couldn't open the file, when no module is
557  * initialized in this object, or when the @p filename is the same as the
558  * one previously set.
559  *
560  * This function sets the file to be used with this emotion object. If the
561  * object already has another file set, this file will be unset and unloaded,
562  * and the new file will be loaded to this emotion object. The seek position
563  * will be set to 0, and the emotion object will be paused, instead of playing.
564  *
565  * If there was already a filename set, and it's the same as the one being set
566  * now, this function does nothing and returns EINA_FALSE.
567  *
568  * Use @c NULL as argument to @p filename if you want to unload the current file
569  * but don't want to load anything else.
570  *
571  * @see emotion_object_init()
572  * @see emotion_object_play_set()
573  * @see emotion_object_file_get()
574  *
575  * @ingroup Emotion_Init
576  */
577 EAPI Eina_Bool    emotion_object_file_set              (Evas_Object *obj, const char *filename);
578 
579 /**
580  * @brief Get the filename of the file associated with the emotion object.
581  *
582  * @param obj The emotion object from which the filename will be retrieved.
583  * @return The path to the file loaded into this emotion object.
584  *
585  * This function returns the path of the file loaded in this emotion object. If
586  * no object is loaded, it will return @c NULL.
587  *
588  * @note Don't free or change the string returned by this function in any way.
589  * If you want to unset it, use @c emotion_object_file_set(obj, NULL).
590  *
591  * @see emotion_object_file_set()
592  *
593  * @ingroup Emotion_Init
594  */
595 EAPI const char  *emotion_object_file_get              (const Evas_Object *obj);
596 /**
597  * @defgroup Emotion_Play Play control functions
598  * @ingroup Emotion
599  *
600  * @{
601  */
602 /**
603  *
604  * @brief Set play/pause state of the media file.
605  *
606  * @param obj The emotion object whose state will be changed.
607  * @param play EINA_TRUE to play, EINA_FALSE to pause.
608  *
609  * This functions sets the currently playing status of the video. Using this
610  * function to play or pause the video doesn't alter it's current position.
611  */
612 EAPI void         emotion_object_play_set              (Evas_Object *obj, Eina_Bool play);
613 /**
614  * @brief Get play/pause state of the media file.
615  *
616  * @param obj The emotion object from which the state will be retrieved.
617  * @return EINA_TRUE if playing. EINA_FALSE if not playing.
618  */
619 EAPI Eina_Bool    emotion_object_play_get              (const Evas_Object *obj);
620 /**
621  * @brief Set the position in the media file.
622  *
623  * @param obj The emotion object whose position will be changed.
624  * @param sec The position(in seconds) to which the media file will be set.
625  *
626  * This functions sets the current position of the media file to @p sec, this
627  * only works on seekable streams. Setting the position doesn't change the
628  * playing state of the media file.
629  *
630  * @see emotion_object_seekable_get
631  */
632 EAPI void         emotion_object_position_set          (Evas_Object *obj, double sec);
633 /**
634  * @brief Get the position in the media file.
635  *
636  * @param obj The emotion object from which the position will be retrieved.
637  * @return The position of the media file.
638  *
639  * The position is returned as the number of seconds since the beginning of the
640  * media file.
641  */
642 EAPI double       emotion_object_position_get          (const Evas_Object *obj);
643 
644 /**
645  * @brief Get the percentual size of the buffering cache.
646  *
647  * @param obj The emotion object from which the buffer size will be retrieved.
648  * @return The buffer percent size, ranging from 0.0 to 1.0
649  *
650  * The buffer size is returned as a number between 0.0 and 1.0, 0.0 means
651  * the buffer if empty, 1.0 means full.
652  * If no buffering is in progress 1.0 is returned. In all other cases (maybe
653  * the backend don't support buffering) 1.0 is returned, thus you can always
654  * check for buffer_size < 1.0 to know if buffering is in progress.
655  *
656  * @warning Generic backend don't implement this (will return 1.0).
657  */
658 EAPI double       emotion_object_buffer_size_get       (const Evas_Object *obj);
659 
660 /**
661  * @brief Get whether the media file is seekable.
662  *
663  * @param obj The emotion object from which the seekable status will be
664  * retrieved.
665  * @return EINA_TRUE if the media file is seekable, EINA_FALSE otherwise.
666  */
667 EAPI Eina_Bool    emotion_object_seekable_get          (const Evas_Object *obj);
668 /**
669  * @brief Get the length of play for the media file.
670  *
671  * @param obj The emotion object from which the length will be retrieved.
672  * @return The length of the media file in seconds.
673  *
674  * This function returns the length of the media file in seconds.
675  *
676  * @warning This will return 0 if called before the "length_change" signal has,
677  * been emitted.
678  */
679 EAPI double       emotion_object_play_length_get       (const Evas_Object *obj);
680 
681 /**
682  * @brief Set the play speed of the media file.
683  *
684  * @param obj The emotion object whose speed will be set.
685  * @param speed The speed to be set in the range [0,infinity)
686  *
687  * This function sets the speed with which the media file will be played. 1.0
688  * represents the normal speed, 2 double speed, 0.5 half speed and so on.
689  *
690  * @warning The only backend that implements this is the experimental VLC
691  * backend.
692  */
693 EAPI void         emotion_object_play_speed_set        (Evas_Object *obj, double speed);
694 /**
695  * @brief Get  the play speed of the media file.
696  *
697  * @param obj The emotion object from which the filename will be retrieved.
698  * @return The current speed of the media file.
699  *
700  * @see emotion_object_play_speed_set
701  */
702 EAPI double       emotion_object_play_speed_get        (const Evas_Object *obj);
703 /**
704  * @brief Get how much of the file has been played.
705  *
706  * @param obj The emotion object from which the filename will be retrieved.
707  * @return The progress of the media file.
708  *
709  * @warning Don't change of free the returned string.
710  * @warning gstreamer xine backends don't implement this(will return NULL).
711  */
712 EAPI const char  *emotion_object_progress_info_get     (const Evas_Object *obj);
713 /**
714  * @brief Get how much of the file has been played.
715  *
716  * @param obj The emotion object from which the filename will be retrieved
717  * @return The progress of the media file.
718  *
719  * This function gets the progress in playing the file, the return value is in
720  * the [0, 1] range.
721  *
722  * @warning gstreamer xine backends don't implement this(will return 0).
723  */
724 EAPI double       emotion_object_progress_status_get   (const Evas_Object *obj);
725 /**
726  * @}
727  */
728 EAPI Eina_Bool    emotion_object_video_handled_get     (const Evas_Object *obj);
729 EAPI Eina_Bool    emotion_object_audio_handled_get     (const Evas_Object *obj);
730 
731 /**
732  * @brief Retrieve the video aspect ratio of the media file loaded.
733  *
734  * @param obj The emotion object which the video aspect ratio will be retrieved
735  * from.
736  * @return The video aspect ratio of the file loaded.
737  *
738  * This function returns the video aspect ratio (width / height) of the file
739  * loaded. It can be used to adapt the size of the emotion object in the canvas,
740  * so the aspect won't be changed (by wrongly resizing the object). Or to crop
741  * the video correctly, if necessary.
742  *
743  * The described behavior can be applied like following. Consider a given
744  * emotion object that we want to position inside an area, which we will
745  * represent by @c w and @c h. Since we want to position this object either
746  * stretching, or filling the entire area but overflowing the video, or just
747  * adjust the video to fit inside the area without keeping the aspect ratio, we
748  * must compare the video aspect ratio with the area aspect ratio:
749  * @code
750  * int w = 200, h = 300; // an arbitrary value which represents the area where
751  *                       // the video would be placed
752  * int vw, vh;
753  * double r, vr = emotion_object_ratio_get(obj);
754  * r = (double)w / h;
755  * @endcode
756  *
757  * Now, if we want to make the video fit inside the area, the following code
758  * would do it:
759  * @code
760  * if (vr > r) // the video is wider than the area
761  *   {
762  *      vw = w;
763  *      vh = w / vr;
764  *   }
765  * else // the video is taller than the area
766  *   {
767  *      vh = h;
768  *      vw = h * vr;
769  *   }
770  * evas_object_resize(obj, vw, vh);
771  * @endcode
772  *
773  * And for keeping the aspect ratio but making the video fill the entire area,
774  * overflowing the content which can't fit inside it, we would do:
775  * @code
776  * if (vr > r) // the video is wider than the area
777  *   {
778  *      vh = h;
779  *      vw = h * vr;
780  *   }
781  * else // the video is taller than the area
782  *   {
783  *      vw = w;
784  *      vh = w / vr;
785  *   }
786  * evas_object_resize(obj, vw, vh);
787  * @endcode
788  *
789  * Finally, by just resizing the video to the video area, we would have the
790  * video stretched:
791  * @code
792  * vw = w;
793  * vh = h;
794  * evas_object_resize(obj, vw, vh);
795  * @endcode
796  *
797  * The following diagram exemplifies what would happen to the video,
798  * respectively, in each case:
799  *
800  * @image html emotion_ratio.png
801  * @image latex emotion_ratio.eps width=\textwidth
802  *
803  * @note This function returns the aspect ratio that the video @b should be, but
804  * sometimes the reported size from emotion_object_size_get() represents a
805  * different aspect ratio. You can safely resize the video to respect the aspect
806  * ratio returned by @b this function.
807  *
808  * @see emotion_object_size_get()
809  *
810  * @ingroup Emotion_Video
811  */
812 EAPI double       emotion_object_ratio_get             (const Evas_Object *obj);
813 
814 /**
815  * @brief Retrieve the video size of the loaded file.
816  *
817  * @param obj The object from which we are retrieving the video size.
818  * @param iw A pointer to a variable where the width will be stored.
819  * @param ih A pointer to a variable where the height will be stored.
820  *
821  * This function returns the reported size of the loaded video file. If a file
822  * that doesn't contain a video channel is loaded, then this size can be
823  * ignored.
824  *
825  * The value reported by this function should be consistent with the aspect
826  * ratio returned by emotion_object_ratio_get(), but sometimes the information
827  * stored in the file is wrong. So use the ratio size reported by
828  * emotion_object_ratio_get(), since it is more likely going to be accurate.
829  *
830  * @note Use @c NULL for @p iw or @p ih if you don't need one of these values.
831  *
832  * @see emotion_object_ratio_get()
833  *
834  * @ingroup Emotion_Video
835  */
836 EAPI void         emotion_object_size_get              (const Evas_Object *obj, int *iw, int *ih);
837 
838 /**
839  * @brief Sets whether to use of high-quality image scaling algorithm
840  * of the given video object.
841  *
842  * When enabled, a higher quality video scaling algorithm is used when
843  * scaling videos to sizes other than the source video. This gives
844  * better results but is more computationally expensive.
845  *
846  * @param obj The given video object.
847  * @param smooth Whether to use smooth scale or not.
848  *
849  * @see emotion_object_smooth_scale_get()
850  *
851  * @ingroup Emotion_Video
852  */
853 EAPI void         emotion_object_smooth_scale_set      (Evas_Object *obj, Eina_Bool smooth);
854 
855 /**
856  * @brief Gets whether the high-quality image scaling algorithm
857  * of the given video object is used.
858  *
859  * @param obj The given video object.
860  * @return Whether the smooth scale is used or not.
861  *
862  * @see emotion_object_smooth_scale_set()
863  *
864  * @ingroup Emotion_Video
865  */
866 EAPI Eina_Bool    emotion_object_smooth_scale_get      (const Evas_Object *obj);
867 
868 /**
869  * @brief Send an Emotion Event to an Evas object
870  *
871  * @param obj The object target of the event.
872  * @param ev The emotion event.
873  *
874  * @see Emotion_Event
875  */
876 EAPI void         emotion_object_event_simple_send     (Evas_Object *obj, Emotion_Event ev);
877 
878 /**
879  * @brief Set the audio volume.
880  *
881  * @param obj The object where the volume is being set.
882  * @param vol The new volume parameter. Range is from 0.0 to 1.0.
883  *
884  * Sets the audio volume of the stream being played. This has nothing to do with
885  * the system volume. This volume will be multiplied by the system volume. e.g.:
886  * if the current volume level is 0.5, and the system volume is 50%, it will be
887  * 0.5 * 0.5 = 0.25.
888  *
889  * The default value depends on the module used. This value doesn't get changed
890  * when another file is loaded.
891  *
892  * @see emotion_object_audio_volume_get()
893  *
894  * @ingroup Emotion_Audio
895  */
896 EAPI void         emotion_object_audio_volume_set      (Evas_Object *obj, double vol);
897 
898 /**
899  * @brief Get the audio volume.
900  *
901  * @param obj The object from which we are retrieving the volume.
902  * @return The current audio volume level for this object.
903  *
904  * Get the current value for the audio volume level. Range is from 0.0 to 1.0.
905  * This volume is set with emotion_object_audio_volume_set().
906  *
907  * @see emotion_object_audio_volume_set()
908  *
909  * @ingroup Emotion_Audio
910  */
911 EAPI double       emotion_object_audio_volume_get      (const Evas_Object *obj);
912 
913 /**
914  * @brief Set the mute audio option for this object.
915  *
916  * @param obj The object which we are setting the mute audio option.
917  * @param mute Whether the audio should be muted (@c EINA_TRUE) or not (@c
918  * EINA_FALSE).
919  *
920  * This function sets the mute audio option for this emotion object. The current
921  * module used for this object can use this to avoid decoding the audio portion
922  * of the loaded media file.
923  *
924  * @see emotion_object_audio_mute_get()
925  * @see emotion_object_video_mute_set()
926  *
927  * @ingroup Emotion_Audio
928  */
929 EAPI void         emotion_object_audio_mute_set        (Evas_Object *obj, Eina_Bool mute);
930 
931 /**
932  * @brief Get the mute audio option of this object.
933  *
934  * @param obj The object which we are retrieving the mute audio option from.
935  * @return Whether the audio is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
936  *
937  * This function return the mute audio option from this emotion object. It can
938  * be set with emotion_object_audio_mute_set().
939  *
940  * @see emotion_object_audio_mute_set()
941  *
942  * @ingroup Emotion_Audio
943  */
944 EAPI Eina_Bool    emotion_object_audio_mute_get        (const Evas_Object *obj);
945 EAPI int          emotion_object_audio_channel_count   (const Evas_Object *obj);
946 EAPI const char  *emotion_object_audio_channel_name_get(const Evas_Object *obj, int channel);
947 EAPI void         emotion_object_audio_channel_set     (Evas_Object *obj, int channel);
948 EAPI int          emotion_object_audio_channel_get     (const Evas_Object *obj);
949 
950 /**
951  * @brief Set the mute video option for this object.
952  *
953  * @param obj The object which we are setting the mute video option.
954  * @param mute Whether the video should be muted (@c EINA_TRUE) or not (@c
955  * EINA_FALSE).
956  *
957  * This function sets the mute video option for this emotion object. The
958  * current module used for this object can use this information to avoid
959  * decoding the video portion of the loaded media file.
960  *
961  * @see emotion_object_video_mute_get()
962  * @see emotion_object_audio_mute_set()
963  *
964  * @ingroup Emotion_Video
965  */
966 EAPI void         emotion_object_video_mute_set        (Evas_Object *obj, Eina_Bool mute);
967 
968 /**
969  * @brief Get the mute video option of this object.
970  *
971  * @param obj The object which we are retrieving the mute video option from.
972  * @return Whether the video is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
973  *
974  * This function returns the mute video option from this emotion object. It can
975  * be set with emotion_object_video_mute_set().
976  *
977  * @see emotion_object_video_mute_set()
978  *
979  * @ingroup Emotion_Video
980  */
981 EAPI Eina_Bool    emotion_object_video_mute_get        (const Evas_Object *obj);
982 
983 /**
984  * @brief Set the video's subtitle file path.
985  *
986  * @param obj  The object which we are setting a subtitle file path.
987  * @param filepath The subtitle file path.
988  *
989  * This function sets a video's subtitle file path(i.e an .srt file) for
990  * supported subtitle formats consult the backend's documentation.
991  *
992  * @see emotion_object_video_subtitle_file_get().
993  *
994  * @ingroup Emotion_Video
995  * @since 1.8
996  */
997 EAPI void         emotion_object_video_subtitle_file_set (Evas_Object *obj, const char *filepath);
998 
999 /**
1000  * @brief Get the video's subtitle file path.
1001  *
1002  * @param obj The object which we are retrieving the subtitle file path from.
1003  * @return The video's subtitle file path previously set, NULL otherwise.
1004  *
1005  * This function returns the video's subtitle file path, if not previously set
1006  * or in error NULL is returned.
1007  *
1008  * @see emotion_object_video_subtitle_file_set().
1009  *
1010  * @ingroup Emotion_Video
1011  * @since 1.8
1012  */
1013 EAPI const char   *emotion_object_video_subtitle_file_get (const Evas_Object *obj);
1014 
1015 /**
1016  * @brief Get the number of available video channel
1017  *
1018  * @param obj The object which we are retrieving the channel count from
1019  * @return the number of available channel.
1020  *
1021  * @see emotion_object_video_channel_name_get()
1022  *
1023  * @ingroup Emotion_Video
1024  */
1025 EAPI int          emotion_object_video_channel_count   (const Evas_Object *obj);
1026 
1027 /**
1028  * @brief Get the name of a given video channel
1029  *
1030  * @param obj The object which we are retrieving the channel name from
1031  * @param channel the channel number
1032  * @return the channel name.
1033  *
1034  * @see emotion_object_video_channel_count()
1035  *
1036  * @ingroup Emotion_Video
1037  */
1038 EAPI const char  *emotion_object_video_channel_name_get(const Evas_Object *obj, int channel);
1039 
1040 /**
1041  * @brief Set the channel for a given video object
1042  *
1043  * @param obj The target object which we are setting the channel
1044  * @param channel the channel number to be setted.
1045  *
1046  * @ingroup Emotion_Video
1047  */
1048 EAPI void         emotion_object_video_channel_set     (Evas_Object *obj, int channel);
1049 
1050 /**
1051  * @brief Get the channel for a given video object
1052  *
1053  * @param obj The target object which we are getting the channel
1054  * @return The current channel number.
1055  *
1056  * @ingroup Emotion_Video
1057  */
1058 EAPI int          emotion_object_video_channel_get     (const Evas_Object *obj);
1059 EAPI void         emotion_object_spu_mute_set          (Evas_Object *obj, Eina_Bool mute);
1060 EAPI Eina_Bool    emotion_object_spu_mute_get          (const Evas_Object *obj);
1061 EAPI int          emotion_object_spu_channel_count     (const Evas_Object *obj);
1062 EAPI const char  *emotion_object_spu_channel_name_get  (const Evas_Object *obj, int channel);
1063 EAPI void         emotion_object_spu_channel_set       (Evas_Object *obj, int channel);
1064 EAPI int          emotion_object_spu_channel_get       (const Evas_Object *obj);
1065 EAPI int          emotion_object_chapter_count         (const Evas_Object *obj);
1066 EAPI void         emotion_object_chapter_set           (Evas_Object *obj, int chapter);
1067 EAPI int          emotion_object_chapter_get           (const Evas_Object *obj);
1068 EAPI const char  *emotion_object_chapter_name_get      (const Evas_Object *obj, int chapter);
1069 EAPI void         emotion_object_eject                 (Evas_Object *obj);
1070 
1071 /**
1072  * @brief Get the dvd title from this emotion object.
1073  *
1074  * @param obj The object which the title will be retrieved from.
1075  * @return A string containing the title.
1076  *
1077  * This function is only useful when playing a DVD.
1078  *
1079  * @note Don't change or free the string returned by this function.
1080  *
1081  * @ingroup Emotion_Info
1082  */
1083 EAPI const char  *emotion_object_title_get             (const Evas_Object *obj);
1084 EAPI const char  *emotion_object_ref_file_get          (const Evas_Object *obj);
1085 EAPI int          emotion_object_ref_num_get           (const Evas_Object *obj);
1086 EAPI int          emotion_object_spu_button_count_get  (const Evas_Object *obj);
1087 EAPI int          emotion_object_spu_button_get        (const Evas_Object *obj);
1088 
1089 /**
1090  * @brief Retrieve meta information from this file being played.
1091  *
1092  * @param obj The object which the meta info will be extracted from.
1093  * @param meta The type of meta information that will be extracted.
1094  *
1095  * This function retrieves information about the file loaded. It can retrieve
1096  * the track title, artist name, album name, etc. See @ref Emotion_Meta_Info
1097  * for all the possibilities.
1098  *
1099  * The meta info may be not available on all types of files. It will return @c
1100  * NULL if the the file doesn't have meta info, or if this specific field is
1101  * empty.
1102  *
1103  * @note Don't change or free the string returned by this function.
1104  *
1105  * @see Emotion_Meta_Info
1106  *
1107  * @ingroup Emotion_Info
1108  */
1109 EAPI const char  *emotion_object_meta_info_get         (const Evas_Object *obj, Emotion_Meta_Info meta);
1110 
1111 /**
1112  * @brief Set the visualization to be used with this object.
1113  *
1114  * @param obj The object where the visualization will be set on.
1115  * @param visualization The type of visualization to be used.
1116  *
1117  * The @p visualization specified will be played instead of a video. This is
1118  * commonly used to display a visualization for audio only files (musics).
1119  *
1120  * The available visualizations are @ref Emotion_Vis.
1121  *
1122  * @see Emotion_Vis
1123  * @see emotion_object_vis_get()
1124  * @see emotion_object_vis_supported()
1125  *
1126  * @ingroup Emotion_Visualization
1127  */
1128 EAPI void         emotion_object_vis_set               (Evas_Object *obj, Emotion_Vis visualization);
1129 
1130 /**
1131  * @brief Get the type of visualization in use by this emotion object.
1132  *
1133  * @param obj The emotion object which the visualization is being retrieved
1134  * from.
1135  * @return The type of visualization in use by this object.
1136  *
1137  * The type of visualization can be set by emotion_object_vis_set().
1138  *
1139  * @see Emotion_Vis
1140  * @see emotion_object_vis_set()
1141  * @see emotion_object_vis_supported()
1142  *
1143  * @ingroup Emotion_Visualization
1144  */
1145 EAPI Emotion_Vis  emotion_object_vis_get               (const Evas_Object *obj);
1146 
1147 /**
1148  * @brief Query whether a type of visualization is supported by this object.
1149  *
1150  * @param obj The object which the query is being ran on.
1151  * @param visualization The type of visualization that is being queried.
1152  * @return EINA_TRUE if the visualization is supported, EINA_FALSE otherwise.
1153  *
1154  * This can be used to check if a visualization is supported. e.g.: one wants to
1155  * display a list of available visualizations for a specific object.
1156  *
1157  * @see Emotion_Vis
1158  * @see emotion_object_vis_set()
1159  * @see emotion_object_vis_get()
1160  *
1161  * @ingroup Emotion_Visualization
1162  */
1163 EAPI Eina_Bool    emotion_object_vis_supported         (const Evas_Object *obj, Emotion_Vis visualization);
1164 
1165 /**
1166  * @brief Raise priority of an object so it will have a priviledged access to hardware resources.
1167  *
1168  * @param obj The object which the query is being ran on.
1169  * @param priority EINA_TRUE means give me a priority access to the hardware resources.
1170  *
1171  * Hardware have a few dedicated hardware pipeline that process the video at no cost for the CPU.
1172  * Especially on SoC, you mostly have one (on mobile phone SoC) or two (on Set Top Box SoC) when
1173  * Picture in Picture is needed. And most application just have a few video stream that really
1174  * deserve high frame rate, high quality output. That's why this call is for.
1175  *
1176  * Please note that if Emotion can't acquire a priviledged hardware resource, it will fallback
1177  * to the no-priority path. This work on the first asking first get basis system.
1178  *
1179  * @see emotion_object_priority_get()
1180  *
1181  * @ingroup Emotion_Ressource
1182  */
1183 EAPI void         emotion_object_priority_set(Evas_Object *obj, Eina_Bool priority);
1184 
1185 /**
1186  * @brief Get the actual priority of an object.
1187  *
1188  * @param obj The object which the query is being ran on.
1189  * @return EINA_TRUE if the object has a priority access to the hardware.
1190  *
1191  * This actually return the priority status of an object. If it failed to have a priviledged
1192  * access to the hardware, it will return EINA_FALSE.
1193  *
1194  * @see emotion_object_priority_get()
1195  *
1196  * @ingroup Emotion_Ressource
1197  */
1198 EAPI Eina_Bool    emotion_object_priority_get(const Evas_Object *obj);
1199 
1200 /**
1201  * @brief Change the state of an object pipeline.
1202  *
1203  * @param obj The object which the query is being ran on.
1204  * @param state The new state for the object.
1205  *
1206  * Changing the state of a pipeline should help preserve the battery of an embedded device.
1207  * But it will only work sanely if the pipeline is not playing at the time you change its
1208  * state. Depending on the engine all state may be not implemented.
1209  *
1210  * @see Emotion_Suspend
1211  * @see emotion_object_suspend_get()
1212  *
1213  * @ingroup Emotion_Ressource
1214  */
1215 EAPI void         emotion_object_suspend_set(Evas_Object *obj, Emotion_Suspend state);
1216 
1217 /**
1218  * @brief Get the current state of the pipeline
1219  *
1220  * @param obj The object which the query is being ran on.
1221  * @return the current state of the pipeline.
1222  *
1223  * @see Emotion_Suspend
1224  * @see emotion_object_suspend_set()
1225  *
1226  * @ingroup Emotion_Ressource
1227  */
1228 EAPI Emotion_Suspend emotion_object_suspend_get(Evas_Object *obj);
1229 
1230 /**
1231  * @brief Load the last known position if available
1232  *
1233  * @param obj The object which the query is being ran on.
1234  *
1235  * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1236  * the latest position. It should trigger some smart callback to let the application
1237  * know when it succeed or fail. Every operation is fully asynchronous and not
1238  * linked to the actual engine used to play the vide.
1239  *
1240  * @see emotion_object_last_position_save()
1241  *
1242  * @ingroup Emotion_Info
1243  */
1244 EAPI void         emotion_object_last_position_load(Evas_Object *obj);
1245 
1246 /**
1247  * @brief Save the lastest position if possible
1248  *
1249  * @param obj The object which the query is being ran on.
1250  *
1251  * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1252  * the latest position. It should trigger some smart callback to let the application
1253  * know when it succeed or fail. Every operation is fully asynchronous and not
1254  * linked to the actual engine used to play the vide.
1255  *
1256  * @see emotion_object_last_position_load()
1257  *
1258  * @ingroup Emotion_Info
1259  */
1260 EAPI void         emotion_object_last_position_save(Evas_Object *obj);
1261 
1262 /**
1263  * @brief Do we have a chance to play that file
1264  *
1265  * @param file A stringshared filename that we want to know if Emotion can play.
1266  *
1267  * This just actually look at the extension of the file, it doesn't check the mime-type
1268  * nor if the file is actually sane. So this is just an hint for your application.
1269  *
1270  * @see emotion_object_extension_may_play_get()
1271  */
1272 EAPI Eina_Bool    emotion_object_extension_may_play_fast_get(const char *file);
1273 
1274 /**
1275  * @brief Do we have a chance to play that file
1276  *
1277  * @param file A filename that we want to know if Emotion can play.
1278  *
1279  * This just actually look at the extension of the file, it doesn't check the mime-type
1280  * nor if the file is actually sane. So this is just an hint for your application.
1281  *
1282  * @see emotion_object_extension_may_play_fast_get()
1283  */
1284 EAPI Eina_Bool    emotion_object_extension_may_play_get(const char *file);
1285 
1286 /**
1287  * @brief Get the actual image object that contains the pixels of the video stream
1288  *
1289  * @param obj The object which the query is being ran on.
1290  *
1291  * This function is useful when you want to get a direct access to the pixels.
1292  *
1293  * @see emotion_object_image_get()
1294  */
1295 EAPI Evas_Object *emotion_object_image_get(const Evas_Object *obj);
1296 
1297 /**
1298  * @defgroup Emotion_Webcam API available for accessing webcam
1299  * @ingroup Emotion
1300  */
1301 
1302 typedef struct _Emotion_Webcam Emotion_Webcam; /**< Webcam description */
1303 
1304 EAPI extern int EMOTION_WEBCAM_UPDATE; /**< Ecore_Event triggered when a new webcam is plugged or unplugged */
1305 EAPI extern int EMOTION_WEBCAM_ADD; /**< Ecore_Event triggered when a new webcam is plugged in @since 1.8*/
1306 EAPI extern int EMOTION_WEBCAM_DEL; /**< Ecore_Event triggered when a webcam is unplugged @since 1.8 */
1307 
1308 /**
1309  * @brief Get a list of active and available webcam
1310  *
1311  * @return the list of available webcam at the time of the call.
1312  *
1313  * It will return the current live list of webcam. It is updated before
1314  * triggering EMOTION_WEBCAM_UPDATE and should never be modified.
1315  *
1316  * @ingroup Emotion_Webcam
1317  */
1318 EAPI const Eina_List *emotion_webcams_get(void);
1319 
1320 /**
1321  * @brief Get the human understandable name of a Webcam
1322  *
1323  * @param ew The webcam to get the name from.
1324  * @return the actual human readable name.
1325  *
1326  * @ingroup Emotion_Webcam
1327  */
1328 EAPI const char      *emotion_webcam_name_get(const Emotion_Webcam *ew);
1329 
1330 /**
1331  * @brief Get the uri of a Webcam that will be understood by emotion
1332  *
1333  * @param ew The webcam to get the uri from.
1334  * @return the actual uri that emotion will later understood.
1335  *
1336  * @ingroup Emotion_Webcam
1337  */
1338 EAPI const char      *emotion_webcam_device_get(const Emotion_Webcam *ew);
1339 
1340 /**
1341  * @brief Get the album artwork from file meta data tags.
1342  *
1343  * @param obj The evas object we are working with.
1344  * @param path The local path for the file.
1345  * @param type The metadata location type (GST_IMAGE_PREVIEW_IMAGE or GST_PREVIEW).
1346  *
1347  * @ingroup Emotion_Artwork
1348  *
1349  * @since 1.19
1350  */
1351 
1352 EAPI Evas_Object     *emotion_file_meta_artwork_get(const Evas_Object *obj, const char *path, Emotion_Artwork_Info type);
1353 /**
1354  * @}
1355  */
1356 
1357 #ifdef __cplusplus
1358 }
1359 #endif
1360 
1361 #undef EAPI
1362 #define EAPI
1363 
1364 #endif
1365