1 /*
2  *			GPAC - Multimedia Framework C SDK
3  *
4  *			Authors: Jean Le Feuvre
5  *			Copyright (c) Telecom ParisTech 2000-2018
6  *					All rights reserved
7  *
8  *  This file is part of GPAC / Scene Compositor sub-project
9  *
10  *  GPAC is free software; you can redistribute it and/or modify
11  *  it under the terms of the GNU Lesser General Public License as published by
12  *  the Free Software Foundation; either version 2, or (at your option)
13  *  any later version.
14  *
15  *  GPAC is distributed in the hope that it will be useful,
16  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
17  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18  *  GNU Lesser General Public License for more details.
19  *
20  *  You should have received a copy of the GNU Lesser General Public
21  *  License along with this library; see the file COPYING.  If not, write to
22  *  the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
23  *
24  */
25 
26 #include <gpac/internal/compositor_dev.h>
27 #include <gpac/internal/scenegraph_dev.h>
28 #include <gpac/nodes_x3d.h>
29 #include <gpac/nodes_svg.h>
30 #include <gpac/network.h>
31 
32 
33 #ifndef GPAC_DISABLE_SVG
get_sync_reference(GF_Scene * scene,XMLRI * iri,u32 o_type,GF_Node * orig_ref,Bool * post_pone)34 static GF_MediaObject *get_sync_reference(GF_Scene *scene, XMLRI *iri, u32 o_type, GF_Node *orig_ref, Bool *post_pone)
35 {
36 	MFURL mfurl;
37 	SFURL sfurl;
38 	GF_MediaObject *res;
39 	GF_Node *ref = NULL;
40 
41 	u32 stream_id = 0;
42 	if (post_pone) *post_pone = GF_FALSE;
43 
44 	if (iri->type==XMLRI_STREAMID) {
45 		stream_id = iri->lsr_stream_id;
46 	} else if (!iri->string) {
47 		return NULL;
48 	} else {
49 		if (iri->target) ref = (GF_Node *)iri->target;
50 		else if (iri->string[0]=='#') ref = gf_sg_find_node_by_name(scene->graph, iri->string+1);
51 		else ref = gf_sg_find_node_by_name(scene->graph, iri->string);
52 
53 		if (ref) {
54 			GF_FieldInfo info;
55 			/*safety check, break cyclic references*/
56 			if (ref==orig_ref) return NULL;
57 
58 			switch (ref->sgprivate->tag) {
59 			case TAG_SVG_audio:
60 				o_type = GF_MEDIA_OBJECT_AUDIO;
61 				if (gf_node_get_attribute_by_tag(ref, TAG_XLINK_ATT_href, GF_FALSE, GF_FALSE, &info)==GF_OK) {
62 					return get_sync_reference(scene, (XMLRI *)info.far_ptr, o_type, orig_ref ? orig_ref : ref, post_pone);
63 				}
64 				return NULL;
65 			case TAG_SVG_video:
66 				o_type = GF_MEDIA_OBJECT_VIDEO;
67 				if (gf_node_get_attribute_by_tag(ref, TAG_XLINK_ATT_href, GF_FALSE, GF_FALSE, &info)==GF_OK) {
68 					return get_sync_reference(scene, (XMLRI *)info.far_ptr, o_type, orig_ref ? orig_ref : ref, post_pone);
69 				}
70 				return NULL;
71 			default:
72 				return NULL;
73 			}
74 		}
75 	}
76 	*post_pone = GF_FALSE;
77 	mfurl.count = 1;
78 	mfurl.vals = &sfurl;
79 	mfurl.vals[0].OD_ID = stream_id;
80 	mfurl.vals[0].url = iri->string;
81 
82 	res = gf_scene_get_media_object(scene, &mfurl, o_type, GF_FALSE);
83 	if (!res) *post_pone = GF_TRUE;
84 	return res;
85 }
86 #endif
87 
88 
89 GF_EXPORT
gf_mo_register(GF_Node * node,MFURL * url,Bool lock_timelines,Bool force_new_res)90 GF_MediaObject *gf_mo_register(GF_Node *node, MFURL *url, Bool lock_timelines, Bool force_new_res)
91 {
92 	u32 obj_type;
93 #ifndef GPAC_DISABLE_SVG
94 	Bool post_pone;
95 	GF_FieldInfo info;
96 #endif
97 	GF_Scene *scene;
98 	GF_MediaObject *res, *syncRef;
99 	GF_SceneGraph *sg = gf_node_get_graph(node);
100 	if (!sg) return NULL;
101 	scene = (GF_Scene*)gf_sg_get_private(sg);
102 	if (!scene) return NULL;
103 
104 	syncRef = NULL;
105 
106 	/*keep track of the kind of object expected if URL is not using OD scheme*/
107 	switch (gf_node_get_tag(node)) {
108 #ifndef GPAC_DISABLE_VRML
109 	/*MPEG-4 / VRML / X3D only*/
110 	case TAG_MPEG4_AudioClip:
111 	case TAG_MPEG4_AudioSource:
112 #ifndef GPAC_DISABLE_X3D
113 	case TAG_X3D_AudioClip:
114 #endif
115 		obj_type = GF_MEDIA_OBJECT_AUDIO;
116 		break;
117 	case TAG_MPEG4_SBVCAnimation:
118 	case TAG_MPEG4_AnimationStream:
119 		obj_type = GF_MEDIA_OBJECT_UPDATES;
120 		break;
121 	case TAG_MPEG4_BitWrapper:
122 		obj_type = GF_MEDIA_OBJECT_SCENE;
123 		break;
124 	case TAG_MPEG4_InputSensor:
125 		obj_type = GF_MEDIA_OBJECT_INTERACT;
126 		break;
127 	case TAG_MPEG4_Background2D:
128 	case TAG_MPEG4_Background:
129 	case TAG_MPEG4_ImageTexture:
130 	case TAG_MPEG4_CacheTexture:
131 	case TAG_MPEG4_MovieTexture:
132 #ifndef GPAC_DISABLE_X3D
133 	case TAG_X3D_Background:
134 	case TAG_X3D_ImageTexture:
135 	case TAG_X3D_MovieTexture:
136 #endif
137 		obj_type = GF_MEDIA_OBJECT_VIDEO;
138 		break;
139 	case TAG_MPEG4_Inline:
140 #ifndef GPAC_DISABLE_X3D
141 	case TAG_X3D_Inline:
142 #endif
143 		obj_type = GF_MEDIA_OBJECT_SCENE;
144 		break;
145 #endif /*GPAC_DISABLE_VRML*/
146 
147 		/*SVG*/
148 #ifndef GPAC_DISABLE_SVG
149 	case TAG_SVG_audio:
150 		obj_type = GF_MEDIA_OBJECT_AUDIO;
151 		if (gf_node_get_attribute_by_tag(node, TAG_SVG_ATT_syncReference, GF_FALSE, GF_FALSE, &info)==GF_OK) {
152 			syncRef = get_sync_reference(scene, (XMLRI *)info.far_ptr, GF_MEDIA_OBJECT_UNDEF, node, &post_pone);
153 			/*syncRef is specified but doesn't exist yet, post-pone*/
154 			if (post_pone) return NULL;
155 		}
156 		break;
157 	case TAG_SVG_video:
158 		obj_type = GF_MEDIA_OBJECT_VIDEO;
159 		if (gf_node_get_attribute_by_tag(node, TAG_SVG_ATT_syncReference, GF_FALSE, GF_FALSE, &info)==GF_OK) {
160 			syncRef = get_sync_reference(scene, (XMLRI *)info.far_ptr, GF_MEDIA_OBJECT_UNDEF, node, &post_pone);
161 			/*syncRef is specified but doesn't exist yet, post-pone*/
162 			if (post_pone) return NULL;
163 		}
164 		break;
165 	case TAG_SVG_image:
166 		obj_type = GF_MEDIA_OBJECT_VIDEO;
167 		break;
168 	case TAG_SVG_foreignObject:
169 	case TAG_SVG_animation:
170 		obj_type = GF_MEDIA_OBJECT_SCENE;
171 		break;
172 	case TAG_LSR_updates:
173 		obj_type = GF_MEDIA_OBJECT_UPDATES;
174 		break;
175 #endif
176 
177 	default:
178 		obj_type = GF_MEDIA_OBJECT_UNDEF;
179 		break;
180 	}
181 
182 	/*move to primary resource handler*/
183 	while (scene->secondary_resource && scene->root_od->parentscene)
184 		scene = scene->root_od->parentscene;
185 
186 	res = gf_scene_get_media_object_ex(scene, url, obj_type, lock_timelines, syncRef, force_new_res, node);
187 	return res;
188 }
189 
190 GF_EXPORT
gf_mo_unregister(GF_Node * node,GF_MediaObject * mo)191 void gf_mo_unregister(GF_Node *node, GF_MediaObject *mo)
192 {
193 	if (mo && node) {
194 		gf_mo_event_target_remove_by_node(mo, node);
195 	}
196 }
197 
gf_mo_new()198 GF_MediaObject *gf_mo_new()
199 {
200 	GF_MediaObject *mo;
201 	mo = (GF_MediaObject *) gf_malloc(sizeof(GF_MediaObject));
202 	memset(mo, 0, sizeof(GF_MediaObject));
203 	mo->speed = FIX_ONE;
204 	mo->URLs.count = 0;
205 	mo->URLs.vals = NULL;
206 	mo->evt_targets = gf_list_new();
207 	return mo;
208 }
209 
210 GF_EXPORT
gf_mo_get_visual_info(GF_MediaObject * mo,u32 * width,u32 * height,u32 * stride,u32 * pixel_ar,u32 * pixelFormat,Bool * is_flipped)211 Bool gf_mo_get_visual_info(GF_MediaObject *mo, u32 *width, u32 *height, u32 *stride, u32 *pixel_ar, u32 *pixelFormat, Bool *is_flipped)
212 {
213 	if ((mo->type != GF_MEDIA_OBJECT_VIDEO) && (mo->type!=GF_MEDIA_OBJECT_TEXT)) return GF_FALSE;
214 
215 	if (mo->config_changed) {
216 		gf_mo_update_caps(mo);
217 	}
218 	if (width) *width = mo->width;
219 	if (height) *height = mo->height;
220 	if (stride) *stride = mo->stride;
221 	if (pixel_ar) *pixel_ar = mo->pixel_ar;
222 	if (pixelFormat) *pixelFormat = mo->pixelformat;
223 	if (is_flipped) *is_flipped = mo->is_flipped;
224 	return GF_TRUE;
225 }
226 
227 GF_EXPORT
gf_mo_get_nb_views(GF_MediaObject * mo,u32 * nb_views)228 void gf_mo_get_nb_views(GF_MediaObject *mo, u32 *nb_views)
229 {
230 	if (mo) *nb_views = mo->nb_views;
231 }
232 
233 GF_EXPORT
234 
gf_mo_get_nb_layers(GF_MediaObject * mo,u32 * nb_layers)235 void gf_mo_get_nb_layers(GF_MediaObject *mo, u32 *nb_layers)
236 {
237 	if (mo) *nb_layers = mo->nb_layers;
238 }
239 
240 GF_EXPORT
gf_mo_get_audio_info(GF_MediaObject * mo,u32 * sample_rate,u32 * bits_per_sample,u32 * num_channels,u64 * channel_config,Bool * forced_layout)241 Bool gf_mo_get_audio_info(GF_MediaObject *mo, u32 *sample_rate, u32 *bits_per_sample, u32 *num_channels, u64 *channel_config, Bool *forced_layout)
242 {
243 	if (!mo->odm || (mo->type != GF_MEDIA_OBJECT_AUDIO)) return GF_FALSE;
244 
245 	if (mo->odm->pid && (!mo->sample_rate || !mo->num_channels))
246 		gf_filter_pid_get_packet(mo->odm->pid);
247 
248 	if (mo->config_changed) {
249 		gf_mo_update_caps(mo);
250 	}
251 
252 	if (sample_rate) *sample_rate = mo->sample_rate;
253 	if (bits_per_sample) *bits_per_sample = mo->afmt;
254 	if (num_channels) *num_channels = mo->num_channels;
255 	if (channel_config) *channel_config = mo->channel_config;
256 	if (forced_layout) *forced_layout = GF_FALSE;
257 
258 	if (mo->odm->ambi_ch_id) {
259 		if (mo->num_channels>1) {
260 			GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("[ODM%d]: tagged as ambisonic channel %d but has %d channels, ignoring ambisonic tag\n",  mo->odm->ID, mo->odm->ambi_ch_id, mo->num_channels ));
261 		} else {
262 			if (num_channels) *num_channels = 1;
263 			if (channel_config) *channel_config = (u64) ( 1 << (mo->odm->ambi_ch_id - 1) );
264 			if (forced_layout) *forced_layout = GF_TRUE;
265 
266 		}
267 	}
268 
269 	return GF_TRUE;
270 }
271 
272 
gf_mo_update_caps(GF_MediaObject * mo)273 void gf_mo_update_caps(GF_MediaObject *mo)
274 {
275 	const GF_PropertyValue *v, *v2;
276 	if (!mo->odm || !mo->odm->pid) return;
277 
278 	mo->planar_audio = GF_FALSE;
279 
280 	if (mo->odm->type==GF_STREAM_VISUAL) {
281 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_WIDTH);
282 		if (v) mo->width = v->value.uint;
283 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_HEIGHT);
284 		if (v) mo->height = v->value.uint;
285 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_STRIDE);
286 		if (v) mo->stride = v->value.uint;
287 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_PIXFMT);
288 		if (v) mo->pixelformat = v->value.uint;
289 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SAR);
290 		if (v) mo->pixel_ar = (v->value.frac.num) << 16 | (v->value.frac.den);
291 
292 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SRD);
293 		v2 = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SRD_REF);
294 		if (v && v->value.vec4i.w && v->value.vec4i.z) {
295 			mo->srd_x = v->value.vec4i.x;
296 			mo->srd_y = v->value.vec4i.y;
297 			mo->srd_w = v->value.vec4i.z;
298 			mo->srd_h = v->value.vec4i.w;
299 			if (v2) {
300 				mo->srd_full_w = v2->value.vec2i.x;
301 				mo->srd_full_h = v2->value.vec2i.y;
302 			}
303 
304 			if (mo->odm->parentscene->is_dynamic_scene) {
305 				u32 old_type = mo->odm->parentscene->srd_type;
306 				if ((mo->srd_w == mo->srd_full_w) && (mo->srd_h == mo->srd_full_h)) {
307 					mo->odm->parentscene->srd_type = 2;
308 				} else if (!mo->odm->parentscene->srd_type) {
309 					mo->odm->parentscene->srd_type = 1;
310 				}
311 				if (old_type != mo->odm->parentscene->srd_type) {
312 					//reset scene graph but prevent object stop/start
313 					u32 i, count = gf_list_count(mo->odm->parentscene->scene_objects);
314 					for (i=0; i<count; i++) {
315 						GF_MediaObject *an_mo = gf_list_get(mo->odm->parentscene->scene_objects, i);
316 						an_mo->num_open++;
317 					}
318 					gf_sg_reset(mo->odm->parentscene->graph);
319 					for (i=0; i<count; i++) {
320 						GF_MediaObject *an_mo = gf_list_get(mo->odm->parentscene->scene_objects, i);
321 						an_mo->num_open--;
322 					}
323 					gf_scene_regenerate(mo->odm->parentscene);
324 				}
325 			}
326 		}
327 		// SRD object with no size but global scene size: HEVC tiled based object
328 		else if (v2 && v2->value.vec2i.x && v2->value.vec2i.y) {
329 			if (mo->odm->parentscene->is_dynamic_scene && !mo->odm->parentscene->srd_type) {
330 				mo->odm->parentscene->is_tiled_srd = GF_TRUE;
331 				mo->srd_full_w = v2->value.vec2i.x;
332 				mo->srd_full_h = v2->value.vec2i.y;
333 			}
334 		}
335 	} else if (mo->odm->type==GF_STREAM_AUDIO) {
336 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SAMPLE_RATE);
337 		if (v) mo->sample_rate = v->value.uint;
338 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_NUM_CHANNELS);
339 		if (v) mo->num_channels = v->value.uint;
340 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_CHANNEL_LAYOUT);
341 		if (v) mo->channel_config = v->value.longuint;
342 		v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_AUDIO_FORMAT);
343 		if (v) mo->afmt = v->value.uint;
344 		else mo->afmt = GF_AUDIO_FMT_S16;
345 
346 		mo->bytes_per_sec = gf_audio_fmt_bit_depth(mo->afmt) * mo->num_channels * mo->sample_rate / 8;
347 		mo->planar_audio = gf_audio_fmt_is_planar(mo->afmt);
348 	} else if (mo->odm->type==GF_STREAM_OD) {
349 		//nothing to do
350 	} else if (mo->odm->type==GF_STREAM_OCR) {
351 		//nothing to do
352 	} else if (mo->odm->type==GF_STREAM_SCENE) {
353 		//nothing to do
354 	} else if (mo->odm->type==GF_STREAM_TEXT) {
355 		//nothing to do
356 	} else {
357 		GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("Unknwon scene object type %d\n", mo->odm->type));
358 	}
359 }
360 
convert_ts_to_ms(GF_MediaObject * mo,u64 ts,u32 timescale,Bool * discard)361 static u64 convert_ts_to_ms(GF_MediaObject *mo, u64 ts, u32 timescale, Bool *discard)
362 {
363 	if (mo->odm->delay) {
364 		if (mo->odm->delay >= 0) {
365 			ts += mo->odm->delay;
366 		} else if (ts < (u64) -mo->odm->delay) {
367 			*discard = GF_TRUE;
368 			return 0;
369 		} else {
370 			ts -= -mo->odm->delay;
371 		}
372 	}
373 	ts *= 1000;
374 	ts /= timescale;
375 	return ts;
376 }
377 
378 GF_EXPORT
gf_mo_fetch_data(GF_MediaObject * mo,GF_MOFetchMode resync,u32 upload_time_ms,Bool * eos,u32 * timestamp,u32 * size,s32 * ms_until_pres,s32 * ms_until_next,GF_FilterFrameInterface ** outFrame,u32 * planar_size)379 u8 *gf_mo_fetch_data(GF_MediaObject *mo, GF_MOFetchMode resync, u32 upload_time_ms, Bool *eos, u32 *timestamp, u32 *size, s32 *ms_until_pres, s32 *ms_until_next, GF_FilterFrameInterface **outFrame, u32 *planar_size)
380 {
381 	Bool discard=GF_FALSE;
382 	u32 force_decode_mode = 0;
383 	u32 obj_time, obj_time_orig;
384 	s64 diff;
385 	Bool skip_resync;
386 	u32 timescale=0;
387 	u64 pck_ts=0, next_ts=0;
388 	u32 retry_pull;
389 	Bool is_first = GF_FALSE;
390 	Bool move_to_next_only = GF_FALSE;
391 
392 	*eos = GF_FALSE;
393 	*timestamp = mo->timestamp;
394 	*size = mo->framesize;
395 	if (ms_until_pres) *ms_until_pres = mo->ms_until_pres;
396 	if (ms_until_next) *ms_until_next = mo->ms_until_next;
397 	if (outFrame) *outFrame = NULL;
398 
399 	if (!mo->odm || !mo->odm->pid)
400 		return NULL;
401 
402 	/*if frame locked return it*/
403 	if (mo->nb_fetch) {
404 		GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] ODM %d: CU already fetched, returning\n", mo->odm->ID));
405 		mo->nb_fetch ++;
406 		if (planar_size) *planar_size = mo->framesize / mo->num_channels;
407 		return mo->frame;
408 	}
409 
410 	if (mo->pck && mo->frame_ifce && (mo->frame_ifce->flags & GF_FRAME_IFCE_BLOCKING) ) {
411 		gf_filter_pck_unref(mo->pck);
412 		mo->pck = NULL;
413 	}
414 
415 	if ( gf_odm_check_buffering(mo->odm, NULL) ) {
416 		//if buffering, first frame fetched and still buffering return
417 		if (mo->first_frame_fetched && mo->odm->nb_buffering)
418 			return NULL;
419 	}
420 
421 retry:
422 	discard = GF_FALSE;
423 	if (!mo->pck) {
424 		mo->pck = gf_filter_pid_get_packet(mo->odm->pid);
425 		if (!mo->pck) {
426 			if (gf_filter_pid_is_eos(mo->odm->pid)) {
427 				if (!mo->is_eos) {
428 					mo->is_eos = GF_TRUE;
429 					mediasensor_update_timing(mo->odm, GF_TRUE);
430 					gf_odm_on_eos(mo->odm, mo->odm->pid);
431 					gf_odm_signal_eos_reached(mo->odm);
432 				}
433 			} else {
434 				mo->odm->ck->has_seen_eos = GF_FALSE;
435 			}
436 			*eos = mo->is_eos;
437 			return NULL;
438 		} else {
439 			gf_filter_pck_ref(&mo->pck);
440 			gf_filter_pid_drop_packet(mo->odm->pid);
441 		}
442 		is_first = GF_TRUE;
443 	}
444 	assert(mo->pck);
445 	mo->first_frame_fetched = GF_TRUE;
446 	mo->is_eos = GF_FALSE;
447 
448 
449 	/*not running and no resync (ie audio)*/
450 	if (!gf_clock_is_started(mo->odm->ck)) {
451 		if (!resync) {
452 			GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] ODM %d: CB not running, returning\n", mo->odm->ID));
453 			return NULL;
454 		} else if (mo->odm->ck->nb_buffering && mo->odm->type==GF_STREAM_AUDIO) {
455 			return NULL;
456 		}
457 	}
458 
459 	/*data = */gf_filter_pck_get_data(mo->pck, size);
460 	timescale = gf_filter_pck_get_timescale(mo->pck);
461 
462 	pck_ts = convert_ts_to_ms(mo, gf_filter_pck_get_cts(mo->pck), timescale, &discard);
463 	if (discard) {
464 		gf_filter_pck_unref(mo->pck);
465 		mo->pck = NULL;
466 		goto retry;
467 	}
468 
469 	if (resync==GF_MO_FETCH_PAUSED)
470 		resync=GF_MO_FETCH;
471 
472 	retry_pull = 1;
473 	/*fast forward, bench mode with composition memory: force one decode if no data is available*/
474 	if (! *eos && ((mo->odm->ck->speed > FIX_ONE) || mo->odm->parentscene->compositor->bench_mode || (mo->odm->type==GF_STREAM_AUDIO) ) ) {
475 		retry_pull = 10;
476 		force_decode_mode=1;
477 	}
478 
479 	while (retry_pull) {
480 		retry_pull--;
481 		next_ts = 0;
482 		if (gf_filter_pid_get_first_packet_cts(mo->odm->pid, &next_ts) ) {
483 			next_ts = 1 + convert_ts_to_ms(mo, next_ts, timescale, &discard);
484 			break;
485 		} else {
486 			if (gf_filter_pid_is_eos(mo->odm->pid)) {
487 				if (!mo->is_eos) {
488 					mo->is_eos = GF_TRUE;
489 					*eos = mo->is_eos;
490 					mediasensor_update_timing(mo->odm, GF_TRUE);
491 					gf_odm_on_eos(mo->odm, mo->odm->pid);
492 					force_decode_mode=0;
493 				}
494 				break;
495 			}
496 		}
497 		*eos = mo->is_eos;
498 		if (!retry_pull) break;
499 
500 		gf_filter_pid_try_pull(mo->odm->pid);
501 	}
502 	if (!retry_pull && (force_decode_mode==1)) {
503 		GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d] At %d could not force a pull from pid - POTENTIAL blank frame after TS %u\n", mo->odm->ID, gf_clock_time(mo->odm->ck), mo->timestamp));
504 	}
505 
506 	/*resync*/
507 	obj_time = obj_time_orig = gf_clock_time(mo->odm->ck);
508 
509 	if (mo->odm->prev_clock_at_discontinuity_plus_one) {
510 		s32 diff_new, diff_old, diff_pck_old, diff_pck_new;
511 		s32 old_timebase_time = (s32) obj_time;
512 		old_timebase_time -= (s32) mo->odm->ck->init_timestamp;
513 		old_timebase_time += (s32) mo->odm->prev_clock_at_discontinuity_plus_one;
514 		diff_new = (s32) obj_time;
515 		diff_new -= mo->last_fetch_time;
516 		if (diff_new < 0) diff_new = -diff_new;
517 		diff_old = (s32) old_timebase_time;
518 		diff_old -= mo->last_fetch_time;
519 		if (diff_old < 0) diff_old = -diff_old;
520 
521 		diff_pck_old = (s32) pck_ts - (s32) old_timebase_time;
522 		diff_pck_new = (s32) pck_ts - (s32) obj_time;
523 		if (ABS(diff_pck_old) > ABS(diff_pck_new)) {
524 			//don't reset discontinuity flag for audio
525 			if (resync>GF_MO_FETCH) {
526 				GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[ODM%d] end of clock discontinuity: diff pck TS to old clock %d to new clock %d\n", mo->odm->ID, diff_pck_old, diff_pck_new));
527 				mo->odm->prev_clock_at_discontinuity_plus_one = 0;
528 			}
529 		} else if (diff_old < diff_new) {
530 			GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[ODM%d] in clock discontinuity: time since fetch old clock %d new clock %d\n", mo->odm->ID, diff_old, diff_new));
531 
532 			obj_time = old_timebase_time;
533 		}
534 	}
535 
536 	skip_resync = mo->odm->parentscene->compositor->bench_mode ? GF_TRUE : GF_FALSE;
537 	//no drop mode, only for speed = 1: all frames are presented, we discard the current output only if already presented and next frame time is mature
538 	if ((mo->odm->ck->speed == FIX_ONE)
539 		&& (mo->type==GF_MEDIA_OBJECT_VIDEO)
540 		//if no buffer playout we are in low latency configuration, don"t skip resync
541 		&& mo->odm->buffer_playout_us
542 	) {
543 		assert(mo->odm->parentscene);
544 		if (! mo->odm->parentscene->compositor->drop) {
545 			if (mo->odm->parentscene->compositor->force_late_frame_draw) {
546 				mo->flags |= GF_MO_IN_RESYNC;
547 			}
548 			else if (mo->flags & GF_MO_IN_RESYNC) {
549 				if (next_ts >= 1 + obj_time) {
550 					skip_resync = GF_TRUE;
551 					mo->flags &= ~GF_MO_IN_RESYNC;
552 				}
553 			}
554 			else if (next_ts && (next_ts < pck_ts) ) {
555 				skip_resync = GF_TRUE;
556 			}
557 			//if the next AU is at most 300 ms from the current clock use no drop mode
558 			else if (next_ts + 300 >= obj_time) {
559 				skip_resync = GF_TRUE;
560 			} else if (next_ts) {
561 				GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[ODM%d] At %u frame TS %u next frame TS %d too late in no-drop mode, enabling drop - resync mode %d\n", mo->odm->ID, obj_time, pck_ts, next_ts, resync));
562 				mo->flags |= GF_MO_IN_RESYNC;
563 			}
564 		}
565 	}
566 
567 	if (skip_resync) {
568 		resync=GF_MO_FETCH; //prevent resync code below
569 		if (mo->odm->parentscene->compositor->use_step_mode) upload_time_ms=0;
570 
571 		//we are in no resync mode, drop current frame once played and object time just matured
572 		//do it only if clock is started or if compositor step mode is set
573 		//the time threshold for fetching is given by the caller
574 		if ( (gf_clock_is_started(mo->odm->ck) || mo->odm->parentscene->compositor->use_step_mode)
575 			&& (mo->timestamp==pck_ts) && next_ts && ( (next_ts <= 1 + obj_time + upload_time_ms) || (next_ts <= 1 + obj_time_orig + upload_time_ms) ) )
576 		{
577 			//drop current and go to next - we use the same loop as regular resync below
578 			resync = GF_MO_FETCH_RESYNC;
579 			move_to_next_only = GF_TRUE;
580 			GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Switching to CU CTS %u (next %d) now %u\n", mo->odm->ID, pck_ts, next_ts, obj_time));
581 		}
582 	}
583 	if (resync!=GF_MO_FETCH) {
584 		u32 nb_dropped = 0;
585 		while (next_ts) {
586 			if (!move_to_next_only) {
587 				if (mo->odm->ck->speed > 0 ? pck_ts >= obj_time : pck_ts <= obj_time )
588 					break;
589 
590 				GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Try to drop frame TS %u next frame TS %u obj time %u\n", mo->odm->ID, pck_ts, next_ts, obj_time));
591 
592 				//nothing ready yet
593 				if ( gf_filter_pid_first_packet_is_empty(mo->odm->pid) ) {
594 					break;
595 				}
596 
597 				/*figure out closest time*/
598 				if (mo->odm->ck->speed > 0 ? next_ts > obj_time : next_ts < obj_time) {
599 					*eos = GF_FALSE;
600 					break;
601 				}
602 
603 				nb_dropped ++;
604 				if (nb_dropped>=1) {
605 					GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %u dropped frame TS %u\n", mo->odm->ID, obj_time, pck_ts));
606 
607 					mo->odm->nb_dropped++;
608 				}
609 			}
610 
611 			//delete our packet
612 			gf_filter_pck_unref(mo->pck);
613 			mo->pck = gf_filter_pid_get_packet(mo->odm->pid);
614 			gf_filter_pck_ref( &mo->pck);
615 
616 			pck_ts = convert_ts_to_ms(mo, gf_filter_pck_get_cts(mo->pck), timescale, &discard);
617 			//drop next packet from pid
618 			gf_filter_pid_drop_packet(mo->odm->pid);
619 
620 			if (obj_time != obj_time_orig) {
621 				s32 diff_pck_old = (s32) pck_ts - (s32) obj_time;
622 				s32 diff_pck_new = (s32) pck_ts - (s32) obj_time_orig;
623 
624 				if (ABS(diff_pck_old) > ABS(diff_pck_new)) {
625 					GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[ODM%d] end of clock discontinuity, moving from old time base %d to new %d\n", mo->odm->ID, obj_time, obj_time_orig));
626 					obj_time = obj_time_orig;
627 					mo->odm->prev_clock_at_discontinuity_plus_one = 0;
628 				}
629 			}
630 
631 			next_ts = 0;
632 			if (gf_filter_pid_get_first_packet_cts(mo->odm->pid, &next_ts)) {
633 				next_ts = convert_ts_to_ms(mo, next_ts, timescale, &discard);
634 			}
635 			if (move_to_next_only)
636 				break;
637 		}
638 	}
639 
640 
641 	mo->frame = (char *) gf_filter_pck_get_data(mo->pck, &mo->size);
642 	mo->framesize = mo->size - mo->RenderedLength;
643 
644 	//planar mode, RenderedLength correspond to all channels, so move frame pointer
645 	//to first sample non consumed = RenderedLength/nb_channels
646 	if (mo->planar_audio) {
647 		mo->frame += mo->RenderedLength / mo->num_channels;
648 	} else {
649 		mo->frame += mo->RenderedLength;
650 	}
651 	mo->frame_ifce = gf_filter_pck_get_frame_interface(mo->pck);
652 //	mo->media_frame = CU->frame;
653 
654 	diff = (s32) ( (mo->speed >= 0) ? ( (s64) pck_ts - (s64) obj_time) : ( (s64) obj_time - (s64) pck_ts) );
655 	mo->ms_until_pres = FIX2INT(diff * mo->speed);
656 
657 	if (mo->is_eos) {
658 		diff = 1000*gf_filter_pck_get_duration(mo->pck) / timescale;
659 		if (!diff) diff = 100;
660 	} else {
661 		diff = next_ts ? next_ts : (pck_ts + 1000*gf_filter_pck_get_duration(mo->pck) / timescale);
662 		diff = (s32) ( (mo->speed >= 0) ? ( (s64) diff - (s64) obj_time) : ( (s64) obj_time - (s64) diff) );
663 
664 		mo->odm->ck->has_seen_eos = GF_FALSE;
665 	}
666 	mo->ms_until_next = FIX2INT(diff * mo->speed);
667 	if (mo->ms_until_next < 0)
668 		mo->ms_until_next = 0;
669 
670 	//safe guard
671 	if (mo->ms_until_next>500)
672 		mo->ms_until_next=500;
673 
674 	if ((mo->timestamp != pck_ts) || is_first) {
675 		const GF_PropertyValue *v;
676 		u32 media_time;
677 		u64 dur = gf_filter_pck_get_duration(mo->pck);
678 		dur *= 1000;
679 		dur /= timescale;
680 		mo->frame_dur = (u32) dur;
681 		mo->last_fetch_time = obj_time;
682 
683 		mo->timestamp = (u32) pck_ts;
684 		media_time = gf_clock_to_media_time(mo->odm->ck, mo->timestamp);
685 
686 		if (mo->odm->media_current_time <= media_time)
687 			mo->odm->media_current_time = media_time;
688 
689 		if (mo->odm->parentscene->is_dynamic_scene) {
690 			GF_Scene *s = mo->odm->parentscene;
691 			while (s && s->root_od->addon) {
692 				s = s->root_od->parentscene;
693 			}
694 			if (s && (s->root_od->media_current_time < mo->odm->media_current_time) )
695 				s->root_od->media_current_time = mo->odm->media_current_time;
696 		}
697 
698 #ifndef GPAC_DISABLE_VRML
699 		if (! *eos )
700 			mediasensor_update_timing(mo->odm, GF_FALSE);
701 #endif
702 
703 		GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d (%s)] At OTB %u fetch frame TS %u size %d (previous TS %u) - %d unit in CB - UTC "LLU" ms - %d ms until CTS is due - %d ms until next frame\n", mo->odm->ID, mo->odm->scene_ns->url, gf_clock_time(mo->odm->ck), pck_ts, mo->framesize, mo->timestamp, gf_filter_pid_get_packet_count(mo->odm->pid), gf_net_get_utc(), mo->ms_until_pres, mo->ms_until_next ));
704 
705 		v = gf_filter_pck_get_property(mo->pck, GF_PROP_PCK_SENDER_NTP);
706 		if (v) {
707 			GF_PropertyEntry *pe = NULL;
708 
709 			mo->odm->last_drawn_frame_ntp_sender = v->value.longuint;
710 
711 			v = gf_filter_pck_get_property(mo->pck, GF_PROP_PCK_RECEIVER_NTP);
712 			if (v) {
713 				mo->odm->last_drawn_frame_ntp_receive = v->value.longuint;
714 			}
715 
716 			mo->odm->last_drawn_frame_ntp_diff = gf_net_get_ntp_diff_ms(mo->odm->last_drawn_frame_ntp_sender);
717 			v = gf_filter_pid_get_info_str(mo->odm->pid, "ntpdiff", &pe);
718 			if (v) {
719 				mo->odm->last_drawn_frame_ntp_diff -= v->value.sint;
720 			}
721 			gf_filter_release_property(pe);
722 			GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d (%s)] Frame TS %u NTP diff with sender %d ms\n", mo->odm->ID, mo->odm->scene_ns->url, pck_ts, mo->odm->last_drawn_frame_ntp_diff));
723 
724 			if (mo->odm->parentscene->compositor->ntpsync
725 				&& (mo->odm->last_drawn_frame_ntp_diff > (s32) mo->odm->parentscene->compositor->ntpsync)
726 //				&& first_ntp
727 			) {
728 //					first_ntp = GF_FALSE;
729 					u32 ntp_diff = mo->odm->last_drawn_frame_ntp_diff - mo->odm->parentscene->compositor->ntpsync;
730 					mo->odm->ck->init_timestamp += ntp_diff;
731 					mo->flags |= GF_MO_IN_RESYNC;
732 			}
733 		}
734 
735 		/*signal EOS after rendering last frame, not while rendering it*/
736 		*eos = GF_FALSE;
737 
738 	} else if (*eos) {
739 		//already rendered the last frame, consider we no longer have pending late frame on this stream
740 		mo->ms_until_pres = 0;
741 	} else {
742 //		GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d (%s)] At OTB %u same frame fetch TS %u\n", mo->odm->ID, mo->odm->net_service->url, obj_time, CU->TS ));
743 
744 		//if paused force a high value for next frame
745 		if (!gf_clock_is_started(mo->odm->ck)) {
746 			mo->ms_until_next = 100;
747 		}
748 	}
749 
750 	/*also adjust CU time based on consummed bytes in input, since some codecs output very large audio chunks*/
751 	if (mo->bytes_per_sec) mo->timestamp += mo->RenderedLength * 1000 / mo->bytes_per_sec;
752 
753 	if (mo->odm->parentscene->compositor->bench_mode) {
754 		mo->ms_until_pres = -1;
755 		mo->ms_until_next = 1;
756 	}
757 
758 	//TODO fixme, hack for clock signaling
759 	if (!mo->frame && !mo->frame_ifce)
760 		return NULL;
761 
762 	mo->nb_fetch ++;
763 	*timestamp = mo->timestamp;
764 	*size = mo->framesize;
765 	if (ms_until_pres) *ms_until_pres = mo->ms_until_pres;
766 	if (ms_until_next) *ms_until_next = mo->ms_until_next;
767 	if (outFrame) *outFrame = mo->frame_ifce;
768 	if (planar_size) *planar_size = mo->framesize / mo->num_channels;
769 
770 //	gf_odm_service_media_event(mo->odm, GF_EVENT_MEDIA_TIME_UPDATE);
771 
772 	if (mo->frame_ifce)
773 		return (char *) mo->frame_ifce;
774 
775 	return mo->frame;
776 }
777 
778 
779 GF_EXPORT
gf_mo_release_data(GF_MediaObject * mo,u32 nb_bytes,s32 drop_mode)780 void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 drop_mode)
781 {
782 	if (!mo || !mo->odm || !mo->odm->pid || !mo->nb_fetch) return;
783 
784 	mo->nb_fetch--;
785 	if (mo->nb_fetch) {
786 		return;
787 	}
788 
789 	if (nb_bytes==0xFFFFFFFF) {
790 		mo->RenderedLength = mo->size;
791 	} else {
792 		assert(mo->RenderedLength + nb_bytes <= mo->size);
793 		mo->RenderedLength += nb_bytes;
794 	}
795 
796 	if (drop_mode<0) {
797 		/*only allow for explicit last frame keeping if only one node is using the resource
798 			otherwise this would block the composition memory*/
799 		if (mo->num_open>1) {
800 			drop_mode=0;
801 		} else {
802 			return;
803 		}
804 	}
805 
806 	/*discard frame*/
807 	if (mo->RenderedLength >= mo->size) {
808 		mo->RenderedLength = 0;
809 
810 		if (!mo->pck) return;
811 
812 		if (drop_mode==3)
813 			drop_mode=0;
814 		else if (gf_filter_pck_is_blocking_ref(mo->pck) )
815 			drop_mode = 1;
816 
817 		if (drop_mode) {
818 			gf_filter_pck_unref(mo->pck);
819 			mo->pck = NULL;
820 			GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %u released frame TS %u\n", mo->odm->ID,gf_clock_time(mo->odm->ck), mo->timestamp));
821 		} else {
822 			/*we cannot drop since we don't know the speed of the playback (which can even be frame by frame)*/
823 		}
824 	}
825 }
826 
827 GF_EXPORT
gf_mo_get_object_time(GF_MediaObject * mo,u32 * obj_time)828 void gf_mo_get_object_time(GF_MediaObject *mo, u32 *obj_time)
829 {
830 	/*get absolute clock (without drift) for audio*/
831 	if (mo && mo->odm && mo->odm->ck) {
832 		if (mo->odm->type==GF_STREAM_AUDIO)
833 			*obj_time = gf_clock_real_time(mo->odm->ck);
834 		else
835 			*obj_time = gf_clock_time(mo->odm->ck);
836 	}
837 	/*unknown / unsupported object*/
838 	else {
839 		*obj_time = 0;
840 	}
841 }
842 
843 GF_EXPORT
gf_mo_play(GF_MediaObject * mo,Double clipBegin,Double clipEnd,Bool can_loop)844 void gf_mo_play(GF_MediaObject *mo, Double clipBegin, Double clipEnd, Bool can_loop)
845 {
846 	if (!mo) return;
847 
848 	if (!mo->num_open && mo->odm) {
849 		mo->is_eos = GF_FALSE;
850 		if (mo->odm->state == GF_ODM_STATE_PLAY) {
851 			if (mo->odm->flags & GF_ODM_PREFETCH) {
852 				mo->odm->flags &= ~GF_ODM_PREFETCH;
853 				mo->num_open++;
854 				return;
855 			}
856 		}
857 		if (mo->odm->flags & GF_ODM_NO_TIME_CTRL) {
858 			mo->odm->media_start_time = 0;
859 		} else {
860 			mo->odm->media_start_time = (u64) (clipBegin*1000);
861 			if (mo->odm->duration && (mo->odm->media_start_time > mo->odm->duration)) {
862 				if (can_loop) {
863 					mo->odm->media_start_time %= mo->odm->duration;
864 				} else {
865 					mo->odm->media_start_time = mo->odm->duration;
866 				}
867 			}
868 			if (clipEnd>=clipBegin) {
869 				mo->odm->media_stop_time = (u64) (clipEnd*1000);
870 				if (mo->odm->duration && (mo->odm->media_stop_time >=0) && ((u64) mo->odm->media_stop_time > mo->odm->duration)) {
871 					mo->odm->media_stop_time = 0;
872 				}
873 			} else {
874 				mo->odm->media_stop_time = 0;
875 			}
876 		}
877 		/*done prefetching*/
878 		assert(! (mo->odm->flags & GF_ODM_PREFETCH) );
879 
880 		gf_odm_start(mo->odm);
881 	} else if (mo->odm) {
882 		if (mo->num_to_restart) mo->num_restart--;
883 		if (!mo->num_restart && (mo->num_to_restart==mo->num_open+1) ) {
884 			mediacontrol_restart(mo->odm);
885 			mo->num_to_restart = mo->num_restart = 0;
886 		}
887 	}
888 	mo->num_open++;
889 }
890 
891 GF_EXPORT
gf_mo_stop(GF_MediaObject ** _mo)892 void gf_mo_stop(GF_MediaObject **_mo)
893 {
894 	GF_MediaObject *mo = _mo ? *_mo : NULL;
895 	if (!mo || !mo->num_open) return;
896 
897 	mo->num_open--;
898 	if (!mo->num_open && mo->odm) {
899 		mo->first_frame_fetched = GF_FALSE;
900 		if (mo->odm->flags & GF_ODM_DESTROYED) {
901 			*_mo = NULL;
902 			return;
903 		}
904 
905 		/*signal STOP request*/
906 		if ((mo->OD_ID==GF_MEDIA_EXTERNAL_ID) || (mo->odm && mo->odm->ID && (mo->odm->ID==GF_MEDIA_EXTERNAL_ID))) {
907 			gf_odm_disconnect(mo->odm, 2);
908 			*_mo = NULL;
909 		} else {
910 			if ( gf_odm_stop_or_destroy(mo->odm) ) {
911 				*_mo = NULL;
912 			}
913 		}
914 	} else {
915 		if (!mo->num_to_restart) {
916 			mo->num_restart = mo->num_to_restart = mo->num_open + 1;
917 		}
918 	}
919 }
920 
921 GF_EXPORT
gf_mo_restart(GF_MediaObject * mo)922 void gf_mo_restart(GF_MediaObject *mo)
923 {
924 	/*if no control and not root of a scene, check timelines are unlocked*/
925 	if (!mo->odm->subscene
926 #ifndef GPAC_DISABLE_VRML
927 		&& !gf_odm_get_mediacontrol(mo->odm)
928 #endif
929 	) {
930 		/*don't restart if sharing parent scene clock*/
931 		if (gf_odm_shares_clock(mo->odm, gf_odm_get_media_clock(mo->odm->parentscene->root_od))) {
932 			return;
933 		}
934 	}
935 	/*all other cases, call restart to take into account clock references*/
936 	mediacontrol_restart(mo->odm);
937 }
938 
gf_mo_get_od_id(MFURL * url)939 u32 gf_mo_get_od_id(MFURL *url)
940 {
941 	u32 i, j, tmpid;
942 	char *str, *s_url;
943 	u32 id = 0;
944 
945 	if (!url) return 0;
946 
947 	for (i=0; i<url->count; i++) {
948 		if (url->vals[i].OD_ID) {
949 			/*works because OD ID 0 is forbidden in MPEG4*/
950 			if (!id) {
951 				id = url->vals[i].OD_ID;
952 			}
953 			/*bad url, only one object can be described in MPEG4 urls*/
954 			else if (id != url->vals[i].OD_ID) return 0;
955 		} else if (url->vals[i].url && strlen(url->vals[i].url)) {
956 			/*format: od:ID or od:ID#segment - also check for "ID" in case...*/
957 			str = url->vals[i].url;
958 			if (!strnicmp(str, "od:", 3)) str += 3;
959 			/*remove segment info*/
960 			s_url = gf_strdup(str);
961 			j = 0;
962 			while (j<strlen(s_url)) {
963 				if (s_url[j]=='#') {
964 					s_url[j] = 0;
965 					break;
966 				}
967 				j++;
968 			}
969 			j = sscanf(s_url, "%u", &tmpid);
970 			/*be carefull, an url like "11-regression-test.mp4" will return 1 on sscanf :)*/
971 			if (j==1) {
972 				char szURL[20];
973 				sprintf(szURL, "%u", tmpid);
974 				if (stricmp(szURL, s_url)) j = 0;
975 			}
976 			gf_free(s_url);
977 
978 			if (j!= 1) {
979 				/*dynamic OD if only one URL specified*/
980 				if (!i) return GF_MEDIA_EXTERNAL_ID;
981 				/*otherwise ignore*/
982 				continue;
983 			}
984 			if (!id) {
985 				id = tmpid;
986 				continue;
987 			}
988 			/*bad url, only one object can be described in MPEG4 urls*/
989 			else if (id != tmpid) return 0;
990 		}
991 	}
992 	return id;
993 }
994 
995 
gf_mo_is_same_url(GF_MediaObject * obj,MFURL * an_url,Bool * keep_fragment,u32 obj_hint_type)996 Bool gf_mo_is_same_url(GF_MediaObject *obj, MFURL *an_url, Bool *keep_fragment, u32 obj_hint_type)
997 {
998 	Bool include_sub_url = GF_FALSE;
999 	u32 i;
1000 	char szURL1[GF_MAX_PATH], szURL2[GF_MAX_PATH], *ext;
1001 
1002 	if (!obj->URLs.count) {
1003 		if (!obj->odm) return GF_FALSE;
1004 		strcpy(szURL1, obj->odm->scene_ns->url);
1005 	} else {
1006 		strcpy(szURL1, obj->URLs.vals[0].url);
1007 	}
1008 
1009 	/*don't analyse audio/video to locate segments or viewports*/
1010 	if ((obj->type==GF_MEDIA_OBJECT_AUDIO) || (obj->type==GF_MEDIA_OBJECT_VIDEO)) {
1011 		if (keep_fragment) *keep_fragment = GF_FALSE;
1012 		include_sub_url = GF_TRUE;
1013 	} else if ((obj->type==GF_MEDIA_OBJECT_SCENE) && keep_fragment && obj->odm) {
1014 		u32 j;
1015 		/*for remoteODs/dynamic ODs, check if one of the running service cannot be used*/
1016 		for (i=0; i<an_url->count; i++) {
1017 			GF_Scene *scene;
1018 			GF_SceneNamespace *sns;
1019 			char *frag = strrchr(an_url->vals[i].url, '#');
1020 			j=0;
1021 			/*this is the same object (may need some refinement)*/
1022 			if (!stricmp(szURL1, an_url->vals[i].url)) return GF_TRUE;
1023 
1024 			/*fragment is a media segment, same URL*/
1025 			if (frag ) {
1026 				Bool same_res;
1027 				frag[0] = 0;
1028 				same_res = !strncmp(an_url->vals[i].url, szURL1, strlen(an_url->vals[i].url)) ? GF_TRUE : GF_FALSE;
1029 				frag[0] = '#';
1030 
1031 				/*if we're talking about the same resource, check if the fragment can be matched*/
1032 				if (same_res) {
1033 					/*if the fragment is a node which can be found, this is the same resource*/
1034 					if (obj->odm->subscene && (gf_sg_find_node_by_name(obj->odm->subscene->graph, frag+1)!=NULL) )
1035 						return GF_TRUE;
1036 
1037 					/*if the expected type is an existing segment (undefined media type), this is the same resource*/
1038 					if (!obj_hint_type && gf_odm_find_segment(obj->odm, frag+1))
1039 						return GF_TRUE;
1040 				}
1041 			}
1042 
1043 			scene = gf_scene_get_root_scene(obj->odm->parentscene ? obj->odm->parentscene : obj->odm->subscene);
1044 			while ( (sns = (GF_SceneNamespace*) gf_list_enum(scene->namespaces, &j) ) ) {
1045 				/*sub-service of an existing service - don't touch any fragment*/
1046 #ifdef FILTER_FIXME
1047 				if (gf_term_service_can_handle_url(sns, an_url->vals[i].url)) {
1048 					*keep_fragment = GF_TRUE;
1049 					return GF_FALSE;
1050 				}
1051 #endif
1052 			}
1053 		}
1054 	}
1055 
1056 	/*check on full URL without removing fragment IDs*/
1057 	if (include_sub_url) {
1058 		for (i=0; i<an_url->count; i++) {
1059 			if (an_url->vals[i].url && !stricmp(szURL1, an_url->vals[i].url)) return GF_TRUE;
1060 		}
1061 		if (obj->odm && (obj->odm->flags & GF_ODM_PASSTHROUGH) && an_url->count && an_url->vals[0].url && !strncmp(an_url->vals[0].url, "gpid://", 7))
1062 			return GF_TRUE;
1063 		/*not same resource, we will have to check fragment as URL might point to a sub-service or single stream of a mux*/
1064 		if (keep_fragment) *keep_fragment = GF_TRUE;
1065 
1066 		return GF_FALSE;
1067 	}
1068 	ext = strrchr(szURL1, '#');
1069 	if (ext) ext[0] = 0;
1070 	for (i=0; i<an_url->count; i++) {
1071 		if (!an_url->vals[i].url) return GF_FALSE;
1072 		strcpy(szURL2, an_url->vals[i].url);
1073 		ext = strrchr(szURL2, '#');
1074 		if (ext) ext[0] = 0;
1075 		if (!stricmp(szURL1, szURL2)) return GF_TRUE;
1076 	}
1077 	return GF_FALSE;
1078 }
1079 
1080 GF_EXPORT
gf_mo_url_changed(GF_MediaObject * mo,MFURL * url)1081 Bool gf_mo_url_changed(GF_MediaObject *mo, MFURL *url)
1082 {
1083 	u32 od_id;
1084 	Bool ret = GF_FALSE;
1085 	if (!mo) return (url ? GF_TRUE : GF_FALSE);
1086 	od_id = gf_mo_get_od_id(url);
1087 	if ( (mo->OD_ID == GF_MEDIA_EXTERNAL_ID) && (od_id == GF_MEDIA_EXTERNAL_ID)) {
1088 		ret = !gf_mo_is_same_url(mo, url, NULL, 0);
1089 	} else {
1090 		ret = (mo->OD_ID == od_id) ? GF_FALSE : GF_TRUE;
1091 	}
1092 	/*special case for 3GPP text: if not playing and user node changed, force removing it*/
1093 	if (ret && mo->odm && !mo->num_open && (mo->type == GF_MEDIA_OBJECT_TEXT)) {
1094 		mo->flags |= GF_MO_DISPLAY_REMOVE;
1095 	}
1096 	return ret;
1097 }
1098 
1099 GF_EXPORT
gf_mo_pause(GF_MediaObject * mo)1100 void gf_mo_pause(GF_MediaObject *mo)
1101 {
1102 #ifndef GPAC_DISABLE_VRML
1103 	if (!mo || !mo->num_open || !mo->odm) return;
1104 	mediacontrol_pause(mo->odm);
1105 #endif
1106 }
1107 
1108 GF_EXPORT
gf_mo_resume(GF_MediaObject * mo)1109 void gf_mo_resume(GF_MediaObject *mo)
1110 {
1111 #ifndef GPAC_DISABLE_VRML
1112 	if (!mo || !mo->num_open || !mo->odm) return;
1113 	mediacontrol_resume(mo->odm, 0);
1114 #endif
1115 }
1116 
1117 GF_EXPORT
gf_mo_set_speed(GF_MediaObject * mo,Fixed speed)1118 void gf_mo_set_speed(GF_MediaObject *mo, Fixed speed)
1119 {
1120 #ifndef GPAC_DISABLE_VRML
1121 	MediaControlStack *ctrl;
1122 #endif
1123 
1124 	if (!mo) return;
1125 	if (!mo->odm) {
1126 		mo->speed = speed;
1127 		return;
1128 	}
1129 	//override startup speed if asked to
1130 	if (mo->odm->set_speed) {
1131 		speed = mo->odm->set_speed;
1132 		mo->odm->set_speed = 0;
1133 	}
1134 #ifndef GPAC_DISABLE_VRML
1135 	/*if media control forbidd that*/
1136 	ctrl = gf_odm_get_mediacontrol(mo->odm);
1137 	if (ctrl) return;
1138 #endif
1139 
1140 	if (mo->odm->scene_ns && mo->odm->scene_ns->owner && (mo->odm->scene_ns->owner->flags & GF_ODM_INHERIT_TIMELINE))
1141 		return;
1142 
1143 	gf_odm_set_speed(mo->odm, speed, GF_TRUE);
1144 }
1145 
1146 GF_EXPORT
gf_mo_get_current_speed(GF_MediaObject * mo)1147 Fixed gf_mo_get_current_speed(GF_MediaObject *mo)
1148 {
1149 	return (mo && mo->odm && mo->odm->ck) ? mo->odm->ck->speed : FIX_ONE;
1150 }
1151 
1152 GF_EXPORT
gf_mo_get_min_frame_dur(GF_MediaObject * mo)1153 u32 gf_mo_get_min_frame_dur(GF_MediaObject *mo)
1154 {
1155 	return mo ? mo->frame_dur : 0;
1156 }
1157 GF_EXPORT
gf_mo_map_timestamp_to_sys_clock(GF_MediaObject * mo,u32 ts)1158 u32 gf_mo_map_timestamp_to_sys_clock(GF_MediaObject *mo, u32 ts)
1159 {
1160 	return (mo && mo->odm)? mo->odm->ck->start_time + ts : 0;
1161 }
1162 
gf_mo_is_buffering(GF_MediaObject * mo)1163 Bool gf_mo_is_buffering(GF_MediaObject *mo)
1164 {
1165 	return (mo && mo->odm && mo->odm->ck->nb_buffering) ? GF_TRUE : GF_FALSE;
1166 }
1167 
1168 GF_EXPORT
gf_mo_get_speed(GF_MediaObject * mo,Fixed in_speed)1169 Fixed gf_mo_get_speed(GF_MediaObject *mo, Fixed in_speed)
1170 {
1171 	Fixed res = in_speed;
1172 	if (!mo || !mo->odm) return in_speed;
1173 
1174 #ifndef GPAC_DISABLE_VRML
1175 	MediaControlStack *ctrl;
1176 
1177 	/*get control*/
1178 	ctrl = gf_odm_get_mediacontrol(mo->odm);
1179 	if (ctrl) res = ctrl->control->mediaSpeed;
1180 
1181 #endif
1182 
1183 	return res;
1184 }
1185 
1186 GF_EXPORT
gf_mo_get_loop(GF_MediaObject * mo,Bool in_loop)1187 Bool gf_mo_get_loop(GF_MediaObject *mo, Bool in_loop)
1188 {
1189 	GF_Clock *ck;
1190 #ifndef GPAC_DISABLE_VRML
1191 	MediaControlStack *ctrl;
1192 #endif
1193 	if (!mo || !mo->odm) return in_loop;
1194 
1195 	/*get control*/
1196 #ifndef GPAC_DISABLE_VRML
1197 	ctrl = gf_odm_get_mediacontrol(mo->odm);
1198 	if (ctrl) in_loop = ctrl->control->loop;
1199 #endif
1200 
1201 	/*otherwise looping is only accepted if not sharing parent scene clock*/
1202 	ck = gf_odm_get_media_clock(mo->odm->parentscene->root_od);
1203 	if (gf_odm_shares_clock(mo->odm, ck)) {
1204 		in_loop = GF_FALSE;
1205 #ifndef GPAC_DISABLE_VRML
1206 		/*
1207 			if (ctrl && ctrl->stream->odm && ctrl->stream->odm->subscene)
1208 					gf_term_invalidate_compositor(mo->odm->term);
1209 		*/
1210 #endif
1211 	}
1212 	return in_loop;
1213 }
1214 
1215 GF_EXPORT
gf_mo_get_duration(GF_MediaObject * mo)1216 Double gf_mo_get_duration(GF_MediaObject *mo)
1217 {
1218 	Double dur;
1219 	dur = ((Double) (s64)mo->odm->duration)/1000.0;
1220 	return dur;
1221 }
1222 
1223 GF_EXPORT
gf_mo_should_deactivate(GF_MediaObject * mo)1224 Bool gf_mo_should_deactivate(GF_MediaObject *mo)
1225 {
1226 	Bool res = GF_FALSE;
1227 #ifndef GPAC_DISABLE_VRML
1228 	MediaControlStack *ctrl;
1229 #endif
1230 
1231 	if (!mo || !mo->odm) return GF_TRUE;
1232 	if (!mo->odm->state) return GF_FALSE;
1233 	//if dynamic scene we can deactivate
1234 	if (mo->odm->parentscene && mo->odm->parentscene->is_dynamic_scene) {
1235 		return GF_TRUE;
1236 	}
1237 
1238 #ifndef GPAC_DISABLE_VRML
1239 	/*get media control and see if object owning control is running*/
1240 	ctrl = gf_odm_get_mediacontrol(mo->odm);
1241 	if (!ctrl) res = GF_TRUE;
1242 	/*if ctrl and ctrl not ruling this mediaObject, deny deactivation*/
1243 	else if (ctrl->stream->odm != mo->odm) res = GF_FALSE;
1244 	/*this is currently under discussion in MPEG. for now we deny deactivation as soon as a mediaControl is here*/
1245 	else if (ctrl->stream->odm->state) res = GF_FALSE;
1246 	/*otherwise allow*/
1247 	else
1248 #endif
1249 		res = GF_TRUE;
1250 
1251 	return res;
1252 }
1253 
1254 GF_EXPORT
gf_mo_is_muted(GF_MediaObject * mo)1255 Bool gf_mo_is_muted(GF_MediaObject *mo)
1256 {
1257 #ifndef GPAC_DISABLE_VRML
1258 	return mo->odm->media_ctrl ? mo->odm->media_ctrl->control->mute : GF_FALSE;
1259 #else
1260 	return GF_FALSE;
1261 #endif
1262 }
1263 
1264 GF_EXPORT
gf_mo_is_started(GF_MediaObject * mo)1265 Bool gf_mo_is_started(GF_MediaObject *mo)
1266 {
1267 	if (mo && mo->odm && gf_clock_is_started(mo->odm->ck)) return GF_TRUE;
1268 	return GF_FALSE;
1269 }
1270 
1271 GF_EXPORT
gf_mo_is_done(GF_MediaObject * mo)1272 Bool gf_mo_is_done(GF_MediaObject *mo)
1273 {
1274 	GF_Clock *ck;
1275 	u64 dur;
1276 	if (!mo || !mo->odm) return GF_FALSE;
1277 
1278 	if (! mo->odm->has_seen_eos) return GF_FALSE;
1279 
1280 	if ((mo->odm->type==GF_STREAM_AUDIO) || (mo->odm->type==GF_STREAM_VISUAL)) {
1281 		return GF_TRUE;
1282 	}
1283 
1284 	/*check time - technically this should also apply to video streams since we could extend the duration
1285 	of the last frame - to further test*/
1286 	dur = (mo->odm->subscene && mo->odm->subscene->duration) ? mo->odm->subscene->duration : mo->odm->duration;
1287 	/*codec is done, check by duration*/
1288 	ck = gf_odm_get_media_clock(mo->odm);
1289 	if (gf_clock_time(ck) > dur)
1290 		return GF_TRUE;
1291 
1292 	return GF_FALSE;
1293 }
1294 
1295 /*resyncs clock - only audio objects are allowed to use this*/
1296 GF_EXPORT
gf_mo_adjust_clock(GF_MediaObject * mo,s32 ms_drift)1297 void gf_mo_adjust_clock(GF_MediaObject *mo, s32 ms_drift)
1298 {
1299 	if (!mo || !mo->odm) return;
1300 	if (mo->odm->type != GF_STREAM_AUDIO) return;
1301 	gf_clock_set_audio_delay(mo->odm->ck, ms_drift);
1302 }
1303 
1304 GF_EXPORT
gf_mo_set_flag(GF_MediaObject * mo,GF_MOUserFlags flag,Bool set_on)1305 void gf_mo_set_flag(GF_MediaObject *mo, GF_MOUserFlags flag, Bool set_on)
1306 {
1307 	if (mo) {
1308 		if (set_on)
1309 			mo->flags |= flag;
1310 		else
1311 			mo->flags &= ~flag;
1312 	}
1313 }
1314 
1315 GF_EXPORT
gf_mo_has_audio(GF_MediaObject * mo)1316 u32 gf_mo_has_audio(GF_MediaObject *mo)
1317 {
1318 #ifdef FILTER_FIXME
1319 	char *sub_url;
1320 #endif
1321 	u32 i;
1322 	GF_SceneNamespace *ns;
1323 	GF_Scene *scene;
1324 	if (!mo || !mo->odm) return 0;
1325 	if (mo->type != GF_MEDIA_OBJECT_VIDEO) return 0;
1326 	if (!mo->odm->scene_ns) return 2;
1327 
1328 	ns = mo->odm->scene_ns;
1329 	scene = mo->odm->parentscene;
1330 #ifdef FILTER_FIXME
1331 	sub_url = strchr(ns->url, '#');
1332 #endif
1333 	for (i=0; i<gf_list_count(scene->resources); i++) {
1334 		GF_ObjectManager *odm = (GF_ObjectManager *)gf_list_get(scene->resources, i);
1335 		if (odm->scene_ns != ns) continue;
1336 		//object already associated
1337 		if (odm->mo) continue;
1338 
1339 #ifdef FILTER_FIXME
1340 		if (sub_url) {
1341 			char *ext = mo->URLs.count ? mo->URLs.vals[0].url : NULL;
1342 			if (ext) ext = strchr(ext, '#');
1343 			if (!ext || strcmp(sub_url, ext)) continue;
1344 		}
1345 #endif
1346 		/*we have one audio object not bound with the scene from the same service, let's use it*/
1347 		if (odm->type == GF_STREAM_AUDIO) return 1;
1348 	}
1349 	return 0;
1350 }
1351 
1352 GF_EXPORT
gf_mo_get_scenegraph(GF_MediaObject * mo)1353 GF_SceneGraph *gf_mo_get_scenegraph(GF_MediaObject *mo)
1354 {
1355 	if (!mo || !mo->odm || !mo->odm->subscene) return NULL;
1356 	return mo->odm->subscene->graph;
1357 }
1358 
1359 
1360 GF_EXPORT
gf_mo_event_target_add_node(GF_MediaObject * mo,GF_Node * n)1361 GF_DOMEventTarget *gf_mo_event_target_add_node(GF_MediaObject *mo, GF_Node *n)
1362 {
1363 #ifndef GPAC_DISABLE_SVG
1364 	GF_DOMEventTarget *target = NULL;
1365 	if (!mo ||!n) return NULL;
1366 	target = gf_dom_event_get_target_from_node(n);
1367 	gf_list_add(mo->evt_targets, target);
1368 	return target;
1369 #else
1370 	return NULL;
1371 #endif
1372 }
1373 
gf_mo_event_target_remove_by_index(GF_MediaObject * mo,u32 i)1374 GF_Err gf_mo_event_target_remove_by_index(GF_MediaObject *mo, u32 i)
1375 {
1376 	if (!mo) return GF_BAD_PARAM;
1377 	gf_list_rem(mo->evt_targets, i);
1378 	return GF_OK;
1379 }
1380 
gf_mo_event_target_enum_node(GF_MediaObject * mo,u32 * i)1381 GF_Node *gf_mo_event_target_enum_node(GF_MediaObject *mo, u32 *i)
1382 {
1383 	GF_DOMEventTarget *target;
1384 	if (!mo || !i) return NULL;
1385 	target = (GF_DOMEventTarget *)gf_list_enum(mo->evt_targets, i);
1386 	if (!target) return NULL;
1387 	//if (target->ptr_type != GF_DOM_EVENT_TARGET_NODE) return NULL;
1388 	return (GF_Node *)target->ptr;
1389 }
1390 
gf_mo_event_target_find_by_node(GF_MediaObject * mo,GF_Node * node)1391 s32 gf_mo_event_target_find_by_node(GF_MediaObject *mo, GF_Node *node)
1392 {
1393 	u32 i, count;
1394 	count = gf_list_count(mo->evt_targets);
1395 	for (i = 0; i < count; i++) {
1396 		GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i);
1397 		if (target->ptr == node) {
1398 			return i;
1399 		}
1400 	}
1401 	return -1;
1402 }
1403 
1404 GF_EXPORT
gf_mo_event_target_remove_by_node(GF_MediaObject * mo,GF_Node * node)1405 GF_Err gf_mo_event_target_remove_by_node(GF_MediaObject *mo, GF_Node *node)
1406 {
1407 	u32 i, count;
1408 	count = gf_list_count(mo->evt_targets);
1409 	for (i = 0; i < count; i++) {
1410 		GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i);
1411 		if (target->ptr == node) {
1412 			gf_list_del_item(mo->evt_targets, target);
1413 			i--;
1414 			count--;
1415 			//return GF_OK;
1416 		}
1417 	}
1418 	return GF_BAD_PARAM;
1419 }
1420 
1421 GF_EXPORT
gf_event_target_get_node(GF_DOMEventTarget * target)1422 GF_Node *gf_event_target_get_node(GF_DOMEventTarget *target)
1423 {
1424 	if (target && (target->ptr_type == GF_DOM_EVENT_TARGET_NODE)) {
1425 		return (GF_Node *)target->ptr;
1426 	}
1427 	return NULL;
1428 }
1429 
1430 GF_EXPORT
gf_mo_event_target_get(GF_MediaObject * mo,u32 i)1431 GF_DOMEventTarget *gf_mo_event_target_get(GF_MediaObject *mo, u32 i)
1432 {
1433 	GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i);
1434 	return target;
1435 }
1436 
gf_mo_event_target_reset(GF_MediaObject * mo)1437 void gf_mo_event_target_reset(GF_MediaObject *mo)
1438 {
1439 	if (mo->evt_targets) gf_list_reset(mo->evt_targets);
1440 }
1441 
gf_mo_event_target_count(GF_MediaObject * mo)1442 u32 gf_mo_event_target_count(GF_MediaObject *mo)
1443 {
1444 	if (!mo) return 0;
1445 	return gf_list_count(mo->evt_targets);
1446 }
1447 
gf_mo_del(GF_MediaObject * mo)1448 void gf_mo_del(GF_MediaObject *mo)
1449 {
1450 	assert(gf_list_count(mo->evt_targets) == 0);
1451 	gf_list_del(mo->evt_targets);
1452 	if (mo->pck) gf_filter_pck_unref(mo->pck);
1453 	gf_sg_mfurl_del(mo->URLs);
1454 	gf_free(mo);
1455 }
1456 
1457 
gf_mo_get_srd_info(GF_MediaObject * mo,GF_MediaObjectVRInfo * vr_info)1458 Bool gf_mo_get_srd_info(GF_MediaObject *mo, GF_MediaObjectVRInfo *vr_info)
1459 {
1460 	GF_Scene *scene;
1461 	if (!vr_info || !mo->odm) return GF_FALSE;
1462 
1463 	scene = mo->odm->subscene ? mo->odm->subscene : mo->odm->parentscene;
1464 	memset(vr_info, 0, sizeof(GF_MediaObjectVRInfo));
1465 
1466 	vr_info->srd_x = mo->srd_x;
1467 	vr_info->srd_y = mo->srd_y;
1468 	vr_info->srd_w = mo->srd_w;
1469 	vr_info->srd_h = mo->srd_h;
1470 	vr_info->srd_min_x = scene->srd_min_x;
1471 	vr_info->srd_min_y = scene->srd_min_y;
1472 	vr_info->srd_max_x = scene->srd_max_x;
1473 	vr_info->srd_max_y = scene->srd_max_y;
1474 	vr_info->is_tiled_srd = scene->is_tiled_srd;
1475 	vr_info->has_full_coverage = (scene->srd_type==2) ? GF_TRUE : GF_FALSE;
1476 
1477 	gf_sg_get_scene_size_info(scene->graph, &vr_info->scene_width, &vr_info->scene_height);
1478 
1479 	if (mo->srd_w && mo->srd_h) return GF_TRUE;
1480 	if (mo->srd_full_w && mo->srd_full_h) return GF_TRUE;
1481 	return GF_FALSE;
1482 }
1483 
1484 /*sets quality hint for this media object  - quality_rank is between 0 (min quality) and 100 (max quality)*/
gf_mo_hint_quality_degradation(GF_MediaObject * mo,u32 quality_degradation)1485 void gf_mo_hint_quality_degradation(GF_MediaObject *mo, u32 quality_degradation)
1486 {
1487 	if (!mo || !mo->odm || !mo->odm->pid) {
1488 		return;
1489 	}
1490 	if (mo->quality_degradation_hint != quality_degradation) {
1491 		GF_FilterEvent evt;
1492 		GF_FEVT_INIT(evt, GF_FEVT_QUALITY_SWITCH, mo->odm->pid);
1493 		evt.quality_switch.quality_degradation = quality_degradation;
1494 		gf_filter_pid_send_event(mo->odm->pid, &evt);
1495 
1496 		mo->quality_degradation_hint = quality_degradation;
1497 	}
1498 }
1499 
gf_mo_hint_visible_rect(GF_MediaObject * mo,u32 min_x,u32 max_x,u32 min_y,u32 max_y)1500 void gf_mo_hint_visible_rect(GF_MediaObject *mo, u32 min_x, u32 max_x, u32 min_y, u32 max_y)
1501 {
1502 	if (!mo || !mo->odm || !mo->odm->pid) {
1503 		return;
1504 	}
1505 
1506 	if ((mo->view_min_x!=min_x) || (mo->view_max_x!=max_x) || (mo->view_min_y!=min_y) || (mo->view_max_y!=max_y)) {
1507 		GF_FilterEvent evt;
1508 		GF_FEVT_INIT(evt, GF_FEVT_VISIBILITY_HINT, mo->odm->pid);
1509 		mo->view_min_x = min_x;
1510 		mo->view_max_x = max_x;
1511 		mo->view_min_y = min_y;
1512 		mo->view_max_y = max_y;
1513 
1514 		evt.visibility_hint.min_x = min_x;
1515 		evt.visibility_hint.max_x = max_x;
1516 		evt.visibility_hint.min_y = min_y;
1517 		evt.visibility_hint.max_y = max_y;
1518 
1519 		gf_filter_pid_send_event(mo->odm->pid, &evt);
1520 	}
1521 }
1522 
gf_mo_hint_gaze(GF_MediaObject * mo,u32 gaze_x,u32 gaze_y)1523 void gf_mo_hint_gaze(GF_MediaObject *mo, u32 gaze_x, u32 gaze_y)
1524 {
1525 	if (!mo || !mo->odm || !mo->odm->pid) {
1526 		return;
1527 	}
1528 
1529 	if ((mo->view_min_x!=gaze_x) || (mo->view_min_y!=gaze_y) ) {
1530 		GF_FilterEvent evt;
1531 		GF_FEVT_INIT(evt, GF_FEVT_VISIBILITY_HINT, mo->odm->pid);
1532 		mo->view_min_x = gaze_x;
1533 		mo->view_min_y = gaze_y;
1534 
1535 		evt.visibility_hint.min_x = gaze_x;
1536 		evt.visibility_hint.min_y = gaze_y;
1537 		evt.visibility_hint.is_gaze = GF_TRUE;
1538 
1539 		gf_filter_pid_send_event(mo->odm->pid, &evt);
1540 	}
1541 }
1542 
1543 
1544 
1545