1 /*
2  * Copyright 2003-2020, Björn Ståhl
3  * License: 3-Clause BSD, see COPYING file in arcan source repository.
4  * Reference: http://arcan-fe.com
5  */
6 
7 #include <stdint.h>
8 #include <inttypes.h>
9 #include <stdbool.h>
10 #include <stdlib.h>
11 #include <string.h>
12 #include <unistd.h>
13 #include <stdio.h>
14 #include <fcntl.h>
15 #include <sys/types.h>
16 #include <sys/stat.h>
17 #include <signal.h>
18 #include <stddef.h>
19 #include <math.h>
20 #include <limits.h>
21 #include <assert.h>
22 #include <errno.h>
23 #include <stdalign.h>
24 #include <stdarg.h>
25 
26 #include <pthread.h>
27 #include <semaphore.h>
28 
29 #define CLAMP(x, l, h) (((x) > (h)) ? (h) : (((x) < (l)) ? (l) : (x)))
30 
31 #ifndef ASYNCH_CONCURRENT_THREADS
32 #define ASYNCH_CONCURRENT_THREADS 12
33 #endif
34 
35 #include PLATFORM_HEADER
36 
37 #include "arcan_shmif.h"
38 #include "arcan_shmif_sub.h"
39 #include "arcan_math.h"
40 #include "arcan_general.h"
41 #include "arcan_video.h"
42 #include "arcan_ttf.h"
43 #include "arcan_audio.h"
44 #include "arcan_event.h"
45 #include "arcan_frameserver.h"
46 #include "arcan_renderfun.h"
47 #include "arcan_videoint.h"
48 #include "arcan_3dbase.h"
49 #include "arcan_img.h"
50 
51 #ifndef offsetof
52 #define offsetof(type, member) ((size_t)((char*)&(*(type*)0).member\
53  - (char*)&(*(type*)0)))
54 #endif
55 
56 #ifndef ARCAN_VIDEO_DEFAULT_MIPMAP_STATE
57 #define ARCAN_VIDEO_DEFAULT_MIPMAP_STATE false
58 #endif
59 
60 static surface_properties empty_surface();
61 static sem_handle asynchsynch;
62 
63 /* these match arcan_vinterpolant enum */
64 static arcan_interp_3d_function lut_interp_3d[] = {
65 	interp_3d_linear,
66 	interp_3d_sine,
67 	interp_3d_expin,
68 	interp_3d_expout,
69 	interp_3d_expinout,
70 	interp_3d_smoothstep,
71 };
72 
73 static arcan_interp_1d_function lut_interp_1d[] = {
74 	interp_1d_linear,
75 	interp_1d_sine,
76 	interp_1d_expin,
77 	interp_1d_expout,
78 	interp_1d_expinout,
79 	interp_1d_smoothstep
80 };
81 
82 struct arcan_video_display arcan_video_display = {
83 	.conservative = false,
84 	.deftxs = ARCAN_VTEX_CLAMP, ARCAN_VTEX_CLAMP,
85 	.scalemode = ARCAN_VIMAGE_NOPOW2,
86 	.filtermode = ARCAN_VFILTER_BILINEAR,
87 	.blendmode = BLEND_FORCE,
88 	.order3d = ORDER3D_FIRST,
89 	.suspended = false,
90 	.msasamples = 4,
91 	.c_ticks = 1,
92 	.default_vitemlim = 1024,
93 	.imageproc = IMAGEPROC_NORMAL,
94 	.mipmap = ARCAN_VIDEO_DEFAULT_MIPMAP_STATE,
95 	.dirty = 0,
96 	.cursor.w = 24,
97 	.cursor.h = 16
98 };
99 
100 struct arcan_video_context vcontext_stack[CONTEXT_STACK_LIMIT] = {
101 	{
102 		.n_rtargets = 0,
103 		.vitem_ofs = 1,
104 		.nalive    = 0,
105 		.world = {
106 			.tracetag = "(world)",
107 			.current  = {
108 				.opa = 1.0,
109 				.rotation.quaternion.w = 1.0
110 			}
111 		}
112 	}
113 };
114 
115 unsigned vcontext_ind = 0;
116 
117 /*
118  * additional internal forwards that do not really belong to videoint.h
119  */
120 static bool detach_fromtarget(struct rendertarget* dst, arcan_vobject* src);
121 static void attach_object(struct rendertarget* dst, arcan_vobject* src);
122 static arcan_errc update_zv(arcan_vobject* vobj, int newzv);
123 static void rebase_transform(struct surface_transform*, int64_t);
124 static size_t process_rendertarget(struct rendertarget*, float);
125 static arcan_vobject* new_vobject(arcan_vobj_id* id,
126 struct arcan_video_context* dctx);
127 static inline void build_modelview(float* dmatr,
128 	float* imatr, surface_properties* prop, arcan_vobject* src);
129 static inline void process_readback(struct rendertarget* tgt, float fract);
130 
trace(const char * msg,...)131 static inline void trace(const char* msg, ...)
132 {
133 #ifdef TRACE_ENABLE
134 	va_list args;
135 	va_start( args, msg );
136 		vfprintf(stderr,  msg, args );
137 	va_end( args);
138 #endif
139 }
140 
video_tracetag(arcan_vobject * src)141 static const char* video_tracetag(arcan_vobject* src)
142 {
143 	return src == NULL || src->tracetag == NULL ? "(unknown)" : src->tracetag;
144 }
145 
146 /* a default more-or-less empty context */
147 static struct arcan_video_context* current_context = vcontext_stack;
148 
arcan_vint_drop_vstore(struct agp_vstore * s)149 void arcan_vint_drop_vstore(struct agp_vstore* s)
150 {
151 	assert(s->refcount);
152 	s->refcount--;
153 
154 	if (s->refcount == 0){
155 		if (s->txmapped != TXSTATE_OFF && s->vinf.text.glid){
156 			if (s->vinf.text.raw){
157 				arcan_mem_free(s->vinf.text.raw);
158 				s->vinf.text.raw = NULL;
159 			}
160 
161 			agp_drop_vstore(s);
162 
163 			if (s->vinf.text.source)
164 				arcan_mem_free(s->vinf.text.source);
165 
166 			memset(s, '\0', sizeof(struct agp_vstore));
167 		}
168 
169 		arcan_mem_free(s);
170 	}
171 }
172 
arcan_video_default_texfilter(enum arcan_vfilter_mode mode)173 void arcan_video_default_texfilter(enum arcan_vfilter_mode mode)
174 {
175 	arcan_video_display.filtermode = mode;
176 }
177 
arcan_video_default_imageprocmode(enum arcan_imageproc_mode mode)178 void arcan_video_default_imageprocmode(enum arcan_imageproc_mode mode)
179 {
180 	arcan_video_display.imageproc = mode;
181 }
182 
arcan_vint_findrt_vstore(struct agp_vstore * st)183 struct rendertarget* arcan_vint_findrt_vstore(struct agp_vstore* st)
184 {
185 	if (!st)
186 		return NULL;
187 
188 	for (size_t i = 0; i < current_context->n_rtargets && st; i++)
189 		if (current_context->rtargets[i].color->vstore == st)
190 			return &current_context->rtargets[i];
191 
192 	if (current_context->stdoutp.color &&
193 		st == current_context->stdoutp.color->vstore)
194 		return &current_context->stdoutp;
195 	return NULL;
196 }
197 
arcan_vint_findrt(arcan_vobject * vobj)198 struct rendertarget* arcan_vint_findrt(arcan_vobject* vobj)
199 {
200 	for (size_t i = 0; i < current_context->n_rtargets && vobj; i++)
201 		if (current_context->rtargets[i].color == vobj)
202 			return &current_context->rtargets[i];
203 
204 	if (vobj == &current_context->world)
205 		return &current_context->stdoutp;
206 
207 	return NULL;
208 }
209 
addchild(arcan_vobject * parent,arcan_vobject * child)210 static void addchild(arcan_vobject* parent, arcan_vobject* child)
211 {
212 	arcan_vobject** slot = NULL;
213 	for (size_t i = 0; i < parent->childslots; i++){
214 		if (parent->children[i] == NULL){
215 			slot = &parent->children[i];
216 			break;
217 		}
218 	}
219 
220 /* grow and set element */
221 	if (!slot){
222 		arcan_vobject** news = arcan_alloc_mem(
223 			(parent->childslots + 8) * sizeof(void*),
224 			ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_NATURAL
225 		);
226 
227 		if (parent->children){
228 			memcpy(news, parent->children, parent->childslots * sizeof(void*));
229 			arcan_mem_free(parent->children);
230 		}
231 
232 		parent->children = news;
233 		for (size_t i = 0; i < 8; i++)
234 			parent->children[parent->childslots + i] = NULL;
235 
236 		slot = &parent->children[parent->childslots];
237 		parent->childslots += 8;
238 	}
239 
240 	parent->extrefc.links++;
241 
242 	child->parent = parent;
243 	*slot = child;
244 }
245 
246 /*
247  * recursively sweep children and
248  * flag their caches for updates as well
249  */
invalidate_cache(arcan_vobject * vobj)250 static void invalidate_cache(arcan_vobject* vobj)
251 {
252 	FLAG_DIRTY(vobj);
253 
254 	if (!vobj->valid_cache)
255 		return;
256 
257 	vobj->valid_cache = false;
258 
259 	for (size_t i = 0; i < vobj->childslots; i++)
260 		if (vobj->children[i])
261 			invalidate_cache(vobj->children[i]);
262 }
263 
dropchild(arcan_vobject * parent,arcan_vobject * child)264 static void dropchild(arcan_vobject* parent, arcan_vobject* child)
265 {
266 	for (size_t i = 0; i < parent->childslots; i++){
267 		if (parent->children[i] == child){
268 			parent->children[i] = NULL;
269 			parent->extrefc.links--;
270 			child->parent = &current_context->world;
271 			break;
272 		}
273 	}
274 }
275 
276 /* scan through each cell in use, and either deallocate / wrap with deleteobject
277  * or pause frameserver connections and (conservative) delete resources that can
278  * be recreated later on. */
deallocate_gl_context(struct arcan_video_context * context,bool del,struct agp_vstore * safe_store)279 static void deallocate_gl_context(
280 	struct arcan_video_context* context, bool del, struct agp_vstore* safe_store)
281 {
282 /* index (0) is always worldid */
283 	for (size_t i = 1; i < context->vitem_limit; i++){
284 		if (FL_TEST(&(context->vitems_pool[i]), FL_INUSE)){
285 			arcan_vobject* current = &(context->vitems_pool[i]);
286 
287 /* before doing any modification, wait for any async load calls to finish(!),
288  * question is IF this should invalidate or not */
289 			if (current->feed.state.tag == ARCAN_TAG_ASYNCIMGLD ||
290 				current->feed.state.tag == ARCAN_TAG_ASYNCIMGRD)
291 				arcan_video_pushasynch(i);
292 
293 /* for persistant objects, deleteobject will only be "effective" if we're at
294  * the stack layer where the object was created */
295 			if (del)
296 				arcan_video_deleteobject(i);
297 
298 /* only non-persistant objects will have their GL objects removed immediately
299  * but not for the cases where we share store with the world */
300 			else if (
301 				!FL_TEST(current, FL_PRSIST) && !FL_TEST(current, FL_RTGT) &&
302 				current->vstore != safe_store)
303 				agp_null_vstore(current->vstore);
304 		}
305 	}
306 
307 /* pool is dynamically sized and size is set on layer push */
308 	if (del){
309 		arcan_mem_free(context->vitems_pool);
310 		context->vitems_pool = NULL;
311 	}
312 }
313 
step_active_frame(arcan_vobject * vobj)314 static inline void step_active_frame(arcan_vobject* vobj)
315 {
316 	if (!vobj->frameset)
317 		return;
318 
319 	size_t sz = vobj->frameset->n_frames;
320 
321 	vobj->frameset->index = (vobj->frameset->index + 1) % sz;
322 	if (vobj->owner)
323 		vobj->owner->transfc++;
324 
325 	FLAG_DIRTY(vobj);
326 }
327 
328 /*
329  * Iterate a saved context, and reallocate all resources associated with it.
330  * Note that this doesn't really consider other forms of gl storage at the
331  * moment, particularly rendertargets(!)
332  *
333  */
reallocate_gl_context(struct arcan_video_context * context)334 static void reallocate_gl_context(struct arcan_video_context* context)
335 {
336 	arcan_tickv cticks = arcan_video_display.c_ticks;
337 
338 /* If there's nothing saved, we reallocate */
339 	if (!context->vitems_pool){
340 		context->vitem_limit = arcan_video_display.default_vitemlim;
341 		context->vitem_ofs   = 1;
342 		context->vitems_pool = arcan_alloc_mem(
343 			sizeof(struct arcan_vobject) * context->vitem_limit,
344 				ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
345 	}
346 	else for (size_t i = 1; i < context->vitem_limit; i++)
347 		if (FL_TEST(&(context->vitems_pool[i]), FL_INUSE)){
348 			arcan_vobject* current = &context->vitems_pool[i];
349 			surface_transform* ctrans = current->transform;
350 
351 			if (FL_TEST(current, FL_PRSIST))
352 				continue;
353 
354 /* since there may be queued transforms in an already pushed context,
355  * we maintain the timing and reset them to match the changes that
356  * has already occurred */
357 			if (ctrans && cticks > context->last_tickstamp){
358 				rebase_transform(ctrans, cticks - context->last_tickstamp);
359 			}
360 
361 /* for conservative memory management mode we need to reallocate
362  * static resources. getimage will strdup the source so to avoid leaking,
363  * copy and free */
364 			if (arcan_video_display.conservative &&
365 				(char)current->feed.state.tag == ARCAN_TAG_IMAGE){
366 					char* fname = strdup( current->vstore->vinf.text.source );
367 					arcan_mem_free(current->vstore->vinf.text.source);
368 				arcan_vint_getimage(fname,
369 					current, (img_cons){.w = current->origw, .h = current->origh}, false);
370 				arcan_mem_free(fname);
371 			}
372 			else
373 				if (current->vstore->txmapped != TXSTATE_OFF)
374 					agp_update_vstore(current->vstore, true);
375 
376 			arcan_frameserver* fsrv = current->feed.state.ptr;
377 			if (current->feed.state.tag == ARCAN_TAG_FRAMESERV && fsrv){
378 				arcan_frameserver_flush(fsrv);
379 				arcan_frameserver_resume(fsrv);
380 				arcan_audio_play(fsrv->aid, false, 0.0, -2); /* -2 == LUA_NOREF */
381 			}
382 		}
383 }
384 
arcan_video_nfreecontexts()385 unsigned arcan_video_nfreecontexts()
386 {
387 		return CONTEXT_STACK_LIMIT - 1 - vcontext_ind;
388 }
389 
rebase_transform(struct surface_transform * current,int64_t ofs)390 static void rebase_transform(struct surface_transform* current, int64_t ofs)
391 {
392 	if (current->move.startt){
393 		current->move.startt += ofs;
394 		current->move.endt   += ofs;
395 	}
396 
397 	if (current->rotate.startt){
398 		current->rotate.startt += ofs;
399 		current->rotate.endt   += ofs;
400 	}
401 
402 	if (current->scale.startt){
403 		current->scale.startt += ofs;
404 		current->scale.endt   += ofs;
405 	}
406 
407 	if (current->next)
408 		rebase_transform(current->next, ofs);
409 }
410 
push_transfer_persists(struct arcan_video_context * src,struct arcan_video_context * dst)411 static void push_transfer_persists(
412 	struct arcan_video_context* src,
413 	struct arcan_video_context* dst)
414 {
415 	for (size_t i = 1; i < src->vitem_limit - 1; i++){
416 		arcan_vobject* srcobj = &src->vitems_pool[i];
417 		arcan_vobject* dstobj = &dst->vitems_pool[i];
418 
419 		if (!FL_TEST(srcobj, FL_INUSE) || !FL_TEST(srcobj, FL_PRSIST))
420 			continue;
421 
422 		detach_fromtarget(srcobj->owner, srcobj);
423 		memcpy(dstobj, srcobj, sizeof(arcan_vobject));
424 		dst->nalive++; /* fake allocate */
425 		dstobj->parent = &dst->world; /* don't cross- reference worlds */
426 		attach_object(&dst->stdoutp, dstobj);
427 		trace("vcontext_stack_push() : transfer-attach: %s\n", srcobj->tracetag);
428 	}
429 }
430 
431 /*
432  * if an object exists in src, is flagged persist,
433  * and a similar (shadow) object is flagged persist in dst,
434  * update the state in dst with src and detach/remove from src.
435  */
pop_transfer_persists(struct arcan_video_context * src,struct arcan_video_context * dst)436 static void pop_transfer_persists(
437 	struct arcan_video_context* src,
438 	struct arcan_video_context* dst)
439 {
440 	for (size_t i = 1; i < src->vitem_limit - 1; i++){
441 		arcan_vobject* srcobj = &src->vitems_pool[i];
442 		arcan_vobject* dstobj = &dst->vitems_pool[i];
443 
444 		if (!FL_TEST(srcobj, FL_INUSE) || !FL_TEST(srcobj, FL_PRSIST))
445 			continue;
446 
447 		arcan_vobject* parent = dstobj->parent;
448 
449 		detach_fromtarget(srcobj->owner, srcobj);
450 		src->nalive--;
451 
452 		memcpy(dstobj, srcobj, sizeof(arcan_vobject));
453 		attach_object(&dst->stdoutp, dstobj);
454 		dstobj->parent = parent;
455 		memset(srcobj, '\0', sizeof(arcan_vobject));
456 	}
457 }
458 
arcan_vint_drawrt(struct agp_vstore * vs,int x,int y,int w,int h)459 void arcan_vint_drawrt(struct agp_vstore* vs, int x, int y, int w, int h)
460 {
461 	_Alignas(16) float imatr[16];
462 	identity_matrix(imatr);
463 	agp_shader_activate(agp_default_shader(BASIC_2D));
464 	if (!vs)
465 		return;
466 
467 	agp_activate_vstore(vs);
468 	agp_shader_envv(MODELVIEW_MATR, imatr, sizeof(float)*16);
469 	agp_shader_envv(PROJECTION_MATR,
470 		arcan_video_display.window_projection, sizeof(float)*16);
471 
472 	agp_blendstate(BLEND_NONE);
473 	agp_draw_vobj(0, 0, x + w, y + h,
474 		arcan_video_display.mirror_txcos, NULL);
475 
476 	agp_deactivate_vstore();
477 }
478 
arcan_vint_applyhint(arcan_vobject * src,enum blitting_hint hint,float * txin,float * txout,size_t * outx,size_t * outy,size_t * outw,size_t * outh,size_t * blackframes)479 void arcan_vint_applyhint(arcan_vobject* src, enum blitting_hint hint,
480 	float* txin, float* txout,
481 	size_t* outx, size_t* outy,
482 	size_t* outw, size_t* outh, size_t* blackframes)
483 {
484 	memcpy(txout, txin, sizeof(float) * 8);
485 
486 	if (hint & HINT_ROTATE_CW_90){
487 		txout[0] = txin[2];
488 		txout[1] = txin[3];
489 		txout[2] = txin[4];
490 		txout[3] = txin[5];
491 		txout[4] = txin[6];
492 		txout[5] = txin[7];
493 		txout[6] = txin[0];
494 		txout[7] = txin[1];
495 	}
496 	else if (hint & HINT_ROTATE_CCW_90){
497 		txout[0] = txin[6];
498 		txout[1] = txin[7];
499 		txout[2] = txin[0];
500 		txout[3] = txin[1];
501 		txout[4] = txin[2];
502 		txout[5] = txin[3];
503 		txout[6] = txin[4];
504 		txout[7] = txin[5];
505 	}
506 	else if (hint & HINT_ROTATE_180){
507 		txout[0] = txin[4];
508 		txout[1] = txin[5];
509 		txout[2] = txin[6];
510 		txout[3] = txin[7];
511 		txout[4] = txin[0];
512 		txout[5] = txin[1];
513 		txout[6] = txin[2];
514 		txout[7] = txin[3];
515 	}
516 
517 	if (hint & HINT_YFLIP){
518 		float flipb[8];
519 		memcpy(flipb, txout, sizeof(float) * 8);
520 		txout[0] = flipb[6];
521 		txout[1] = flipb[7];
522 		txout[2] = flipb[4];
523 		txout[3] = flipb[5];
524 		txout[4] = flipb[2];
525 		txout[5] = flipb[3];
526 		txout[6] = flipb[0];
527 		txout[7] = flipb[1];
528 	}
529 
530 	if (hint & HINT_CROP){
531 		ssize_t diffw = *outw - src->vstore->w;
532 		ssize_t diffh = *outh - src->vstore->h;
533 		if (diffw < 0){
534 			*outx = -1 * diffw;
535 		}
536 		else{
537 			*outw = src->vstore->w;
538 			*outx = diffw >> 1;
539 		}
540 
541 		if (diffh < 0){
542 			*outy = -1 * diffh;
543 		}
544 		else{
545 			*outh = src->vstore->h;
546 			*outy = diffh >> 1;
547 		}
548 	}
549 	else {
550 		*outx = *outy = 0;
551 	}
552 
553 	*blackframes = 3;
554 }
555 
arcan_vint_drawcursor(bool erase)556 void arcan_vint_drawcursor(bool erase)
557 {
558 	if (!arcan_video_display.cursor.vstore)
559 		return;
560 
561 	float txmatr[8];
562 	float* txcos = arcan_video_display.cursor_txcos;
563 
564 /*
565  * flip internal cursor position to last drawn cursor position
566  */
567 	if (!erase){
568 		arcan_video_display.cursor.ox = arcan_video_display.cursor.x;
569 		arcan_video_display.cursor.oy = arcan_video_display.cursor.y;
570 	}
571 
572 	int x1 = arcan_video_display.cursor.ox;
573 	int y1 = arcan_video_display.cursor.oy;
574 	int x2 = x1 + arcan_video_display.cursor.w;
575 	int y2 = y1 + arcan_video_display.cursor.h;
576 	struct monitor_mode mode = platform_video_dimensions();
577 
578 	if (erase){
579 		float s1 = (float)x1 / mode.width;
580 		float s2 = (float)x2 / mode.width;
581 		float t1 = 1.0 - ((float)y1 / mode.height);
582 		float t2 = 1.0 - ((float)y2 / mode.height);
583 
584 		txmatr[0] = s1;
585 		txmatr[1] = t1;
586 		txmatr[2] = s2;
587 		txmatr[3] = t1;
588 		txmatr[4] = s2;
589 		txmatr[5] = t2;
590 		txmatr[6] = s1;
591 		txmatr[7] = t2;
592 
593 		txcos = txmatr;
594 
595 		agp_blendstate(BLEND_NONE);
596 		agp_activate_vstore(current_context->world.vstore);
597 	}
598 	else{
599 		agp_blendstate(BLEND_FORCE);
600 		agp_activate_vstore(arcan_video_display.cursor.vstore);
601 	}
602 
603 	float opa = 1.0;
604 	agp_shader_activate(agp_default_shader(BASIC_2D));
605 	agp_shader_envv(OBJ_OPACITY, &opa, sizeof(float));
606 	agp_draw_vobj(x1, y1, x2, y2, txcos, NULL);
607 
608 	agp_deactivate_vstore();
609 }
610 
arcan_video_pushcontext()611 signed arcan_video_pushcontext()
612 {
613 	arcan_vobject empty_vobj = {
614 		.current = {
615 			.position = {0},
616 			.opa = 1.0,
617 			.scale = {.x = 1.0, .y = 1.0, .z = 1.0},
618 			.rotation.quaternion = default_quat
619 		},
620 /* we transfer the vstore over as that will be used as a
621  * container for the main display FBO */
622 		.vstore = current_context->world.vstore
623 	};
624 
625 	if (vcontext_ind + 1 == CONTEXT_STACK_LIMIT)
626 		return -1;
627 
628 	current_context->last_tickstamp = arcan_video_display.c_ticks;
629 
630 /* copy everything then manually reset some fields to defaults */
631 	memcpy(&vcontext_stack[ ++vcontext_ind ], current_context,
632 		sizeof(struct arcan_video_context));
633 	deallocate_gl_context(current_context, false, empty_vobj.vstore);
634 	if (current_context->world.vstore){
635 		empty_vobj.origw = empty_vobj.vstore->w;
636 		empty_vobj.origh = empty_vobj.vstore->h;
637 	}
638 
639 	current_context = &vcontext_stack[ vcontext_ind ];
640 	current_context->stdoutp.first = NULL;
641 	current_context->vitem_ofs = 1;
642 	current_context->nalive = 0;
643 
644 	current_context->world = empty_vobj;
645 	current_context->stdoutp.refreshcnt = 1;
646 	current_context->stdoutp.refresh = 1;
647 	current_context->stdoutp.vppcm = current_context->stdoutp.hppcm = 28;
648 	current_context->stdoutp.color = &current_context->world;
649 	current_context->stdoutp.max_order = 65536;
650 	current_context->vitem_limit = arcan_video_display.default_vitemlim;
651 	current_context->vitems_pool = arcan_alloc_mem(
652 		sizeof(struct arcan_vobject) * current_context->vitem_limit,
653 		ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL
654 	);
655 
656 	current_context->rtargets[0].first = NULL;
657 
658 /* propagate persistent flagged objects upwards */
659 	push_transfer_persists(
660 		&vcontext_stack[ vcontext_ind - 1], current_context);
661 	FLAG_DIRTY(NULL);
662 
663 	return arcan_video_nfreecontexts();
664 }
665 
arcan_video_recoverexternal(bool pop,int * saved,int * truncated,recovery_adoptfun adopt,void * tag)666 void arcan_video_recoverexternal(bool pop, int* saved,
667 	int* truncated, recovery_adoptfun adopt, void* tag)
668 {
669 	unsigned lastctxa, lastctxc;
670 	size_t n_ext = 0;
671 
672 	*saved = 0;
673 	*truncated = 0;
674 
675 /* pass, count contexts and disable rendertarget proxies */
676 	for (size_t i = 0; i <= vcontext_ind; i++){
677 		struct arcan_video_context* ctx = &vcontext_stack[i];
678 
679 		for (size_t j = 1; j < ctx->vitem_limit; j++){
680 			if (FL_TEST(&(ctx->vitems_pool[j]), FL_INUSE)){
681 				if (ctx->vitems_pool[j].feed.state.tag == ARCAN_TAG_FRAMESERV)
682 					n_ext++;
683 			}
684 		}
685 
686 		for (size_t j = 0; j < ctx->n_rtargets; j++){
687 			agp_rendertarget_proxy(ctx->rtargets[j].art, NULL, 0);
688 		}
689 	}
690 
691 	struct {
692 		struct agp_vstore* gl_store;
693 		char* tracetag;
694 		ffunc_ind ffunc;
695 		vfunc_state state;
696 		int origw, origh;
697 		int zv;
698 	} alim[n_ext+1];
699 
700 	arcan_aobj_id audbuf[n_ext+1];
701 
702 	if (n_ext == 0)
703 		goto clense;
704 
705 /* clamp number of salvaged objects, save space for WORLDID
706  * and if necessary, increase the size for new contexts */
707 	if (n_ext >= VITEM_CONTEXT_LIMIT - 1)
708 		n_ext = VITEM_CONTEXT_LIMIT - 1;
709 
710 	if (n_ext > arcan_video_display.default_vitemlim)
711 		arcan_video_display.default_vitemlim = n_ext + 1;
712 
713 /* pass 2, salvage remains */
714 	int s_ofs = 0;
715 
716 	for (size_t i = 0; i <= vcontext_ind; i++){
717 		struct arcan_video_context* ctx = &vcontext_stack[i];
718 
719 /* only care about frameservers */
720 		for (size_t j = 1; j < ctx->vitem_limit; j++){
721 			if (!FL_TEST(&ctx->vitems_pool[j], FL_INUSE) ||
722 				ctx->vitems_pool[j].feed.state.tag != ARCAN_TAG_FRAMESERV)
723 				continue;
724 
725 			arcan_vobject* cobj = &ctx->vitems_pool[j];
726 
727 /* some feedfunctions are dangerous to try and save */
728 			if (cobj->feed.ffunc == FFUNC_SOCKVER ||
729 				cobj->feed.ffunc == FFUNC_SOCKPOLL)
730 				continue;
731 
732 			arcan_frameserver* fsrv = cobj->feed.state.ptr;
733 
734 /* and some might want to opt-out of the whole thing */
735 			if (fsrv->flags.no_adopt)
736 				continue;
737 
738 /* only liberate if we have enough space left */
739 			if (s_ofs < n_ext){
740 				alim[s_ofs].state = cobj->feed.state;
741 				alim[s_ofs].ffunc = cobj->feed.ffunc;
742 				alim[s_ofs].gl_store = cobj->vstore;
743 				alim[s_ofs].origw = cobj->origw;
744 				alim[s_ofs].origh = cobj->origh;
745 				alim[s_ofs].zv = i + 1;
746 				alim[s_ofs].tracetag = cobj->tracetag ? strdup(cobj->tracetag) : NULL;
747 
748 				audbuf[s_ofs] = fsrv->aid;
749 
750 /* disassociate with cobj (when killed in pop, free wont be called),
751  * and increase refcount on storage (won't be killed in pop) */
752 				cobj->vstore->refcount++;
753 				cobj->feed.state.tag = ARCAN_TAG_NONE;
754 				cobj->feed.ffunc = FFUNC_FATAL;
755 				cobj->feed.state.ptr = NULL;
756 
757 				s_ofs++;
758 			}
759 			else
760 				(*truncated)++;
761 		}
762 	}
763 
764 /* pop them all, will also create a new fresh
765  * context with at least enough space */
766 clense:
767 	if (pop){
768 		lastctxc = arcan_video_popcontext();
769 
770 		while ( lastctxc != (lastctxa = arcan_video_popcontext()))
771 			lastctxc = lastctxa;
772 	}
773 
774 	if (n_ext == 0)
775 		return;
776 
777 /* pass 3, setup new world. a big note here: since we adopt and get a new
778  * cellid, internally tracked relations (subsegments tracking parents for
779  * instance) will point to an old and broken ID or, even worse, frameservers in
780  * different context levels being merged down and subsegments now referring to
781  * the wrong parent. This need to be fixed by the FFUNC_ADOPT */
782 	for (size_t i = 0; i < s_ofs; i++){
783 		arcan_vobj_id did;
784 		arcan_vobject* vobj = new_vobject(&did, current_context);
785 		vobj->vstore = alim[i].gl_store;
786 		vobj->feed.state = alim[i].state;
787 		vobj->feed.ffunc = alim[i].ffunc;
788 		vobj->origw = alim[i].origw;
789 		vobj->origh = alim[i].origh;
790 /*		vobj->order = alim[i].zv;
791 		vobj->blendmode = BLEND_NORMAL; */
792 		vobj->tracetag = alim[i].tracetag;
793 
794 /* since the feed function may keep a track of its parent (some do)
795  * we also need to support the adopt call */
796 		arcan_vint_attachobject(did);
797 		arcan_ffunc_lookup(vobj->feed.ffunc)(FFUNC_ADOPT,
798 			0, 0, 0, 0, 0, vobj->feed.state, vobj->cellid);
799 
800 		(*saved)++;
801 		if (adopt)
802 			adopt(did, tag);
803 	}
804 
805 	arcan_audio_purge(audbuf, s_ofs);
806 	arcan_event_purge();
807 }
808 
arcan_video_findstate(enum arcan_vobj_tags tag,void * ptr)809 arcan_vobj_id arcan_video_findstate(enum arcan_vobj_tags tag, void* ptr)
810 {
811 	for (size_t i = 1; i < current_context->vitem_limit; i++){
812 	if (FL_TEST(&current_context->vitems_pool[i], FL_INUSE)){
813 		arcan_vobject* vobj = &current_context->vitems_pool[i];
814 		if (vobj->feed.state.tag == tag && vobj->feed.state.ptr == ptr)
815 			return i;
816 	}
817 	}
818 
819 	return ARCAN_EID;
820 }
821 
822 /*
823  * the first approach to the _extpop etc. was to create a separate FBO, a vid
824  * in the current context and a view in the next context then run a separate
825  * rendertarget and readback the FBO into a texture.  Now we reuse the
826  * screenshot function into a buffer, use that buffer to create a raw image and
827  * voilà.
828  */
arcan_video_extpopcontext(arcan_vobj_id * dst)829 unsigned arcan_video_extpopcontext(arcan_vobj_id* dst)
830 {
831 	av_pixel* dstbuf;
832 	size_t dsz;
833 
834 	FLAG_DIRTY(NULL);
835 
836 	arcan_vint_refresh(0.0, &dsz);
837 
838 	bool ss = arcan_video_screenshot((void*)&dstbuf, &dsz) == ARCAN_OK;
839 	int rv = arcan_video_popcontext();
840 
841 	if (ss){
842 		struct monitor_mode mode = platform_video_dimensions();
843 		int w = mode.width;
844 		int h = mode.height;
845 
846 		img_cons cons = {.w = w, .h = h, .bpp = sizeof(av_pixel)};
847 		*dst = arcan_video_rawobject(dstbuf, cons, w, h, 1);
848 
849 		if (*dst == ARCAN_EID){
850 			arcan_mem_free(dstbuf);
851 		}
852 		else{
853 /* flip y by using texture coordinates */
854 			arcan_vobject* vobj = arcan_video_getobject(*dst);
855 			vobj->txcos = arcan_alloc_mem(sizeof(float) * 8,
856 				ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_SIMD);
857 
858 			arcan_vint_mirrormapping(vobj->txcos, 1.0, 1.0);
859 		}
860 	}
861 
862 	return rv;
863 }
864 
arcan_video_extpushcontext(arcan_vobj_id * dst)865 signed arcan_video_extpushcontext(arcan_vobj_id* dst)
866 {
867 	av_pixel* dstbuf;
868 	size_t dsz;
869 
870 	FLAG_DIRTY(NULL);
871 	arcan_vint_refresh(0.0, &dsz);
872 	bool ss = arcan_video_screenshot(&dstbuf, &dsz) == ARCAN_OK;
873 	int rv = arcan_video_pushcontext();
874 
875 	if (ss){
876 		struct monitor_mode mode = platform_video_dimensions();
877 		int w = mode.width;
878 		int h = mode.height;
879 
880 		img_cons cons = {.w = w, .h = h, .bpp = sizeof(av_pixel)};
881 		*dst = arcan_video_rawobject(dstbuf, cons, w, h, 1);
882 
883 		if (*dst == ARCAN_EID)
884 			arcan_mem_free(dstbuf);
885 		else
886 		{
887 			arcan_vobject* vobj = arcan_video_getobject(*dst);
888 			vobj->txcos = arcan_alloc_mem(sizeof(float) * 8,
889 				ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_SIMD);
890 
891 			arcan_vint_mirrormapping(vobj->txcos, 1.0, 1.0);
892 		}
893 	}
894 
895 	return rv;
896 }
897 
arcan_video_popcontext()898 unsigned arcan_video_popcontext()
899 {
900 /* propagate persistent flagged objects downwards */
901 	if (vcontext_ind > 0)
902 		pop_transfer_persists(
903 			current_context, &vcontext_stack[vcontext_ind-1]);
904 
905 	deallocate_gl_context(current_context, true, current_context->world.vstore);
906 
907 	if (vcontext_ind > 0){
908 		vcontext_ind--;
909 		current_context = &vcontext_stack[ vcontext_ind ];
910 	}
911 
912 	reallocate_gl_context(current_context);
913 	FLAG_DIRTY(NULL);
914 
915 	return (CONTEXT_STACK_LIMIT - 1) - vcontext_ind;
916 }
917 
empty_surface()918 static inline surface_properties empty_surface()
919 {
920 	surface_properties res  = {
921 		.rotation.quaternion = default_quat
922 	};
923 	return res;
924 }
925 
video_allocid(bool * status,struct arcan_video_context * ctx,bool write)926 static arcan_vobj_id video_allocid(
927 	bool* status, struct arcan_video_context* ctx, bool write)
928 {
929 	unsigned i = ctx->vitem_ofs, c = ctx->vitem_limit;
930 	*status = false;
931 
932 	while (c--){
933 		if (i == 0) /* 0 is protected */
934 			i = 1;
935 
936 		if (!FL_TEST(&ctx->vitems_pool[i], FL_INUSE)){
937 			*status = true;
938 			if (!write)
939 				return i;
940 
941 			ctx->nalive++;
942 			FL_SET(&ctx->vitems_pool[i], FL_INUSE);
943 			ctx->vitem_ofs = (ctx->vitem_ofs + 1) >= ctx->vitem_limit ? 1 : i + 1;
944 			return i;
945 		}
946 
947 		i = (i + 1) % (ctx->vitem_limit - 1);
948 	}
949 
950 	return ARCAN_EID;
951 }
952 
arcan_video_resampleobject(arcan_vobj_id vid,arcan_vobj_id did,size_t neww,size_t newh,agp_shader_id shid,bool nocopy)953 arcan_errc arcan_video_resampleobject(arcan_vobj_id vid,
954 	arcan_vobj_id did, size_t neww, size_t newh, agp_shader_id shid,
955 	bool nocopy)
956 {
957 	arcan_vobject* vobj = arcan_video_getobject(vid);
958 	if (!vobj)
959 		return ARCAN_ERRC_NO_SUCH_OBJECT;
960 
961 	if (neww <= 0 || newh <= 0)
962 		return ARCAN_ERRC_OUT_OF_SPACE;
963 
964 	if (vobj->vstore->txmapped != TXSTATE_TEX2D)
965 		return ARCAN_ERRC_UNACCEPTED_STATE;
966 
967 	arcan_vobj_id xfer = arcan_video_nullobject(neww, newh, 0);
968 	if (xfer == ARCAN_EID)
969 		return ARCAN_ERRC_OUT_OF_SPACE;
970 
971 /* dstbuf is now managed by the glstore in xfer */
972 	arcan_video_shareglstore(vid, xfer);
973 	arcan_video_setprogram(xfer, shid);
974 	arcan_video_forceblend(xfer, BLEND_FORCE);
975 
976 	img_cons cons = {.w = neww, .h = newh, .bpp = sizeof(av_pixel)};
977 	arcan_vobj_id dst;
978 	arcan_vobject* dobj;
979 
980 /* if we want to sample into another dstore, some more safeguard checks
981  * are needed so that we don't break other state (textured backend, not
982  * a rendertarget) */
983 	if (did != ARCAN_EID){
984 		arcan_vobject* dvobj = arcan_video_getobject(did);
985 		if (!dvobj){
986 			arcan_video_deleteobject(xfer);
987 			return ARCAN_ERRC_OUT_OF_SPACE;
988 		}
989 
990 		bool is_rtgt = arcan_vint_findrt(dvobj) != NULL;
991 		if (vobj->vstore->txmapped != TXSTATE_TEX2D){
992 			arcan_video_deleteobject(xfer);
993 			return ARCAN_ERRC_UNACCEPTED_STATE;
994 		}
995 
996 /* create another intermediate object to act as our rendertarget as
997  * that is an irreversible state transform which we can't do to did */
998 		arcan_vobj_id rtgt = arcan_video_nullobject(neww, newh, 0);
999 		if (rtgt == ARCAN_EID){
1000 			arcan_video_deleteobject(xfer);
1001 			return ARCAN_ERRC_OUT_OF_SPACE;
1002 		}
1003 
1004 /* and now swap and the rest of the function should behave as normal */
1005 		if (dvobj->vstore->w != neww || dvobj->vstore->h != newh){
1006 			agp_resize_vstore(dvobj->vstore, neww, newh);
1007 		}
1008 		arcan_video_shareglstore(did, rtgt);
1009 		dst = rtgt;
1010 	}
1011 	else{
1012 /* new intermediate storage that the FBO will draw into */
1013 		size_t new_sz = neww * newh * sizeof(av_pixel);
1014 		av_pixel* dstbuf = arcan_alloc_mem(new_sz,
1015 			ARCAN_MEM_VBUFFER, ARCAN_MEM_NONFATAL, ARCAN_MEMALIGN_PAGE);
1016 
1017 		if (!dstbuf){
1018 			arcan_video_deleteobject(xfer);
1019 			return ARCAN_ERRC_OUT_OF_SPACE;
1020 		}
1021 
1022 /* bind that to the destination object */
1023 		dst = arcan_video_rawobject(dstbuf, cons, neww, newh, 1);
1024 		if (dst == ARCAN_EID){
1025 			arcan_mem_free(dstbuf);
1026 			arcan_video_deleteobject(xfer);
1027 			return ARCAN_ERRC_OUT_OF_SPACE;
1028 		}
1029 	}
1030 
1031 /* set up a rendertarget and a proxy transfer object */
1032 	arcan_errc rts = arcan_video_setuprendertarget(
1033 		dst, 0, -1, false, RENDERTARGET_COLOR | RENDERTARGET_RETAIN_ALPHA);
1034 
1035 	if (rts != ARCAN_OK){
1036 		arcan_video_deleteobject(dst);
1037 		arcan_video_deleteobject(xfer);
1038 		return rts;
1039 	}
1040 
1041 /* draw, transfer storages and cleanup, xfer will
1042  * be deleted implicitly when dst cascades */
1043 	arcan_video_attachtorendertarget(dst, xfer, true);
1044 	agp_rendertarget_clearcolor(
1045 		arcan_vint_findrt(arcan_video_getobject(dst))->art, 0.0, 0.0, 0.0, 0.0);
1046 	arcan_video_objectopacity(xfer, 1.0, 0);
1047 	arcan_video_forceupdate(dst, true);
1048 
1049 /* in the call mode where caller specifies destination storage, we don't
1050  * share / override (or update the dimensions of the storage) */
1051 	if (did == ARCAN_EID){
1052 		vobj->origw = neww;
1053 		vobj->origh = newh;
1054 		arcan_video_shareglstore(dst, vid);
1055 		arcan_video_objectscale(vid, 1.0, 1.0, 1.0, 0);
1056 	}
1057 	arcan_video_deleteobject(dst);
1058 
1059 /* readback so we can survive push/pop and restore external */
1060 	if (!nocopy){
1061 		struct agp_vstore* dstore = vobj->vstore;
1062 		agp_readback_synchronous(dstore);
1063 	}
1064 
1065 	return ARCAN_OK;
1066 }
1067 
arcan_video_mipmapset(arcan_vobj_id vid,bool enable)1068 arcan_errc arcan_video_mipmapset(arcan_vobj_id vid, bool enable)
1069 {
1070 	arcan_vobject* vobj = arcan_video_getobject(vid);
1071 	if (!vobj)
1072 		return ARCAN_ERRC_NO_SUCH_OBJECT;
1073 
1074 	if (vobj->vstore->txmapped != TXSTATE_TEX2D ||
1075 		!vobj->vstore->vinf.text.raw)
1076 		return ARCAN_ERRC_UNACCEPTED_STATE;
1077 
1078 /*
1079  * For both disable and enable, we need to recreate the
1080  * gl_store and possibly remove the old one.
1081  */
1082 	void* newbuf = arcan_alloc_fillmem(vobj->vstore->vinf.text.raw,
1083 		vobj->vstore->vinf.text.s_raw,
1084 		ARCAN_MEM_VBUFFER, ARCAN_MEM_NONFATAL,
1085 		ARCAN_MEMALIGN_PAGE
1086 	);
1087 
1088 	if (!newbuf)
1089 		return ARCAN_ERRC_OUT_OF_SPACE;
1090 
1091 	arcan_vint_drop_vstore(vobj->vstore);
1092 	if (enable)
1093 		vobj->vstore->filtermode |= ARCAN_VFILTER_MIPMAP;
1094 	else
1095 		vobj->vstore->filtermode &= ~ARCAN_VFILTER_MIPMAP;
1096 
1097 	vobj->vstore->vinf.text.raw = newbuf;
1098 	agp_update_vstore(vobj->vstore, true);
1099 
1100 	return ARCAN_OK;
1101 }
1102 
arcan_vint_defaultmapping(float * dst,float st,float tt)1103 void arcan_vint_defaultmapping(float* dst, float st, float tt)
1104 {
1105 	dst[0] = 0.0;
1106 	dst[1] = 0.0;
1107 	dst[2] = st;
1108 	dst[3] = 0.0;
1109 	dst[4] = st;
1110 	dst[5] = tt;
1111 	dst[6] = 0.0;
1112 	dst[7] = tt;
1113 }
1114 
arcan_vint_mirrormapping(float * dst,float st,float tt)1115 void arcan_vint_mirrormapping(float* dst, float st, float tt)
1116 {
1117 	dst[6] = 0.0;
1118 	dst[7] = 0.0;
1119 	dst[4] = st;
1120 	dst[5] = 0.0;
1121 	dst[2] = st;
1122 	dst[3] = tt;
1123 	dst[0] = 0.0;
1124 	dst[1] = tt;
1125 }
1126 
populate_vstore(struct agp_vstore ** vs)1127 static void populate_vstore(struct agp_vstore** vs)
1128 {
1129 	*vs = arcan_alloc_mem(
1130 		sizeof(struct agp_vstore),
1131 		ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO,
1132 		ARCAN_MEMALIGN_NATURAL
1133 	);
1134 
1135 	(*vs)->txmapped   = TXSTATE_TEX2D;
1136 	(*vs)->txu        = arcan_video_display.deftxs;
1137 	(*vs)->txv        = arcan_video_display.deftxt;
1138 	(*vs)->scale      = arcan_video_display.scalemode;
1139 	(*vs)->imageproc  = arcan_video_display.imageproc;
1140 	(*vs)->filtermode = arcan_video_display.filtermode;
1141 
1142 	if (arcan_video_display.mipmap)
1143 		(*vs)->filtermode |= ARCAN_VFILTER_MIPMAP;
1144 
1145 	(*vs)->refcount   = 1;
1146 }
1147 
arcan_vint_nextfree()1148 arcan_vobj_id arcan_vint_nextfree()
1149 {
1150 	bool status;
1151 	arcan_vobj_id id = video_allocid(&status, current_context, false);
1152 	if (!status)
1153 		return ARCAN_EID;
1154 	else
1155 		return id;
1156 }
1157 
1158 /*
1159  * arcan_video_newvobject is used in other parts (3d, renderfun, ...)
1160  * as well, but they wrap to this one as to not expose more of the
1161  * context stack
1162  */
new_vobject(arcan_vobj_id * id,struct arcan_video_context * dctx)1163 static arcan_vobject* new_vobject(
1164 	arcan_vobj_id* id, struct arcan_video_context* dctx)
1165 {
1166 	arcan_vobject* rv = NULL;
1167 
1168 	bool status;
1169 	arcan_vobj_id fid = video_allocid(&status, dctx, true);
1170 
1171 	if (!status)
1172 		return NULL;
1173 
1174 	rv = dctx->vitems_pool + fid;
1175 	rv->order = 0;
1176 	populate_vstore(&rv->vstore);
1177 
1178 	rv->feed.ffunc = FFUNC_FATAL;
1179 	rv->childslots = 0;
1180 	rv->children = NULL;
1181 
1182 	rv->valid_cache = false;
1183 
1184 	rv->blendmode = arcan_video_display.blendmode;
1185 	rv->clip = ARCAN_CLIP_OFF;
1186 
1187 	rv->current.scale.x = 1.0;
1188 	rv->current.scale.y = 1.0;
1189 	rv->current.scale.z = 1.0;
1190 
1191 	rv->current.position.x = 0;
1192 	rv->current.position.y = 0;
1193 	rv->current.position.z = 0;
1194 
1195 	rv->current.rotation.quaternion = default_quat;
1196 
1197 	rv->current.opa = 0.0;
1198 
1199 	rv->cellid = fid;
1200 	assert(rv->cellid > 0);
1201 
1202 	rv->parent = &current_context->world;
1203 	rv->mask = MASK_ORIENTATION | MASK_OPACITY | MASK_POSITION
1204 		| MASK_FRAMESET | MASK_LIVING;
1205 
1206 	if (id != NULL)
1207 		*id = fid;
1208 
1209 	return rv;
1210 }
1211 
arcan_video_newvobject(arcan_vobj_id * id)1212 arcan_vobject* arcan_video_newvobject(arcan_vobj_id* id )
1213 {
1214 	return new_vobject(id, current_context);
1215 }
1216 
arcan_video_getobject(arcan_vobj_id id)1217 arcan_vobject* arcan_video_getobject(arcan_vobj_id id)
1218 {
1219 	arcan_vobject* rc = NULL;
1220 
1221 	if (id > 0 && id < current_context->vitem_limit &&
1222 		FL_TEST(&current_context->vitems_pool[id], FL_INUSE))
1223 		rc = current_context->vitems_pool + id;
1224 	else
1225 		if (id == ARCAN_VIDEO_WORLDID){
1226 			rc = &current_context->world;
1227 		}
1228 
1229 	return rc;
1230 }
1231 
detach_fromtarget(struct rendertarget * dst,arcan_vobject * src)1232 static bool detach_fromtarget(struct rendertarget* dst, arcan_vobject* src)
1233 {
1234 	arcan_vobject_litem* torem;
1235 	assert(src);
1236 
1237 /* already detached? */
1238 	if (!dst){
1239 		return false;
1240 	}
1241 
1242 	if (dst->camtag == src->cellid)
1243 		dst->camtag = ARCAN_EID;
1244 
1245 /* chain onwards if dst refers to a link target */
1246 	if (dst->link){
1247 		return detach_fromtarget(dst->link, src);
1248 	}
1249 
1250 /* or empty set */
1251  	if (!dst->first)
1252 		return false;
1253 
1254 	if (dst->camtag == src->cellid)
1255 		dst->camtag = ARCAN_EID;
1256 
1257 /* find it */
1258 	torem = dst->first;
1259 	while(torem){
1260 		if (torem->elem == src)
1261 			break;
1262 
1263 		torem = torem->next;
1264 	}
1265 	if (!torem)
1266 		return false;
1267 
1268 /* (1.) remove first */
1269 	if (dst->first == torem){
1270 		dst->first = torem->next;
1271 
1272 /* only one element? */
1273 		if (dst->first){
1274 			dst->first->previous = NULL;
1275 		}
1276 	}
1277 /* (2.) remove last */
1278 	else if (torem->next == NULL){
1279 		assert(torem->previous);
1280 		torem->previous->next = NULL;
1281 	}
1282 /* (3.) remove arbitrary */
1283 	else {
1284 		torem->next->previous = torem->previous;
1285 		torem->previous->next = torem->next;
1286 	}
1287 
1288 /* (4.) mark as something easy to find in dumps */
1289 	torem->elem = (arcan_vobject*) 0xfeedface;
1290 
1291 /* cleanup torem */
1292 	arcan_mem_free(torem);
1293 
1294 	if (src->owner == dst)
1295 		src->owner = NULL;
1296 
1297 	if (dst->color && dst != &current_context->stdoutp){
1298 		dst->color->extrefc.attachments--;
1299 		src->extrefc.attachments--;
1300 
1301 		trace("(detach) (%ld:%s) removed from rendertarget:(%ld:%s),"
1302 			"left: %d, attached to: %d\n", src->cellid, video_tracetag(src),
1303 			dst->color ? dst->color->cellid : -1, video_tracetag(dst->color),
1304 			dst->color->extrefc.attachments, src->extrefc.attachments);
1305 
1306 		if (dst->color->extrefc.attachments < 0){
1307 			arcan_warning(
1308 				"[bug] attach-count (%d) < 0", dst->color->extrefc.attachments);
1309 		}
1310 	} else {
1311 		src->extrefc.attachments--;
1312 		trace("(detach) (%ld:%s) removed from stdout, attached to: %d\n",
1313 		src->cellid, video_tracetag(src), src->extrefc.attachments);
1314 	}
1315 
1316 	FLAG_DIRTY(NULL);
1317 	return true;
1318 }
1319 
arcan_vint_reraster(arcan_vobject * src,struct rendertarget * rtgt)1320 void arcan_vint_reraster(arcan_vobject* src, struct rendertarget* rtgt)
1321 {
1322 	struct agp_vstore* vs = src->vstore;
1323 
1324 /* unless the storage is eligible and the density is sufficiently different */
1325 	if (!
1326 		((vs->txmapped && (vs->vinf.text.kind ==
1327 		STORAGE_TEXT || vs->vinf.text.kind == STORAGE_TEXTARRAY)) &&
1328 		((fabs(vs->vinf.text.vppcm - rtgt->vppcm) > EPSILON ||
1329 		 fabs(vs->vinf.text.hppcm - rtgt->hppcm) > EPSILON)))
1330 	)
1331 		return;
1332 
1333 /*  in update sourcedescr we guarantee that any vinf that come here with
1334  *  the TEXT | TEXTARRAY storage type will have a copy of the format string
1335  *  that led to its creation. This allows us to just reraster into that */
1336 	size_t dw, dh, maxw, maxh;
1337 	uint32_t dsz;
1338 	if (vs->vinf.text.kind == STORAGE_TEXT)
1339 		arcan_renderfun_renderfmtstr(
1340 			vs->vinf.text.source, src->cellid,
1341 			false, NULL, NULL, &dw, &dh, &dsz, &maxw, &maxh, false
1342 		);
1343 	else {
1344 		arcan_renderfun_renderfmtstr_extended(
1345 			(const char**) vs->vinf.text.source_arr, src->cellid,
1346 			false, NULL, NULL, &dw, &dh, &dsz, &maxw, &maxh, false
1347 		);
1348 	}
1349 }
1350 
attach_object(struct rendertarget * dst,arcan_vobject * src)1351 static void attach_object(struct rendertarget* dst, arcan_vobject* src)
1352 {
1353 	if (dst->link)
1354 		return attach_object(dst->link, src);
1355 
1356 	arcan_vobject_litem* new_litem =
1357 		arcan_alloc_mem(sizeof *new_litem,
1358 			ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_NATURAL);
1359 
1360 	new_litem->next = new_litem->previous = NULL;
1361 	new_litem->elem = src;
1362 
1363 /* (pre) if orphaned, assign */
1364 	if (src->owner == NULL){
1365 		src->owner = dst;
1366 	}
1367 
1368 /* 2. insert first into empty? */
1369 	if (!dst->first)
1370 		dst->first = new_litem;
1371 	else
1372 /* 3. insert first with n >= 1 */
1373 	if (dst->first->elem->order > src->order){
1374 		new_litem->next = dst->first;
1375 		dst->first = new_litem;
1376 		new_litem->next->previous = new_litem;
1377 	}
1378 /* 4. insert last or arbitrary */
1379 	else {
1380 		bool last;
1381 		arcan_vobject_litem* ipoint = dst->first;
1382 
1383 /* 5. scan for insertion point */
1384 		do
1385 			last = (ipoint->elem->order <= src->order);
1386 		while (last && ipoint->next && (ipoint = ipoint->next));
1387 
1388 /* 6. insert last? */
1389 		if (last){
1390 			new_litem->previous = ipoint;
1391 			ipoint->next = new_litem;
1392 		}
1393 
1394 		else {
1395 /* 7. insert arbitrary */
1396 			ipoint->previous->next = new_litem;
1397 			new_litem->previous = ipoint->previous;
1398 			ipoint->previous = new_litem;
1399 			new_litem->next = ipoint;
1400 		}
1401 	}
1402 
1403 	FLAG_DIRTY(src);
1404 	if (dst->color){
1405 		src->extrefc.attachments++;
1406 		dst->color->extrefc.attachments++;
1407 		trace("(attach) (%d:%s) attached to rendertarget:(%ld:%s), "
1408 			"src-count: %d, dst-count: %d\n", src->cellid, video_tracetag(src),
1409 			dst->color ? dst->color->cellid : -1,
1410 			dst->color ? video_tracetag(dst->color) : "(stdout)",
1411 			src->extrefc.attachments, dst->color->extrefc.attachments);
1412 	} else {
1413 		src->extrefc.attachments++;
1414 		trace("(attach) (%d:%s) attached to stdout, count: %d\n", src->cellid,
1415 		video_tracetag(src), src->extrefc.attachments);
1416 	}
1417 
1418 	struct agp_vstore* vs = src->vstore;
1419 
1420 /* IF the new attachment point has a different density than the previous,
1421  * AND the source is of a vector source, RERASTER to match the new target. */
1422 	struct rendertarget* rtgt = current_context->attachment ?
1423 		current_context->attachment : &current_context->stdoutp;
1424 
1425 	arcan_vint_reraster(src, rtgt);
1426 }
1427 
arcan_vint_attachobject(arcan_vobj_id id)1428 arcan_errc arcan_vint_attachobject(arcan_vobj_id id)
1429 {
1430 	arcan_vobject* src = arcan_video_getobject(id);
1431 
1432 	if (!src)
1433 		return ARCAN_ERRC_BAD_RESOURCE;
1434 
1435 	struct rendertarget* rtgt = current_context->attachment ?
1436 		current_context->attachment : &current_context->stdoutp;
1437 
1438 	if (rtgt == src->owner)
1439 		return ARCAN_OK;
1440 
1441 /* make sure that there isn't already one attached */
1442 	trace("(attach-eval-detach)\n");
1443 	if (src->extrefc.attachments)
1444 		detach_fromtarget(src->owner, src);
1445 
1446 	trace("(attach-eval-attach)\n");
1447 	attach_object(rtgt, src);
1448 	trace("(attach-eval-done)\n");
1449 	FLAG_DIRTY(src);
1450 
1451 	return ARCAN_OK;
1452 }
1453 
arcan_vint_dropshape(arcan_vobject * vobj)1454 arcan_errc arcan_vint_dropshape(arcan_vobject* vobj)
1455 {
1456 	if (!vobj->shape)
1457 		return ARCAN_OK;
1458 
1459 	agp_drop_mesh(vobj->shape);
1460 	return ARCAN_OK;
1461 }
1462 
1463 /* run through the chain and delete all occurences at ofs */
swipe_chain(surface_transform * base,unsigned ofs,unsigned size)1464 static void swipe_chain(surface_transform* base, unsigned ofs, unsigned size)
1465 {
1466 	while (base){
1467 		memset((char*)base + ofs, 0, size);
1468 		base = base->next;
1469 	}
1470 }
1471 
1472 /* copy a transform and at the same time, compact it into
1473  * a better sized buffer */
dup_chain(surface_transform * base)1474 static surface_transform* dup_chain(surface_transform* base)
1475 {
1476 	if (!base)
1477 		return NULL;
1478 
1479 	surface_transform* res = arcan_alloc_mem( sizeof(surface_transform),
1480 		ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_NATURAL);
1481 
1482 	surface_transform* current = res;
1483 
1484 	while (base)
1485 	{
1486 		memcpy(current, base, sizeof(surface_transform));
1487 
1488 		if (base->next)
1489 			current->next = arcan_alloc_mem( sizeof(surface_transform),
1490 			ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_NATURAL);
1491 		else
1492 			current->next = NULL;
1493 
1494 		current = current->next;
1495 		base = base->next;
1496 	}
1497 
1498 	return res;
1499 }
1500 
arcan_video_inheritorder(arcan_vobj_id id,bool val)1501 arcan_errc arcan_video_inheritorder(arcan_vobj_id id, bool val)
1502 {
1503 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
1504 	arcan_vobject* vobj = arcan_video_getobject(id);
1505 
1506 	if (vobj && id != ARCAN_VIDEO_WORLDID && vobj->order >= 0){
1507 		rv = ARCAN_OK;
1508 		if (val)
1509 			FL_SET(vobj, FL_ORDOFS);
1510 		else
1511 			FL_CLEAR(vobj, FL_ORDOFS);
1512 		update_zv(vobj, vobj->parent->order);
1513 	}
1514 
1515 	return rv;
1516 }
1517 
arcan_video_getmask(arcan_vobj_id id)1518 enum arcan_transform_mask arcan_video_getmask(arcan_vobj_id id)
1519 {
1520 	enum arcan_transform_mask mask = 0;
1521 	arcan_vobject* vobj = arcan_video_getobject(id);
1522 
1523 	if (vobj && id > 0)
1524 		mask = vobj->mask;
1525 
1526 	return mask;
1527 }
1528 
1529 
arcan_video_readtag(arcan_vobj_id id)1530 const char* const arcan_video_readtag(arcan_vobj_id id)
1531 {
1532 	arcan_vobject* vobj = arcan_video_getobject(id);
1533 	return vobj ? vobj->tracetag : "(no tag)";
1534 }
1535 
arcan_video_transformmask(arcan_vobj_id id,enum arcan_transform_mask mask)1536 arcan_errc arcan_video_transformmask(arcan_vobj_id id,
1537 	enum arcan_transform_mask mask)
1538 {
1539 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
1540 	arcan_vobject* vobj = arcan_video_getobject(id);
1541 
1542 	if (vobj && id > FL_INUSE){
1543 		vobj->mask = mask;
1544 		rv = ARCAN_OK;
1545 	}
1546 
1547 	return rv;
1548 }
1549 
arcan_video_linkobjs(arcan_vobj_id srcid,arcan_vobj_id parentid,enum arcan_transform_mask mask,enum parent_anchor anchorp,enum parent_scale scalem)1550 arcan_errc arcan_video_linkobjs(arcan_vobj_id srcid,
1551 	arcan_vobj_id parentid, enum arcan_transform_mask mask,
1552 	enum parent_anchor anchorp, enum parent_scale scalem)
1553 {
1554 	arcan_vobject* src = arcan_video_getobject(srcid);
1555 	arcan_vobject* dst = arcan_video_getobject(parentid);
1556 
1557 /* link to self always means link to world */
1558 	if (srcid == parentid || parentid == 0)
1559 		dst = &current_context->world;
1560 
1561 	if (!src || !dst)
1562 		return ARCAN_ERRC_NO_SUCH_OBJECT;
1563 
1564 	arcan_vobject* current = dst;
1565 
1566 /* traverse destination and make sure we don't create cycles */
1567 	while (current){
1568 		if (current->parent == src)
1569 			return ARCAN_ERRC_CLONE_NOT_PERMITTED;
1570 		else
1571 			current = current->parent;
1572 	}
1573 
1574 /* update anchor, mask and scale */
1575 	src->p_anchor = anchorp;
1576 	src->mask = mask;
1577 	src->p_scale = scalem;
1578 	src->valid_cache = false;
1579 
1580 /* already linked to dst? do nothing */
1581 		if (src->parent == dst)
1582 			return ARCAN_OK;
1583 
1584 /* otherwise, first decrement parent counter */
1585 		else if (src->parent != &current_context->world)
1586 			dropchild(src->parent, src);
1587 
1588 /* create link connection, and update counter */
1589 	if (dst != &current_context->world){
1590 		addchild(dst, src);
1591 		trace("(link) (%d:%s) linked to (%d:%s), count: %d\n",
1592 			src->cellid, src->tracetag == NULL ? "(unknown)" : src->tracetag,
1593 			dst->cellid, dst->tracetag ? "(unknown)" : dst->tracetag,
1594 			src->parent->extrefc.links);
1595 	}
1596 
1597 	if (FL_TEST(src, FL_ORDOFS))
1598 		update_zv(src, src->parent->order);
1599 
1600 /* reset all transformations except blend as they don't make sense until
1601  * redefined relative to their new parent. Blend is a special case in that
1602  * [fade + switch ownership] is often a desired operation */
1603 	swipe_chain(src->transform, offsetof(surface_transform, move),
1604 		sizeof(struct transf_move  ));
1605 	swipe_chain(src->transform, offsetof(surface_transform, scale),
1606 		sizeof(struct transf_scale ));
1607 	swipe_chain(src->transform, offsetof(surface_transform, rotate),
1608 		sizeof(struct transf_rotate));
1609 
1610 	FLAG_DIRTY(NULL);
1611 
1612 	return ARCAN_OK;
1613 }
1614 
arcan_video_init(uint16_t width,uint16_t height,uint8_t bpp,bool fs,bool frames,bool conservative,const char * caption)1615 arcan_errc arcan_video_init(uint16_t width, uint16_t height, uint8_t bpp,
1616 	bool fs, bool frames, bool conservative, const char* caption)
1617 {
1618 	static bool firstinit = true;
1619 
1620 /* might be called multiple times due to longjmp recover etc. */
1621 	if (firstinit){
1622 		if (-1 == arcan_sem_init(&asynchsynch, ASYNCH_CONCURRENT_THREADS)){
1623 			arcan_warning("video_init couldn't create synchronization handle\n");
1624 		}
1625 
1626 		arcan_vint_defaultmapping(arcan_video_display.default_txcos, 1.0, 1.0);
1627 		arcan_vint_defaultmapping(arcan_video_display.cursor_txcos, 1.0, 1.0);
1628 		arcan_vint_mirrormapping(arcan_video_display.mirror_txcos, 1.0, 1.0);
1629 		arcan_video_reset_fontcache();
1630 		firstinit = false;
1631 
1632 /* though it should not be the default, the option to turn of the
1633  * 'block rendertarget drawing if not dirty' optimization may be
1634  * useful for some cases and for troubleshooting */
1635 		uintptr_t tag;
1636 		cfg_lookup_fun get_config = platform_config_lookup(&tag);
1637 		if (get_config("video_ignore_dirty", 0, NULL, tag)){
1638 			arcan_video_display.ignore_dirty = SIZE_MAX >> 1;
1639 		}
1640 	}
1641 
1642 	if (!platform_video_init(width, height, bpp, fs, frames, caption)){
1643 		arcan_warning("platform_video_init() failed.\n");
1644 		return ARCAN_ERRC_BADVMODE;
1645 	}
1646 
1647 	agp_init();
1648 
1649 	arcan_video_display.in_video = true;
1650 	arcan_video_display.conservative = conservative;
1651 
1652 	current_context->world.current.scale.x = 1.0;
1653 	current_context->world.current.scale.y = 1.0;
1654 	current_context->vitem_limit = arcan_video_display.default_vitemlim;
1655 	current_context->vitems_pool = arcan_alloc_mem(
1656 		sizeof(struct arcan_vobject) * current_context->vitem_limit,
1657 		ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
1658 
1659 	struct monitor_mode mode = platform_video_dimensions();
1660 	if (mode.width == 0 || mode.height == 0){
1661 		arcan_fatal("(video) platform error, invalid default mode\n");
1662 	}
1663 	arcan_video_resize_canvas(mode.width, mode.height);
1664 
1665 	identity_matrix(current_context->stdoutp.base);
1666 	current_context->stdoutp.order3d = arcan_video_display.order3d;
1667 	current_context->stdoutp.refreshcnt = 1;
1668 	current_context->stdoutp.refresh = -1;
1669 	current_context->stdoutp.max_order = 65536;
1670 	current_context->stdoutp.shid = agp_default_shader(BASIC_2D);
1671 	current_context->stdoutp.vppcm = current_context->stdoutp.hppcm;
1672 
1673 	arcan_renderfun_outputdensity(
1674 		current_context->stdoutp.hppcm, current_context->stdoutp.vppcm);
1675 
1676 /*
1677  * By default, expected video output display matches canvas 1:1,
1678  * canvas can be explicitly resized and these two matrices will still
1679  * make the output correct. For multiple- dynamic monitor configurations,
1680  * things get hairy; the video platform will be expected to
1681  * map rendertargets / videoobjects to output displays.
1682  */
1683 	FLAG_DIRTY(NULL);
1684 	return ARCAN_OK;
1685 }
1686 
arcan_video_resize_canvas(size_t neww,size_t newh)1687 arcan_errc arcan_video_resize_canvas(size_t neww, size_t newh)
1688 {
1689 	struct monitor_mode mode = platform_video_dimensions();
1690 
1691 	if (!arcan_video_display.no_stdout){
1692 		if (!current_context->world.vstore || !current_context->stdoutp.art){
1693 			populate_vstore(&current_context->world.vstore);
1694 			current_context->world.vstore->filtermode &= ~ARCAN_VFILTER_MIPMAP;
1695 			agp_empty_vstore(current_context->world.vstore, neww, newh);
1696 			current_context->stdoutp.color = &current_context->world;
1697 			current_context->stdoutp.mode = RENDERTARGET_COLOR_DEPTH_STENCIL;
1698 			current_context->stdoutp.art = agp_setup_rendertarget(
1699 				current_context->world.vstore,
1700 				current_context->stdoutp.mode
1701 			);
1702 		}
1703 		else
1704 			agp_resize_rendertarget(current_context->stdoutp.art, neww, newh);
1705 	}
1706 
1707 	build_orthographic_matrix(arcan_video_display.window_projection, 0,
1708 		mode.width, mode.height, 0, 0, 1);
1709 
1710 	build_orthographic_matrix(arcan_video_display.default_projection, 0,
1711 		mode.width, mode.height, 0, 0, 1);
1712 
1713 	memcpy(current_context->stdoutp.projection,
1714 		arcan_video_display.default_projection, sizeof(float) * 16);
1715 
1716 	current_context->world.origw = neww;
1717 	current_context->world.origh = newh;
1718 
1719 	FLAG_DIRTY(NULL);
1720 	arcan_video_forceupdate(ARCAN_VIDEO_WORLDID, true);
1721 
1722 	return ARCAN_OK;
1723 }
1724 
nexthigher(uint16_t k)1725 static uint16_t nexthigher(uint16_t k)
1726 {
1727 	k--;
1728 	for (size_t i=1; i < sizeof(uint16_t) * 8; i = i * 2)
1729 		k = k | k >> i;
1730 	return k+1;
1731 }
1732 
arcan_vint_getimage(const char * fname,arcan_vobject * dst,img_cons forced,bool asynchsrc)1733 arcan_errc arcan_vint_getimage(const char* fname, arcan_vobject* dst,
1734 	img_cons forced, bool asynchsrc)
1735 {
1736 /*
1737  * with asynchsynch, it's likely that we get a storm of requests and we'd
1738  * likely suffer thrashing, so limit this.  also, look into using
1739  * pthread_setschedparam and switch to pthreads exclusively
1740  */
1741 	arcan_sem_wait(asynchsynch);
1742 
1743 	size_t inw, inh;
1744 
1745 /* try- open */
1746 	data_source inres = arcan_open_resource(fname);
1747 	if (inres.fd == BADFD){
1748 		arcan_sem_post(asynchsynch);
1749 		return ARCAN_ERRC_BAD_RESOURCE;
1750 	}
1751 
1752 /* mmap (preferred) or buffer (mmap not working / useful due to alignment) */
1753 	map_region inmem = arcan_map_resource(&inres, false);
1754 	if (inmem.ptr == NULL){
1755 		arcan_sem_post(asynchsynch);
1756 		arcan_release_resource(&inres);
1757 		return ARCAN_ERRC_BAD_RESOURCE;
1758 	}
1759 
1760 	struct arcan_img_meta meta = {0};
1761 	uint32_t* ch_imgbuf = NULL;
1762 
1763 	arcan_errc rv = arcan_img_decode(fname, inmem.ptr, inmem.sz,
1764 		&ch_imgbuf, &inw, &inh, &meta, dst->vstore->imageproc == IMAGEPROC_FLIPH);
1765 
1766 	arcan_release_map(inmem);
1767 	arcan_release_resource(&inres);
1768 
1769 	if (ARCAN_OK != rv)
1770 		goto done;
1771 
1772 	av_pixel* imgbuf = arcan_img_repack(ch_imgbuf, inw, inh);
1773 	if (!imgbuf){
1774 		rv = ARCAN_ERRC_OUT_OF_SPACE;
1775 		goto done;
1776 	}
1777 
1778 	uint16_t neww, newh;
1779 
1780 /* store this so we can maintain aspect ratios etc. while still
1781  * possibly aligning to next power of two */
1782 	dst->origw = inw;
1783 	dst->origh = inh;
1784 
1785 	neww = inw;
1786 	newh = inh;
1787 
1788 /* the thread_loader will take care of converting the asynchsrc
1789  * to an image once its completely done */
1790 	if (!asynchsrc)
1791 		dst->feed.state.tag = ARCAN_TAG_IMAGE;
1792 
1793 /* need to keep the identification string in order to rebuild
1794  * on a forced push/pop */
1795 	struct agp_vstore* dstframe = dst->vstore;
1796 	dstframe->vinf.text.source = strdup(fname);
1797 
1798 	enum arcan_vimage_mode desm = dst->vstore->scale;
1799 
1800 	if (meta.compressed)
1801 		goto push_comp;
1802 
1803 /* the user requested specific dimensions, or we are in a mode where
1804  * we should manually enfore a stretch to the nearest power of two */
1805 	if (desm == ARCAN_VIMAGE_SCALEPOW2){
1806 		forced.w = nexthigher(neww) == neww ? 0 : nexthigher(neww);
1807 		forced.h = nexthigher(newh) == newh ? 0 : nexthigher(newh);
1808 	}
1809 
1810 	if (forced.h > 0 && forced.w > 0){
1811 		neww = desm == ARCAN_VIMAGE_SCALEPOW2 ? nexthigher(forced.w) : forced.w;
1812 		newh = desm == ARCAN_VIMAGE_SCALEPOW2 ? nexthigher(forced.h) : forced.h;
1813 		dst->origw = forced.w;
1814 		dst->origh = forced.h;
1815 
1816 		dstframe->vinf.text.s_raw = neww * newh * sizeof(av_pixel);
1817 		dstframe->vinf.text.raw = arcan_alloc_mem(dstframe->vinf.text.s_raw,
1818 			ARCAN_MEM_VBUFFER, 0, ARCAN_MEMALIGN_PAGE);
1819 
1820 		arcan_renderfun_stretchblit((char*)imgbuf, inw, inh,
1821 			(uint32_t*) dstframe->vinf.text.raw,
1822 			neww, newh, dst->vstore->imageproc == IMAGEPROC_FLIPH);
1823 		arcan_mem_free(imgbuf);
1824 	}
1825 	else {
1826 		neww = inw;
1827 		newh = inh;
1828 		dstframe->vinf.text.raw = imgbuf;
1829 		dstframe->vinf.text.s_raw = inw * inh * sizeof(av_pixel);
1830 	}
1831 
1832 	dst->vstore->w = neww;
1833 	dst->vstore->h = newh;
1834 
1835 /*
1836  * for the asynch case, we need to do this separately as we're in a different
1837  * thread and forcibly assigning the glcontext to another thread is expensive */
1838 
1839 push_comp:
1840 	if (!asynchsrc && dst->vstore->txmapped != TXSTATE_OFF)
1841 		agp_update_vstore(dst->vstore, true);
1842 
1843 done:
1844 	arcan_sem_post(asynchsynch);
1845 	return rv;
1846 }
1847 
arcan_video_3dorder(enum arcan_order3d order,arcan_vobj_id rt)1848 arcan_errc arcan_video_3dorder(enum arcan_order3d order, arcan_vobj_id rt)
1849 {
1850 	if (rt != ARCAN_EID){
1851 		arcan_vobject* vobj = arcan_video_getobject(rt);
1852 		if (!vobj)
1853 			return ARCAN_ERRC_NO_SUCH_OBJECT;
1854 
1855 		struct rendertarget* rtgt = arcan_vint_findrt(vobj);
1856 		if (!rtgt)
1857 			return ARCAN_ERRC_NO_SUCH_OBJECT;
1858 
1859 		rtgt->order3d = order;
1860 	}
1861 	else
1862 		arcan_video_display.order3d = order;
1863 	return ARCAN_OK;
1864 }
1865 
rescale_origwh(arcan_vobject * dst,float fx,float fy)1866 static void rescale_origwh(arcan_vobject* dst, float fx, float fy)
1867 {
1868 	vector svect = build_vect(fx, fy, 1.0);
1869 	surface_transform* current = dst->transform;
1870 
1871 	while (current){
1872 		current->scale.startd = mul_vector(current->scale.startd, svect);
1873 		current->scale.endd   = mul_vector(current->scale.endd, svect);
1874 		current = current->next;
1875 	}
1876 }
1877 
arcan_video_framecyclemode(arcan_vobj_id id,int mode)1878 arcan_errc arcan_video_framecyclemode(arcan_vobj_id id, int mode)
1879 {
1880 	arcan_vobject* vobj = arcan_video_getobject(id);
1881 	if (!vobj)
1882 		return ARCAN_ERRC_NO_SUCH_OBJECT;
1883 
1884 	if (!vobj->frameset)
1885 		return ARCAN_ERRC_UNACCEPTED_STATE;
1886 
1887 	vobj->frameset->ctr = vobj->frameset->mctr = abs(mode);
1888 
1889 	return ARCAN_OK;
1890 }
1891 
arcan_video_cursorpos(int newx,int newy,bool absolute)1892 void arcan_video_cursorpos(int newx, int newy, bool absolute)
1893 {
1894 	if (absolute){
1895 		arcan_video_display.cursor.x = newx;
1896 		arcan_video_display.cursor.y = newy;
1897 	}
1898 	else {
1899 		arcan_video_display.cursor.x += newx;
1900 		arcan_video_display.cursor.y += newy;
1901 	}
1902 }
1903 
arcan_video_cursorsize(size_t w,size_t h)1904 void arcan_video_cursorsize(size_t w, size_t h)
1905 {
1906 	arcan_video_display.cursor.w = w;
1907 	arcan_video_display.cursor.h = h;
1908 }
1909 
arcan_video_cursorstore(arcan_vobj_id src)1910 void arcan_video_cursorstore(arcan_vobj_id src)
1911 {
1912 	if (arcan_video_display.cursor.vstore){
1913 		arcan_vint_drop_vstore(arcan_video_display.cursor.vstore);
1914 		arcan_video_display.cursor.vstore = NULL;
1915 	}
1916 
1917 	arcan_vobject* vobj = arcan_video_getobject(src);
1918 	if (src == ARCAN_VIDEO_WORLDID || !vobj ||
1919 		vobj->vstore->txmapped != TXSTATE_TEX2D)
1920 		return;
1921 
1922 /* texture coordinates are managed separately through _display.cursor_txcos */
1923 	arcan_video_display.cursor.vstore = vobj->vstore;
1924 	vobj->vstore->refcount++;
1925 }
1926 
arcan_video_shareglstore(arcan_vobj_id sid,arcan_vobj_id did)1927 arcan_errc arcan_video_shareglstore(arcan_vobj_id sid, arcan_vobj_id did)
1928 {
1929 	arcan_vobject* src = arcan_video_getobject(sid);
1930 	arcan_vobject* dst = arcan_video_getobject(did);
1931 
1932 	if (!src || !dst || src == dst)
1933 		return ARCAN_ERRC_NO_SUCH_OBJECT;
1934 
1935 /* remove the original target store, substitute in our own */
1936 	arcan_vint_drop_vstore(dst->vstore);
1937 
1938 	struct rendertarget* rtgt = arcan_vint_findrt(dst);
1939 
1940 /* if the source is broken, convert dst to null store (color with bad prg) */
1941 	if (src->vstore->txmapped == TXSTATE_OFF ||
1942 		src->vstore->vinf.text.glid == 0 ||
1943 		FL_TEST(src, FL_PRSIST) ||
1944 		FL_TEST(dst, FL_PRSIST)
1945 	){
1946 
1947 /* but leave rendertarget vstore alone */
1948 		if (rtgt){
1949 			return ARCAN_OK;
1950 		}
1951 
1952 		populate_vstore(&src->vstore);
1953 		struct agp_vstore* store = src->vstore;
1954 		store->txmapped = TXSTATE_OFF;
1955 		src->program = 0;
1956 
1957 		FLAG_DIRTY(dst);
1958 		return ARCAN_OK;
1959 	}
1960 
1961 	dst->vstore = src->vstore;
1962 	dst->vstore->refcount++;
1963 
1964 /* customized texture coordinates unless we should use defaults ... */
1965 	if (src->txcos){
1966 		if (!dst->txcos)
1967 			dst->txcos = arcan_alloc_mem(8 * sizeof(float),
1968 				ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_SIMD);
1969 		memcpy(dst->txcos, src->txcos, sizeof(float) * 8);
1970 	}
1971 	else if (dst->txcos){
1972 		arcan_mem_free(dst->txcos);
1973 		dst->txcos = NULL;
1974 	}
1975 
1976 /* for rendertarget, we also need to rebuild attachments and so on, the easiest
1977  * approach for this is to drop the FBO entirely and rebuild with the new store
1978  * as the store affects format. */
1979 	if (rtgt){
1980 		agp_drop_rendertarget(rtgt->art);
1981 		agp_setup_rendertarget(rtgt->color->vstore, rtgt->mode);
1982 		arcan_video_forceupdate(did, true);
1983 	}
1984 
1985 	FLAG_DIRTY(dst);
1986 	return ARCAN_OK;
1987 }
1988 
arcan_video_solidcolor(float origw,float origh,uint8_t r,uint8_t g,uint8_t b,unsigned short zv)1989 arcan_vobj_id arcan_video_solidcolor(float origw, float origh,
1990 	uint8_t r, uint8_t g, uint8_t b, unsigned short zv)
1991 {
1992 	arcan_vobj_id rv = ARCAN_EID;
1993 	arcan_vobject* newvobj = arcan_video_newvobject(&rv);
1994 	if (!newvobj)
1995 		return rv;
1996 
1997 	newvobj->vstore->txmapped = TXSTATE_OFF;
1998 	newvobj->vstore->vinf.col.r = (float)r / 255.0f;
1999 	newvobj->vstore->vinf.col.g = (float)g / 255.0f;
2000 	newvobj->vstore->vinf.col.b = (float)b / 255.0f;
2001 
2002 	newvobj->program = agp_default_shader(COLOR_2D);
2003 
2004 	newvobj->origw = origw;
2005 	newvobj->origh = origh;
2006 	newvobj->order = zv;
2007 
2008 	arcan_vint_attachobject(rv);
2009 
2010 	return rv;
2011 }
2012 
2013 /* solid and null are essentially treated the same, the difference being
2014  * there's no program associated in the vstore for the nullobject */
arcan_video_nullobject(float origw,float origh,unsigned short zv)2015 arcan_vobj_id arcan_video_nullobject(float origw,
2016 	float origh, unsigned short zv)
2017 {
2018 	arcan_vobj_id rv =  arcan_video_solidcolor(origw, origh, 0, 0, 0, zv);
2019 	arcan_vobject* vobj = arcan_video_getobject(rv);
2020 	if (vobj)
2021 		vobj->program = 0;
2022 
2023 	return rv;
2024 }
2025 
arcan_video_rawobject(av_pixel * buf,img_cons cons,float origw,float origh,unsigned short zv)2026 arcan_vobj_id arcan_video_rawobject(av_pixel* buf,
2027 	img_cons cons, float origw, float origh, unsigned short zv)
2028 {
2029 	arcan_vobj_id rv = ARCAN_EID;
2030 	size_t bufs = cons.w * cons.h * cons.bpp;
2031 
2032 	if (cons.bpp != sizeof(av_pixel))
2033 		return ARCAN_EID;
2034 
2035 	arcan_vobject* newvobj = arcan_video_newvobject(&rv);
2036 
2037 	if (!newvobj)
2038 		return ARCAN_EID;
2039 
2040 	struct agp_vstore* ds = newvobj->vstore;
2041 
2042 	ds->w = cons.w;
2043 	ds->h = cons.h;
2044 	ds->bpp = cons.bpp;
2045 	ds->vinf.text.s_raw = bufs;
2046 	ds->vinf.text.raw = buf;
2047 	ds->txmapped = TXSTATE_TEX2D;
2048 
2049 	newvobj->origw = origw;
2050 	newvobj->origh = origh;
2051 	newvobj->order = zv;
2052 
2053 	agp_update_vstore(newvobj->vstore, true);
2054 	arcan_vint_attachobject(rv);
2055 
2056 	return rv;
2057 }
2058 
arcan_video_rendertargetdensity(arcan_vobj_id src,float vppcm,float hppcm,bool reraster,bool rescale)2059 arcan_errc arcan_video_rendertargetdensity(
2060 	arcan_vobj_id src, float vppcm, float hppcm, bool reraster, bool rescale)
2061 {
2062 /* sanity checks */
2063 	arcan_vobject* srcobj = arcan_video_getobject(src);
2064 	if (!srcobj)
2065 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2066 
2067 	struct rendertarget* rtgt = arcan_vint_findrt(srcobj);
2068 	if (!rtgt)
2069 		return ARCAN_ERRC_UNACCEPTED_STATE;
2070 
2071 	if (vppcm < EPSILON)
2072 		vppcm = rtgt->vppcm;
2073 
2074 	if (hppcm < EPSILON)
2075 		hppcm = rtgt->hppcm;
2076 
2077 	if (rtgt->vppcm == vppcm && rtgt->hppcm == hppcm)
2078 		return ARCAN_OK;
2079 
2080 /* reflect the new changes */
2081 	float sfx = hppcm / rtgt->hppcm;
2082 	float sfy = vppcm / rtgt->vppcm;
2083 	arcan_renderfun_outputdensity(rtgt->hppcm, rtgt->vppcm);
2084 
2085 	rtgt->vppcm = vppcm;
2086 	rtgt->hppcm = hppcm;
2087 
2088 	struct arcan_vobject_litem* cent = rtgt->first;
2089 	while(cent){
2090 		struct arcan_vobject* vobj = cent->elem;
2091 		if (vobj->owner != rtgt){
2092 			cent = cent->next;
2093 			continue;
2094 		}
2095 
2096 /* for all vobj- that are attached to this rendertarget AND has it as
2097  * primary, check if it is possible to rebuild a raster representation
2098  * with more accurate density */
2099 		if (reraster)
2100 			arcan_vint_reraster(vobj, rtgt);
2101 
2102 		if (rescale){
2103 			float ox = (float)vobj->origw*vobj->current.scale.x;
2104 			float oy = (float)vobj->origh*vobj->current.scale.y;
2105 			rescale_origwh(vobj,
2106 					sfx / vobj->current.scale.x, sfy / vobj->current.scale.y);
2107 			invalidate_cache(vobj);
2108 		}
2109 		cent = cent->next;
2110 	}
2111 
2112 	FLAG_DIRTY(rtgt);
2113 	return ARCAN_OK;
2114 }
2115 
arcan_video_detachfromrendertarget(arcan_vobj_id did,arcan_vobj_id src)2116 arcan_errc arcan_video_detachfromrendertarget(arcan_vobj_id did,
2117 	arcan_vobj_id src)
2118 {
2119 	arcan_vobject* srcobj = arcan_video_getobject(src);
2120 	arcan_vobject* dstobj = arcan_video_getobject(did);
2121 	if (!srcobj || !dstobj)
2122 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2123 
2124 	if (&current_context->stdoutp == srcobj->owner){
2125 		detach_fromtarget(&current_context->stdoutp, srcobj);
2126 		return ARCAN_OK;
2127 	}
2128 
2129 	for (size_t ind = 0; ind < current_context->n_rtargets; ind++){
2130 		if (current_context->rtargets[ind].color == dstobj &&
2131 			srcobj->owner != &current_context->rtargets[ind])
2132 				detach_fromtarget(&current_context->rtargets[ind], srcobj);
2133 	}
2134 
2135 	return ARCAN_OK;
2136 }
2137 
arcan_video_attachtorendertarget(arcan_vobj_id did,arcan_vobj_id src,bool detach)2138 arcan_errc arcan_video_attachtorendertarget(
2139 	arcan_vobj_id did, arcan_vobj_id src, bool detach)
2140 {
2141 	if (src == ARCAN_VIDEO_WORLDID){
2142 		arcan_warning("arcan_video_attachtorendertarget(), WORLDID attach"
2143 			" not directly supported, use a null-surface with "
2144 			"shared storage instead.");
2145 
2146 		return ARCAN_ERRC_UNACCEPTED_STATE;
2147 	}
2148 
2149 /* don't allow to attach to self, that FBO behavior would be undefined
2150  * and don't allow persist attachments as the other object can go out of
2151  * scope */
2152 	arcan_vobject* dstobj = arcan_video_getobject(did);
2153 	arcan_vobject* srcobj = arcan_video_getobject(src);
2154 	if (!dstobj || !srcobj || dstobj == srcobj)
2155 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2156 
2157 	if (FL_TEST(dstobj, FL_PRSIST) || FL_TEST(srcobj, FL_PRSIST))
2158 		return ARCAN_ERRC_UNACCEPTED_STATE;
2159 
2160 	if (current_context->stdoutp.color == dstobj){
2161 		if (srcobj->owner && detach)
2162 			detach_fromtarget(srcobj->owner, srcobj);
2163 
2164 /* try and detach (most likely fail) to make sure that we don't get duplicates*/
2165 		detach_fromtarget(&current_context->stdoutp, srcobj);
2166 		attach_object(&current_context->stdoutp, srcobj);
2167 
2168 		return ARCAN_OK;
2169 	}
2170 
2171 /* linear search for rendertarget matching the destination id */
2172 	for (size_t ind = 0; ind < current_context->n_rtargets; ind++){
2173 		if (current_context->rtargets[ind].color == dstobj){
2174 /* find whatever rendertarget we're already attached to, and detach */
2175 			if (srcobj->owner && detach)
2176 				detach_fromtarget(srcobj->owner, srcobj);
2177 
2178 /* try and detach (most likely fail) to make sure that we don't get duplicates*/
2179 			detach_fromtarget(&current_context->rtargets[ind], srcobj);
2180 			attach_object(&current_context->rtargets[ind], srcobj);
2181 
2182 			return ARCAN_OK;
2183 		}
2184 	}
2185 
2186 	return ARCAN_ERRC_BAD_ARGUMENT;
2187 }
2188 
arcan_video_defaultattachment(arcan_vobj_id src)2189 arcan_errc arcan_video_defaultattachment(arcan_vobj_id src)
2190 {
2191 	if (src == ARCAN_EID)
2192 		return ARCAN_ERRC_BAD_ARGUMENT;
2193 
2194 	arcan_vobject* vobj = arcan_video_getobject(src);
2195 	if (!vobj)
2196 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2197 
2198 	struct rendertarget* rtgt = arcan_vint_findrt(vobj);
2199 	if (!rtgt)
2200 		return ARCAN_ERRC_UNACCEPTED_STATE;
2201 
2202 	current_context->attachment = rtgt;
2203 	return ARCAN_OK;
2204 }
2205 
arcan_video_currentattachment()2206 arcan_vobj_id arcan_video_currentattachment()
2207 {
2208 	struct rendertarget* rtgt = current_context->attachment;
2209 	if (!rtgt || !rtgt->color)
2210 		return ARCAN_VIDEO_WORLDID;
2211 
2212 	return rtgt->color->cellid;
2213 }
2214 
arcan_video_alterreadback(arcan_vobj_id did,int readback)2215 arcan_errc arcan_video_alterreadback(arcan_vobj_id did, int readback)
2216 {
2217 	if (did == ARCAN_VIDEO_WORLDID){
2218 		current_context->stdoutp.readback = readback;
2219 		return ARCAN_OK;
2220 	}
2221 
2222 	arcan_vobject* vobj = arcan_video_getobject(did);
2223 	if (!vobj)
2224 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2225 
2226 	struct rendertarget* rtgt = arcan_vint_findrt(vobj);
2227 	if (!rtgt)
2228 		return ARCAN_ERRC_UNACCEPTED_STATE;
2229 
2230 	rtgt->readback = readback;
2231 	rtgt->readcnt = abs(readback);
2232 	return ARCAN_OK;
2233 }
2234 
arcan_video_rendertarget_range(arcan_vobj_id did,ssize_t min,ssize_t max)2235 arcan_errc arcan_video_rendertarget_range(
2236 	arcan_vobj_id did, ssize_t min, ssize_t max)
2237 {
2238 	struct rendertarget* rtgt;
2239 
2240 	if (did == ARCAN_VIDEO_WORLDID)
2241 		rtgt = &current_context->stdoutp;
2242 	else {
2243 		arcan_vobject* vobj = arcan_video_getobject(did);
2244 		if (!vobj)
2245 			return ARCAN_ERRC_NO_SUCH_OBJECT;
2246 
2247 		rtgt = arcan_vint_findrt(vobj);
2248 	}
2249 
2250 	if (!rtgt)
2251 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2252 
2253 	if (min < 0 || max < min){
2254 		min = 0;
2255 		max = 65536;
2256 	}
2257 
2258 	rtgt->min_order = min;
2259 	rtgt->max_order = max;
2260 
2261 	return ARCAN_OK;
2262 }
2263 
arcan_video_rendertarget_setnoclear(arcan_vobj_id did,bool value)2264 arcan_errc arcan_video_rendertarget_setnoclear(arcan_vobj_id did, bool value)
2265 {
2266 	struct rendertarget* rtgt;
2267 
2268 	if (did == ARCAN_VIDEO_WORLDID)
2269 		rtgt = &current_context->stdoutp;
2270 	else {
2271 		arcan_vobject* vobj = arcan_video_getobject(did);
2272 		if (!vobj)
2273 			return ARCAN_ERRC_NO_SUCH_OBJECT;
2274 
2275 		rtgt = arcan_vint_findrt(vobj);
2276 	}
2277 
2278 	if (!rtgt)
2279 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2280 
2281 	if (value)
2282 		FL_SET(rtgt, TGTFL_NOCLEAR);
2283 	else
2284 		FL_CLEAR(rtgt, TGTFL_NOCLEAR);
2285 
2286 	return ARCAN_OK;
2287 }
2288 
arcan_video_linkrendertarget(arcan_vobj_id did,arcan_vobj_id tgt_id,int refresh,bool scale,enum rendertarget_mode format)2289 arcan_errc arcan_video_linkrendertarget(arcan_vobj_id did,
2290 	arcan_vobj_id tgt_id, int refresh, bool scale, enum rendertarget_mode format)
2291 {
2292 	arcan_vobject* vobj = arcan_video_getobject(tgt_id);
2293 	if (!vobj)
2294 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2295 
2296 	struct rendertarget* tgt = arcan_vint_findrt(vobj);
2297 	if (!tgt)
2298 		return ARCAN_ERRC_BAD_ARGUMENT;
2299 
2300 	arcan_errc rv = arcan_video_setuprendertarget(did, 0, refresh, scale, format);
2301 	if (rv != ARCAN_OK)
2302 		return rv;
2303 
2304 	vobj = arcan_video_getobject(did);
2305 	struct rendertarget* newtgt = arcan_vint_findrt(vobj);
2306 	newtgt->link = tgt;
2307 	return ARCAN_OK;
2308 }
2309 
arcan_video_setuprendertarget(arcan_vobj_id did,int readback,int refresh,bool scale,enum rendertarget_mode format)2310 arcan_errc arcan_video_setuprendertarget(arcan_vobj_id did,
2311 	int readback, int refresh, bool scale, enum rendertarget_mode format)
2312 {
2313 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
2314 	arcan_vobject* vobj = arcan_video_getobject(did);
2315 	if (!vobj)
2316 		return rv;
2317 
2318 	bool is_rtgt = arcan_vint_findrt(vobj) != NULL;
2319 	if (is_rtgt){
2320 		arcan_warning("arcan_video_setuprendertarget() source vid"
2321 			"already is a rendertarget\n");
2322 		rv = ARCAN_ERRC_BAD_ARGUMENT;
2323 		return rv;
2324 	}
2325 
2326 /* hard-coded number of render-targets allowed */
2327 	if (current_context->n_rtargets >= RENDERTARGET_LIMIT)
2328 		return ARCAN_ERRC_OUT_OF_SPACE;
2329 
2330 	int ind = current_context->n_rtargets++;
2331 	struct rendertarget* dst = &current_context->rtargets[ ind ];
2332 	*dst = (struct rendertarget){};
2333 
2334 	FL_SET(vobj, FL_RTGT);
2335 	FL_SET(dst, TGTFL_ALIVE);
2336 	dst->color = vobj;
2337 	dst->camtag = ARCAN_EID;
2338 	dst->readback = readback;
2339 	dst->readcnt = abs(readback);
2340 	dst->refresh = refresh;
2341 	dst->refreshcnt = abs(refresh);
2342 	dst->art = agp_setup_rendertarget(vobj->vstore, format);
2343 	dst->shid = agp_default_shader(BASIC_2D);
2344 	dst->mode = format;
2345 	dst->order3d = arcan_video_display.order3d;
2346 	dst->vppcm = dst->hppcm = 28.346456692913385;
2347 	dst->min_order = 0;
2348 	dst->max_order = 65536;
2349 
2350 	static int rendertarget_id;
2351 	rendertarget_id = (rendertarget_id + 1) % (INT_MAX-1);
2352 	dst->id = rendertarget_id;
2353 
2354 	vobj->extrefc.attachments++;
2355 	trace("(setuprendertarget), (%d:%s) defined as rendertarget."
2356 		"attachments: %d\n", vobj->cellid, video_tracetag(vobj),
2357 		vobj->extrefc.attachments);
2358 
2359 /* alter projection so the GL texture gets stored in the way
2360  * the images are rendered in normal mode, with 0,0 being upper left */
2361 	build_orthographic_matrix(
2362 		dst->projection, 0, vobj->origw, 0, vobj->origh, 0, 1);
2363 	identity_matrix(dst->base);
2364 
2365 	struct monitor_mode mode = platform_video_dimensions();
2366 	if (scale){
2367 		float xs = (float)vobj->vstore->w / (float)mode.width;
2368 		float ys = (float)vobj->vstore->h / (float)mode.height;
2369 
2370 /* since we may likely have a differently sized FBO, scale it */
2371 		scale_matrix(dst->base, xs, ys, 1.0);
2372 	}
2373 
2374 	return ARCAN_OK;
2375 }
2376 
arcan_video_setactiveframe(arcan_vobj_id dst,unsigned fid)2377 arcan_errc arcan_video_setactiveframe(arcan_vobj_id dst, unsigned fid)
2378 {
2379 	arcan_vobject* dstvobj = arcan_video_getobject(dst);
2380 
2381 	if (!dstvobj)
2382 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2383 
2384 	if (!dstvobj->frameset)
2385 		return ARCAN_ERRC_UNACCEPTED_STATE;
2386 
2387 	dstvobj->frameset->index = fid < dstvobj->frameset->n_frames ? fid : 0;
2388 
2389 	FLAG_DIRTY(dstvobj);
2390 	return ARCAN_OK;
2391 }
2392 
arcan_video_setasframe(arcan_vobj_id dst,arcan_vobj_id src,size_t fid)2393 arcan_errc arcan_video_setasframe(arcan_vobj_id dst,
2394 	arcan_vobj_id src, size_t fid)
2395 {
2396 	arcan_vobject* dstvobj = arcan_video_getobject(dst);
2397 	arcan_vobject* srcvobj = arcan_video_getobject(src);
2398 
2399 	if (!dstvobj || !srcvobj)
2400 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2401 
2402 	if (dstvobj->frameset == NULL || srcvobj->vstore->txmapped != TXSTATE_TEX2D)
2403 		return ARCAN_ERRC_UNACCEPTED_STATE;
2404 
2405 	if (fid >= dstvobj->frameset->n_frames)
2406 		return ARCAN_ERRC_BAD_ARGUMENT;
2407 
2408 	struct frameset_store* store = &dstvobj->frameset->frames[fid];
2409 	if (store->frame != srcvobj->vstore){
2410 		arcan_vint_drop_vstore(store->frame);
2411 		store->frame = srcvobj->vstore;
2412 	}
2413 
2414 /* we need texture coordinates to come with in order to support
2415  * animations using 'sprite-sheet' like features */
2416 	if (srcvobj->txcos)
2417 		memcpy(store->txcos, srcvobj->txcos, sizeof(float)*8);
2418 	else
2419 		arcan_vint_defaultmapping(store->txcos, 1.0, 1.0);
2420 
2421 	store->frame->refcount++;
2422 
2423 	return ARCAN_OK;
2424 }
2425 
2426 struct thread_loader_args {
2427 	arcan_vobject* dst;
2428 	pthread_t self;
2429 	arcan_vobj_id dstid;
2430 	char* fname;
2431 	intptr_t tag;
2432 	img_cons constraints;
2433 	arcan_errc rc;
2434 };
2435 
thread_loader(void * in)2436 static void* thread_loader(void* in)
2437 {
2438 	struct thread_loader_args* largs = (struct thread_loader_args*) in;
2439 	arcan_vobject* dst = largs->dst;
2440 	largs->rc = arcan_vint_getimage(largs->fname, dst, largs->constraints, true);
2441 	dst->feed.state.tag = ARCAN_TAG_ASYNCIMGRD;
2442 	return 0;
2443 }
2444 
arcan_vint_joinasynch(arcan_vobject * img,bool emit,bool force)2445 void arcan_vint_joinasynch(arcan_vobject* img, bool emit, bool force)
2446 {
2447 	if (!force && img->feed.state.tag != ARCAN_TAG_ASYNCIMGRD){
2448 		return;
2449 	}
2450 
2451 	struct thread_loader_args* args =
2452 		(struct thread_loader_args*) img->feed.state.ptr;
2453 
2454 	pthread_join(args->self, NULL);
2455 
2456 	arcan_event loadev = {
2457 		.category = EVENT_VIDEO,
2458 		.vid.data = args->tag,
2459 		.vid.source = args->dstid
2460 	};
2461 
2462 	if (args->rc == ARCAN_OK){
2463 		loadev.vid.kind = EVENT_VIDEO_ASYNCHIMAGE_LOADED;
2464 		loadev.vid.width = img->origw;
2465 		loadev.vid.height = img->origh;
2466 	}
2467 /* copy broken placeholder instead */
2468 	else {
2469 		img->origw = 32;
2470 		img->origh = 32;
2471 		img->vstore->vinf.text.s_raw = 32 * 32 * sizeof(av_pixel);
2472 		img->vstore->vinf.text.raw = arcan_alloc_mem(img->vstore->vinf.text.s_raw,
2473 			ARCAN_MEM_VBUFFER, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_PAGE);
2474 
2475 		img->vstore->w = 32;
2476 		img->vstore->h = 32;
2477 		img->vstore->vinf.text.source = strdup(args->fname);
2478 		img->vstore->filtermode = ARCAN_VFILTER_NONE;
2479 
2480 		loadev.vid.width = 32;
2481 		loadev.vid.height = 32;
2482 		loadev.vid.kind = EVENT_VIDEO_ASYNCHIMAGE_FAILED;
2483 	}
2484 
2485 	agp_update_vstore(img->vstore, true);
2486 
2487 	if (emit)
2488 		arcan_event_enqueue(arcan_event_defaultctx(), &loadev);
2489 
2490 	arcan_mem_free(args->fname);
2491 	arcan_mem_free(args);
2492 	img->feed.state.ptr = NULL;
2493 	img->feed.state.tag = ARCAN_TAG_IMAGE;
2494 }
2495 
loadimage_asynch(const char * fname,img_cons constraints,intptr_t tag)2496 static arcan_vobj_id loadimage_asynch(const char* fname,
2497 	img_cons constraints, intptr_t tag)
2498 {
2499 	arcan_vobj_id rv = ARCAN_EID;
2500 	arcan_vobject* dstobj = arcan_video_newvobject(&rv);
2501 	if (!dstobj)
2502 		return rv;
2503 
2504 	struct thread_loader_args* args = arcan_alloc_mem(
2505 		sizeof(struct thread_loader_args),
2506 		ARCAN_MEM_THREADCTX, 0, ARCAN_MEMALIGN_NATURAL);
2507 
2508 	args->dstid = rv;
2509 	args->dst = dstobj;
2510 	args->fname = strdup(fname);
2511 	args->tag = tag;
2512 	args->constraints = constraints;
2513 
2514 	dstobj->feed.state.tag = ARCAN_TAG_ASYNCIMGLD;
2515 	dstobj->feed.state.ptr = args;
2516 
2517 	pthread_create(&args->self, NULL, thread_loader, (void*) args);
2518 
2519 	return rv;
2520 }
2521 
arcan_video_pushasynch(arcan_vobj_id source)2522 arcan_errc arcan_video_pushasynch(arcan_vobj_id source)
2523 {
2524 	arcan_vobject* vobj = arcan_video_getobject(source);
2525 
2526 	if (!vobj)
2527 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2528 
2529 	if (vobj->feed.state.tag == ARCAN_TAG_ASYNCIMGLD ||
2530 		vobj->feed.state.tag == ARCAN_TAG_ASYNCIMGRD){
2531 		/* protect us against premature invocation */
2532 		arcan_vint_joinasynch(vobj, false, true);
2533 	}
2534 	else
2535 		return ARCAN_ERRC_UNACCEPTED_STATE;
2536 
2537 	return ARCAN_OK;
2538 }
2539 
loadimage(const char * fname,img_cons constraints,arcan_errc * errcode)2540 static arcan_vobj_id loadimage(const char* fname, img_cons constraints,
2541 	arcan_errc* errcode)
2542 {
2543 	arcan_vobj_id rv = 0;
2544 
2545 	arcan_vobject* newvobj = arcan_video_newvobject(&rv);
2546 	if (newvobj == NULL)
2547 		return ARCAN_EID;
2548 
2549 	arcan_errc rc = arcan_vint_getimage(fname, newvobj, constraints, false);
2550 
2551 	if (rc != ARCAN_OK)
2552 		arcan_video_deleteobject(rv);
2553 
2554 	if (errcode != NULL)
2555 		*errcode = rc;
2556 
2557 	return rv;
2558 }
2559 
arcan_video_feedstate(arcan_vobj_id id)2560 vfunc_state* arcan_video_feedstate(arcan_vobj_id id)
2561 {
2562 	void* rv = NULL;
2563 	arcan_vobject* vobj = arcan_video_getobject(id);
2564 
2565 	if (vobj && id > 0){
2566 		rv = &vobj->feed.state;
2567 	}
2568 
2569 	return rv;
2570 }
2571 
arcan_video_alterfeed(arcan_vobj_id id,ffunc_ind cb,vfunc_state state)2572 arcan_errc arcan_video_alterfeed(arcan_vobj_id id,
2573 	ffunc_ind cb, vfunc_state state)
2574 {
2575 	arcan_vobject* vobj = arcan_video_getobject(id);
2576 
2577 	if (!vobj)
2578 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2579 
2580 	vobj->feed.state = state;
2581 	vobj->feed.ffunc = cb;
2582 
2583 	return ARCAN_OK;
2584 }
2585 
arcan_video_setupfeed(ffunc_ind ffunc,img_cons cons,uint8_t ntus,uint8_t ncpt)2586 static arcan_vobj_id arcan_video_setupfeed(
2587 	ffunc_ind ffunc, img_cons cons, uint8_t ntus, uint8_t ncpt)
2588 {
2589 	if (!ffunc)
2590 		return 0;
2591 
2592 	arcan_vobj_id rv = 0;
2593 	arcan_vobject* newvobj = arcan_video_newvobject(&rv);
2594 
2595 	if (!newvobj || !ffunc)
2596 		return ARCAN_EID;
2597 
2598 	struct agp_vstore* vstor = newvobj->vstore;
2599 /* preset */
2600 	newvobj->origw = cons.w;
2601 	newvobj->origh = cons.h;
2602 	newvobj->vstore->bpp = ncpt == 0 ? sizeof(av_pixel) : ncpt;
2603 	newvobj->vstore->filtermode &= ~ARCAN_VFILTER_MIPMAP;
2604 
2605 	if (newvobj->vstore->scale == ARCAN_VIMAGE_NOPOW2){
2606 		newvobj->vstore->w = cons.w;
2607 		newvobj->vstore->h = cons.h;
2608 	}
2609 	else {
2610 /* For feeds, we don't do the forced- rescale on
2611  * every frame, way too expensive, this behavior only
2612  * occurs if there's a custom set of texture coordinates already */
2613 		newvobj->vstore->w = nexthigher(cons.w);
2614 		newvobj->vstore->h = nexthigher(cons.h);
2615 		float hx = (float)cons.w / (float)newvobj->vstore->w;
2616 		float hy = (float)cons.h / (float)newvobj->vstore->h;
2617 		if (newvobj->txcos)
2618 			arcan_vint_defaultmapping(newvobj->txcos, hx, hy);
2619 	}
2620 
2621 /* allocate */
2622 	vstor->vinf.text.s_raw = newvobj->vstore->w *
2623 		newvobj->vstore->h * newvobj->vstore->bpp;
2624 	vstor->vinf.text.raw = arcan_alloc_mem(vstor->vinf.text.s_raw,
2625 		ARCAN_MEM_VBUFFER, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_PAGE);
2626 
2627 	newvobj->feed.ffunc = ffunc;
2628 	agp_update_vstore(newvobj->vstore, true);
2629 
2630 	return rv;
2631 }
2632 
2633 /* some targets like to change size dynamically (thanks for that),
2634  * thus, drop the allocated buffers, generate new one and tweak txcos */
arcan_video_resizefeed(arcan_vobj_id id,size_t w,size_t h)2635 arcan_errc arcan_video_resizefeed(arcan_vobj_id id, size_t w, size_t h)
2636 {
2637 	arcan_vobject* vobj = arcan_video_getobject(id);
2638 	if (!vobj)
2639 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2640 
2641 	if (vobj->feed.state.tag == ARCAN_TAG_ASYNCIMGLD ||
2642 		vobj->feed.state.tag == ARCAN_TAG_ASYNCIMGRD)
2643 		arcan_video_pushasynch(id);
2644 
2645 /* rescale transformation chain */
2646 	float ox = (float)vobj->origw*vobj->current.scale.x;
2647 	float oy = (float)vobj->origh*vobj->current.scale.y;
2648 	float sfx = ox / (float)w;
2649 	float sfy = oy / (float)h;
2650 	if (vobj->current.scale.x > 0 && vobj->current.scale.y > 0){
2651 		rescale_origwh(vobj,
2652 			sfx / vobj->current.scale.x, sfy / vobj->current.scale.y);
2653 	}
2654 
2655 /* "initial" base dimensions, important when dimensions change for objects that
2656  * have a shared storage elsewhere but where scale differs. */
2657 	vobj->origw = w;
2658 	vobj->origh = h;
2659 
2660 	vobj->current.scale.x = sfx;
2661 	vobj->current.scale.y = sfy;
2662 	invalidate_cache(vobj);
2663 	agp_resize_vstore(vobj->vstore, w, h);
2664 
2665 	FLAG_DIRTY();
2666 	return ARCAN_OK;
2667 }
2668 
arcan_video_loadimageasynch(const char * rloc,img_cons constraints,intptr_t tag)2669 arcan_vobj_id arcan_video_loadimageasynch(const char* rloc,
2670 	img_cons constraints, intptr_t tag)
2671 {
2672 	arcan_vobj_id rv = loadimage_asynch(rloc, constraints, tag);
2673 
2674 	if (rv > 0){
2675 		arcan_vobject* vobj = arcan_video_getobject(rv);
2676 
2677 		if (vobj){
2678 			vobj->current.rotation.quaternion = default_quat;
2679 			arcan_vint_attachobject(rv);
2680 		}
2681 	}
2682 
2683 	return rv;
2684 }
2685 
arcan_video_loadimage(const char * rloc,img_cons constraints,unsigned short zv)2686 arcan_vobj_id arcan_video_loadimage(const char* rloc,
2687 	img_cons constraints, unsigned short zv)
2688 {
2689 	arcan_vobj_id rv = loadimage((char*) rloc, constraints, NULL);
2690 
2691 /* the asynch version could've been deleted in between,
2692  * so we need to double check */
2693 		if (rv > 0){
2694 		arcan_vobject* vobj = arcan_video_getobject(rv);
2695 		if (vobj){
2696 			vobj->order = zv;
2697 			vobj->current.rotation.quaternion = default_quat;
2698 			arcan_vint_attachobject(rv);
2699 		}
2700 	}
2701 
2702 	return rv;
2703 }
2704 
arcan_video_addfobject(ffunc_ind feed,vfunc_state state,img_cons cons,unsigned short zv)2705 arcan_vobj_id arcan_video_addfobject(
2706 	ffunc_ind feed, vfunc_state state, img_cons cons, unsigned short zv)
2707 {
2708 	arcan_vobj_id rv;
2709 	const int feed_ntus = 1;
2710 
2711 	if ((rv = arcan_video_setupfeed(feed, cons, feed_ntus, cons.bpp)) > 0){
2712 		arcan_vobject* vobj = arcan_video_getobject(rv);
2713 		vobj->order = zv;
2714 		vobj->feed.state = state;
2715 
2716 		if (state.tag == ARCAN_TAG_3DOBJ){
2717 			FL_SET(vobj, FL_FULL3D);
2718 			vobj->order *= -1;
2719 		}
2720 
2721 		arcan_vint_attachobject(rv);
2722 	}
2723 
2724 	return rv;
2725 }
2726 
arcan_video_scaletxcos(arcan_vobj_id id,float sfs,float sft)2727 arcan_errc arcan_video_scaletxcos(arcan_vobj_id id, float sfs, float sft)
2728 {
2729 	arcan_vobject* vobj = arcan_video_getobject(id);
2730 	if (!vobj)
2731 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2732 
2733 	if (!vobj->txcos){
2734 		vobj->txcos = arcan_alloc_mem(8 * sizeof(float),
2735 			ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_SIMD);
2736 		if (!vobj->txcos)
2737 			return ARCAN_ERRC_OUT_OF_SPACE;
2738 
2739 		arcan_vint_defaultmapping(vobj->txcos, 1.0, 1.0);
2740 	}
2741 
2742 	vobj->txcos[0] *= sfs;
2743 	vobj->txcos[1] *= sft;
2744 	vobj->txcos[2] *= sfs;
2745 	vobj->txcos[3] *= sft;
2746 	vobj->txcos[4] *= sfs;
2747 	vobj->txcos[5] *= sft;
2748 	vobj->txcos[6] *= sfs;
2749 	vobj->txcos[7] *= sft;
2750 
2751 	FLAG_DIRTY(vobj);
2752 	return ARCAN_OK;
2753 }
2754 
2755 
arcan_video_forceblend(arcan_vobj_id id,enum arcan_blendfunc mode)2756 arcan_errc arcan_video_forceblend(arcan_vobj_id id, enum arcan_blendfunc mode)
2757 {
2758 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
2759 	arcan_vobject* vobj = arcan_video_getobject(id);
2760 
2761 	if (vobj && id > 0){
2762 		vobj->blendmode = mode;
2763 		FLAG_DIRTY(vobj);
2764 		rv = ARCAN_OK;
2765 	}
2766 
2767 	return rv;
2768 }
2769 
arcan_video_getzv(arcan_vobj_id id)2770 unsigned short arcan_video_getzv(arcan_vobj_id id)
2771 {
2772 	unsigned short rv = 0;
2773 	arcan_vobject* vobj = arcan_video_getobject(id);
2774 
2775 	if (vobj){
2776 		rv = vobj->order;
2777 	}
2778 
2779 	return rv;
2780 }
2781 
2782 /* no parent resolve performed */
update_zv(arcan_vobject * vobj,int newzv)2783 static arcan_errc update_zv(arcan_vobject* vobj, int newzv)
2784 {
2785 	struct rendertarget* owner = vobj->owner;
2786 
2787 	if (!owner)
2788 		return ARCAN_ERRC_UNACCEPTED_STATE;
2789 
2790 	newzv = newzv < 0 ? 0 : newzv;
2791 	newzv = newzv > 65535 ? 65535 : newzv;
2792 
2793 /*
2794  * attach also works like an insertion sort where
2795  * the insertion criterion is <= order, to aid dynamic
2796  * corruption checks, this could be further optimized
2797  * by using the fact that we're simply "sliding" in the
2798  * same chain.
2799  */
2800 	int oldv = vobj->order;
2801 	detach_fromtarget(owner, vobj);
2802 	vobj->order = newzv;
2803 
2804 	if (vobj->feed.state.tag == ARCAN_TAG_3DOBJ)
2805 		vobj->order *= -1;
2806 
2807 	attach_object(owner, vobj);
2808 
2809 /*
2810  * unfortunately, we need to do this recursively AND
2811  * take account for the fact that we may relatively speaking shrink
2812  * the distance between our orderv vs. parent
2813  */
2814 	for (size_t i = 0; i < vobj->childslots; i++)
2815 		if (vobj->children[i] && FL_TEST(vobj->children[i], FL_ORDOFS)){
2816 			int distance = vobj->children[i]->order - oldv;
2817 			update_zv(vobj->children[i], newzv + distance);
2818 		}
2819 
2820 	return ARCAN_OK;
2821 }
2822 
2823 /* change zval (see arcan_video_addobject) for a particular object.
2824  * return value is an error code */
arcan_video_setzv(arcan_vobj_id id,int newzv)2825 arcan_errc arcan_video_setzv(arcan_vobj_id id, int newzv)
2826 {
2827 	arcan_vobject* vobj = arcan_video_getobject(id);
2828 
2829 	if (!vobj)
2830 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2831 
2832 /* calculate order relative to parent if that's toggled
2833  * clip to 16bit US and ignore if the parent is a 3dobj */
2834 	if (FL_TEST(vobj, FL_ORDOFS))
2835 		newzv = newzv + vobj->parent->order;
2836 
2837 /*
2838  * Then propagate to any child that might've inherited
2839  */
2840 	update_zv(vobj, newzv);
2841 
2842 	return ARCAN_OK;
2843 }
2844 
2845 /* forcibly kill videoobject after n cycles,
2846  * which will reset a counter that upon expiration invocates
2847  * arcan_video_deleteobject(arcan_vobj_id id)
2848  */
arcan_video_setlife(arcan_vobj_id id,unsigned lifetime)2849 arcan_errc arcan_video_setlife(arcan_vobj_id id, unsigned lifetime)
2850 {
2851 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
2852 	arcan_vobject* vobj = arcan_video_getobject(id);
2853 
2854 	if (vobj && id > 0){
2855 		if (lifetime == 0){
2856 			vobj->lifetime = -1;
2857 		}
2858 		else
2859 /* make sure the object is flagged as alive */
2860 			vobj->mask |= MASK_LIVING;
2861 
2862 		vobj->lifetime = lifetime;
2863 		rv = ARCAN_OK;
2864 	}
2865 
2866 	return rv;
2867 }
2868 
arcan_video_zaptransform(arcan_vobj_id id,int mask,unsigned left[4])2869 arcan_errc arcan_video_zaptransform(
2870 	arcan_vobj_id id, int mask, unsigned left[4])
2871 {
2872 	arcan_vobject* vobj = arcan_video_getobject(id);
2873 
2874 	if (!vobj)
2875 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2876 
2877 	surface_transform* current = vobj->transform;
2878 	surface_transform** last = &vobj->transform;
2879 
2880 	unsigned ct = arcan_video_display.c_ticks;
2881 
2882 /* only set if the data is actually needed */
2883 	if (left){
2884 		if (current){
2885 			left[0] = ct > current->blend.endt ? 0 : current->blend.endt - ct;
2886 			left[1] = ct > current->move.endt ? 0 : current->move.endt - ct;
2887 			left[2] = ct > current->rotate.endt ? 0 : current->rotate.endt - ct;
2888 			left[3] = ct > current->scale.endt ? 0 : current->scale.endt - ct;
2889 		}
2890 		else{
2891 			left[0] = left[1] = left[2] = left[3] = 4;
2892 		}
2893 	}
2894 
2895 /* if we don't set a mask, zap the entire chain - otherwise walk the chain,
2896  * remove the slots that match the mask, and then drop ones that has become empty */
2897 	if (!mask)
2898 		mask = ~mask;
2899 
2900 	while (current){
2901 		current->blend.endt  *= !!!(mask & MASK_OPACITY); /* int to 0|1, then invert result */
2902 		current->move.endt   *= !!!(mask & MASK_POSITION);
2903 		current->rotate.endt *= !!!(mask & MASK_ORIENTATION);
2904 		current->scale.endt  *= !!!(mask & MASK_SCALE);
2905 
2906 		current->blend.startt  *= !!!(mask & MASK_OPACITY); /* int to 0|1, then invert result */
2907 		current->move.startt   *= !!!(mask & MASK_POSITION);
2908 		current->rotate.startt *= !!!(mask & MASK_ORIENTATION);
2909 		current->scale.startt  *= !!!(mask & MASK_SCALE);
2910 
2911 /* any transform alive? then don't free the transform */
2912 		bool used =
2913 			!!(current->blend.endt | current->move.endt |
2914 			current->rotate.endt | current->scale.endt);
2915 
2916 		if (!used){
2917 
2918 /* relink previous valid and point to next, this might be null but since wain
2919  * from &vobj->transform that will reset the head as well so no weird aliasing */
2920 			if (*last == current)
2921 				*last = current->next;
2922 
2923 			surface_transform* next = current->next;
2924 			arcan_mem_free(current);
2925 			current = next;
2926 		}
2927 		else {
2928 			last = &current->next;
2929 			current = current->next;
2930 		}
2931 	}
2932 
2933 	invalidate_cache(vobj);
2934 	return ARCAN_OK;
2935 }
2936 
arcan_video_tagtransform(arcan_vobj_id id,intptr_t tag,enum arcan_transform_mask mask)2937 arcan_errc arcan_video_tagtransform(arcan_vobj_id id,
2938 	intptr_t tag, enum arcan_transform_mask mask)
2939 {
2940 	arcan_vobject* vobj = arcan_video_getobject(id);
2941 
2942 	if (!vobj)
2943 		return ARCAN_ERRC_NO_SUCH_OBJECT;
2944 
2945 	if (!vobj->transform)
2946 		return ARCAN_ERRC_UNACCEPTED_STATE;
2947 
2948 	if ((mask & ~MASK_TRANSFORMS) > 0)
2949 		return ARCAN_ERRC_BAD_ARGUMENT;
2950 
2951 	surface_transform* current = vobj->transform;
2952 
2953 	while(current && mask > 0){
2954 		if ((mask & MASK_POSITION) > 0){
2955 			if (current->move.startt &&
2956 			(!current->next || !current->next->move.startt)){
2957 				mask &= ~MASK_POSITION;
2958 				current->move.tag = tag;
2959 			}
2960 		}
2961 
2962 		if ((mask & MASK_SCALE) > 0){
2963 			if (current->scale.startt &&
2964 			(!current->next || !current->next->scale.startt)){
2965 				mask &= ~MASK_SCALE;
2966 				current->scale.tag = tag;
2967 			}
2968 		}
2969 
2970 		if ((mask & MASK_ORIENTATION) > 0){
2971 			if (current->rotate.startt &&
2972 			(!current->next || !current->next->rotate.startt)){
2973 				mask &= ~MASK_ORIENTATION;
2974 				current->rotate.tag = tag;
2975 			}
2976 		}
2977 
2978 		if ((mask & MASK_OPACITY) > 0){
2979 			if (current->blend.startt &&
2980 			(!current->next || !current->next->blend.startt)){
2981 				mask &= ~MASK_OPACITY;
2982 				current->blend.tag = tag;
2983 			}
2984 		}
2985 
2986 		current = current->next;
2987 	}
2988 
2989 	return ARCAN_OK;
2990 }
2991 
emit_transform_event(arcan_vobj_id src,enum arcan_transform_mask slot,intptr_t tag)2992 static void emit_transform_event(arcan_vobj_id src,
2993 	enum arcan_transform_mask slot, intptr_t tag)
2994 {
2995 	arcan_event_enqueue(arcan_event_defaultctx(),
2996 		&(struct arcan_event){
2997 			.category = EVENT_VIDEO,
2998 			.vid.kind = EVENT_VIDEO_CHAIN_OVER,
2999 			.vid.data = tag,
3000 			.vid.source = src,
3001 			.vid.slot = slot
3002 		}
3003 	);
3004 }
3005 
arcan_video_instanttransform(arcan_vobj_id id,int mask,enum tag_transform_methods method)3006 arcan_errc arcan_video_instanttransform(
3007 	arcan_vobj_id id, int mask, enum tag_transform_methods method)
3008 {
3009 	arcan_vobject* vobj = arcan_video_getobject(id);
3010 	if (!vobj)
3011 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3012 
3013 	if (!vobj->transform)
3014 		return ARCAN_OK;
3015 
3016 /* step through the list of transforms */
3017 	surface_transform* current = vobj->transform;
3018 
3019 /* determine if any tag events should be produced or not, and if so, if we want
3020  * all of them, or only the last. The last case is more complicated as there
3021  * might be a ->next allocated for another transform so also need to check the
3022  * time */
3023 	if (!mask)
3024 		mask = ~mask;
3025 
3026 	bool at_last;
3027 	struct surface_transform** last = &vobj->transform;
3028 
3029 	while (current){
3030 		if (current->move.startt && (mask & MASK_POSITION)){
3031 			vobj->current.position = current->move.endp;
3032 				current->move.startt = 0;
3033 
3034 			at_last = (method == TAG_TRANSFORM_LAST) &&
3035 				!( current->next && current->next->move.startt );
3036 
3037 			if (current->move.tag && (method == TAG_TRANSFORM_ALL || at_last))
3038 				emit_transform_event(vobj->cellid, MASK_POSITION, current->move.tag);
3039 		}
3040 
3041 		if (current->blend.startt && (mask & MASK_OPACITY)){
3042 			vobj->current.opa = current->blend.endopa;
3043 			current->blend.startt = 0;
3044 
3045 			at_last = (method == TAG_TRANSFORM_LAST) &&
3046 				!( current->next && current->next->blend.startt );
3047 
3048 			if (current->blend.tag && (method == TAG_TRANSFORM_ALL || at_last))
3049 				emit_transform_event(vobj->cellid, MASK_OPACITY, current->blend.tag);
3050 		}
3051 
3052 		if (current->rotate.startt && (mask & MASK_ORIENTATION)){
3053 			vobj->current.rotation = current->rotate.endo;
3054 			current->rotate.startt = 0;
3055 
3056 			at_last = (method == TAG_TRANSFORM_LAST) &&
3057 				!( current->next && current->next->rotate.startt );
3058 
3059 			if (current->rotate.tag && (method == TAG_TRANSFORM_LAST || at_last))
3060 				emit_transform_event(
3061 					vobj->cellid, MASK_ORIENTATION, current->rotate.tag);
3062 		}
3063 
3064 		if (current->scale.startt && (mask & MASK_SCALE)){
3065 			vobj->current.scale = current->scale.endd;
3066 			current->scale.startt = 0;
3067 
3068 			at_last = (method == TAG_TRANSFORM_LAST) &&
3069 				!( current->next && current->next->scale.startt );
3070 
3071 			if (current->scale.tag && (method == TAG_TRANSFORM_LAST || at_last))
3072 				emit_transform_event(vobj->cellid, MASK_SCALE, current->scale.tag);
3073 		}
3074 
3075 /* see also: zaptransform */
3076 		bool used =
3077 			!!(current->blend.startt | current->move.startt |
3078 			current->rotate.startt | current->scale.startt);
3079 
3080 		if (!used){
3081 			if (*last == current){
3082 				*last = current->next;
3083 			}
3084 
3085 			if (vobj->transform == current)
3086 				vobj->transform = current->next;
3087 
3088 			surface_transform* tokill = current;
3089 			current = current->next;
3090 			arcan_mem_free(tokill);
3091 		}
3092 		else {
3093 			last = &current->next;
3094 			current = current->next;
3095 		}
3096 	}
3097 
3098 	invalidate_cache(vobj);
3099 	return ARCAN_OK;
3100 }
3101 
arcan_video_objecttexmode(arcan_vobj_id id,enum arcan_vtex_mode modes,enum arcan_vtex_mode modet)3102 arcan_errc arcan_video_objecttexmode(arcan_vobj_id id,
3103 	enum arcan_vtex_mode modes, enum arcan_vtex_mode modet)
3104 {
3105 	arcan_vobject* src = arcan_video_getobject(id);
3106 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3107 
3108 	if (src){
3109 		src->vstore->txu = modes;
3110 		src->vstore->txv = modet;
3111 		agp_update_vstore(src->vstore, false);
3112 		FLAG_DIRTY(src);
3113 	}
3114 
3115 	return rv;
3116 }
3117 
arcan_video_objectfilter(arcan_vobj_id id,enum arcan_vfilter_mode mode)3118 arcan_errc arcan_video_objectfilter(arcan_vobj_id id,
3119 	enum arcan_vfilter_mode mode)
3120 {
3121 	arcan_vobject* src = arcan_video_getobject(id);
3122 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3123 
3124 /* fake an upload with disabled filteroptions */
3125 	if (src){
3126 		src->vstore->filtermode = mode;
3127 		agp_update_vstore(src->vstore, false);
3128 	}
3129 
3130 	return rv;
3131 }
3132 
arcan_video_transformcycle(arcan_vobj_id sid,bool flag)3133 arcan_errc arcan_video_transformcycle(arcan_vobj_id sid, bool flag)
3134 {
3135 	arcan_vobject* src = arcan_video_getobject(sid);
3136 	if (!src)
3137 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3138 
3139 	if (flag)
3140 		FL_SET(src, FL_TCYCLE);
3141 	else
3142 		FL_CLEAR(src, FL_TCYCLE);
3143 
3144 	return ARCAN_OK;
3145 }
3146 
arcan_video_copyprops(arcan_vobj_id sid,arcan_vobj_id did)3147 arcan_errc arcan_video_copyprops ( arcan_vobj_id sid, arcan_vobj_id did )
3148 {
3149 	if (sid == did)
3150 		return ARCAN_OK;
3151 
3152 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3153 
3154 	arcan_vobject* src = arcan_video_getobject(sid);
3155 	arcan_vobject* dst = arcan_video_getobject(did);
3156 
3157 	if (src && dst){
3158 		surface_properties newprop;
3159 		arcan_resolve_vidprop(src, 0.0, &newprop);
3160 
3161 		dst->current = newprop;
3162 /* we need to translate scale */
3163 		if (newprop.scale.x > 0 && newprop.scale.y > 0){
3164 			int dstw = newprop.scale.x * src->origw;
3165 			int dsth = newprop.scale.y * src->origh;
3166 
3167 			dst->current.scale.x = (float) dstw / (float) dst->origw;
3168 			dst->current.scale.y = (float) dsth / (float) dst->origh;
3169 		}
3170 
3171 		rv = ARCAN_OK;
3172 	}
3173 
3174 	return rv;
3175 }
3176 
arcan_video_copytransform(arcan_vobj_id sid,arcan_vobj_id did)3177 arcan_errc arcan_video_copytransform(arcan_vobj_id sid, arcan_vobj_id did)
3178 {
3179 	arcan_vobject* src, (* dst);
3180 
3181 	src = arcan_video_getobject(sid);
3182 	dst = arcan_video_getobject(did);
3183 
3184 	if (!src || !dst || src == dst)
3185 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3186 
3187 /* remove what's happening in destination, move
3188  * pointers from source to dest and done. */
3189 	memcpy(&dst->current, &src->current, sizeof(surface_properties));
3190 
3191 	arcan_video_zaptransform(did, 0, NULL);
3192 	dst->transform = dup_chain(src->transform);
3193 	update_zv(dst, src->order);
3194 
3195 	invalidate_cache(dst);
3196 
3197 /* in order to NOT break resizefeed etc. this copy actually
3198  * requires a modification of the transformation
3199  * chain, as scale is relative origw? */
3200 	dst->origw = src->origw;
3201 	dst->origh = src->origh;
3202 
3203 	return ARCAN_OK;
3204 }
3205 
3206 /*
3207  * quick set and run from gdb or update_object
3208  */
3209 #ifdef DUMP_TRANSFORM
dump_chain(surface_transform * base)3210 static void dump_chain(surface_transform* base)
3211 {
3212 	for (int i = 0; base; i++, base = base->next){
3213 		printf("[transform (%"PRIxPTR") @%d]\n", (uintptr_t)base, i);
3214 		printf("\trotate (%zu / %zu)\n\t\t x: %.2f->%.2f y: %.2f->%.2f z: %.2f->%.2f\n",
3215 			(size_t) base->rotate.startt, (size_t) base->rotate.endt,
3216 			base->rotate.starto.roll, base->rotate.endo.roll,
3217 			base->rotate.starto.pitch, base->rotate.endo.pitch,
3218 			base->rotate.starto.yaw, base->rotate.endo.yaw
3219 		);
3220 		printf("\tscale (%zu / %zu)\n\t\t x: %2.f->%.2f y: %.2f->%.2f z: %.2f->%.2f\n",
3221 			(size_t) base->scale.startt, (size_t) base->scale.endt,
3222 			base->scale.startd.x, base->scale.endd.x,
3223 			base->scale.startd.y, base->scale.endd.y,
3224 			base->scale.startd.z, base->scale.endd.z
3225 		);
3226 		printf("\tblend (%zu / %zu)\n\t\t opacity: %2.f->%.2f\n",
3227 			(size_t) base->blend.startt, (size_t) base->blend.endt,
3228 			base->blend.startopa, base->blend.endopa
3229 		);
3230 		printf("\tmove (%zu / %zu)\n\t\t x: %2.f->%.2f y: %.2f->%.2f z: %.2f->%.2f\n",
3231 			(size_t) base->move.startt, (size_t) base->move.endt,
3232 			base->move.startp.x, base->move.endp.x,
3233 			base->move.startp.y, base->move.endp.y,
3234 			base->move.startp.z, base->move.endp.z
3235 		);
3236 	}
3237 }
3238 #endif
3239 
arcan_video_transfertransform(arcan_vobj_id sid,arcan_vobj_id did)3240 arcan_errc arcan_video_transfertransform(arcan_vobj_id sid, arcan_vobj_id did)
3241 {
3242 	arcan_errc rv = arcan_video_copytransform(sid, did);
3243 
3244 	if (rv == ARCAN_OK){
3245 		arcan_vobject* src = arcan_video_getobject(sid);
3246 		arcan_video_zaptransform(sid, 0, NULL);
3247 		src->transform = NULL;
3248 	}
3249 
3250 	return rv;
3251 }
3252 
3253 /* remove a video object that is also a rendertarget (FBO) output */
drop_rtarget(arcan_vobject * vobj)3254 static void drop_rtarget(arcan_vobject* vobj)
3255 {
3256 /* check if vobj is indeed a rendertarget */
3257 	struct rendertarget* dst = NULL;
3258 	int cascade_c = 0;
3259 	arcan_vobject** pool;
3260 
3261 	unsigned dstind;
3262 
3263 /* linear search for the vobj among rendertargets */
3264 	for (dstind = 0; dstind < current_context->n_rtargets; dstind++){
3265 		if (current_context->rtargets[dstind].color == vobj){
3266 			dst = &current_context->rtargets[dstind];
3267 			break;
3268 		}
3269 	}
3270 
3271 	if (!dst)
3272 		return;
3273 
3274 	if (current_context->attachment == dst)
3275 		current_context->attachment = NULL;
3276 
3277 /* found one, disassociate with the context */
3278 	current_context->n_rtargets--;
3279 	if (current_context->n_rtargets < 0){
3280 		arcan_warning(
3281 			"[bug] rtgt count (%d) < 0\n", current_context->n_rtargets);
3282 	}
3283 
3284 	if (vobj->tracetag)
3285 		arcan_warning("(arcan_video_deleteobject(reference-pass) -- "
3286 			"remove rendertarget (%s)\n", vobj->tracetag);
3287 
3288 /* kill GPU resources */
3289 	if (dst->art)
3290 		agp_drop_rendertarget(dst->art);
3291 	dst->art = NULL;
3292 
3293 /* create a temporary copy of all the elements in the rendertarget,
3294  * this will be a noop for a linked rendertarget */
3295 	arcan_vobject_litem* current = dst->first;
3296 	size_t pool_sz = (dst->color->extrefc.attachments) * sizeof(arcan_vobject*);
3297 	pool = arcan_alloc_mem(pool_sz, ARCAN_MEM_VSTRUCT, ARCAN_MEM_TEMPORARY,
3298 		ARCAN_MEMALIGN_NATURAL);
3299 
3300 /* note the contents of the rendertarget as "detached" from the source vobj */
3301 	while (current){
3302 		arcan_vobject* base = current->elem;
3303 		pool[cascade_c++] = base;
3304 
3305 /* rtarget has one less attachment, and base is attached to one less */
3306 		vobj->extrefc.attachments--;
3307 		base->extrefc.attachments--;
3308 
3309 		trace("(deleteobject::drop_rtarget) remove attached (%d:%s) from"
3310 			"	rendertarget (%d:%s), left: %d:%d\n",
3311 			current->elem->cellid, video_tracetag(current->elem), vobj->cellid,
3312 			video_tracetag(vobj),vobj->extrefc.attachments,base->extrefc.attachments);
3313 
3314 		if (base->extrefc.attachments < 0){
3315 			arcan_warning(
3316 				"[bug] obj-attach-refc (%d) < 0\n", base->extrefc.attachments);
3317 		}
3318 
3319 		if (vobj->extrefc.attachments < 0){
3320 			arcan_warning(
3321 				"[bug] rtgt-ext-refc (%d) < 0\n", vobj->extrefc.attachments);
3322 		}
3323 
3324 /* cleanup and unlink before moving on */
3325 		arcan_vobject_litem* last = current;
3326 		current->elem = (arcan_vobject*) 0xfacefeed;
3327 		current = current->next;
3328 		last->next = (struct arcan_vobject_litem*) 0xdeadbeef;
3329 		arcan_mem_free(last);
3330 	}
3331 
3332 /* compact the context array of rendertargets */
3333 	if (dstind+1 < RENDERTARGET_LIMIT)
3334 		memmove(&current_context->rtargets[dstind],
3335 			&current_context->rtargets[dstind+1],
3336 			sizeof(struct rendertarget) * (RENDERTARGET_LIMIT - 1 - dstind));
3337 
3338 /* always kill the last element */
3339 	memset(&current_context->rtargets[RENDERTARGET_LIMIT- 1], 0,
3340 		sizeof(struct rendertarget));
3341 
3342 /* self-reference gone */
3343 	vobj->extrefc.attachments--;
3344 	trace("(deleteobject::drop_rtarget) remove self reference from "
3345 		"rendertarget (%d:%s)\n", vobj->cellid, video_tracetag(vobj));
3346 	if (vobj->extrefc.attachments != 0){
3347 		arcan_warning("[bug] vobj refc (%d) != 0\n", vobj->extrefc.attachments);
3348 	}
3349 
3350 /* sweep the list of rendertarget children, and see if we have the
3351  * responsibility of cleaning it up */
3352 	for (size_t i = 0; i < cascade_c; i++)
3353 		if (pool[i] && FL_TEST(pool[i], FL_INUSE) &&
3354 		(pool[i]->owner == dst || !FL_TEST(pool[i]->owner, TGTFL_ALIVE))){
3355 			pool[i]->owner = NULL;
3356 
3357 /* cascade or push to stdout as new owner */
3358 			if ((pool[i]->mask & MASK_LIVING) > 0)
3359 				arcan_video_deleteobject(pool[i]->cellid);
3360 			else
3361 				attach_object(&current_context->stdoutp, pool[i]);
3362 		}
3363 
3364 /* lastly, remove any dangling references/links, converting those rendertargets
3365  * to normal/empty ones */
3366 	cascade_c = 0;
3367 	for (dstind = 0; dstind < current_context->n_rtargets; dstind++){
3368 		if (current_context->rtargets[dstind].link == dst){
3369 			current_context->rtargets[dstind].link = NULL;
3370 		}
3371 	}
3372 
3373 	arcan_mem_free(pool);
3374 }
3375 
drop_frameset(arcan_vobject * vobj)3376 static void drop_frameset(arcan_vobject* vobj)
3377 {
3378 	if (vobj->frameset){
3379 		for (size_t i = 0; i < vobj->frameset->n_frames; i++)
3380 			arcan_vint_drop_vstore(vobj->frameset->frames[i].frame);
3381 
3382 		arcan_mem_free(vobj->frameset->frames);
3383 		vobj->frameset->frames = NULL;
3384 
3385 		arcan_mem_free(vobj->frameset);
3386 		vobj->frameset = NULL;
3387 	}
3388 }
3389 
3390 /* by far, the most involved and dangerous function in this .o,
3391  * hence the many safe-guards checks and tracing output,
3392  * the simplest of objects (just an image or whatnot) should have
3393  * a minimal cost, with everything going up from there.
3394  * Things to consider:
3395  * persistence (existing in multiple stack layers, only allowed to be deleted
3396  * IF it doesn't exist at a lower layer
3397  * rendertargets (objects that gets rendered to)
3398  * links (objects linked to others to be deleted in a cascading fashion)
3399  *
3400  * an object can belong to either a parent object (ultimately, WORLD),
3401  * one or more rendertargets, at the same time,
3402  * and these deletions should also sustain a full context wipe
3403  */
arcan_video_deleteobject(arcan_vobj_id id)3404 arcan_errc arcan_video_deleteobject(arcan_vobj_id id)
3405 {
3406 	arcan_vobject* vobj = arcan_video_getobject(id);
3407 
3408 	int cascade_c = 0;
3409 
3410 /* some objects can't be deleted */
3411 	if (!vobj || id == ARCAN_VIDEO_WORLDID || id == ARCAN_EID)
3412 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3413 
3414 /* when a persist is defined in a lower layer, we know that the lowest layer
3415  * is the last on to have the persistflag) */
3416 	if (FL_TEST(vobj, FL_PRSIST) &&
3417 		(vcontext_ind > 0 && FL_TEST(
3418 			&vcontext_stack[vcontext_ind - 1].vitems_pool[
3419 			vobj->cellid], FL_PRSIST))
3420 	)
3421 		return ARCAN_ERRC_UNACCEPTED_STATE;
3422 
3423 /* step one, disassociate from ALL rendertargets,  */
3424 	detach_fromtarget(&current_context->stdoutp, vobj);
3425 	for (unsigned int i = 0; i < current_context->n_rtargets &&
3426 		vobj->extrefc.attachments; i++)
3427 		detach_fromtarget(&current_context->rtargets[i], vobj);
3428 
3429 /* step two, disconnect from parent, WORLD references doesn't count */
3430 	if (vobj->parent && vobj->parent != &current_context->world)
3431 		dropchild(vobj->parent, vobj);
3432 
3433 /* vobj might be a rendertarget itself, so detach all its
3434  * possible members, free FBO/PBO resources etc. */
3435 	drop_rtarget(vobj);
3436 	drop_frameset(vobj);
3437 
3438 /* populate a pool of cascade deletions */
3439 	unsigned sum = vobj->extrefc.links;
3440 
3441 	arcan_vobject* pool[ (sum + 1) ];
3442 
3443 	if (sum)
3444 		memset(pool, 0, sizeof(pool));
3445 
3446 /* drop all children, add those that should be deleted to the pool */
3447 	for (size_t i = 0; i < vobj->childslots; i++){
3448 		arcan_vobject* cur = vobj->children[i];
3449 		if (!cur)
3450 			continue;
3451 
3452 /* the last constraint should be guaranteed, but safety first */
3453 		if ((cur->mask & MASK_LIVING) > 0 && cascade_c < sum+1)
3454 			pool[cascade_c++] = cur;
3455 
3456 		dropchild(vobj, cur);
3457 	}
3458 
3459 	arcan_mem_free(vobj->children);
3460 	vobj->childslots = 0;
3461 
3462 	current_context->nalive--;
3463 
3464 /* time to drop all associated resources */
3465 	arcan_video_zaptransform(id, 0, NULL);
3466 	arcan_mem_free(vobj->txcos);
3467 
3468 /* full- object specific clean-up */
3469 	if (vobj->feed.ffunc){
3470 		arcan_ffunc_lookup(vobj->feed.ffunc)(FFUNC_DESTROY,
3471 			0, 0, 0, 0, 0, vobj->feed.state, vobj->cellid);
3472 		vobj->feed.state.ptr = NULL;
3473 		vobj->feed.ffunc = FFUNC_FATAL;
3474 		vobj->feed.state.tag = ARCAN_TAG_NONE;
3475 	}
3476 
3477 	if (vobj->feed.state.tag == ARCAN_TAG_ASYNCIMGLD)
3478 		arcan_video_pushasynch(id);
3479 
3480 /* video storage, will take care of refcounting in case of shared storage */
3481 	arcan_vint_drop_vstore(vobj->vstore);
3482 	vobj->vstore = NULL;
3483 
3484 	if (vobj->extrefc.attachments|vobj->extrefc.links){
3485 		arcan_warning("[BUG] Broken reference counters for expiring objects, "
3486 			"%d, %d, tracetag? (%s)\n", vobj->extrefc.attachments,
3487 			vobj->extrefc.links, vobj->tracetag ? vobj->tracetag : "(NO TAG)"
3488 		);
3489 #ifdef _DEBUG
3490 		abort();
3491 #endif
3492 	}
3493 
3494 	arcan_mem_free(vobj->tracetag);
3495 	arcan_vint_dropshape(vobj);
3496 
3497 /* lots of default values are assumed to be 0, so reset the
3498  * entire object to be sure. will help leak detectors as well */
3499 	memset(vobj, 0, sizeof(arcan_vobject));
3500 
3501 	for (size_t i = 0; i < cascade_c; i++){
3502 		if (!pool[i])
3503 			continue;
3504 
3505 		trace("(deleteobject) cascade pool entry (%d), %d:%s\n", i, pool[i]->cellid,
3506 			pool[i]->tracetag ? pool[i]->tracetag : "(NO TAG)");
3507 
3508 		if (FL_TEST(pool[i], FL_INUSE))
3509 			arcan_video_deleteobject(pool[i]->cellid);
3510 	}
3511 
3512 	return ARCAN_OK;
3513 }
3514 
arcan_video_override_mapping(arcan_vobj_id id,float * newmapping)3515 arcan_errc arcan_video_override_mapping(arcan_vobj_id id, float* newmapping)
3516 {
3517 	arcan_vobject* vobj = arcan_video_getobject(id);
3518 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3519 
3520 	if (vobj && id > 0){
3521 		if (vobj->txcos)
3522 			arcan_mem_free(vobj->txcos);
3523 
3524 		vobj->txcos = arcan_alloc_fillmem(newmapping,
3525 			sizeof(float) * 8, ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_SIMD);
3526 
3527 		rv = ARCAN_OK;
3528 		FLAG_DIRTY(vobj);
3529 	}
3530 
3531 	return rv;
3532 }
3533 
arcan_video_retrieve_mapping(arcan_vobj_id id,float * dst)3534 arcan_errc arcan_video_retrieve_mapping(arcan_vobj_id id, float* dst)
3535 {
3536 	arcan_vobject* vobj = arcan_video_getobject(id);
3537 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3538 
3539 	if (vobj && dst && id > 0){
3540 		float* sptr = vobj->txcos ?
3541 			vobj->txcos : arcan_video_display.default_txcos;
3542 		memcpy(dst, sptr, sizeof(float) * 8);
3543 		rv = ARCAN_OK;
3544 	}
3545 
3546 	return rv;
3547 }
3548 
arcan_video_findparent(arcan_vobj_id id,arcan_vobj_id ref)3549 arcan_vobj_id arcan_video_findparent(arcan_vobj_id id, arcan_vobj_id ref)
3550 {
3551 	arcan_vobject* vobj = arcan_video_getobject(id);
3552 	if (!vobj)
3553 		return ARCAN_EID;
3554 
3555 	if (!vobj->parent || !vobj->parent->owner)
3556 		return ARCAN_EID;
3557 
3558 	if (ref != ARCAN_EID){
3559 		while (vobj && vobj->parent){
3560 			vobj = vobj->parent;
3561 			if (ref == vobj->cellid){
3562 				return vobj->cellid;
3563 			}
3564 		}
3565 		return ARCAN_EID;
3566 	}
3567 
3568 	return vobj->parent->cellid;
3569 }
3570 
arcan_video_findchild(arcan_vobj_id parentid,unsigned ofs)3571 arcan_vobj_id arcan_video_findchild(arcan_vobj_id parentid, unsigned ofs)
3572 {
3573 	arcan_vobj_id rv = ARCAN_EID;
3574 	arcan_vobject* vobj = arcan_video_getobject(parentid);
3575 
3576 	if (!vobj)
3577 		return rv;
3578 
3579 	for (size_t i = 0; i < vobj->childslots; i++){
3580 		if (vobj->children[i]){
3581 			if (ofs > 0)
3582 				ofs--;
3583 			else
3584 				return vobj->children[i]->cellid;
3585 		}
3586 	}
3587 
3588 	return rv;
3589 }
3590 
recsweep(arcan_vobject * base,arcan_vobject * match,int limit)3591 static bool recsweep(arcan_vobject* base, arcan_vobject* match, int limit)
3592 {
3593 	if (base == NULL || (limit != -1 && limit-- <= 0))
3594 		return false;
3595 
3596 	if (base == match)
3597 		return true;
3598 
3599 	for (size_t i = 0; i < base->childslots; i++)
3600 		if (recsweep(base->children[i], match, limit))
3601 			return true;
3602 
3603 	return false;
3604 }
3605 
arcan_video_isdescendant(arcan_vobj_id vid,arcan_vobj_id parent,int limit)3606 bool arcan_video_isdescendant(arcan_vobj_id vid,
3607 	arcan_vobj_id parent, int limit)
3608 {
3609 	arcan_vobject* base = arcan_video_getobject(parent);
3610 	arcan_vobject* match = arcan_video_getobject(vid);
3611 
3612 	if (base== NULL || match == NULL)
3613 		return false;
3614 
3615 	return recsweep(base, match, limit);
3616 }
3617 
arcan_video_objectrotate(arcan_vobj_id id,float ang,arcan_tickv time)3618 arcan_errc arcan_video_objectrotate(arcan_vobj_id id,
3619 	float ang, arcan_tickv time)
3620 {
3621 	return arcan_video_objectrotate3d(id, ang, 0.0, 0.0, time);
3622 }
3623 
arcan_video_objectrotate3d(arcan_vobj_id id,float roll,float pitch,float yaw,arcan_tickv tv)3624 arcan_errc arcan_video_objectrotate3d(arcan_vobj_id id,
3625 	float roll, float pitch, float yaw, arcan_tickv tv)
3626 {
3627 	arcan_vobject* vobj = arcan_video_getobject(id);
3628 
3629 	if (!vobj)
3630 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3631 
3632 	invalidate_cache(vobj);
3633 
3634 /* clear chains for rotate attribute previous rotate objects */
3635 	if (tv == 0){
3636 		swipe_chain(vobj->transform, offsetof(surface_transform, rotate),
3637 			sizeof(struct transf_rotate));
3638 		vobj->current.rotation.roll  = roll;
3639 		vobj->current.rotation.pitch = pitch;
3640 		vobj->current.rotation.yaw   = yaw;
3641 		vobj->current.rotation.quaternion = build_quat_taitbryan(roll,pitch,yaw);
3642 
3643 		return ARCAN_OK;
3644 	}
3645 
3646 	surface_orientation bv  = vobj->current.rotation;
3647 	surface_transform* base = vobj->transform;
3648 	surface_transform* last = base;
3649 
3650 /* figure out the starting angle */
3651 	while (base && base->rotate.startt){
3652 		bv = base->rotate.endo;
3653 
3654 		last = base;
3655 		base = base->next;
3656 	}
3657 
3658 	if (!base){
3659 		if (last)
3660 			base = last->next = arcan_alloc_mem(sizeof(surface_transform),
3661 							ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3662 		else
3663 			base = last = arcan_alloc_mem(sizeof(surface_transform),
3664 				ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3665 	}
3666 
3667 	if (!vobj->transform)
3668 		vobj->transform = base;
3669 
3670 	base->rotate.startt = last->rotate.endt < arcan_video_display.c_ticks ?
3671 		arcan_video_display.c_ticks : last->rotate.endt;
3672 	base->rotate.endt   = base->rotate.startt + tv;
3673 	base->rotate.starto = bv;
3674 
3675 	base->rotate.endo.roll  = roll;
3676 	base->rotate.endo.pitch = pitch;
3677 	base->rotate.endo.yaw   = yaw;
3678 	base->rotate.endo.quaternion = build_quat_taitbryan(roll, pitch, yaw);
3679 	if (vobj->owner)
3680 		vobj->owner->transfc++;
3681 
3682 	base->rotate.interp = (fabsf(bv.roll - roll) > 180.0 ||
3683 		fabsf(bv.pitch - pitch) > 180.0 || fabsf(bv.yaw - yaw) > 180.0) ?
3684 		nlerp_quat180 : nlerp_quat360;
3685 
3686 	return ARCAN_OK;
3687 }
3688 
arcan_video_allocframes(arcan_vobj_id id,unsigned char capacity,enum arcan_framemode mode)3689 arcan_errc arcan_video_allocframes(arcan_vobj_id id,
3690 	unsigned char capacity, enum arcan_framemode mode)
3691 {
3692 	arcan_vobject* target = arcan_video_getobject(id);
3693 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3694 
3695 	if (!target)
3696 		return rv;
3697 
3698 /* similar restrictions as with sharestore */
3699 	if (target->vstore->txmapped != TXSTATE_TEX2D)
3700 		return ARCAN_ERRC_UNACCEPTED_STATE;
3701 
3702 	if (FL_TEST(target, FL_PRSIST))
3703 		return ARCAN_ERRC_CLONE_NOT_PERMITTED;
3704 
3705 /* special case, de-allocate */
3706 	if (capacity <= 1){
3707 		drop_frameset(target);
3708 		return ARCAN_OK;
3709 	}
3710 
3711 /* only permit framesets to grow */
3712 	if (target->frameset){
3713 		if (target->frameset->n_frames > capacity)
3714 			return ARCAN_ERRC_UNACCEPTED_STATE;
3715 	}
3716 	else
3717 		target->frameset = arcan_alloc_mem(sizeof(struct vobject_frameset),
3718 			ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3719 
3720 	target->frameset->n_frames = capacity;
3721 	target->frameset->frames = arcan_alloc_mem(
3722 			sizeof(struct frameset_store) * capacity,
3723 			ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL
3724 	);
3725 
3726 	for (size_t i = 0; i < capacity; i++){
3727 		target->frameset->frames[i].frame = target->vstore;
3728 		arcan_vint_defaultmapping(target->frameset->frames[i].txcos, 1.0, 1.0);
3729 
3730 		target->vstore->refcount++;
3731 	}
3732 
3733 	target->frameset->mode = mode;
3734 
3735 	return ARCAN_OK;
3736 }
3737 
arcan_video_origoshift(arcan_vobj_id id,float sx,float sy,float sz)3738 arcan_errc arcan_video_origoshift(arcan_vobj_id id,
3739 	float sx, float sy, float sz)
3740 {
3741 	arcan_vobject* vobj = arcan_video_getobject(id);
3742 
3743 	if (!vobj)
3744 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3745 
3746 	invalidate_cache(vobj);
3747 	FL_SET(vobj, FL_ORDOFS);
3748 	vobj->origo_ofs.x = sx;
3749 	vobj->origo_ofs.y = sy;
3750 	vobj->origo_ofs.z = sz;
3751 
3752 	return ARCAN_OK;
3753 }
3754 
3755 /* alter object opacity, range 0..1 */
arcan_video_objectopacity(arcan_vobj_id id,float opa,unsigned int tv)3756 arcan_errc arcan_video_objectopacity(arcan_vobj_id id,
3757 	float opa, unsigned int tv)
3758 {
3759 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3760 	arcan_vobject* vobj = arcan_video_getobject(id);
3761 	opa = CLAMP(opa, 0.0, 1.0);
3762 
3763 	if (vobj){
3764 		rv = ARCAN_OK;
3765 		invalidate_cache(vobj);
3766 
3767 		/* clear chains for rotate attribute
3768 		 * if time is set to ovverride and be immediate */
3769 		if (tv == 0){
3770 			swipe_chain(vobj->transform, offsetof(surface_transform, blend),
3771 				sizeof(struct transf_blend));
3772 			vobj->current.opa = opa;
3773 		}
3774 		else { /* find endpoint to attach at */
3775 			float bv = vobj->current.opa;
3776 
3777 			surface_transform* base = vobj->transform;
3778 			surface_transform* last = base;
3779 
3780 			while (base && base->blend.startt){
3781 				bv = base->blend.endopa;
3782 				last = base;
3783 				base = base->next;
3784 			}
3785 
3786 			if (!base){
3787 				if (last)
3788 					base = last->next =
3789 						arcan_alloc_mem(sizeof(surface_transform), ARCAN_MEM_VSTRUCT,
3790 							ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3791 				else
3792 					base = last =
3793 						arcan_alloc_mem(sizeof(surface_transform), ARCAN_MEM_VSTRUCT,
3794 							ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3795 			}
3796 
3797 			if (!vobj->transform)
3798 				vobj->transform = base;
3799 
3800 			if (vobj->owner)
3801 				vobj->owner->transfc++;
3802 
3803 			base->blend.startt = last->blend.endt < arcan_video_display.c_ticks ?
3804 				arcan_video_display.c_ticks : last->blend.endt;
3805 			base->blend.endt = base->blend.startt + tv;
3806 			base->blend.startopa = bv;
3807 			base->blend.endopa = opa + EPSILON;
3808 			base->blend.interp = ARCAN_VINTER_LINEAR;
3809 		}
3810 	}
3811 
3812 	return rv;
3813 }
3814 
arcan_video_blendinterp(arcan_vobj_id id,enum arcan_vinterp inter)3815 arcan_errc arcan_video_blendinterp(arcan_vobj_id id, enum arcan_vinterp inter)
3816 {
3817 	arcan_vobject* vobj = arcan_video_getobject(id);
3818 	if (!vobj)
3819 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3820 
3821 	if (!vobj->transform)
3822 		return ARCAN_ERRC_UNACCEPTED_STATE;
3823 
3824 	surface_transform* base = vobj->transform;
3825 
3826 	while (base && base->blend.startt &&
3827 		base->next && base->next->blend.startt)
3828 			base = base->next;
3829 
3830 	assert(base);
3831 	base->blend.interp = inter;
3832 
3833 	return ARCAN_OK;
3834 }
3835 
arcan_video_scaleinterp(arcan_vobj_id id,enum arcan_vinterp inter)3836 arcan_errc arcan_video_scaleinterp(arcan_vobj_id id, enum arcan_vinterp inter)
3837 {
3838 	arcan_vobject* vobj = arcan_video_getobject(id);
3839 	if (!vobj)
3840 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3841 
3842 	if (!vobj->transform)
3843 		return ARCAN_ERRC_UNACCEPTED_STATE;
3844 
3845 	surface_transform* base = vobj->transform;
3846 
3847 	while (base && base->scale.startt &&
3848 		base->next && base->next->scale.startt)
3849 			base = base->next;
3850 
3851 	assert(base);
3852 	base->scale.interp = inter;
3853 
3854 	return ARCAN_OK;
3855 }
3856 
arcan_video_moveinterp(arcan_vobj_id id,enum arcan_vinterp inter)3857 arcan_errc arcan_video_moveinterp(arcan_vobj_id id, enum arcan_vinterp inter)
3858 {
3859 	arcan_vobject* vobj = arcan_video_getobject(id);
3860 	if (!vobj)
3861 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3862 
3863 	if (!vobj->transform)
3864 		return ARCAN_ERRC_UNACCEPTED_STATE;
3865 
3866 	surface_transform* base = vobj->transform;
3867 
3868 	while (base && base->move.startt &&
3869 		base->next && base->next->move.startt)
3870 			base = base->next;
3871 
3872 	assert(base);
3873 	base->move.interp = inter;
3874 
3875 	return ARCAN_OK;
3876 }
3877 
3878 /* linear transition from current position to a new desired position,
3879  * if time is 0 the move will be instantaneous (and not generate an event)
3880  * otherwise time denotes how many ticks it should take to move the object
3881  * from its start position to it's final.
3882  * An event will in this case be generated */
arcan_video_objectmove(arcan_vobj_id id,float newx,float newy,float newz,unsigned int tv)3883 arcan_errc arcan_video_objectmove(arcan_vobj_id id, float newx,
3884 	float newy, float newz, unsigned int tv)
3885 {
3886 	arcan_vobject* vobj = arcan_video_getobject(id);
3887 
3888 	if (!vobj)
3889 		return ARCAN_ERRC_NO_SUCH_OBJECT;
3890 
3891 	invalidate_cache(vobj);
3892 
3893 /* clear chains for rotate attribute
3894  * if time is set to ovverride and be immediate */
3895 	if (tv == 0){
3896 		swipe_chain(vobj->transform, offsetof(surface_transform, move),
3897 			sizeof(struct transf_move));
3898 		vobj->current.position.x = newx;
3899 		vobj->current.position.y = newy;
3900 		vobj->current.position.z = newz;
3901 		return ARCAN_OK;
3902 	}
3903 
3904 /* find endpoint to attach at */
3905 	surface_transform* base = vobj->transform;
3906 	surface_transform* last = base;
3907 
3908 /* figure out the coordinates which the transformation is chained to */
3909 	point bwp = vobj->current.position;
3910 
3911 	while (base && base->move.startt){
3912 		bwp = base->move.endp;
3913 
3914 		last = base;
3915 		base = base->next;
3916 	}
3917 
3918 	if (!base){
3919 		if (last)
3920 			base = last->next =
3921 				arcan_alloc_mem(sizeof(surface_transform), ARCAN_MEM_VSTRUCT,
3922 					ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3923 		else
3924 			base = last =
3925 				arcan_alloc_mem(sizeof(surface_transform), ARCAN_MEM_VSTRUCT,
3926 					ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3927 	}
3928 
3929 	point newp = {newx, newy, newz};
3930 
3931 	if (!vobj->transform)
3932 		vobj->transform = base;
3933 
3934 	base->move.startt = last->move.endt < arcan_video_display.c_ticks ?
3935 		arcan_video_display.c_ticks : last->move.endt;
3936 	base->move.endt   = base->move.startt + tv;
3937 	base->move.interp = ARCAN_VINTER_LINEAR;
3938 	base->move.startp = bwp;
3939 	base->move.endp   = newp;
3940 	if (vobj->owner)
3941 		vobj->owner->transfc++;
3942 
3943 	return ARCAN_OK;
3944 }
3945 
3946 /* scale the video object to match neww and newh, with stepx or
3947  * stepy at 0 it will be instantaneous,
3948  * otherwise it will move at stepx % of delta-size each tick
3949  * return value is an errorcode, run through char* arcan_verror(int8_t) */
arcan_video_objectscale(arcan_vobj_id id,float wf,float hf,float df,unsigned tv)3950 arcan_errc arcan_video_objectscale(arcan_vobj_id id, float wf,
3951 	float hf, float df, unsigned tv)
3952 {
3953 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
3954 	arcan_vobject* vobj = arcan_video_getobject(id);
3955 
3956 	if (vobj){
3957 		const int immediately = 0;
3958 		rv = ARCAN_OK;
3959 		invalidate_cache(vobj);
3960 
3961 		if (tv == immediately){
3962 			swipe_chain(vobj->transform, offsetof(surface_transform, scale),
3963 				sizeof(struct transf_scale));
3964 
3965 			vobj->current.scale.x = wf;
3966 			vobj->current.scale.y = hf;
3967 			vobj->current.scale.z = df;
3968 		}
3969 		else {
3970 			surface_transform* base = vobj->transform;
3971 			surface_transform* last = base;
3972 
3973 /* figure out the coordinates which the transformation is chained to */
3974 			scalefactor bs = vobj->current.scale;
3975 
3976 			while (base && base->scale.startt){
3977 				bs = base->scale.endd;
3978 
3979 				last = base;
3980 				base = base->next;
3981 			}
3982 
3983 			if (!base){
3984 				if (last)
3985 					base = last->next = arcan_alloc_mem(sizeof(surface_transform),
3986 						ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3987 				else
3988 					base = last = arcan_alloc_mem(sizeof(surface_transform),
3989 						ARCAN_MEM_VSTRUCT, ARCAN_MEM_BZERO, ARCAN_MEMALIGN_NATURAL);
3990 			}
3991 
3992 			if (!vobj->transform)
3993 				vobj->transform = base;
3994 
3995 			base->scale.startt = last->scale.endt < arcan_video_display.c_ticks ?
3996 				arcan_video_display.c_ticks : last->scale.endt;
3997 			base->scale.endt = base->scale.startt + tv;
3998 			base->scale.interp = ARCAN_VINTER_LINEAR;
3999 			base->scale.startd = bs;
4000 			base->scale.endd.x = wf;
4001 			base->scale.endd.y = hf;
4002 			base->scale.endd.z = df;
4003 
4004 			if (vobj->owner)
4005 				vobj->owner->transfc++;
4006 		}
4007 	}
4008 
4009 	return rv;
4010 }
4011 
4012 /*
4013  * fill out vertices / txcos, return number of elements to draw
4014  */
tesselate_2d(size_t n_s,size_t n_t)4015 static struct agp_mesh_store tesselate_2d(size_t n_s, size_t n_t)
4016 {
4017 	struct agp_mesh_store res = {
4018 		.depth_func = AGP_DEPTH_LESS
4019 	};
4020 
4021 	float step_s = 2.0 / (n_s-1);
4022 	float step_t = 2.0 / (n_t-1);
4023 
4024 /* use same buffer for both vertices and txcos, can't reuse the same values
4025  * though initially similar, the user might want to modify */
4026 	res.shared_buffer_sz = sizeof(float) * n_s * n_t * 4;
4027 	void* sbuf = arcan_alloc_mem(res.shared_buffer_sz,
4028 		ARCAN_MEM_MODELDATA, ARCAN_MEM_NONFATAL, ARCAN_MEMALIGN_PAGE);
4029 	res.shared_buffer = (uint8_t*) sbuf;
4030 	float* vertices = sbuf;
4031 	float* txcos = &vertices[n_s*n_t*2];
4032 
4033 	if (!vertices)
4034 		return res;
4035 
4036 	unsigned* indices = arcan_alloc_mem(sizeof(unsigned)*(n_s-1)*(n_t-1)*6,
4037 		ARCAN_MEM_MODELDATA, ARCAN_MEM_NONFATAL, ARCAN_MEMALIGN_PAGE);
4038 
4039 	if (!indices){
4040 		arcan_mem_free(vertices);
4041 		return res;
4042 	}
4043 
4044 /* populate txco/vertices */
4045 	for (size_t y = 0; y < n_t; y++){
4046 		for (size_t x = 0; x < n_s; x++){
4047 			size_t ofs = (y * n_s + x) * 2;
4048 			vertices[ofs + 0] = (float)x * step_s - 1.0;
4049 			vertices[ofs + 1] = (float)y * step_t - 1.0;
4050 			txcos[ofs + 0] = (float)x / (float)n_s;
4051 			txcos[ofs + 1] = (float)y / (float)n_t;
4052 		}
4053 	}
4054 
4055 /* get the indices */
4056 	size_t ofs = 0;
4057 	#define GETVERT(X,Y)( ( (X) * n_s) + Y)
4058 	for (size_t y = 0; y < n_t-1; y++)
4059 		for (size_t x = 0; x < n_s-1; x++){
4060 		indices[ofs++] = GETVERT(x, y);
4061 		indices[ofs++] = GETVERT(x, y+1);
4062 		indices[ofs++] = GETVERT(x+1, y+1);
4063 		indices[ofs++] = GETVERT(x, y);
4064 		indices[ofs++] = GETVERT(x+1, y+1);
4065 		indices[ofs++] = GETVERT(x+1, y);
4066 	}
4067 
4068 	res.verts = vertices;
4069 	res.txcos = txcos;
4070 	res.indices = indices;
4071 	res.n_vertices = n_s * n_t;
4072 	res.vertex_size = 2;
4073 	res.n_indices = (n_s-1) * (n_t-1) * 6;
4074 	res.type = AGP_MESH_TRISOUP;
4075 
4076 	return res;
4077 }
4078 
arcan_video_defineshape(arcan_vobj_id dst,size_t n_s,size_t n_t,struct agp_mesh_store ** store,bool depth)4079 arcan_errc arcan_video_defineshape(arcan_vobj_id dst,
4080 	size_t n_s, size_t n_t, struct agp_mesh_store** store, bool depth)
4081 {
4082 	arcan_vobject* vobj = arcan_video_getobject(dst);
4083 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
4084 	if (!vobj){
4085 		if (store)
4086 			*store = NULL;
4087 		return ARCAN_ERRC_NO_SUCH_OBJECT;
4088 	}
4089 
4090 	if (n_s == 0 || n_t == 0){
4091 		if (store)
4092 			*store = vobj->shape;
4093 		return ARCAN_OK;
4094 	}
4095 
4096 	if (vobj->shape || n_s == 1 || n_t == 1){
4097 		agp_drop_mesh(vobj->shape);
4098 		if (n_s == 1 || n_t == 1){
4099 			vobj->shape = NULL;
4100 			if (store)
4101 				*store = vobj->shape;
4102 			return ARCAN_OK;
4103 		}
4104 	}
4105 	else
4106 		vobj->shape = arcan_alloc_mem(sizeof(struct agp_mesh_store),
4107 			ARCAN_MEM_MODELDATA, ARCAN_MEM_BZERO |
4108 			ARCAN_MEM_NONFATAL, ARCAN_MEMALIGN_NATURAL
4109 		);
4110 
4111 	if (!vobj->shape){
4112 		if (store)
4113 			*store = NULL;
4114 		return ARCAN_ERRC_OUT_OF_SPACE;
4115 	}
4116 
4117 /* we now KNOW that s > 1 and t > 1, that shape is valid -
4118  * time to build the mesh */
4119 	struct agp_mesh_store ns = tesselate_2d(n_s, n_t);
4120 	if (!ns.verts){
4121 		if (vobj->shape){
4122 			arcan_vint_dropshape(vobj);
4123 		}
4124 		if (store)
4125 			*store = NULL;
4126 		return ARCAN_ERRC_OUT_OF_SPACE;
4127 	}
4128 
4129 	*(vobj->shape) = ns;
4130 	vobj->shape->nodepth = depth;
4131 
4132 /* dirty- flag here if we support meshing into VBO */
4133 	if (store)
4134 		*store = vobj->shape;
4135 	return ARCAN_OK;
4136 }
4137 
4138 /* called whenever a cell in update has a time that reaches 0 */
compact_transformation(arcan_vobject * base,unsigned int ofs,unsigned int count)4139 static void compact_transformation(arcan_vobject* base,
4140 	unsigned int ofs, unsigned int count)
4141 {
4142 	if (!base || !base->transform) return;
4143 
4144 	surface_transform* last = NULL;
4145 	surface_transform* work = base->transform;
4146 /* copy the next transformation */
4147 
4148 	while (work && work->next){
4149 		assert(work != work->next);
4150 		memcpy((char*)(work) + ofs, (char*)(work->next) + ofs, count);
4151 		last = work;
4152 		work = work->next;
4153 	}
4154 
4155 /* reset the last one */
4156 	memset((char*) work + ofs, 0, count);
4157 
4158 /* if it is now empty, free and delink */
4159 	if (!(work->blend.startt | work->scale.startt |
4160 		work->move.startt | work->rotate.startt )){
4161 
4162 		arcan_mem_free(work);
4163 		if (last)
4164 			last->next = NULL;
4165 		else
4166 			base->transform = NULL;
4167 	}
4168 }
4169 
arcan_video_setprogram(arcan_vobj_id id,agp_shader_id shid)4170 arcan_errc arcan_video_setprogram(arcan_vobj_id id, agp_shader_id shid)
4171 {
4172 	arcan_vobject* vobj = arcan_video_getobject(id);
4173 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
4174 
4175 	if (vobj && agp_shader_valid(shid)){
4176 		FLAG_DIRTY(vobj);
4177 		vobj->program = shid;
4178 		rv = ARCAN_OK;
4179 	}
4180 
4181 	return rv;
4182 }
4183 
lerp_fract(float startt,float endt,float ts)4184 static inline float lerp_fract(float startt, float endt, float ts)
4185 {
4186 	float rv = (EPSILON + (ts - startt)) / (endt - startt);
4187 	rv = rv > 1.0 ? 1.0 : rv;
4188 	return rv;
4189 }
4190 
4191 /* This is run for each active rendertarget and once for each object by
4192  * generating a cookie (stamp) so that objects that exist in multiple
4193  * rendertargets do not get updated several times.
4194  *
4195  * It returns the number of transforms applied to the object. */
update_object(arcan_vobject * ci,unsigned long long stamp)4196 static int update_object(arcan_vobject* ci, unsigned long long stamp)
4197 {
4198 	int upd = 0;
4199 
4200 /* update parent if this has not already been updated this cycle */
4201 	if (ci->last_updated < stamp &&
4202 		ci->parent && ci->parent != &current_context->world &&
4203 		ci->parent->last_updated != stamp){
4204 		upd += update_object(ci->parent, stamp);
4205 	}
4206 
4207 	ci->last_updated = stamp;
4208 
4209 	if (!ci->transform)
4210 		return upd;
4211 
4212 	if (ci->transform->blend.startt){
4213 		upd++;
4214 		float fract = lerp_fract(ci->transform->blend.startt,
4215 			ci->transform->blend.endt, stamp);
4216 
4217 		ci->current.opa = lut_interp_1d[ci->transform->blend.interp](
4218 			ci->transform->blend.startopa,
4219 			ci->transform->blend.endopa, fract
4220 		);
4221 
4222 		if (fract > 1.0-EPSILON){
4223 			ci->current.opa = ci->transform->blend.endopa;
4224 
4225 			if (FL_TEST(ci, FL_TCYCLE)){
4226 				arcan_video_objectopacity(ci->cellid, ci->transform->blend.endopa,
4227 					ci->transform->blend.endt - ci->transform->blend.startt);
4228 				if (ci->transform->blend.interp > 0)
4229 					arcan_video_blendinterp(ci->cellid, ci->transform->blend.interp);
4230 			}
4231 
4232 			if (ci->transform->blend.tag)
4233 				emit_transform_event(ci->cellid,
4234 					MASK_OPACITY, ci->transform->blend.tag);
4235 
4236 			compact_transformation(ci,
4237 				offsetof(surface_transform, blend),
4238 				sizeof(struct transf_blend));
4239 		}
4240 	}
4241 
4242 	if (ci->transform && ci->transform->move.startt){
4243 		upd++;
4244 		float fract = lerp_fract(ci->transform->move.startt,
4245 			ci->transform->move.endt, stamp);
4246 
4247 		ci->current.position = lut_interp_3d[ci->transform->move.interp](
4248 				ci->transform->move.startp,
4249 				ci->transform->move.endp, fract
4250 			);
4251 
4252 		if (fract > 1.0-EPSILON){
4253 			ci->current.position = ci->transform->move.endp;
4254 
4255 			if (FL_TEST(ci, FL_TCYCLE)){
4256 				arcan_video_objectmove(ci->cellid,
4257 					ci->transform->move.endp.x,
4258 					ci->transform->move.endp.y,
4259 					ci->transform->move.endp.z,
4260 					ci->transform->move.endt - ci->transform->move.startt
4261 				);
4262 
4263 				if (ci->transform->move.interp > 0)
4264 					arcan_video_moveinterp(ci->cellid, ci->transform->move.interp);
4265 			}
4266 
4267 			if (ci->transform->move.tag)
4268 				emit_transform_event(ci->cellid,
4269 					MASK_POSITION, ci->transform->move.tag);
4270 
4271 			compact_transformation(ci,
4272 				offsetof(surface_transform, move),
4273 				sizeof(struct transf_move));
4274 		}
4275 	}
4276 
4277 	if (ci->transform && ci->transform->scale.startt){
4278 		upd++;
4279 		float fract = lerp_fract(ci->transform->scale.startt,
4280 			ci->transform->scale.endt, stamp);
4281 		ci->current.scale = lut_interp_3d[ci->transform->scale.interp](
4282 			ci->transform->scale.startd,
4283 			ci->transform->scale.endd, fract
4284 		);
4285 
4286 		if (fract > 1.0-EPSILON){
4287 			ci->current.scale = ci->transform->scale.endd;
4288 
4289 			if (FL_TEST(ci, FL_TCYCLE)){
4290 				arcan_video_objectscale(ci->cellid, ci->transform->scale.endd.x,
4291 					ci->transform->scale.endd.y,
4292 					ci->transform->scale.endd.z,
4293 					ci->transform->scale.endt - ci->transform->scale.startt);
4294 
4295 				if (ci->transform->scale.interp > 0)
4296 					arcan_video_scaleinterp(ci->cellid, ci->transform->scale.interp);
4297 			}
4298 
4299 			if (ci->transform->scale.tag)
4300 				emit_transform_event(ci->cellid, MASK_SCALE, ci->transform->scale.tag);
4301 
4302 			compact_transformation(ci,
4303 				offsetof(surface_transform, scale),
4304 				sizeof(struct transf_scale));
4305 		}
4306 	}
4307 
4308 	if (ci->transform && ci->transform->rotate.startt){
4309 		upd++;
4310 		float fract = lerp_fract(ci->transform->rotate.startt,
4311 			ci->transform->rotate.endt, stamp);
4312 
4313 /* close enough */
4314 		if (fract > 1.0-EPSILON){
4315 			ci->current.rotation = ci->transform->rotate.endo;
4316 			if (FL_TEST(ci, FL_TCYCLE))
4317 				arcan_video_objectrotate3d(ci->cellid,
4318 					ci->transform->rotate.endo.roll,
4319 					ci->transform->rotate.endo.pitch,
4320 					ci->transform->rotate.endo.yaw,
4321 					ci->transform->rotate.endt - ci->transform->rotate.startt
4322 				);
4323 
4324 			if (ci->transform->rotate.tag)
4325 				emit_transform_event(ci->cellid,
4326 					MASK_ORIENTATION, ci->transform->rotate.tag);
4327 
4328 			compact_transformation(ci,
4329 				offsetof(surface_transform, rotate),
4330 				sizeof(struct transf_rotate));
4331 		}
4332 		else {
4333 			ci->current.rotation.quaternion =
4334 				ci->transform->rotate.interp(
4335 					ci->transform->rotate.starto.quaternion,
4336 					ci->transform->rotate.endo.quaternion, fract
4337 				);
4338 		}
4339 	}
4340 
4341 	return upd;
4342 }
4343 
expire_object(arcan_vobject * obj)4344 static void expire_object(arcan_vobject* obj){
4345 	if (obj->lifetime && --obj->lifetime == 0)
4346 	{
4347 		arcan_event dobjev = {
4348 		.category = EVENT_VIDEO,
4349 		.vid.kind = EVENT_VIDEO_EXPIRE
4350 		};
4351 
4352 		dobjev.vid.source = obj->cellid;
4353 
4354 #ifdef _DEBUG
4355 		if (obj->tracetag){
4356 			arcan_warning("arcan_event(EXPIRE) -- "
4357 				"traced object expired (%s)\n", obj->tracetag);
4358 		}
4359 #endif
4360 
4361 		arcan_event_enqueue(arcan_event_defaultctx(), &dobjev);
4362 	}
4363 }
4364 
process_counter(struct rendertarget * tgt,int * field,int base,float fract)4365 static inline bool process_counter(
4366 	struct rendertarget* tgt, int* field, int base, float fract)
4367 {
4368 /* manually clocked, do nothing */
4369 	if (0 == base)
4370 		return false;
4371 
4372 /* simply decrement the counter and reset it, two dimensions to
4373  * the clock (tick vs frame), then abs() to convert to counter */
4374 	(*field)--;
4375 	if (*field <= 0){
4376 		*field = abs(base);
4377 		return true;
4378 	}
4379 
4380 	return false;
4381 }
4382 
process_readback(struct rendertarget * tgt,float fract)4383 static inline void process_readback(struct rendertarget* tgt, float fract)
4384 {
4385 	if (!FL_TEST(tgt, TGTFL_READING) &&
4386 		process_counter(tgt, &tgt->readcnt, tgt->readback, fract)){
4387 		agp_request_readback(tgt->color->vstore);
4388 		FL_SET(tgt, TGTFL_READING);
4389 	}
4390 }
4391 
4392 /*
4393  * return number of actual objects that were updated / dirty,
4394  * move/process/etc. and possibly dispatch draw commands if needed
4395  */
tick_rendertarget(struct rendertarget * tgt)4396 static int tick_rendertarget(struct rendertarget* tgt)
4397 {
4398 	tgt->transfc = 0;
4399 	arcan_vobject_litem* current = tgt->first;
4400 
4401 	while (current){
4402 		arcan_vobject* elem = current->elem;
4403 
4404 		arcan_vint_joinasynch(elem, true, false);
4405 
4406 		if (elem->last_updated != arcan_video_display.c_ticks)
4407 			tgt->transfc += update_object(elem, arcan_video_display.c_ticks);
4408 
4409 		if (elem->feed.ffunc)
4410 			arcan_ffunc_lookup(elem->feed.ffunc)
4411 				(FFUNC_TICK, 0, 0, 0, 0, 0, elem->feed.state, elem->cellid);
4412 
4413 /* mode > 0, cycle activate frame every 'n' ticks */
4414 		if (elem->frameset && elem->frameset->mctr != 0){
4415 			elem->frameset->ctr--;
4416 			if (elem->frameset->ctr == 0){
4417 				step_active_frame(elem);
4418 				elem->frameset->ctr = abs( elem->frameset->mctr );
4419 			}
4420 		}
4421 
4422 		if ((elem->mask & MASK_LIVING) > 0)
4423 			expire_object(elem);
4424 
4425 		current = current->next;
4426 	}
4427 
4428 	if (tgt->refresh > 0 && process_counter(tgt,
4429 		&tgt->refreshcnt, tgt->refresh, 0.0)){
4430 		tgt->transfc += process_rendertarget(tgt, 0.0);
4431 		tgt->dirtyc = 0;
4432 	}
4433 
4434 	if (tgt->readback < 0)
4435 		process_readback(tgt, 0.0);
4436 
4437 	return tgt->transfc;
4438 }
4439 
arcan_video_tick(unsigned steps,unsigned * njobs)4440 unsigned arcan_video_tick(unsigned steps, unsigned* njobs)
4441 {
4442 	if (steps == 0)
4443 		return 0;
4444 
4445 	unsigned now = arcan_frametime();
4446 	uint32_t tsd = arcan_video_display.c_ticks;
4447 	arcan_random((void*)&arcan_video_display.cookie, 8);
4448 
4449 #ifdef SHADER_TIME_PERIOD
4450 	tsd = tsd % SHADER_TIME_PERIOD;
4451 #endif
4452 
4453 	do {
4454 		arcan_video_display.dirty +=
4455 			update_object(&current_context->world, arcan_video_display.c_ticks);
4456 
4457 		arcan_video_display.dirty +=
4458 			agp_shader_envv(TIMESTAMP_D, &tsd, sizeof(uint32_t));
4459 
4460 		for (size_t i = 0; i < current_context->n_rtargets; i++)
4461 			arcan_video_display.dirty +=
4462 				tick_rendertarget(&current_context->rtargets[i]);
4463 
4464 		arcan_video_display.dirty +=
4465 			tick_rendertarget(&current_context->stdoutp);
4466 
4467 /*
4468  * we don't want c_ticks running too high (the tick is monotonic, but not
4469  * continous) as lots of float operations are relying on this as well, this
4470  * will cause transformations that are scheduled across the boundary to behave
4471  * oddly until reset. A fix would be to rebase if that is a problem.
4472  */
4473 		arcan_video_display.c_ticks =
4474 			(arcan_video_display.c_ticks + 1) % (INT32_MAX / 3);
4475 
4476 		steps = steps - 1;
4477 	} while (steps);
4478 
4479 	if (njobs)
4480 		*njobs = arcan_video_display.dirty;
4481 
4482 	return arcan_frametime() - now;
4483 }
4484 
arcan_video_clipto(arcan_vobj_id id,arcan_vobj_id clip_tgt)4485 arcan_errc arcan_video_clipto(arcan_vobj_id id, arcan_vobj_id clip_tgt)
4486 {
4487 	arcan_vobject* vobj = arcan_video_getobject(id);
4488 	if (!vobj)
4489 		return ARCAN_ERRC_NO_SUCH_OBJECT;
4490 
4491 	vobj->clip_src = clip_tgt;
4492 
4493 	return ARCAN_OK;
4494 }
4495 
arcan_video_setclip(arcan_vobj_id id,enum arcan_clipmode mode)4496 arcan_errc arcan_video_setclip(arcan_vobj_id id, enum arcan_clipmode mode)
4497 {
4498 	arcan_vobject* vobj = arcan_video_getobject(id);
4499 	if (!vobj)
4500 		return ARCAN_ERRC_NO_SUCH_OBJECT;
4501 
4502 	vobj->clip = mode;
4503 
4504 	return ARCAN_OK;
4505 }
4506 
arcan_video_persistobject(arcan_vobj_id id)4507 arcan_errc arcan_video_persistobject(arcan_vobj_id id)
4508 {
4509 	arcan_vobject* vobj = arcan_video_getobject(id);
4510 
4511 	if (!vobj)
4512 		return ARCAN_ERRC_NO_SUCH_OBJECT;
4513 
4514 	if (!vobj->frameset &&
4515 		vobj->vstore->refcount == 1 &&
4516 		vobj->parent == &current_context->world){
4517 		FL_SET(vobj, FL_PRSIST);
4518 
4519 		return ARCAN_OK;
4520 	}
4521 	else
4522 		return ARCAN_ERRC_UNACCEPTED_STATE;
4523 }
4524 
arcan_video_visible(arcan_vobj_id id)4525 bool arcan_video_visible(arcan_vobj_id id)
4526 {
4527 	bool rv = false;
4528 	arcan_vobject* vobj= arcan_video_getobject(id);
4529 
4530 	if (vobj && id > 0)
4531 		return vobj->current.opa > EPSILON;
4532 
4533 	return rv;
4534 }
4535 
4536 /* take sprops, apply them to the coordinates in vobj with proper
4537  * masking (or force to ignore mask), store the results in dprops */
apply(arcan_vobject * vobj,surface_properties * dprops,surface_properties * sprops,float lerp,bool force)4538 static void apply(arcan_vobject* vobj, surface_properties* dprops,
4539 	surface_properties* sprops, float lerp, bool force)
4540 {
4541 	*dprops = vobj->current;
4542 
4543 	if (vobj->transform){
4544 		surface_transform* tf = vobj->transform;
4545 		unsigned ct = arcan_video_display.c_ticks;
4546 
4547 		if (tf->move.startt)
4548 			dprops->position = lut_interp_3d[tf->move.interp](
4549 				tf->move.startp,
4550 				tf->move.endp,
4551 				lerp_fract(tf->move.startt, tf->move.endt, (float)ct + lerp)
4552 			);
4553 
4554 		if (tf->scale.startt)
4555 			dprops->scale = lut_interp_3d[tf->scale.interp](
4556 				tf->scale.startd,
4557 				tf->scale.endd,
4558 				lerp_fract(tf->scale.startt, tf->scale.endt, (float)ct + lerp)
4559 			);
4560 
4561 		if (tf->blend.startt)
4562 			dprops->opa = lut_interp_1d[tf->blend.interp](
4563 				tf->blend.startopa,
4564 				tf->blend.endopa,
4565 				lerp_fract(tf->blend.startt, tf->blend.endt, (float)ct + lerp)
4566 			);
4567 
4568 		if (tf->rotate.startt){
4569 			dprops->rotation.quaternion = tf->rotate.interp(
4570 				tf->rotate.starto.quaternion, tf->rotate.endo.quaternion,
4571 				lerp_fract(tf->rotate.startt, tf->rotate.endt,
4572 					(float)ct + lerp)
4573 			);
4574 
4575 			vector ang = angle_quat(dprops->rotation.quaternion);
4576 			dprops->rotation.roll  = ang.x;
4577 			dprops->rotation.pitch = ang.y;
4578 			dprops->rotation.yaw   = ang.z;
4579 		}
4580 
4581 		if (!sprops)
4582 			return;
4583 	}
4584 
4585 /* translate to sprops */
4586 	if (force || (vobj->mask & MASK_POSITION) > 0)
4587 		dprops->position = add_vector(dprops->position, sprops->position);
4588 
4589 	if (force || (vobj->mask & MASK_ORIENTATION) > 0){
4590 		dprops->rotation.yaw   += sprops->rotation.yaw;
4591 		dprops->rotation.pitch += sprops->rotation.pitch;
4592 		dprops->rotation.roll  += sprops->rotation.roll;
4593 		if (FL_TEST(vobj, FL_FULL3D)){
4594 			dprops->rotation.quaternion = mul_quat(
4595 				sprops->rotation.quaternion, dprops->rotation.quaternion );
4596 		}
4597 	}
4598 
4599 	if (force || (vobj->mask & MASK_OPACITY) > 0){
4600 		dprops->opa *= sprops->opa;
4601 	}
4602 }
4603 
4604 /*
4605  * Caching works as follows;
4606  * Any object that has a parent with an ongoing transformation
4607  * has its valid_cache property set to false
4608  * upon changing it to true a copy is made and stored in prop_cache
4609  * and a resolve- pass is performed with its results stored in prop_matr
4610  * which is then re-used every rendercall.
4611  * Queueing a transformation immediately invalidates the cache.
4612  */
arcan_resolve_vidprop(arcan_vobject * vobj,float lerp,surface_properties * props)4613 void arcan_resolve_vidprop(
4614 	arcan_vobject* vobj, float lerp, surface_properties* props)
4615 {
4616 	if (vobj->valid_cache)
4617 		*props = vobj->prop_cache;
4618 
4619 /* walk the chain up to the parent, resolve recursively - there might be an
4620  * early out detection here if all transforms are masked though the value of
4621  * that is questionable without more real-world data */
4622 	else if (vobj->parent && vobj->parent != &current_context->world){
4623 		surface_properties dprop = empty_surface();
4624 		arcan_resolve_vidprop(vobj->parent, lerp, &dprop);
4625 
4626 /* now apply the parent chain to ourselves */
4627 		apply(vobj, props, &dprop, lerp, false);
4628 
4629 		if (vobj->p_scale){
4630 /* resolve parent scaled size, then our own delta, apply that and then back
4631  * to object-local scale factor */
4632 			if (vobj->p_scale & SCALEM_WIDTH){
4633 				float pw = vobj->parent->origw * dprop.scale.x;
4634 				float mw_d = vobj->origw + ((vobj->origw * props->scale.x) - vobj->origw);
4635 				pw += mw_d - 1;
4636 				props->scale.x = pw / (float)vobj->origw;
4637 			}
4638 			if (vobj->p_scale & SCALEM_HEIGHT){
4639 				float ph = vobj->parent->origh * dprop.scale.y;
4640 				float mh_d = vobj->origh + ((vobj->origh * props->scale.y) - vobj->origh);
4641 				ph += mh_d - 1;
4642 				props->scale.y = ph / (float)vobj->origh;
4643 			}
4644 		}
4645 
4646 /* anchor ignores normal position mask */
4647 		switch(vobj->p_anchor){
4648 		case ANCHORP_UR:
4649 			props->position.x += (float)vobj->parent->origw * dprop.scale.x;
4650 		break;
4651 		case ANCHORP_LR:
4652 			props->position.y += (float)vobj->parent->origh * dprop.scale.y;
4653 			props->position.x += (float)vobj->parent->origw * dprop.scale.x;
4654 		break;
4655 		case ANCHORP_LL:
4656 			props->position.y += (float)vobj->parent->origh * dprop.scale.y;
4657 		break;
4658 		case ANCHORP_CR:
4659 			props->position.y += (float)vobj->parent->origh * dprop.scale.y * 0.5;
4660 			props->position.x += (float)vobj->parent->origw * dprop.scale.x;
4661 		break;
4662 		case ANCHORP_C:
4663 		case ANCHORP_UC:
4664 		case ANCHORP_CL:
4665 		case ANCHORP_LC:{
4666 			float mid_y = (vobj->parent->origh * dprop.scale.y) * 0.5;
4667 			float mid_x = (vobj->parent->origw * dprop.scale.x) * 0.5;
4668 			if (vobj->p_anchor == ANCHORP_UC ||
4669 				vobj->p_anchor == ANCHORP_LC || vobj->p_anchor == ANCHORP_C)
4670 				props->position.x += mid_x;
4671 
4672 			if (vobj->p_anchor == ANCHORP_CL || vobj->p_anchor == ANCHORP_C)
4673 				props->position.y += mid_y;
4674 
4675 			if (vobj->p_anchor == ANCHORP_LC)
4676 				props->position.y += vobj->parent->origh * dprop.scale.y;
4677 		}
4678 		case ANCHORP_UL:
4679 		default:
4680 		break;
4681 		}
4682 	}
4683 	else
4684 		apply(vobj, props, &current_context->world.current, lerp, true);
4685 
4686 /* the cache evaluation here is a bit shallow - there are differences between
4687  * in-frame caching (multiple resolves of related objects within the same
4688  * frame) and time-stable (no ongoing transformations queued) - likely that big
4689  * gains can be have with in-frame caching as well */
4690 	arcan_vobject* current = vobj;
4691 	bool can_cache = true;
4692 	while (current && can_cache){
4693 		if (current->transform){
4694 			can_cache = false;
4695 			break;
4696 		}
4697 		current = current->parent;
4698 	}
4699 
4700 	if (can_cache && vobj->owner && !vobj->valid_cache){
4701 		surface_properties dprop = *props;
4702 		vobj->prop_cache  = *props;
4703 		vobj->valid_cache = true;
4704 		build_modelview(vobj->prop_matr, vobj->owner->base, &dprop, vobj);
4705 	}
4706 	else
4707 		;
4708 }
4709 
calc_cp_area(arcan_vobject * vobj,point * ul,point * lr)4710 static void calc_cp_area(arcan_vobject* vobj, point* ul, point* lr)
4711 {
4712 	surface_properties cur;
4713 	arcan_resolve_vidprop(vobj, 0.0, &cur);
4714 
4715 	ul->x = cur.position.x < ul->x ? cur.position.x : ul->x;
4716 	ul->y = cur.position.y < ul->y ? cur.position.y : ul->y;
4717 
4718 	float t1 = (cur.position.x + cur.scale.x * vobj->origw);
4719 	float t2 = (cur.position.y + cur.scale.y * vobj->origh);
4720 	lr->x = cur.position.x > t1 ? cur.position.x : t1;
4721 	lr->y = cur.position.y > t2 ? cur.position.y : t2;
4722 
4723 	if (vobj->parent && vobj->parent != &current_context->world)
4724 		calc_cp_area(vobj->parent, ul, lr);
4725 }
4726 
build_modelview(float * dmatr,float * imatr,surface_properties * prop,arcan_vobject * src)4727 static inline void build_modelview(float* dmatr,
4728 	float* imatr, surface_properties* prop, arcan_vobject* src)
4729 {
4730 	float _Alignas(16) omatr[16];
4731 	float _Alignas(16) tmatr[16];
4732 
4733 /* now position represents centerpoint in screen coordinates */
4734 	prop->scale.x *= (float)src->origw * 0.5f;
4735 	prop->scale.y *= (float)src->origh * 0.5f;
4736 
4737 	prop->position.x += prop->scale.x;
4738 	prop->position.y += prop->scale.y;
4739 
4740 	src->rotate_state =
4741 		fabsf(prop->rotation.roll)  > EPSILON ||
4742 		fabsf(prop->rotation.pitch) > EPSILON ||
4743 		fabsf(prop->rotation.yaw)   > EPSILON;
4744 
4745 	memcpy(tmatr, imatr, sizeof(float) * 16);
4746 
4747 	if (src->rotate_state){
4748 		if (FL_TEST(src, FL_FULL3D))
4749 			matr_quatf(norm_quat (prop->rotation.quaternion), omatr);
4750 		else
4751 			matr_rotatef(DEG2RAD(prop->rotation.roll), omatr);
4752 	}
4753 
4754 	point oofs = src->origo_ofs;
4755 
4756 /* rotate around user-defined point rather than own center */
4757 	if (oofs.x > EPSILON || oofs.y > EPSILON){
4758 		translate_matrix(tmatr,
4759 			prop->position.x + src->origo_ofs.x,
4760 			prop->position.y + src->origo_ofs.y, 0.0);
4761 
4762 		multiply_matrix(dmatr, tmatr, omatr);
4763 		translate_matrix(dmatr, -src->origo_ofs.x, -src->origo_ofs.y, 0.0);
4764 	}
4765 	else
4766 		translate_matrix(tmatr, prop->position.x, prop->position.y, 0.0);
4767 
4768 	if (src->rotate_state)
4769 		multiply_matrix(dmatr, tmatr, omatr);
4770 	else
4771 		memcpy(dmatr, tmatr, sizeof(float) * 16);
4772 }
4773 
time_ratio(arcan_tickv start,arcan_tickv stop)4774 static inline float time_ratio(arcan_tickv start, arcan_tickv stop)
4775 {
4776 	return start > 0 ? (float)(arcan_video_display.c_ticks - start) /
4777 		(float)(stop - start) : 1.0;
4778 }
4779 
update_shenv(arcan_vobject * src,surface_properties * prop)4780 static void update_shenv(arcan_vobject* src, surface_properties* prop)
4781 {
4782 	agp_shader_envv(OBJ_OPACITY, &prop->opa, sizeof(float));
4783 
4784 	float sz_i[2] = {src->origw, src->origh};
4785 	agp_shader_envv(SIZE_INPUT, sz_i, sizeof(float)*2);
4786 
4787 	float sz_o[2] = {prop->scale.x * 2.0, prop->scale.y * 2.0};
4788 	agp_shader_envv(SIZE_OUTPUT, sz_o, sizeof(float)*2);
4789 
4790 	float sz_s[2] = {src->vstore->w, src->vstore->h};
4791 	agp_shader_envv(SIZE_STORAGE, sz_s, sizeof(float)*2);
4792 
4793 	if (src->transform){
4794 		struct surface_transform* trans = src->transform;
4795 		float ev = time_ratio(trans->move.startt, trans->move.endt);
4796 		agp_shader_envv(TRANS_MOVE, &ev, sizeof(float));
4797 
4798 		ev = time_ratio(trans->rotate.startt, trans->rotate.endt);
4799 		agp_shader_envv(TRANS_ROTATE, &ev, sizeof(float));
4800 
4801 		ev = time_ratio(trans->scale.startt, trans->scale.endt);
4802 		agp_shader_envv(TRANS_SCALE, &ev, sizeof(float));
4803 
4804 		ev = time_ratio(trans->blend.startt, trans->blend.endt);
4805 		agp_shader_envv(TRANS_BLEND, &ev, sizeof(float));
4806 	}
4807 	else {
4808 		float ev = 1.0;
4809 		agp_shader_envv(TRANS_MOVE, &ev, sizeof(float));
4810 		agp_shader_envv(TRANS_ROTATE, &ev, sizeof(float));
4811 		agp_shader_envv(TRANS_SCALE, &ev, sizeof(float));
4812 		agp_shader_envv(TRANS_BLEND, &ev, sizeof(float));
4813 	}
4814 }
4815 
setup_surf(struct rendertarget * dst,surface_properties * prop,arcan_vobject * src,float ** mv)4816 static inline void setup_surf(struct rendertarget* dst,
4817 	surface_properties* prop, arcan_vobject* src, float** mv)
4818 {
4819 /* just temporary storage/scratch */
4820 	static float _Alignas(16) dmatr[16];
4821 
4822 	if (src->feed.state.tag == ARCAN_TAG_ASYNCIMGLD)
4823 		return;
4824 
4825 /* currently, we only cache the primary rendertarget, and the better option is
4826  * to actually remove secondary attachments etc. now that we have order-peeling
4827  * and sharestorage there should really just be 1:1 between src and dst */
4828 	if (src->valid_cache && dst == src->owner){
4829 		prop->scale.x *= src->origw * 0.5f;
4830 		prop->scale.y *= src->origh * 0.5f;
4831 		prop->position.x += prop->scale.x;
4832 		prop->position.y += prop->scale.y;
4833 		*mv = src->prop_matr;
4834 	}
4835 	else {
4836 		build_modelview(dmatr, dst->base, prop, src);
4837 		*mv = dmatr;
4838 	}
4839 	update_shenv(src, prop);
4840 }
4841 
setup_shape_surf(struct rendertarget * dst,surface_properties * prop,arcan_vobject * src,float ** mv)4842 static inline void setup_shape_surf(struct rendertarget* dst,
4843 	surface_properties* prop, arcan_vobject* src, float** mv)
4844 {
4845 	static float _Alignas(16) dmatr[16];
4846 	surface_properties oldprop = *prop;
4847 	if (src->feed.state.tag == ARCAN_TAG_ASYNCIMGLD)
4848 		return;
4849 
4850 	build_modelview(dmatr, dst->base, prop, src);
4851 	*mv = dmatr;
4852 	scale_matrix(*mv, prop->scale.x, prop->scale.y, 1.0);
4853 	update_shenv(src, prop);
4854 }
4855 
draw_colorsurf(struct rendertarget * dst,surface_properties prop,arcan_vobject * src,float r,float g,float b,float * txcos)4856 static inline void draw_colorsurf(struct rendertarget* dst,
4857 	surface_properties prop, arcan_vobject* src,
4858 	float r, float g, float b, float* txcos)
4859 {
4860 	float cval[3] = {r, g, b};
4861 	float* mvm = NULL;
4862 
4863 	setup_surf(dst, &prop, src, &mvm);
4864 	agp_shader_forceunif("obj_col", shdrvec3, (void*) &cval);
4865 
4866 	agp_draw_vobj(-prop.scale.x, -prop.scale.y,
4867 		prop.scale.x, prop.scale.y, txcos, mvm);
4868 }
4869 
4870 /*
4871  * When we deal with multiple AGP implementations, it probably makes sense
4872  * to move some of these steps to that layer, as there might be more backend
4873  * specific ways that are faster (particularly for software that can have
4874  * many fastpaths)
4875  */
draw_texsurf(struct rendertarget * dst,surface_properties prop,arcan_vobject * src,float * txcos)4876 static inline void draw_texsurf(struct rendertarget* dst,
4877 	surface_properties prop, arcan_vobject* src, float* txcos)
4878 {
4879 	float* mvm = NULL;
4880 /*
4881  * Shape is treated mostly as a simplified 3D model but with an ortographic
4882  * projection and no hierarchy of meshes etc. we still need to switch to 3D
4883  * mode so we get a depth buffer to work with as there might be vertex- stage Z
4884  * displacement. This switch is slightly expensive (depth-buffer clear) though
4885  * used for such fringe cases that it's only a problem when measured as such.
4886  */
4887 	if (src->shape){
4888 		if (!src->shape->nodepth)
4889 			agp_pipeline_hint(PIPELINE_3D);
4890 
4891 		setup_shape_surf(dst, &prop, src, &mvm);
4892 		agp_shader_envv(MODELVIEW_MATR, mvm, sizeof(float) * 16);
4893 		agp_submit_mesh(src->shape, MESH_FACING_BOTH);
4894 
4895 		if (!src->shape->nodepth)
4896 			agp_pipeline_hint(PIPELINE_2D);
4897 	}
4898 	else {
4899 		setup_surf(dst, &prop, src, &mvm);
4900 		agp_draw_vobj(
4901 				(-prop.scale.x),
4902 				(-prop.scale.y),
4903 				( prop.scale.x),
4904 				( prop.scale.y), txcos, mvm);
4905 	}
4906 }
4907 
4908 /*
4909  * Perform an explicit poll pass of the object in question.
4910  * Assumes [dst] is valid.
4911  *
4912  * [step] will commit- the buffer, rotate frame-store and have the object
4913  * cookie tagged with the current update
4914  */
ffunc_process(arcan_vobject * dst,bool step)4915 static void ffunc_process(arcan_vobject* dst, bool step)
4916 {
4917 	if (!dst->feed.ffunc)
4918 		return;
4919 
4920 	TRACE_MARK_ONESHOT("video", "feed-poll", TRACE_SYS_DEFAULT, dst->cellid, 0, dst->tracetag);
4921 	int frame_status = arcan_ffunc_lookup(dst->feed.ffunc)(
4922 		FFUNC_POLL, 0, 0, 0, 0, 0, dst->feed.state, dst->cellid);
4923 
4924 	if (frame_status == FRV_GOTFRAME){
4925 /* there is an edge condition from the conductor where it wants to 'pump' the
4926  * feeds but not induce any video buffer transfers (audio is ok) as we still
4927  * have buffers in flight and can't buffer more */
4928 		if (!step)
4929 			return;
4930 
4931 /* this feed has already been updated during the current round so we can't
4932  * continue without risking graphics-layer undefined behavior (mutating stores
4933  * while pending asynch tasks), mark the rendertarget as dirty and move on */
4934 		FLAG_DIRTY(dst);
4935 		if (dst->feed.pcookie == arcan_video_display.cookie){
4936 			dst->owner->transfc++;
4937 			return;
4938 		}
4939 		dst->feed.pcookie = arcan_video_display.cookie;
4940 
4941 /* cycle active frame store (depending on how often we want to
4942  * track history frames, might not be every time) */
4943 		if (dst->frameset && dst->frameset->mctr != 0){
4944 			dst->frameset->ctr--;
4945 
4946 			if (dst->frameset->ctr == 0){
4947 				dst->frameset->ctr = abs( dst->frameset->mctr );
4948 				step_active_frame(dst);
4949 			}
4950 		}
4951 
4952 /* this will queue the new frame upload, unlocking any external provider
4953  * and so on, see frameserver.c and the different vfunc handlers there */
4954 		TRACE_MARK_ENTER("video", "feed-render", TRACE_SYS_DEFAULT, dst->cellid, 0, dst->tracetag);
4955 		arcan_ffunc_lookup(dst->feed.ffunc)(FFUNC_RENDER,
4956 			dst->vstore->vinf.text.raw, dst->vstore->vinf.text.s_raw,
4957 			dst->vstore->w, dst->vstore->h,
4958 			dst->vstore->vinf.text.glid,
4959 			dst->feed.state, dst->cellid
4960 		);
4961 		TRACE_MARK_EXIT("video", "feed-render", TRACE_SYS_DEFAULT, dst->cellid, 0, dst->tracetag);
4962 
4963 /* for statistics, mark an upload */
4964 		arcan_video_display.dirty++;
4965 		dst->owner->uploadc++;
4966 		dst->owner->transfc++;
4967 	}
4968 
4969 	return;
4970 }
4971 
arcan_vint_pollfeed(arcan_vobj_id vid,bool step)4972 arcan_errc arcan_vint_pollfeed(arcan_vobj_id vid, bool step)
4973 {
4974 	arcan_vobject* vobj = arcan_video_getobject(vid);
4975 	if (!vobj)
4976 		return ARCAN_ERRC_NO_SUCH_OBJECT;
4977 
4978 /* this will always invalidate, so calling this multiple times per
4979  * frame is implementation defined behavior */
4980 	ffunc_process(vobj, step);
4981 
4982 	return ARCAN_OK;
4983 }
4984 
4985 /*
4986  * For large Ns this approach 'should' be rather dumb in the sense
4987  * that we could arguably well just have a set of descriptors and
4988  * check the ones that have been signalled. On the other hand, they
4989  * will be read almost immediately after this, and with that in mind,
4990  * we would possibly gain more by just having a big-array(TM){for
4991  * all cases where n*obj_size < data_cache_size} as that hit/miss is
4992  * really all that matters now.
4993  */
poll_list(arcan_vobject_litem * current)4994 static void poll_list(arcan_vobject_litem* current)
4995 {
4996 	while(current && current->elem){
4997 		arcan_vobject* celem = current->elem;
4998 
4999 		if (celem->feed.ffunc)
5000 			ffunc_process(celem, true);
5001 
5002 		current = current->next;
5003 	}
5004 }
5005 
arcan_video_pollfeed()5006 void arcan_video_pollfeed()
5007 {
5008  for (off_t ind = 0; ind < current_context->n_rtargets; ind++)
5009 		arcan_vint_pollreadback(&current_context->rtargets[ind]);
5010 	arcan_vint_pollreadback(&current_context->stdoutp);
5011 
5012 	for (size_t i = 0; i < current_context->n_rtargets; i++)
5013 		poll_list(current_context->rtargets[i].first);
5014 
5015 	poll_list(current_context->stdoutp.first);
5016 }
5017 
get_clip_source(arcan_vobject * vobj)5018 static arcan_vobject* get_clip_source(arcan_vobject* vobj)
5019 {
5020 	arcan_vobject* res = vobj->parent;
5021 	if (vobj->clip_src && vobj->clip_src != ARCAN_VIDEO_WORLDID){
5022 		arcan_vobject* clipref = arcan_video_getobject(vobj->clip_src);
5023 		if (clipref)
5024 			return clipref;
5025 	}
5026 
5027 	if (vobj->parent == &current_context->world)
5028 		res = NULL;
5029 
5030 	return res;
5031 }
5032 
populate_stencil(struct rendertarget * tgt,arcan_vobject * celem,float fract)5033 static inline void populate_stencil(
5034 	struct rendertarget* tgt, arcan_vobject* celem, float fract)
5035 {
5036 	agp_prepare_stencil();
5037 
5038 /* note that the stencil buffer setup currently forces the default shader, this
5039  * might not be desired if some vertex transform is desired in the clipping */
5040 	agp_shader_activate(tgt->shid);
5041 
5042 	if (celem->clip == ARCAN_CLIP_SHALLOW){
5043 		celem = get_clip_source(celem);
5044 		if (celem){
5045 			surface_properties pprops = empty_surface();
5046 			arcan_resolve_vidprop(celem, fract, &pprops);
5047 			draw_colorsurf(tgt, pprops, celem, 1.0, 1.0, 1.0, NULL);
5048 		}
5049 	}
5050 	else
5051 /* deep -> draw all objects that aren't clipping to parent,
5052  * terminate when a shallow clip- object is found */
5053 		while (celem->parent != &current_context->world){
5054 			surface_properties pprops = empty_surface();
5055 			arcan_resolve_vidprop(celem->parent, fract, &pprops);
5056 
5057 			if (celem->parent->clip == ARCAN_CLIP_OFF)
5058 				draw_colorsurf(tgt, pprops, celem->parent, 1.0, 1.0, 1.0, NULL);
5059 
5060 			else if (celem->parent->clip == ARCAN_CLIP_SHALLOW){
5061 				draw_colorsurf(tgt, pprops, celem->parent, 1.0, 1.0, 1.0, NULL);
5062 				break;
5063 			}
5064 
5065 			celem = celem->parent;
5066 		}
5067 
5068 	agp_activate_stencil();
5069 }
5070 
arcan_video_rendertargetid(arcan_vobj_id did,int * inid,int * outid)5071 arcan_errc arcan_video_rendertargetid(arcan_vobj_id did, int* inid, int* outid)
5072 {
5073 	arcan_vobject* vobj = arcan_video_getobject(did);
5074 	if (!vobj)
5075 		return ARCAN_ERRC_NO_SUCH_OBJECT;
5076 
5077 	struct rendertarget* tgt = arcan_vint_findrt(vobj);
5078 	if (!tgt)
5079 		return ARCAN_ERRC_UNACCEPTED_STATE;
5080 
5081 	if (inid){
5082 		tgt->id = *inid;
5083 		FLAG_DIRTY(vobj);
5084 	}
5085 
5086 	if (outid)
5087 		*outid = tgt->id;
5088 
5089 	return ARCAN_OK;
5090 }
5091 
arcan_vint_bindmulti(arcan_vobject * elem,size_t ind)5092 void arcan_vint_bindmulti(arcan_vobject* elem, size_t ind)
5093 {
5094 	struct vobject_frameset* set = elem->frameset;
5095 	size_t sz = set->n_frames;
5096 
5097 /* Build a temporary array of storage- info references for multi-
5098  * build. Note that this does not respect texture coordinates */
5099 	struct agp_vstore* elems[sz];
5100 
5101 	for (size_t i = 0; i < sz; i++, ind = (ind > 0 ? ind - 1 : sz - 1))
5102 		elems[i] = set->frames[ind].frame;
5103 
5104 	agp_activate_vstore_multi(elems, sz);
5105 }
5106 
draw_vobj(struct rendertarget * tgt,arcan_vobject * vobj,surface_properties * dprops,float * txcos)5107 static int draw_vobj(struct rendertarget* tgt,
5108 	arcan_vobject* vobj, surface_properties* dprops, float* txcos)
5109 {
5110 	if (vobj->blendmode == BLEND_NORMAL && dprops->opa > 1.0 - EPSILON)
5111 		agp_blendstate(BLEND_NONE);
5112 	else
5113 		agp_blendstate(vobj->blendmode);
5114 
5115 /* pick the right vstore drawing type (textured, colored) */
5116 	struct agp_vstore* vstore = vobj->vstore;
5117 	if (vstore->txmapped == TXSTATE_OFF && vobj->program != 0){
5118 		draw_colorsurf(tgt, *dprops, vobj, vstore->vinf.col.r,
5119 			vstore->vinf.col.g, vstore->vinf.col.b, txcos);
5120 		return 1;
5121 	}
5122 
5123 	if (vstore->txmapped == TXSTATE_TEX2D){
5124 		draw_texsurf(tgt, *dprops, vobj, txcos);
5125 		return 1;
5126 	}
5127 
5128 	return 0;
5129 }
5130 
5131 /*
5132  * Apply clipping without using the stencil buffer, cheaper but with some
5133  * caveats of its own. Will work particularly bad for partial clipping with
5134  * customized texture coordinates.
5135  */
setup_shallow_texclip(arcan_vobject * elem,arcan_vobject * clip_src,float ** txcos,surface_properties * dprops,float fract)5136 static inline bool setup_shallow_texclip(
5137 	arcan_vobject* elem,
5138 	arcan_vobject* clip_src,
5139 	float** txcos, surface_properties* dprops, float fract)
5140 {
5141 	static float cliptxbuf[8];
5142 
5143 	surface_properties pprops = empty_surface();
5144 	arcan_resolve_vidprop(clip_src, fract, &pprops);
5145 
5146 	float p_x = pprops.position.x;
5147 	float p_y = pprops.position.y;
5148 	float p_w = pprops.scale.x * clip_src->origw;
5149 	float p_h = pprops.scale.y * clip_src->origh;
5150 	float p_xw = p_x + p_w;
5151 	float p_yh = p_y + p_h;
5152 
5153 	float cp_x = dprops->position.x;
5154 	float cp_y = dprops->position.y;
5155 	float cp_w = dprops->scale.x * elem->origw;
5156 	float cp_h = dprops->scale.y * elem->origh;
5157 	float cp_xw = cp_x + cp_w;
5158 	float cp_yh = cp_y + cp_h;
5159 
5160 /* fully outside? skip drawing */
5161 	if (cp_xw < p_x || cp_yh < p_y ||	cp_x > p_xw || cp_y > p_yh){
5162 		return false;
5163 	}
5164 
5165 /* fully contained? don't do anything */
5166 	else if (	cp_x >= p_x && cp_xw <= p_xw && cp_y >= p_y && cp_yh <= p_yh ){
5167 		return true;
5168 	}
5169 
5170 	memcpy(cliptxbuf, *txcos, sizeof(float) * 8);
5171 	float xrange = cliptxbuf[2] - cliptxbuf[0];
5172 	float yrange = cliptxbuf[7] - cliptxbuf[1];
5173 
5174 	if (cp_x < p_x){
5175 		float sl = ((p_x - cp_x) / elem->origw) * xrange;
5176 		cp_w -= p_x - cp_x;
5177 		cliptxbuf[0] += sl;
5178 		cliptxbuf[6] += sl;
5179 		cp_x = p_x;
5180 	}
5181 
5182 	if (cp_y < p_y){
5183 		float su = ((p_y - cp_y) / elem->origh) * yrange;
5184 		cp_h -= p_y - cp_y;
5185 		cliptxbuf[1] += su;
5186 		cliptxbuf[3] += su;
5187 		cp_y = p_y;
5188 	}
5189 
5190 	if (cp_x + cp_w > p_xw){
5191 		float sr = ((cp_x + cp_w) - p_xw) / elem->origw * xrange;
5192 		cp_w -= (cp_x + cp_w) - p_xw;
5193 		cliptxbuf[2] -= sr;
5194 		cliptxbuf[4] -= sr;
5195 	}
5196 
5197 	if (cp_y + cp_h > p_yh){
5198 		float sd = ((cp_y + cp_h) - p_yh) / elem->origh * yrange;
5199 		cp_h -= (cp_y + cp_h) - p_yh;
5200 		cliptxbuf[5] -= sd;
5201 		cliptxbuf[7] -= sd;
5202 	}
5203 
5204 /* dprops modifications should be moved to a scaled draw */
5205 	dprops->position.x = cp_x;
5206 	dprops->position.y = cp_y;
5207 	dprops->scale.x = cp_w / elem->origw;
5208 	dprops->scale.y = cp_h / elem->origh;
5209 
5210 /* this is expensive, we should instead temporarily offset */
5211 	elem->valid_cache = false;
5212 	*txcos = cliptxbuf;
5213 	return true;
5214 }
5215 
5216 _Thread_local static struct rendertarget* current_rendertarget;
arcan_vint_current_rt()5217 struct rendertarget* arcan_vint_current_rt()
5218 {
5219 	return current_rendertarget;
5220 }
5221 
process_rendertarget(struct rendertarget * tgt,float fract)5222 static size_t process_rendertarget(struct rendertarget* tgt, float fract)
5223 {
5224 	arcan_vobject_litem* current;
5225 
5226 /* If the rendertarget links to the pipeline of another, inherit the dirty
5227  * state from that. This comes from define-linktarget, though it might also be
5228  * that an extended view (merge two rendertargets) would be useful. If that
5229  * turns out to be the case, simply run link first then ourselves. */
5230 	if (tgt->link){
5231 		current = tgt->link->first;
5232 		tgt->dirtyc += tgt->link->dirtyc;
5233 		tgt->transfc += tgt->link->transfc;
5234 	}
5235 	else
5236 		current = tgt->first;
5237 
5238 /* If there are no ongoing transformations, or the platform has flagged that we
5239  * need to redraw everything, and there are no actual changes to the rtgt pipe
5240  * (FLAG_DIRTY) then early out. This does not cover content update from
5241  * external sources directly as those are set during ffunc_process/pollfeed */
5242 	if (
5243 		!arcan_video_display.dirty &&
5244 		!arcan_video_display.ignore_dirty &&
5245 		!tgt->dirtyc && !tgt->transfc)
5246 		return 0;
5247 
5248 	tgt->uploadc = 0;
5249 
5250 /* this does not really swap the stores unless they are actually different, it
5251  * is cheaper to do it here than shareglstore as the search for vobj to rtgt is
5252  * expensive */
5253 	if (tgt->color)
5254 		agp_rendertarget_swapstore(tgt->art, tgt->color->vstore);
5255 
5256 	current_rendertarget = tgt;
5257 	agp_activate_rendertarget(tgt->art);
5258 	agp_shader_envv(RTGT_ID, &tgt->id, sizeof(int));
5259 	agp_shader_envv(OBJ_OPACITY, &(float){1.0}, sizeof(float));
5260 
5261 	if (!FL_TEST(tgt, TGTFL_NOCLEAR))
5262 		agp_rendertarget_clear();
5263 
5264 	size_t pc = arcan_video_display.ignore_dirty ? 1 : 0;
5265 
5266 /* first, handle all 3d work (which may require multiple passes etc.) */
5267 	if (tgt->order3d == ORDER3D_FIRST && current && current->elem->order < 0){
5268 		current = arcan_3d_refresh(tgt->camtag, current, fract);
5269 		pc++;
5270 	}
5271 
5272 /* skip a possible 3d pipeline */
5273 	while (current && current->elem->order < 0)
5274 		current = current->next;
5275 
5276 	if (!current)
5277 		goto end3d;
5278 
5279 /* make sure we're in a decent state for 2D */
5280 	agp_pipeline_hint(PIPELINE_2D);
5281 
5282 	agp_shader_activate(agp_default_shader(BASIC_2D));
5283 	agp_shader_envv(PROJECTION_MATR, tgt->projection, sizeof(float)*16);
5284 
5285 	while (current && current->elem->order >= 0){
5286 		arcan_vobject* elem = current->elem;
5287 
5288 		if (current->elem->order < tgt->min_order){
5289 			current = current->next;
5290 			continue;
5291 		}
5292 
5293 		if (current->elem->order > tgt->max_order)
5294 			break;
5295 
5296 /* calculate coordinate system translations, world cannot be masked */
5297 		surface_properties dprops = empty_surface();
5298 		arcan_resolve_vidprop(elem, fract, &dprops);
5299 
5300 /* don't waste time on objects that aren't supposed to be visible */
5301 		if ( dprops.opa <= EPSILON || elem == tgt->color){
5302 			current = current->next;
5303 			continue;
5304 		}
5305 
5306 /* enable clipping using stencil buffer, we need to reset the state of the
5307  * stencil buffer between draw calls so track if it's enabled or not */
5308 		bool clipped = false;
5309 
5310 /*
5311  * texture coordinates that will be passed to the draw call, clipping and other
5312  * effects may maintain a local copy and manipulate these
5313  */
5314 		float* txcos = elem->txcos;
5315 		float** dstcos = &txcos;
5316 
5317 		if ( (elem->mask & MASK_MAPPING) > 0)
5318 			txcos = elem->parent != &current_context->world ?
5319 				elem->parent->txcos : elem->txcos;
5320 
5321 		if (!txcos)
5322 			txcos = arcan_video_display.default_txcos;
5323 
5324 /* depending on frameset- mode, we may need to split the frameset up into
5325  * multitexturing, or switch the txcos with the ones that may be used for
5326  * clipping, but mapping TU indices to current shader must be done before.
5327  * To not skip on the early-out-on-clipping and not incur additional state
5328  * change costs, only do it in this edge case. */
5329 		agp_shader_id shid = tgt->shid;
5330 		if (!tgt->force_shid && elem->program)
5331 			shid = elem->program;
5332 		agp_shader_activate(shid);
5333 
5334 		if (elem->frameset){
5335 			if (elem->frameset->mode == ARCAN_FRAMESET_MULTITEXTURE){
5336 				arcan_vint_bindmulti(elem, elem->frameset->index);
5337 			}
5338 			else{
5339 				struct frameset_store* ds =
5340 					&elem->frameset->frames[elem->frameset->index];
5341 				txcos = ds->txcos;
5342 				agp_activate_vstore(ds->frame);
5343 			}
5344 		}
5345 		else
5346 			agp_activate_vstore(elem->vstore);
5347 
5348 /* fast-path out if no clipping */
5349 		arcan_vobject* clip_src;
5350 		current = current->next;
5351 
5352 		if (elem->clip == ARCAN_CLIP_OFF || !(clip_src = get_clip_source(elem))){
5353 			pc += draw_vobj(tgt, elem, &dprops, *dstcos);
5354 			continue;
5355 		}
5356 
5357 /* fast-path, shallow non-rotated clipping */
5358 		if (elem->clip == ARCAN_CLIP_SHALLOW &&
5359 			!elem->rotate_state && !clip_src->rotate_state){
5360 
5361 /* this will tweak the output object size and texture coordinates */
5362 			if (!setup_shallow_texclip(elem, clip_src, dstcos, &dprops, fract)){
5363 				continue;
5364 			}
5365 
5366 			pc += draw_vobj(tgt, elem, &dprops, *dstcos);
5367 			continue;
5368 		}
5369 
5370 		populate_stencil(tgt, elem, fract);
5371 		pc += draw_vobj(tgt, elem, &dprops, *dstcos);
5372 		agp_disable_stencil();
5373 	}
5374 
5375 /* reset and try the 3d part again if requested */
5376 end3d:
5377 	current = tgt->first;
5378 	if (current && current->elem->order < 0 && tgt->order3d == ORDER3D_LAST){
5379 		agp_shader_activate(agp_default_shader(BASIC_2D));
5380 		current = arcan_3d_refresh(tgt->camtag, current, fract);
5381 		if (current != tgt->first)
5382 			pc++;
5383 	}
5384 
5385 	if (pc){
5386 		tgt->frame_cookie = arcan_video_display.cookie;
5387 	}
5388 	return pc;
5389 }
5390 
arcan_video_forceread(arcan_vobj_id sid,bool local,av_pixel ** dptr,size_t * dsize)5391 arcan_errc arcan_video_forceread(
5392 	arcan_vobj_id sid, bool local, av_pixel** dptr, size_t* dsize)
5393 {
5394 /*
5395  * more involved than one may think, the store doesn't have to be representative
5396  * in case of rendertargets, and for streaming readbacks of those we already
5397  * have readback toggles etc. Thus this function is only for "one-off" reads
5398  * where a blocking behavior may be accepted, especially outside a main
5399  * renderloop as this will possibly stall the pipeline
5400  */
5401 
5402 	arcan_vobject* vobj = arcan_video_getobject(sid);
5403 	struct agp_vstore* dstore = vobj->vstore;
5404 
5405 	if (!vobj || !dstore)
5406 		return ARCAN_ERRC_NO_SUCH_OBJECT;
5407 
5408 	if (dstore->txmapped != TXSTATE_TEX2D)
5409 		return ARCAN_ERRC_UNACCEPTED_STATE;
5410 
5411 	*dsize = sizeof(av_pixel) * dstore->w * dstore->h;
5412 	*dptr  = arcan_alloc_mem(*dsize, ARCAN_MEM_VBUFFER,
5413 		ARCAN_MEM_TEMPORARY | ARCAN_MEM_NONFATAL, ARCAN_MEMALIGN_PAGE);
5414 
5415 	if (local && dstore->vinf.text.raw && dstore->vinf.text.s_raw > 0){
5416 		memcpy(dptr, dstore->vinf.text.raw, *dsize);
5417 	}
5418 	else {
5419 		av_pixel* temp = dstore->vinf.text.raw;
5420 		dstore->vinf.text.raw = *dptr;
5421 		agp_readback_synchronous(dstore);
5422 		dstore->vinf.text.raw = temp;
5423 	}
5424 
5425 	return ARCAN_OK;
5426 }
5427 
arcan_video_disable_worldid()5428 void arcan_video_disable_worldid()
5429 {
5430 	if (current_context->stdoutp.art){
5431 		agp_drop_rendertarget(current_context->stdoutp.art);
5432 		current_context->stdoutp.art = NULL;
5433 	}
5434 	arcan_video_display.no_stdout = true;
5435 }
5436 
arcan_vint_worldrt()5437 struct agp_rendertarget* arcan_vint_worldrt()
5438 {
5439 	return current_context->stdoutp.art;
5440 }
5441 
arcan_vint_world()5442 struct agp_vstore* arcan_vint_world()
5443 {
5444 	return current_context->stdoutp.color->vstore;
5445 }
5446 
arcan_video_forceupdate(arcan_vobj_id vid,bool forcedirty)5447 arcan_errc arcan_video_forceupdate(arcan_vobj_id vid, bool forcedirty)
5448 {
5449 	arcan_vobject* vobj = arcan_video_getobject(vid);
5450 	if (!vobj)
5451 		return ARCAN_ERRC_NO_SUCH_OBJECT;
5452 
5453 	struct rendertarget* tgt = arcan_vint_findrt(vobj);
5454 	if (!tgt)
5455 		return ARCAN_ERRC_UNACCEPTED_STATE;
5456 
5457 /* remember / retain platform decay */
5458 	size_t id = arcan_video_display.ignore_dirty;
5459 	if (forcedirty){
5460 		FLAG_DIRTY(vobj);
5461 /* full pass regardless of there being any updates or not */
5462 		arcan_video_display.ignore_dirty = 1;
5463 	}
5464 	else {
5465 		arcan_video_display.ignore_dirty = 0;
5466 	}
5467 
5468 	process_rendertarget(tgt, arcan_video_display.c_lerp);
5469 	tgt->dirtyc = 0;
5470 
5471 	arcan_video_display.ignore_dirty = id;
5472 	current_rendertarget = NULL;
5473 	agp_activate_rendertarget(NULL);
5474 
5475 	if (tgt->readback != 0){
5476 		process_readback(tgt, arcan_video_display.c_lerp);
5477 		arcan_vint_pollreadback(tgt);
5478 	}
5479 
5480 	return ARCAN_OK;
5481 }
5482 
arcan_video_screenshot(av_pixel ** dptr,size_t * dsize)5483 arcan_errc arcan_video_screenshot(av_pixel** dptr, size_t* dsize)
5484 {
5485 	struct monitor_mode mode = platform_video_dimensions();
5486 	*dsize = sizeof(char) * mode.width * mode.height * sizeof(av_pixel);
5487 
5488 	*dptr = arcan_alloc_mem(*dsize, ARCAN_MEM_VBUFFER,
5489 		ARCAN_MEM_TEMPORARY | ARCAN_MEM_NONFATAL, ARCAN_MEMALIGN_PAGE);
5490 
5491 	if (!(*dptr)){
5492 		*dsize = 0;
5493 		return ARCAN_ERRC_OUT_OF_SPACE;
5494 	}
5495 
5496 	agp_save_output(mode.width, mode.height, *dptr, *dsize);
5497 
5498 	return ARCAN_OK;
5499 }
5500 
5501 /* Check outstanding readbacks, map and feed onwards, ideally we should synch
5502  * this with a fence - but the platform GL etc. versioning restricts things for
5503  * the time being. Threaded- dispatch from the conductor is the right way
5504  * forward */
arcan_vint_pollreadback(struct rendertarget * tgt)5505 void arcan_vint_pollreadback(struct rendertarget* tgt)
5506 {
5507 	if (!FL_TEST(tgt, TGTFL_READING))
5508 		return;
5509 
5510 	arcan_vobject* vobj = tgt->color;
5511 
5512 /* don't check the readback unless the client is ready, should possibly have a
5513  * timeout for this as well so we don't hold GL resources with an unwilling /
5514  * broken client, it's a hard tradeoff as streaming video encode might deal
5515  * well with the dropped frame at this stage, while a variable rate interactive
5516  * source may lose data */
5517 	arcan_vfunc_cb ffunc = NULL;
5518 	if (vobj->feed.ffunc){
5519 		ffunc = arcan_ffunc_lookup(vobj->feed.ffunc);
5520 		if (FRV_GOTFRAME == ffunc(
5521 			FFUNC_POLL, NULL, 0, 0, 0, 0, vobj->feed.state, vobj->cellid))
5522 			return;
5523 	}
5524 
5525 /* now we can check the readback, it is not safe to call poll, get results
5526  * and then call poll again, we have to release once retrieved */
5527 	struct asynch_readback_meta rbb = agp_poll_readback(vobj->vstore);
5528 
5529 	if (rbb.ptr == NULL)
5530 		return;
5531 
5532 /* the ffunc might've disappeared, so disable the readback state */
5533 	if (!vobj->feed.ffunc)
5534 		tgt->readback = 0;
5535 	else{
5536 		arcan_ffunc_lookup(vobj->feed.ffunc)(
5537 			FFUNC_READBACK, rbb.ptr, rbb.w * rbb.h * sizeof(av_pixel),
5538 			rbb.w, rbb.h, 0, vobj->feed.state, vobj->cellid
5539 		);
5540 	}
5541 
5542 	rbb.release(rbb.tag);
5543 	FL_CLEAR(tgt, TGTFL_READING);
5544 }
5545 
steptgt(float fract,struct rendertarget * tgt)5546 static size_t steptgt(float fract, struct rendertarget* tgt)
5547 {
5548 /* A special case here are rendertargets where the color output store
5549  * is explicitly bound only to a frameserver. This requires that:
5550  * 1. The frameserver is still waiting to synch
5551  * 2. The object (rendertarget color vobj) is invisible
5552  * 3. The backing store has a single consumer
5553  */
5554 	struct arcan_vobject* dst = tgt->color;
5555 	if (dst && dst->current.opa < EPSILON && dst->vstore->refcount == 1 &&
5556 		dst->feed.state.tag == ARCAN_TAG_FRAMESERV &&
5557 		arcan_ffunc_lookup(dst->feed.ffunc)
5558 			(FFUNC_POLL, 0, 0, 0, 0, 0, dst->feed.state, dst->cellid) == FRV_GOTFRAME)
5559 	{
5560 		return 1;
5561 	}
5562 
5563 	size_t transfc = 0;
5564 	if (tgt->refresh < 0 && process_counter(
5565 		tgt, &tgt->refreshcnt, tgt->refresh, fract)){
5566 		transfc += process_rendertarget(tgt, fract);
5567 		tgt->dirtyc = 0;
5568 
5569 /* may need to readback even if we havn't updated as it may
5570  * be used as clock (though optimization possibility of using buffer) */
5571 		process_readback(tgt, fract);
5572 	}
5573 
5574 	return transfc;
5575 }
5576 
arcan_vint_refresh(float fract,size_t * ndirty)5577 unsigned arcan_vint_refresh(float fract, size_t* ndirty)
5578 {
5579 	long long int pre = arcan_timemillis();
5580 	TRACE_MARK_ENTER("video", "refresh", TRACE_SYS_DEFAULT, 0, 0, "");
5581 
5582 	size_t transfc = 0;
5583 
5584 /* we track last interp. state in order to handle forcerefresh */
5585 	arcan_video_display.c_lerp = fract;
5586 	arcan_random((void*)&arcan_video_display.cookie, 8);
5587 
5588 /* active shaders with counter counts towards dirty */
5589 	transfc += agp_shader_envv(FRACT_TIMESTAMP_F, &fract, sizeof(float));
5590 
5591 /* the user/developer or the platform can decide that all dirty tracking should
5592  * be enabled - we do that with a global counter and then 'fake' a transform */
5593 	if (arcan_video_display.ignore_dirty > 0){
5594 		transfc++;
5595 		arcan_video_display.ignore_dirty--;
5596 	}
5597 
5598 /* Right now there is an explicit 'first come first update' kind of
5599  * order except for worldid as everything else might be composed there.
5600  *
5601  * The opption would be to build the dependency graph between rendertargets
5602  * and account for cycles, but has so far not shown worth it. */
5603 	size_t tgt_dirty = 0;
5604 	for (size_t ind = 0; ind < current_context->n_rtargets; ind++){
5605 		struct rendertarget* tgt = &current_context->rtargets[ind];
5606 
5607 		const char* tag = tgt->color ? tgt->color->tracetag : NULL;
5608 		TRACE_MARK_ENTER("video", "process-rendertarget", TRACE_SYS_DEFAULT, ind, 0, tag);
5609 			tgt_dirty = steptgt(fract, tgt);
5610 			transfc += tgt_dirty;
5611 		TRACE_MARK_EXIT("video", "process-rendertarget", TRACE_SYS_DEFAULT, ind, tgt_dirty, tag);
5612 	}
5613 
5614 /* reset the bound rendertarget, otherwise we may be in an undefined
5615  * state if world isn't dirty or with pending transfers */
5616 	current_rendertarget = NULL;
5617 	agp_activate_rendertarget(NULL);
5618 
5619 	TRACE_MARK_ENTER("video", "process-world-rendertarget", TRACE_SYS_DEFAULT, 0, 0, "world");
5620 		tgt_dirty = steptgt(fract, &current_context->stdoutp);
5621 		transfc += tgt_dirty;
5622 	TRACE_MARK_EXIT("video", "process-world-rendertarget", TRACE_SYS_DEFAULT, 0, tgt_dirty, "world");
5623 	*ndirty = transfc + arcan_video_display.dirty;
5624 	arcan_video_display.dirty = 0;
5625 
5626 /* This is part of another dirty workaround when n buffers are needed by the
5627  * video platform for a flip to reach the display and we want the same contents
5628  * in every buffer stage at the cost of rendering */
5629 	if (*ndirty && arcan_video_display.ignore_dirty == 0){
5630 		arcan_video_display.ignore_dirty = platform_video_decay();
5631 	}
5632 
5633 	long long int post = arcan_timemillis();
5634 	TRACE_MARK_EXIT("video", "refresh", TRACE_SYS_DEFAULT, 0, 0, "");
5635 	return post - pre;
5636 }
5637 
arcan_video_default_scalemode(enum arcan_vimage_mode newmode)5638 void arcan_video_default_scalemode(enum arcan_vimage_mode newmode)
5639 {
5640 	arcan_video_display.scalemode = newmode;
5641 }
5642 
arcan_video_default_blendmode(enum arcan_blendfunc newmode)5643 void arcan_video_default_blendmode(enum arcan_blendfunc newmode)
5644 {
5645 	arcan_video_display.blendmode = newmode;
5646 }
5647 
arcan_video_default_texmode(enum arcan_vtex_mode modes,enum arcan_vtex_mode modet)5648 void arcan_video_default_texmode(enum arcan_vtex_mode modes,
5649 	enum arcan_vtex_mode modet)
5650 {
5651 	arcan_video_display.deftxs = modes;
5652 	arcan_video_display.deftxt = modet;
5653 }
5654 
arcan_video_screencoords(arcan_vobj_id id,vector * res)5655 arcan_errc arcan_video_screencoords(arcan_vobj_id id, vector* res)
5656 {
5657 	arcan_vobject* vobj = arcan_video_getobject(id);
5658 
5659 	if (!vobj)
5660 		return ARCAN_ERRC_NO_SUCH_OBJECT;
5661 
5662 	if (vobj->feed.state.tag == ARCAN_TAG_3DOBJ)
5663 		return ARCAN_ERRC_UNACCEPTED_STATE;
5664 
5665 	surface_properties prop;
5666 
5667 	if (vobj->valid_cache)
5668 		prop = vobj->prop_cache;
5669 	else {
5670 		prop = empty_surface();
5671 		arcan_resolve_vidprop(vobj, arcan_video_display.c_lerp, &prop);
5672 	}
5673 
5674 	float w = (float)vobj->origw * prop.scale.x;
5675 	float h = (float)vobj->origh * prop.scale.y;
5676 
5677 	res[0].x = prop.position.x;
5678 	res[0].y = prop.position.y;
5679 	res[1].x = res[0].x + w;
5680 	res[1].y = res[0].y;
5681 	res[2].x = res[1].x;
5682 	res[2].y = res[1].y + h;
5683 	res[3].x = res[0].x;
5684 	res[3].y = res[2].y;
5685 
5686 	if (fabsf(prop.rotation.roll) > EPSILON){
5687 		float ang = DEG2RAD(prop.rotation.roll);
5688 		float sinv = sinf(ang);
5689 		float cosv = cosf(ang);
5690 
5691 		float cpx = res[0].x + 0.5 * w;
5692 		float cpy = res[0].y + 0.5 * h;
5693 
5694 		for (size_t i = 0; i < 4; i++){
5695 			float rx = cosv * (res[i].x - cpx) - sinv * (res[i].y-cpy) + cpx;
5696 			float ry = sinv * (res[i].x - cpx) + cosv * (res[i].y-cpy) + cpy;
5697 			res[i].x = rx;
5698 			res[i].y = ry;
5699 		}
5700 	}
5701 
5702 	return ARCAN_OK;
5703 }
5704 
isign(int p1_x,int p1_y,int p2_x,int p2_y,int p3_x,int p3_y)5705 static inline int isign(int p1_x, int p1_y,
5706 	int p2_x, int p2_y, int p3_x, int p3_y)
5707 {
5708 	return (p1_x - p3_x) * (p2_y - p3_y) - (p2_x - p3_x) * (p1_y - p3_y);
5709 }
5710 
itri(int x,int y,int t[6])5711 static inline bool itri(int x, int y, int t[6])
5712 {
5713 	bool b1, b2, b3;
5714 
5715 	b1 = isign(x, y, t[0], t[1], t[2], t[3]) < 0;
5716 	b2 = isign(x, y, t[2], t[3], t[4], t[5]) < 0;
5717 	b3 = isign(x, y, t[4], t[5], t[0], t[1]) < 0;
5718 
5719 	return (b1 == b2) && (b2 == b3);
5720 }
5721 
arcan_video_hittest(arcan_vobj_id id,int x,int y)5722 bool arcan_video_hittest(arcan_vobj_id id, int x, int y)
5723 {
5724 	vector projv[4];
5725 	arcan_vobject* vobj = arcan_video_getobject(id);
5726 
5727 	if (ARCAN_OK != arcan_video_screencoords(id, projv)){
5728 		if (vobj && vobj->feed.state.tag == ARCAN_TAG_3DOBJ){
5729 			return arcan_3d_obj_bb_intersect(
5730 				current_context->stdoutp.camtag, id, x, y);
5731 		}
5732 		return false;
5733 	}
5734 
5735 	if (vobj->rotate_state){
5736 		int t1[] = {
5737 			projv[0].x, projv[0].y,
5738 			projv[1].x, projv[1].y,
5739 			projv[2].x, projv[2].y
5740 		};
5741 
5742 		int t2[] = {
5743 			projv[2].x, projv[2].y,
5744 			projv[3].x, projv[3].y,
5745 			projv[0].x, projv[0].y
5746 		};
5747 
5748 		return itri(x, y, t1) || itri(x, y, t2);
5749 	}
5750 	else
5751 		return (x >= projv[0].x && y >= projv[0].y) &&
5752 			(x <= projv[2].x && y <= projv[2].y);
5753 }
5754 
arcan_video_sliceobject(arcan_vobj_id sid,enum arcan_slicetype type,size_t base,size_t n_slices)5755 arcan_errc arcan_video_sliceobject(arcan_vobj_id sid,
5756 	enum arcan_slicetype type, size_t base, size_t n_slices)
5757 {
5758 	arcan_vobject* src = arcan_video_getobject(sid);
5759 	if (!src)
5760 		return ARCAN_ERRC_NO_SUCH_OBJECT;
5761 
5762 	return (agp_slice_vstore(src->vstore, n_slices, base,
5763 		type == ARCAN_CUBEMAP ? TXSTATE_CUBE : TXSTATE_TEX3D))
5764 		? ARCAN_OK : ARCAN_ERRC_UNACCEPTED_STATE;
5765 }
5766 
arcan_video_updateslices(arcan_vobj_id sid,size_t n_slices,arcan_vobj_id * slices)5767 arcan_errc arcan_video_updateslices(
5768 	arcan_vobj_id sid, size_t n_slices, arcan_vobj_id* slices)
5769 {
5770 	arcan_vobject* src = arcan_video_getobject(sid);
5771 	if (!src || n_slices > 4096)
5772 		return ARCAN_ERRC_NO_SUCH_OBJECT;
5773 
5774 	struct agp_vstore* vstores[n_slices];
5775 	for (size_t i = 0; i < sid; i++){
5776 		arcan_vobject* slot = arcan_video_getobject(slices[i]);
5777 		if (!slot){
5778 			vstores[i] = NULL;
5779 			continue;
5780 		}
5781 		vstores[i] = slot->vstore;
5782 	}
5783 
5784 	return (agp_slice_synch(src->vstore, n_slices, vstores)) ?
5785 			ARCAN_OK : ARCAN_ERRC_UNACCEPTED_STATE;
5786 }
5787 
obj_visible(arcan_vobject * vobj)5788 static inline bool obj_visible(arcan_vobject* vobj)
5789 {
5790 	bool visible = vobj->current.opa > EPSILON;
5791 
5792 	while (visible && vobj->parent && (vobj->mask & MASK_OPACITY) > 0){
5793 		visible = vobj->current.opa > EPSILON;
5794 		vobj = vobj->parent;
5795 	}
5796 
5797 	return visible;
5798 }
5799 
arcan_video_rpick(arcan_vobj_id rt,arcan_vobj_id * dst,size_t lim,int x,int y)5800 size_t arcan_video_rpick(arcan_vobj_id rt,
5801 	arcan_vobj_id* dst, size_t lim, int x, int y)
5802 {
5803 	size_t count = 0;
5804 	arcan_vobject* vobj = arcan_video_getobject(rt);
5805 	struct rendertarget* tgt = arcan_vint_findrt(vobj);
5806 
5807 	if (lim == 0 || !tgt || !tgt->first)
5808 		return count;
5809 
5810 	arcan_vobject_litem* current = tgt->first;
5811 
5812 /* skip to last, then start stepping backwards */
5813 	while (current->next)
5814 		current = current->next;
5815 
5816 	while (current && count < lim){
5817 		arcan_vobject* vobj = current->elem;
5818 
5819 		if ((vobj->mask & MASK_UNPICKABLE) == 0 && obj_visible(vobj) &&
5820 			arcan_video_hittest(vobj->cellid, x, y))
5821 				dst[count++] = vobj->cellid;
5822 
5823 		current = current->previous;
5824 	}
5825 
5826 	return count;
5827 }
5828 
arcan_video_pick(arcan_vobj_id rt,arcan_vobj_id * dst,size_t lim,int x,int y)5829 size_t arcan_video_pick(arcan_vobj_id rt,
5830 	arcan_vobj_id* dst, size_t lim, int x, int y)
5831 {
5832 	size_t count = 0;
5833 	arcan_vobject* vobj = arcan_video_getobject(rt);
5834 	struct rendertarget* tgt = arcan_vint_findrt(vobj);
5835 	if (lim == 0 || !tgt || !tgt->first)
5836 		return count;
5837 
5838 	arcan_vobject_litem* current = tgt->first;
5839 
5840 	while (current && count < lim){
5841 		arcan_vobject* vobj = current->elem;
5842 
5843 		if (vobj->cellid && !(vobj->mask & MASK_UNPICKABLE) &&
5844 			obj_visible(vobj) && arcan_video_hittest(vobj->cellid, x, y))
5845 				dst[count++] = vobj->cellid;
5846 
5847 		current = current->next;
5848 	}
5849 
5850 	return count;
5851 }
5852 
arcan_video_storage_properties(arcan_vobj_id id)5853 img_cons arcan_video_storage_properties(arcan_vobj_id id)
5854 {
5855 	img_cons res = {.w = 0, .h = 0, .bpp = 0};
5856 	arcan_vobject* vobj = arcan_video_getobject(id);
5857 
5858 	if (vobj && vobj->vstore){
5859 		res.w = vobj->vstore->w;
5860 		res.h = vobj->vstore->h;
5861 		res.bpp = vobj->vstore->bpp;
5862 	}
5863 
5864 	return res;
5865 }
5866 
5867 /* image dimensions at load time, without
5868  * any transformations being applied */
arcan_video_initial_properties(arcan_vobj_id id)5869 surface_properties arcan_video_initial_properties(arcan_vobj_id id)
5870 {
5871 	surface_properties res = empty_surface();
5872 	arcan_vobject* vobj = arcan_video_getobject(id);
5873 
5874 	if (vobj && id > 0){
5875 		res.scale.x = vobj->origw;
5876 		res.scale.y = vobj->origh;
5877 	}
5878 
5879 	return res;
5880 }
5881 
arcan_video_resolve_properties(arcan_vobj_id id)5882 surface_properties arcan_video_resolve_properties(arcan_vobj_id id)
5883 {
5884 	surface_properties res = empty_surface();
5885 	arcan_vobject* vobj = arcan_video_getobject(id);
5886 
5887 	if (vobj && id > 0){
5888 		arcan_resolve_vidprop(vobj, 0.0, &res);
5889 		res.scale.x *= vobj->origw;
5890 		res.scale.y *= vobj->origh;
5891 	}
5892 
5893 	return res;
5894 }
5895 
arcan_video_current_properties(arcan_vobj_id id)5896 surface_properties arcan_video_current_properties(arcan_vobj_id id)
5897 {
5898 	surface_properties rv = empty_surface();
5899 	arcan_vobject* vobj = arcan_video_getobject(id);
5900 
5901 	if (vobj){
5902 		rv = vobj->current;
5903 		rv.scale.x *= vobj->origw;
5904 		rv.scale.y *= vobj->origh;
5905 	}
5906 
5907 	return rv;
5908 }
5909 
arcan_video_properties_at(arcan_vobj_id id,unsigned ticks)5910 surface_properties arcan_video_properties_at(arcan_vobj_id id, unsigned ticks)
5911 {
5912 	if (ticks == 0)
5913 		return arcan_video_current_properties(id);
5914 
5915 	bool fullprocess = ticks == (unsigned int) -1;
5916 
5917 	surface_properties rv = empty_surface();
5918 	arcan_vobject* vobj = arcan_video_getobject(id);
5919 
5920 	if (vobj){
5921 		rv = vobj->current;
5922 /* if there's no transform defined, then the ticks will be the same */
5923 		if (vobj->transform){
5924 /* translate ticks from relative to absolute */
5925 			if (!fullprocess)
5926 				ticks += arcan_video_display.c_ticks;
5927 
5928 /* check if there is a transform for each individual attribute, and find
5929  * the one that defines a timeslot within the range of the desired value */
5930 			surface_transform* current = vobj->transform;
5931 			if (current->move.startt){
5932 				while ( (current->move.endt < ticks || fullprocess) && current->next
5933 					&& current->next->move.startt)
5934 					current = current->next;
5935 
5936 				if (current->move.endt <= ticks)
5937 					rv.position = current->move.endp;
5938 				else if (current->move.startt == ticks)
5939 					rv.position = current->move.startp;
5940 				else{ /* need to interpolate */
5941 					float fract = lerp_fract(current->move.startt,
5942 						current->move.endt, ticks);
5943 					rv.position = lut_interp_3d[current->move.interp](
5944 						current->move.startp,
5945 						current->move.endp, fract
5946 					);
5947 				}
5948 			}
5949 
5950 			current = vobj->transform;
5951 			if (current->scale.startt){
5952 				while ( (current->scale.endt < ticks || fullprocess) &&
5953 					current->next && current->next->scale.startt)
5954 					current = current->next;
5955 
5956 				if (current->scale.endt <= ticks)
5957 					rv.scale = current->scale.endd;
5958 				else if (current->scale.startt == ticks)
5959 					rv.scale = current->scale.startd;
5960 				else{
5961 					float fract = lerp_fract(current->scale.startt,
5962 						current->scale.endt, ticks);
5963 					rv.scale = lut_interp_3d[current->scale.interp](
5964 						current->scale.startd,
5965 						current->scale.endd, fract
5966 					);
5967 				}
5968 			}
5969 
5970 			current = vobj->transform;
5971 			if (current->blend.startt){
5972 				while ( (current->blend.endt < ticks || fullprocess) &&
5973 					current->next && current->next->blend.startt)
5974 					current = current->next;
5975 
5976 				if (current->blend.endt <= ticks)
5977 					rv.opa = current->blend.endopa;
5978 				else if (current->blend.startt == ticks)
5979 					rv.opa = current->blend.startopa;
5980 				else{
5981 					float fract = lerp_fract(current->blend.startt,
5982 						current->blend.endt, ticks);
5983 					rv.opa = lut_interp_1d[current->blend.interp](
5984 						current->blend.startopa,
5985 						current->blend.endopa,
5986 						fract
5987 					);
5988 				}
5989 			}
5990 
5991 			current = vobj->transform;
5992 			if (current->rotate.startt){
5993 				while ( (current->rotate.endt < ticks || fullprocess) &&
5994 					current->next && current->next->rotate.startt)
5995 					current = current->next;
5996 
5997 				if (current->rotate.endt <= ticks)
5998 					rv.rotation = current->rotate.endo;
5999 				else if (current->rotate.startt == ticks)
6000 					rv.rotation = current->rotate.starto;
6001 				else{
6002 					float fract = lerp_fract(current->rotate.startt,
6003 						current->rotate.endt, ticks);
6004 
6005 					rv.rotation.quaternion = current->rotate.interp(
6006 						current->rotate.starto.quaternion,
6007 						current->rotate.endo.quaternion, fract
6008 					);
6009 				}
6010 			}
6011 		}
6012 
6013 		rv.scale.x *= vobj->origw;
6014 		rv.scale.y *= vobj->origh;
6015 	}
6016 
6017 	return rv;
6018 }
6019 
arcan_video_prepare_external(bool keep_events)6020 bool arcan_video_prepare_external(bool keep_events)
6021 {
6022 	if (-1 == arcan_video_pushcontext())
6023 		return false;
6024 
6025 /* this still leaves rendertargets alive, normally this is ok but if the
6026  * platform swaps gpus, contexts whatever in the meanwhile, it is not! */
6027 	if (!keep_events)
6028 		arcan_event_deinit(arcan_event_defaultctx(), false);
6029 
6030 	platform_video_prepare_external();
6031 
6032 	return true;
6033 }
6034 
invalidate_rendertargets()6035 static void invalidate_rendertargets()
6036 {
6037 /* passs one, rebuild all the rendertargets */
6038 	for (size_t i = 0; i < current_context->n_rtargets; i++){
6039 		struct rendertarget* tgt = &current_context->rtargets[i];
6040 		if (!tgt->art)
6041 			continue;
6042 
6043 		arcan_mem_free(current_context->rtargets[i].art);
6044 		tgt->art = NULL;
6045 
6046 		if (!tgt->color)
6047 			continue;
6048 
6049 		tgt->art = agp_setup_rendertarget(tgt->color->vstore, tgt->mode);
6050 	}
6051 
6052 /* pass two, force update - back to forth to cover dependencies */
6053 	for (ssize_t i = current_context->n_rtargets - 1; i >= 0; i--){
6054 		struct rendertarget* tgt = &current_context->rtargets[i];
6055 		if (!tgt->color)
6056 			continue;
6057 		arcan_video_forceupdate(tgt->color->cellid, true);
6058 	}
6059 }
6060 
arcan_video_maxorder(arcan_vobj_id rt,uint16_t * ov)6061 arcan_errc arcan_video_maxorder(arcan_vobj_id rt, uint16_t* ov)
6062 {
6063 	arcan_vobject* vobj = arcan_video_getobject(rt);
6064 	if (!vobj)
6065 		return ARCAN_ERRC_NO_SUCH_OBJECT;
6066 
6067 	struct rendertarget* tgt = arcan_vint_findrt(vobj);
6068 	if (!tgt)
6069 		return ARCAN_ERRC_UNACCEPTED_STATE;
6070 
6071 	arcan_vobject_litem* current = current_context->stdoutp.first;
6072 	uint16_t order = 0;
6073 
6074 	while (current){
6075 		if (current->elem && current->elem->order > order &&
6076 			current->elem->order < 65531)
6077 			order = current->elem->order;
6078 
6079 		current = current->next;
6080 	}
6081 
6082 	*ov = order;
6083 	return ARCAN_OK;
6084 }
6085 
arcan_video_contextusage(unsigned * used)6086 unsigned arcan_video_contextusage(unsigned* used)
6087 {
6088 	if (used){
6089 		*used = 0;
6090 		for (unsigned i = 1; i < current_context->vitem_limit-1; i++)
6091 			if (FL_TEST(&current_context->vitems_pool[i], FL_INUSE))
6092 				(*used)++;
6093 	}
6094 
6095 	return current_context->vitem_limit-1;
6096 }
6097 
arcan_video_contextsize(unsigned newlim)6098 bool arcan_video_contextsize(unsigned newlim)
6099 {
6100 	if (newlim <= 1 || newlim >= VITEM_CONTEXT_LIMIT)
6101 		return false;
6102 
6103 /* this change isn't allowed when the shrink/expand operation would
6104  * change persistent objects in the stack */
6105 	if (newlim < arcan_video_display.default_vitemlim)
6106 		for (unsigned i = 1; i < current_context->vitem_limit-1; i++)
6107 			if (FL_TEST(&current_context->vitems_pool[i], FL_INUSE|FL_PRSIST))
6108 				return false;
6109 
6110 	arcan_video_display.default_vitemlim = newlim;
6111 	return true;
6112 }
6113 
arcan_video_restore_external(bool keep_events)6114 void arcan_video_restore_external(bool keep_events)
6115 {
6116 	if (!keep_events)
6117 		arcan_event_init( arcan_event_defaultctx() );
6118 
6119 	arcan_event ev = {
6120 		.category = EVENT_VIDEO,
6121 		.vid.kind = EVENT_VIDEO_DISPLAY_RESET,
6122 	};
6123 	arcan_event_enqueue(arcan_event_defaultctx(), &ev);
6124 	platform_video_restore_external();
6125 
6126 	platform_video_query_displays();
6127 	agp_shader_rebuild_all();
6128 	arcan_video_popcontext();
6129 	invalidate_rendertargets();
6130 }
6131 
flag_ctxfsrv_dms(struct arcan_video_context * ctx)6132 static void flag_ctxfsrv_dms(struct arcan_video_context* ctx)
6133 {
6134 	if (!ctx)
6135 		return;
6136 
6137 	for (size_t i = 1; i < ctx->vitem_limit; i++){
6138 		if (!FL_TEST(&(ctx->vitems_pool[i]), FL_INUSE))
6139 			continue;
6140 
6141 		arcan_vobject* current = &ctx->vitems_pool[i];
6142 		if (current->feed.state.tag ==
6143 			ARCAN_TAG_FRAMESERV && current->feed.state.ptr){
6144 			struct arcan_frameserver* fsrv = current->feed.state.ptr;
6145 			fsrv->flags.no_dms_free = true;
6146 		}
6147 	}
6148 }
6149 
6150 extern void platform_video_shutdown();
arcan_video_shutdown(bool release_fsrv)6151 void arcan_video_shutdown(bool release_fsrv)
6152 {
6153 /* subsystem active or not */
6154 	if (arcan_video_display.in_video == false)
6155 		return;
6156 
6157 	arcan_video_display.in_video = false;
6158 
6159 /* This will effectively make sure that all external launchers, frameservers
6160  * etc. gets killed off. If we should release frameservers, individually set
6161  * their dms flag. */
6162 	if (!release_fsrv)
6163 		flag_ctxfsrv_dms(current_context);
6164 
6165 	unsigned lastctxa, lastctxc = arcan_video_popcontext();
6166 
6167 /* A bit ugly, the upper context slot gets reallocated on pop as a cheap way
6168  * of letting the caller 'flush', but since we want to interleave with calls
6169  * to flag_ctxfsrv_dms, we need to be a bit careful. This approach costs an
6170  * extra full- iteration, but it's in the shutdown stage - the big time waste
6171  * here is resetting screen resolution etc. */
6172 	if (!release_fsrv)
6173 		flag_ctxfsrv_dms(current_context);
6174 
6175 	while ( lastctxc != (lastctxa = arcan_video_popcontext()) ){
6176 		lastctxc = lastctxa;
6177 		if (lastctxc != lastctxa && !release_fsrv)
6178 			flag_ctxfsrv_dms(current_context);
6179 	}
6180 
6181 	agp_shader_flush();
6182 	deallocate_gl_context(current_context, true, NULL);
6183 	arcan_video_reset_fontcache();
6184 	TTF_Quit();
6185 	platform_video_shutdown();
6186 }
6187 
arcan_video_tracetag(arcan_vobj_id id,const char * const message)6188 arcan_errc arcan_video_tracetag(arcan_vobj_id id, const char*const message)
6189 {
6190 	arcan_errc rv = ARCAN_ERRC_NO_SUCH_OBJECT;
6191 	arcan_vobject* vobj = arcan_video_getobject(id);
6192 
6193 	if (vobj){
6194 		if (vobj->tracetag)
6195 			arcan_mem_free(vobj->tracetag);
6196 
6197 		vobj->tracetag = strdup(message);
6198 		rv = ARCAN_OK;
6199 	}
6200 
6201 	return rv;
6202 }
6203 
update_sourcedescr(struct agp_vstore * ds,struct arcan_rstrarg * data)6204 static void update_sourcedescr(struct agp_vstore* ds,
6205 	struct arcan_rstrarg* data)
6206 {
6207 	assert(ds->vinf.text.kind != STORAGE_IMAGE_URI);
6208 
6209 	if (ds->vinf.text.kind == STORAGE_TEXT){
6210 		arcan_mem_free(ds->vinf.text.source);
6211 	}
6212 	else if (ds->vinf.text.kind == STORAGE_TEXTARRAY){
6213 		char** work = ds->vinf.text.source_arr;
6214 		while(*work){
6215 			arcan_mem_free(*work);
6216 			work++;
6217 		}
6218 		arcan_mem_free(ds->vinf.text.source_arr);
6219 	}
6220 
6221 	if (data->multiple){
6222 		ds->vinf.text.kind = STORAGE_TEXTARRAY;
6223 		ds->vinf.text.source_arr = data->array;
6224 	}
6225 	else {
6226 		ds->vinf.text.kind = STORAGE_TEXT;
6227 		ds->vinf.text.source = data->message;
6228 	}
6229 }
6230 
arcan_video_renderstring(arcan_vobj_id src,struct arcan_rstrarg data,unsigned int * n_lines,struct renderline_meta ** lineheights,arcan_errc * errc)6231 arcan_vobj_id arcan_video_renderstring(arcan_vobj_id src,
6232 	struct arcan_rstrarg data, unsigned int* n_lines,
6233 	struct renderline_meta** lineheights,arcan_errc* errc)
6234 {
6235 #define FAIL(CODE){ if (errc) *errc = CODE; return ARCAN_EID; }
6236 	arcan_vobject* vobj;
6237 	arcan_vobj_id rv = src;
6238 
6239 	if (src == ARCAN_VIDEO_WORLDID){
6240 		return ARCAN_ERRC_UNACCEPTED_STATE;
6241 	}
6242 
6243 	size_t maxw, maxh, w, h;
6244 	struct agp_vstore* ds;
6245 	uint32_t dsz;
6246 
6247 	struct rendertarget* dst = current_context->attachment ?
6248 		current_context->attachment : &current_context->stdoutp;
6249 	arcan_renderfun_outputdensity(dst->hppcm, dst->vppcm);
6250 
6251 	if (src == ARCAN_EID){
6252 		vobj = arcan_video_newvobject(&rv);
6253 		if (!vobj)
6254 			FAIL(ARCAN_ERRC_OUT_OF_SPACE);
6255 
6256 #define ARGLST src, false, n_lines, \
6257 lineheights, &w, &h, &dsz, &maxw, &maxh, false
6258 
6259 		ds = vobj->vstore;
6260 		av_pixel* rawdst = ds->vinf.text.raw;
6261 
6262 		vobj->feed.state.tag = ARCAN_TAG_TEXT;
6263 		vobj->blendmode = BLEND_FORCE;
6264 
6265 		ds->vinf.text.raw = data.multiple ?
6266 			arcan_renderfun_renderfmtstr_extended((const char**)data.array, ARGLST) :
6267 			arcan_renderfun_renderfmtstr(data.message, ARGLST);
6268 
6269 		if (ds->vinf.text.raw == NULL){
6270 			arcan_video_deleteobject(rv);
6271 			FAIL(ARCAN_ERRC_BAD_ARGUMENT);
6272 		}
6273 
6274 		ds->vinf.text.vppcm = dst->vppcm;
6275 		ds->vinf.text.hppcm = dst->hppcm;
6276 		ds->vinf.text.kind = STORAGE_TEXT;
6277 		ds->vinf.text.s_raw = dsz;
6278 		ds->w = w;
6279 		ds->h = h;
6280 
6281 /* transfer sync is done separately here */
6282 		agp_update_vstore(ds, true);
6283 		arcan_vint_attachobject(rv);
6284 	}
6285 	else {
6286 		vobj = arcan_video_getobject(src);
6287 
6288 		if (!vobj)
6289 			FAIL(ARCAN_ERRC_NO_SUCH_OBJECT);
6290 		if (vobj->feed.state.tag != ARCAN_TAG_TEXT)
6291 			FAIL(ARCAN_ERRC_UNACCEPTED_STATE);
6292 
6293 		ds = vobj->vstore;
6294 
6295 		if (data.multiple)
6296 			arcan_renderfun_renderfmtstr_extended((const char**)data.array, ARGLST);
6297 		else
6298 			arcan_renderfun_renderfmtstr(data.message, ARGLST);
6299 
6300 		invalidate_cache(vobj);
6301 		arcan_video_objectscale(vobj->cellid, 1.0, 1.0, 1.0, 0);
6302 	}
6303 
6304 	vobj->origw = maxw;
6305 	vobj->origh = maxh;
6306 
6307 	update_sourcedescr(ds, &data);
6308 
6309 /*
6310  * POT but not all used,
6311 	vobj->txcos = arcan_alloc_mem(8 * sizeof(float),
6312 		ARCAN_MEM_VSTRUCT, 0, ARCAN_MEMALIGN_SIMD);
6313 	float wv = (float)maxw / (float)vobj->vstore->w;
6314 	float hv = (float)maxh / (float)vobj->vstore->h;
6315 	arcan_vint_defaultmapping(vobj->txcos, wv, hv);
6316  */
6317 #undef ARGLST
6318 #undef FAIL
6319 	return rv;
6320 }
6321