1 /*
2 * This file is part of mpv.
3 *
4 * mpv is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * mpv is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with mpv. If not, see <http://www.gnu.org/licenses/>.
16 */
17
18 #include <stdio.h>
19 #include <stdlib.h>
20 #include <string.h>
21 #include <assert.h>
22 #include <stdbool.h>
23 #include <pthread.h>
24 #include <math.h>
25
26 #include "mpv_talloc.h"
27
28 #include "config.h"
29 #include "osdep/atomic.h"
30 #include "osdep/timer.h"
31 #include "osdep/threads.h"
32 #include "misc/dispatch.h"
33 #include "misc/rendezvous.h"
34 #include "options/options.h"
35 #include "misc/bstr.h"
36 #include "vo.h"
37 #include "aspect.h"
38 #include "dr_helper.h"
39 #include "input/input.h"
40 #include "options/m_config.h"
41 #include "common/msg.h"
42 #include "common/global.h"
43 #include "common/stats.h"
44 #include "video/hwdec.h"
45 #include "video/mp_image.h"
46 #include "sub/osd.h"
47 #include "osdep/io.h"
48 #include "osdep/threads.h"
49
50 extern const struct vo_driver video_out_mediacodec_embed;
51 extern const struct vo_driver video_out_x11;
52 extern const struct vo_driver video_out_vdpau;
53 extern const struct vo_driver video_out_xv;
54 extern const struct vo_driver video_out_gpu;
55 extern const struct vo_driver video_out_libmpv;
56 extern const struct vo_driver video_out_null;
57 extern const struct vo_driver video_out_image;
58 extern const struct vo_driver video_out_lavc;
59 extern const struct vo_driver video_out_caca;
60 extern const struct vo_driver video_out_drm;
61 extern const struct vo_driver video_out_direct3d;
62 extern const struct vo_driver video_out_sdl;
63 extern const struct vo_driver video_out_vaapi;
64 extern const struct vo_driver video_out_wlshm;
65 extern const struct vo_driver video_out_rpi;
66 extern const struct vo_driver video_out_tct;
67 extern const struct vo_driver video_out_sixel;
68
69 const struct vo_driver *const video_out_drivers[] =
70 {
71 &video_out_libmpv,
72 #if HAVE_ANDROID
73 &video_out_mediacodec_embed,
74 #endif
75 &video_out_gpu,
76 #if HAVE_VDPAU
77 &video_out_vdpau,
78 #endif
79 #if HAVE_DIRECT3D
80 &video_out_direct3d,
81 #endif
82 #if HAVE_WAYLAND && HAVE_MEMFD_CREATE
83 &video_out_wlshm,
84 #endif
85 #if HAVE_XV
86 &video_out_xv,
87 #endif
88 #if HAVE_SDL2_VIDEO
89 &video_out_sdl,
90 #endif
91 #if HAVE_VAAPI_X11 && HAVE_GPL
92 &video_out_vaapi,
93 #endif
94 #if HAVE_X11
95 &video_out_x11,
96 #endif
97 &video_out_null,
98 // should not be auto-selected
99 &video_out_image,
100 &video_out_tct,
101 #if HAVE_CACA
102 &video_out_caca,
103 #endif
104 #if HAVE_DRM
105 &video_out_drm,
106 #endif
107 #if HAVE_RPI_MMAL
108 &video_out_rpi,
109 #endif
110 #if HAVE_SIXEL
111 &video_out_sixel,
112 #endif
113 &video_out_lavc,
114 NULL
115 };
116
117 struct vo_internal {
118 pthread_t thread;
119 struct mp_dispatch_queue *dispatch;
120 struct dr_helper *dr_helper;
121
122 // --- The following fields are protected by lock
123 pthread_mutex_t lock;
124 pthread_cond_t wakeup;
125
126 bool need_wakeup;
127 bool terminate;
128
129 bool hasframe;
130 bool hasframe_rendered;
131 bool request_redraw; // redraw request from player to VO
132 bool want_redraw; // redraw request from VO to player
133 bool send_reset; // send VOCTRL_RESET
134 bool paused;
135 bool wakeup_on_done;
136 int queued_events; // event mask for the user
137 int internal_events; // event mask for us
138
139 int64_t nominal_vsync_interval;
140
141 int64_t vsync_interval;
142 int64_t *vsync_samples;
143 int num_vsync_samples;
144 int64_t num_total_vsync_samples;
145 int64_t prev_vsync;
146 int64_t base_vsync;
147 int drop_point;
148 double estimated_vsync_interval;
149 double estimated_vsync_jitter;
150 bool expecting_vsync;
151 int64_t num_successive_vsyncs;
152
153 int64_t flip_queue_offset; // queue flip events at most this much in advance
154 int64_t timing_offset; // same (but from options; not VO configured)
155
156 int64_t delayed_count;
157 int64_t drop_count;
158 bool dropped_frame; // the previous frame was dropped
159
160 struct vo_frame *current_frame; // last frame queued to the VO
161
162 int64_t wakeup_pts; // time at which to pull frame from decoder
163
164 bool rendering; // true if an image is being rendered
165 struct vo_frame *frame_queued; // should be drawn next
166 int req_frames; // VO's requested value of num_frames
167 uint64_t current_frame_id;
168
169 double display_fps;
170 double reported_display_fps;
171
172 struct stats_ctx *stats;
173 };
174
175 extern const struct m_sub_options gl_video_conf;
176
177 static void forget_frames(struct vo *vo);
178 static void *vo_thread(void *ptr);
179
get_desc(struct m_obj_desc * dst,int index)180 static bool get_desc(struct m_obj_desc *dst, int index)
181 {
182 if (index >= MP_ARRAY_SIZE(video_out_drivers) - 1)
183 return false;
184 const struct vo_driver *vo = video_out_drivers[index];
185 *dst = (struct m_obj_desc) {
186 .name = vo->name,
187 .description = vo->description,
188 .priv_size = vo->priv_size,
189 .priv_defaults = vo->priv_defaults,
190 .options = vo->options,
191 .options_prefix = vo->options_prefix,
192 .global_opts = vo->global_opts,
193 .hidden = vo->encode,
194 .p = vo,
195 };
196 return true;
197 }
198
199 // For the vo option
200 const struct m_obj_list vo_obj_list = {
201 .get_desc = get_desc,
202 .description = "video outputs",
203 .aliases = {
204 {"gl", "gpu"},
205 {"direct3d_shaders", "direct3d"},
206 {"opengl", "gpu"},
207 {"opengl-cb", "libmpv"},
208 {0}
209 },
210 .allow_unknown_entries = true,
211 .allow_trailer = true,
212 .disallow_positional_parameters = true,
213 .use_global_options = true,
214 };
215
dispatch_wakeup_cb(void * ptr)216 static void dispatch_wakeup_cb(void *ptr)
217 {
218 struct vo *vo = ptr;
219 vo_wakeup(vo);
220 }
221
222 // Initialize or update options from vo->opts
read_opts(struct vo * vo)223 static void read_opts(struct vo *vo)
224 {
225 struct vo_internal *in = vo->in;
226
227 pthread_mutex_lock(&in->lock);
228 in->timing_offset = (uint64_t)(vo->opts->timing_offset * 1e6);
229 pthread_mutex_unlock(&in->lock);
230 }
231
update_opts(void * p)232 static void update_opts(void *p)
233 {
234 struct vo *vo = p;
235
236 if (m_config_cache_update(vo->opts_cache)) {
237 read_opts(vo);
238
239 if (vo->driver->control) {
240 vo->driver->control(vo, VOCTRL_VO_OPTS_CHANGED, NULL);
241 // "Legacy" update of video position related options.
242 // Unlike VOCTRL_VO_OPTS_CHANGED, often not propagated to backends.
243 vo->driver->control(vo, VOCTRL_SET_PANSCAN, NULL);
244 }
245 }
246
247 if (vo->gl_opts_cache && m_config_cache_update(vo->gl_opts_cache)) {
248 // "Legacy" update of video GL renderer related options.
249 if (vo->driver->control)
250 vo->driver->control(vo, VOCTRL_UPDATE_RENDER_OPTS, NULL);
251 }
252
253 if (m_config_cache_update(vo->eq_opts_cache)) {
254 // "Legacy" update of video equalizer related options.
255 if (vo->driver->control)
256 vo->driver->control(vo, VOCTRL_SET_EQUALIZER, NULL);
257 }
258 }
259
260 // Does not include thread- and VO uninit.
dealloc_vo(struct vo * vo)261 static void dealloc_vo(struct vo *vo)
262 {
263 forget_frames(vo); // implicitly synchronized
264
265 // These must be free'd before vo->in->dispatch.
266 talloc_free(vo->opts_cache);
267 talloc_free(vo->gl_opts_cache);
268 talloc_free(vo->eq_opts_cache);
269
270 pthread_mutex_destroy(&vo->in->lock);
271 pthread_cond_destroy(&vo->in->wakeup);
272 talloc_free(vo);
273 }
274
vo_create(bool probing,struct mpv_global * global,struct vo_extra * ex,char * name)275 static struct vo *vo_create(bool probing, struct mpv_global *global,
276 struct vo_extra *ex, char *name)
277 {
278 assert(ex->wakeup_cb);
279
280 struct mp_log *log = mp_log_new(NULL, global->log, "vo");
281 struct m_obj_desc desc;
282 if (!m_obj_list_find(&desc, &vo_obj_list, bstr0(name))) {
283 mp_msg(log, MSGL_ERR, "Video output %s not found!\n", name);
284 talloc_free(log);
285 return NULL;
286 };
287 struct vo *vo = talloc_ptrtype(NULL, vo);
288 *vo = (struct vo) {
289 .log = mp_log_new(vo, log, name),
290 .driver = desc.p,
291 .global = global,
292 .encode_lavc_ctx = ex->encode_lavc_ctx,
293 .input_ctx = ex->input_ctx,
294 .osd = ex->osd,
295 .monitor_par = 1,
296 .extra = *ex,
297 .probing = probing,
298 .in = talloc(vo, struct vo_internal),
299 };
300 talloc_steal(vo, log);
301 *vo->in = (struct vo_internal) {
302 .dispatch = mp_dispatch_create(vo),
303 .req_frames = 1,
304 .estimated_vsync_jitter = -1,
305 .stats = stats_ctx_create(vo, global, "vo"),
306 };
307 mp_dispatch_set_wakeup_fn(vo->in->dispatch, dispatch_wakeup_cb, vo);
308 pthread_mutex_init(&vo->in->lock, NULL);
309 pthread_cond_init(&vo->in->wakeup, NULL);
310
311 vo->opts_cache = m_config_cache_alloc(NULL, global, &vo_sub_opts);
312 vo->opts = vo->opts_cache->opts;
313
314 m_config_cache_set_dispatch_change_cb(vo->opts_cache, vo->in->dispatch,
315 update_opts, vo);
316
317 vo->gl_opts_cache = m_config_cache_alloc(NULL, global, &gl_video_conf);
318 m_config_cache_set_dispatch_change_cb(vo->gl_opts_cache, vo->in->dispatch,
319 update_opts, vo);
320
321 vo->eq_opts_cache = m_config_cache_alloc(NULL, global, &mp_csp_equalizer_conf);
322 m_config_cache_set_dispatch_change_cb(vo->eq_opts_cache, vo->in->dispatch,
323 update_opts, vo);
324
325 mp_input_set_mouse_transform(vo->input_ctx, NULL, NULL);
326 if (vo->driver->encode != !!vo->encode_lavc_ctx)
327 goto error;
328 vo->priv = m_config_group_from_desc(vo, vo->log, global, &desc, name);
329 if (!vo->priv)
330 goto error;
331
332 if (pthread_create(&vo->in->thread, NULL, vo_thread, vo))
333 goto error;
334 if (mp_rendezvous(vo, 0) < 0) { // init barrier
335 pthread_join(vo->in->thread, NULL);
336 goto error;
337 }
338 return vo;
339
340 error:
341 dealloc_vo(vo);
342 return NULL;
343 }
344
init_best_video_out(struct mpv_global * global,struct vo_extra * ex)345 struct vo *init_best_video_out(struct mpv_global *global, struct vo_extra *ex)
346 {
347 struct mp_vo_opts *opts = mp_get_config_group(NULL, global, &vo_sub_opts);
348 struct m_obj_settings *vo_list = opts->video_driver_list;
349 struct vo *vo = NULL;
350 // first try the preferred drivers, with their optional subdevice param:
351 if (vo_list && vo_list[0].name) {
352 for (int n = 0; vo_list[n].name; n++) {
353 // Something like "-vo name," allows fallback to autoprobing.
354 if (strlen(vo_list[n].name) == 0)
355 goto autoprobe;
356 bool p = !!vo_list[n + 1].name;
357 vo = vo_create(p, global, ex, vo_list[n].name);
358 if (vo)
359 goto done;
360 }
361 goto done;
362 }
363 autoprobe:
364 // now try the rest...
365 for (int i = 0; video_out_drivers[i]; i++) {
366 const struct vo_driver *driver = video_out_drivers[i];
367 if (driver == &video_out_null)
368 break;
369 vo = vo_create(true, global, ex, (char *)driver->name);
370 if (vo)
371 goto done;
372 }
373 done:
374 talloc_free(opts);
375 return vo;
376 }
377
terminate_vo(void * p)378 static void terminate_vo(void *p)
379 {
380 struct vo *vo = p;
381 struct vo_internal *in = vo->in;
382 in->terminate = true;
383 }
384
vo_destroy(struct vo * vo)385 void vo_destroy(struct vo *vo)
386 {
387 struct vo_internal *in = vo->in;
388 mp_dispatch_run(in->dispatch, terminate_vo, vo);
389 pthread_join(vo->in->thread, NULL);
390 dealloc_vo(vo);
391 }
392
393 // Wakeup the playloop to queue new video frames etc.
wakeup_core(struct vo * vo)394 static void wakeup_core(struct vo *vo)
395 {
396 vo->extra.wakeup_cb(vo->extra.wakeup_ctx);
397 }
398
399 // Drop timing information on discontinuities like seeking.
400 // Always called locked.
reset_vsync_timings(struct vo * vo)401 static void reset_vsync_timings(struct vo *vo)
402 {
403 struct vo_internal *in = vo->in;
404 in->drop_point = 0;
405 in->base_vsync = 0;
406 in->expecting_vsync = false;
407 in->num_successive_vsyncs = 0;
408 }
409
vsync_stddef(struct vo * vo,int64_t ref_vsync)410 static double vsync_stddef(struct vo *vo, int64_t ref_vsync)
411 {
412 struct vo_internal *in = vo->in;
413 double jitter = 0;
414 for (int n = 0; n < in->num_vsync_samples; n++) {
415 double diff = in->vsync_samples[n] - ref_vsync;
416 jitter += diff * diff;
417 }
418 return sqrt(jitter / in->num_vsync_samples);
419 }
420
421 #define MAX_VSYNC_SAMPLES 200
422
423 // Check if we should switch to measured average display FPS if it seems
424 // "better" then the system-reported one. (Note that small differences are
425 // handled as drift instead.)
check_estimated_display_fps(struct vo * vo)426 static void check_estimated_display_fps(struct vo *vo)
427 {
428 struct vo_internal *in = vo->in;
429
430 bool use_estimated = false;
431 if (in->num_total_vsync_samples >= MAX_VSYNC_SAMPLES / 2 &&
432 in->estimated_vsync_interval <= 1e6 / 20.0 &&
433 in->estimated_vsync_interval >= 1e6 / 99.0)
434 {
435 for (int n = 0; n < in->num_vsync_samples; n++) {
436 if (fabs(in->vsync_samples[n] - in->estimated_vsync_interval)
437 >= in->estimated_vsync_interval / 4)
438 goto done;
439 }
440 double mjitter = vsync_stddef(vo, in->estimated_vsync_interval);
441 double njitter = vsync_stddef(vo, in->nominal_vsync_interval);
442 if (mjitter * 1.01 < njitter)
443 use_estimated = true;
444 done: ;
445 }
446 if (use_estimated == (in->vsync_interval == in->nominal_vsync_interval)) {
447 if (use_estimated) {
448 MP_VERBOSE(vo, "adjusting display FPS to a value closer to %.3f Hz\n",
449 1e6 / in->estimated_vsync_interval);
450 } else {
451 MP_VERBOSE(vo, "switching back to assuming display fps = %.3f Hz\n",
452 1e6 / in->nominal_vsync_interval);
453 }
454 }
455 in->vsync_interval = use_estimated ? (int64_t)in->estimated_vsync_interval
456 : in->nominal_vsync_interval;
457 }
458
459 // Attempt to detect vsyncs delayed/skipped by the driver. This tries to deal
460 // with strong jitter too, because some drivers have crap vsync timing.
vsync_skip_detection(struct vo * vo)461 static void vsync_skip_detection(struct vo *vo)
462 {
463 struct vo_internal *in = vo->in;
464
465 int window = 4;
466 int64_t t_r = in->prev_vsync, t_e = in->base_vsync, diff = 0, desync_early = 0;
467 for (int n = 0; n < in->drop_point; n++) {
468 diff += t_r - t_e;
469 t_r -= in->vsync_samples[n];
470 t_e -= in->vsync_interval;
471 if (n == window + 1)
472 desync_early = diff / window;
473 }
474 int64_t desync = diff / in->num_vsync_samples;
475 if (in->drop_point > window * 2 &&
476 llabs(desync - desync_early) >= in->vsync_interval * 3 / 4)
477 {
478 // Assume a drop. An underflow can technically speaking not be a drop
479 // (it's up to the driver what this is supposed to mean), but no reason
480 // to treat it differently.
481 in->base_vsync = in->prev_vsync;
482 in->delayed_count += 1;
483 in->drop_point = 0;
484 MP_STATS(vo, "vo-delayed");
485 }
486 if (in->drop_point > 10)
487 in->base_vsync += desync / 10; // smooth out drift
488 }
489
490 // Always called locked.
update_vsync_timing_after_swap(struct vo * vo,struct vo_vsync_info * vsync)491 static void update_vsync_timing_after_swap(struct vo *vo,
492 struct vo_vsync_info *vsync)
493 {
494 struct vo_internal *in = vo->in;
495
496 int64_t vsync_time = vsync->last_queue_display_time;
497 int64_t prev_vsync = in->prev_vsync;
498 in->prev_vsync = vsync_time;
499
500 if (!in->expecting_vsync) {
501 reset_vsync_timings(vo);
502 return;
503 }
504
505 in->num_successive_vsyncs++;
506 if (in->num_successive_vsyncs <= 2)
507 return;
508
509 if (in->num_vsync_samples >= MAX_VSYNC_SAMPLES)
510 in->num_vsync_samples -= 1;
511 MP_TARRAY_INSERT_AT(in, in->vsync_samples, in->num_vsync_samples, 0,
512 vsync_time - prev_vsync);
513 in->drop_point = MPMIN(in->drop_point + 1, in->num_vsync_samples);
514 in->num_total_vsync_samples += 1;
515 if (in->base_vsync) {
516 in->base_vsync += in->vsync_interval;
517 } else {
518 in->base_vsync = vsync_time;
519 }
520
521 double avg = 0;
522 for (int n = 0; n < in->num_vsync_samples; n++)
523 avg += in->vsync_samples[n];
524 in->estimated_vsync_interval = avg / in->num_vsync_samples;
525 in->estimated_vsync_jitter =
526 vsync_stddef(vo, in->vsync_interval) / in->vsync_interval;
527
528 check_estimated_display_fps(vo);
529 vsync_skip_detection(vo);
530
531 MP_STATS(vo, "value %f jitter", in->estimated_vsync_jitter);
532 MP_STATS(vo, "value %f vsync-diff", in->vsync_samples[0] / 1e6);
533 }
534
535 // to be called from VO thread only
update_display_fps(struct vo * vo)536 static void update_display_fps(struct vo *vo)
537 {
538 struct vo_internal *in = vo->in;
539 pthread_mutex_lock(&in->lock);
540 if (in->internal_events & VO_EVENT_WIN_STATE) {
541 in->internal_events &= ~(unsigned)VO_EVENT_WIN_STATE;
542
543 pthread_mutex_unlock(&in->lock);
544
545 double fps = 0;
546 vo->driver->control(vo, VOCTRL_GET_DISPLAY_FPS, &fps);
547
548 pthread_mutex_lock(&in->lock);
549
550 in->reported_display_fps = fps;
551 }
552
553 double display_fps = vo->opts->override_display_fps;
554 if (display_fps <= 0)
555 display_fps = in->reported_display_fps;
556
557 if (in->display_fps != display_fps) {
558 in->nominal_vsync_interval = display_fps > 0 ? 1e6 / display_fps : 0;
559 in->vsync_interval = MPMAX(in->nominal_vsync_interval, 1);
560 in->display_fps = display_fps;
561
562 MP_VERBOSE(vo, "Assuming %f FPS for display sync.\n", display_fps);
563
564 // make sure to update the player
565 in->queued_events |= VO_EVENT_WIN_STATE;
566 wakeup_core(vo);
567 }
568
569 pthread_mutex_unlock(&in->lock);
570 }
571
check_vo_caps(struct vo * vo)572 static void check_vo_caps(struct vo *vo)
573 {
574 int rot = vo->params->rotate;
575 if (rot) {
576 bool ok = rot % 90 ? false : (vo->driver->caps & VO_CAP_ROTATE90);
577 if (!ok) {
578 MP_WARN(vo, "Video is flagged as rotated by %d degrees, but the "
579 "video output does not support this.\n", rot);
580 }
581 }
582 }
583
run_reconfig(void * p)584 static void run_reconfig(void *p)
585 {
586 void **pp = p;
587 struct vo *vo = pp[0];
588 struct mp_image *img = pp[1];
589 int *ret = pp[2];
590
591 struct mp_image_params *params = &img->params;
592
593 struct vo_internal *in = vo->in;
594
595 MP_VERBOSE(vo, "reconfig to %s\n", mp_image_params_to_str(params));
596
597 update_opts(vo);
598
599 mp_image_params_get_dsize(params, &vo->dwidth, &vo->dheight);
600
601 talloc_free(vo->params);
602 vo->params = talloc_dup(vo, params);
603
604 if (vo->driver->reconfig2) {
605 *ret = vo->driver->reconfig2(vo, img);
606 } else {
607 *ret = vo->driver->reconfig(vo, vo->params);
608 }
609 vo->config_ok = *ret >= 0;
610 if (vo->config_ok) {
611 check_vo_caps(vo);
612 } else {
613 talloc_free(vo->params);
614 vo->params = NULL;
615 }
616
617 pthread_mutex_lock(&in->lock);
618 talloc_free(in->current_frame);
619 in->current_frame = NULL;
620 forget_frames(vo);
621 reset_vsync_timings(vo);
622 pthread_mutex_unlock(&in->lock);
623
624 update_display_fps(vo);
625 }
626
vo_reconfig(struct vo * vo,struct mp_image_params * params)627 int vo_reconfig(struct vo *vo, struct mp_image_params *params)
628 {
629 int ret;
630 struct mp_image dummy = {0};
631 mp_image_set_params(&dummy, params);
632 void *p[] = {vo, &dummy, &ret};
633 mp_dispatch_run(vo->in->dispatch, run_reconfig, p);
634 return ret;
635 }
636
vo_reconfig2(struct vo * vo,struct mp_image * img)637 int vo_reconfig2(struct vo *vo, struct mp_image *img)
638 {
639 int ret;
640 void *p[] = {vo, img, &ret};
641 mp_dispatch_run(vo->in->dispatch, run_reconfig, p);
642 return ret;
643 }
644
run_control(void * p)645 static void run_control(void *p)
646 {
647 void **pp = p;
648 struct vo *vo = pp[0];
649 int request = (intptr_t)pp[1];
650 void *data = pp[2];
651 update_opts(vo);
652 int ret = vo->driver->control(vo, request, data);
653 if (pp[3])
654 *(int *)pp[3] = ret;
655 }
656
vo_control(struct vo * vo,int request,void * data)657 int vo_control(struct vo *vo, int request, void *data)
658 {
659 int ret;
660 void *p[] = {vo, (void *)(intptr_t)request, data, &ret};
661 mp_dispatch_run(vo->in->dispatch, run_control, p);
662 return ret;
663 }
664
665 // Run vo_control() without waiting for a reply.
666 // (Only works for some VOCTRLs.)
vo_control_async(struct vo * vo,int request,void * data)667 void vo_control_async(struct vo *vo, int request, void *data)
668 {
669 void *p[4] = {vo, (void *)(intptr_t)request, NULL, NULL};
670 void **d = talloc_memdup(NULL, p, sizeof(p));
671
672 switch (request) {
673 case VOCTRL_UPDATE_PLAYBACK_STATE:
674 d[2] = talloc_dup(d, (struct voctrl_playback_state *)data);
675 break;
676 case VOCTRL_KILL_SCREENSAVER:
677 case VOCTRL_RESTORE_SCREENSAVER:
678 break;
679 default:
680 abort(); // requires explicit support
681 }
682
683 mp_dispatch_enqueue_autofree(vo->in->dispatch, run_control, d);
684 }
685
686 // must be called locked
forget_frames(struct vo * vo)687 static void forget_frames(struct vo *vo)
688 {
689 struct vo_internal *in = vo->in;
690 in->hasframe = false;
691 in->hasframe_rendered = false;
692 in->drop_count = 0;
693 in->delayed_count = 0;
694 talloc_free(in->frame_queued);
695 in->frame_queued = NULL;
696 in->current_frame_id += VO_MAX_REQ_FRAMES + 1;
697 // don't unref current_frame; we always want to be able to redraw it
698 if (in->current_frame) {
699 in->current_frame->num_vsyncs = 0; // but reset future repeats
700 in->current_frame->display_synced = false; // mark discontinuity
701 }
702 }
703
704 // VOs which have no special requirements on UI event loops etc. can set the
705 // vo_driver.wait_events callback to this (and leave vo_driver.wakeup unset).
706 // This function must not be used or called for other purposes.
vo_wait_default(struct vo * vo,int64_t until_time)707 void vo_wait_default(struct vo *vo, int64_t until_time)
708 {
709 struct vo_internal *in = vo->in;
710
711 pthread_mutex_lock(&in->lock);
712 if (!in->need_wakeup) {
713 struct timespec ts = mp_time_us_to_timespec(until_time);
714 pthread_cond_timedwait(&in->wakeup, &in->lock, &ts);
715 }
716 pthread_mutex_unlock(&in->lock);
717 }
718
719 // Called unlocked.
wait_vo(struct vo * vo,int64_t until_time)720 static void wait_vo(struct vo *vo, int64_t until_time)
721 {
722 struct vo_internal *in = vo->in;
723
724 if (vo->driver->wait_events) {
725 vo->driver->wait_events(vo, until_time);
726 } else {
727 vo_wait_default(vo, until_time);
728 }
729 pthread_mutex_lock(&in->lock);
730 in->need_wakeup = false;
731 pthread_mutex_unlock(&in->lock);
732 }
733
wakeup_locked(struct vo * vo)734 static void wakeup_locked(struct vo *vo)
735 {
736 struct vo_internal *in = vo->in;
737
738 pthread_cond_broadcast(&in->wakeup);
739 if (vo->driver->wakeup)
740 vo->driver->wakeup(vo);
741 in->need_wakeup = true;
742 }
743
744 // Wakeup VO thread, and make it check for new events with VOCTRL_CHECK_EVENTS.
745 // To be used by threaded VO backends.
vo_wakeup(struct vo * vo)746 void vo_wakeup(struct vo *vo)
747 {
748 struct vo_internal *in = vo->in;
749
750 pthread_mutex_lock(&in->lock);
751 wakeup_locked(vo);
752 pthread_mutex_unlock(&in->lock);
753 }
754
still_displaying(struct vo * vo)755 static bool still_displaying(struct vo *vo)
756 {
757 struct vo_internal *in = vo->in;
758 int64_t now = mp_time_us();
759 int64_t frame_end = 0;
760 if (in->current_frame) {
761 frame_end = in->current_frame->pts + MPMAX(in->current_frame->duration, 0);
762 if (in->current_frame->display_synced)
763 frame_end = in->current_frame->num_vsyncs > 0 ? INT64_MAX : 0;
764 }
765 return (now < frame_end || in->rendering || in->frame_queued) && in->hasframe;
766 }
767
768 // Return true if there is still a frame being displayed (or queued).
vo_still_displaying(struct vo * vo)769 bool vo_still_displaying(struct vo *vo)
770 {
771 pthread_mutex_lock(&vo->in->lock);
772 bool res = still_displaying(vo);
773 pthread_mutex_unlock(&vo->in->lock);
774 return res;
775 }
776
777 // Make vo issue a wakeup once vo_still_displaying() becomes true.
vo_request_wakeup_on_done(struct vo * vo)778 void vo_request_wakeup_on_done(struct vo *vo)
779 {
780 struct vo_internal *in = vo->in;
781 pthread_mutex_lock(&vo->in->lock);
782 if (still_displaying(vo)) {
783 in->wakeup_on_done = true;
784 } else {
785 wakeup_core(vo);
786 }
787 pthread_mutex_unlock(&vo->in->lock);
788 }
789
790 // Whether vo_queue_frame() can be called. If the VO is not ready yet, the
791 // function will return false, and the VO will call the wakeup callback once
792 // it's ready.
793 // next_pts is the exact time when the next frame should be displayed. If the
794 // VO is ready, but the time is too "early", return false, and call the wakeup
795 // callback once the time is right.
796 // If next_pts is negative, disable any timing and draw the frame as fast as
797 // possible.
vo_is_ready_for_frame(struct vo * vo,int64_t next_pts)798 bool vo_is_ready_for_frame(struct vo *vo, int64_t next_pts)
799 {
800 struct vo_internal *in = vo->in;
801 pthread_mutex_lock(&in->lock);
802 bool blocked = vo->driver->initially_blocked &&
803 !(in->internal_events & VO_EVENT_INITIAL_UNBLOCK);
804 bool r = vo->config_ok && !in->frame_queued && !blocked &&
805 (!in->current_frame || in->current_frame->num_vsyncs < 1);
806 if (r && next_pts >= 0) {
807 // Don't show the frame too early - it would basically freeze the
808 // display by disallowing OSD redrawing or VO interaction.
809 // Actually render the frame at earliest the given offset before target
810 // time.
811 next_pts -= in->timing_offset;
812 next_pts -= in->flip_queue_offset;
813 int64_t now = mp_time_us();
814 if (next_pts > now)
815 r = false;
816 if (!in->wakeup_pts || next_pts < in->wakeup_pts) {
817 in->wakeup_pts = next_pts;
818 // If we have to wait, update the vo thread's timer.
819 if (!r)
820 wakeup_locked(vo);
821 }
822 }
823 pthread_mutex_unlock(&in->lock);
824 return r;
825 }
826
827 // Direct the VO thread to put the currently queued image on the screen.
828 // vo_is_ready_for_frame() must have returned true before this call.
829 // Ownership of frame is handed to the vo.
vo_queue_frame(struct vo * vo,struct vo_frame * frame)830 void vo_queue_frame(struct vo *vo, struct vo_frame *frame)
831 {
832 struct vo_internal *in = vo->in;
833 pthread_mutex_lock(&in->lock);
834 assert(vo->config_ok && !in->frame_queued &&
835 (!in->current_frame || in->current_frame->num_vsyncs < 1));
836 in->hasframe = true;
837 frame->frame_id = ++(in->current_frame_id);
838 in->frame_queued = frame;
839 in->wakeup_pts = frame->display_synced
840 ? 0 : frame->pts + MPMAX(frame->duration, 0);
841 wakeup_locked(vo);
842 pthread_mutex_unlock(&in->lock);
843 }
844
845 // If a frame is currently being rendered (or queued), wait until it's done.
846 // Otherwise, return immediately.
vo_wait_frame(struct vo * vo)847 void vo_wait_frame(struct vo *vo)
848 {
849 struct vo_internal *in = vo->in;
850 pthread_mutex_lock(&in->lock);
851 while (in->frame_queued || in->rendering)
852 pthread_cond_wait(&in->wakeup, &in->lock);
853 pthread_mutex_unlock(&in->lock);
854 }
855
856 // Wait until realtime is >= ts
857 // called without lock
wait_until(struct vo * vo,int64_t target)858 static void wait_until(struct vo *vo, int64_t target)
859 {
860 struct vo_internal *in = vo->in;
861 struct timespec ts = mp_time_us_to_timespec(target);
862 pthread_mutex_lock(&in->lock);
863 while (target > mp_time_us()) {
864 if (in->queued_events & VO_EVENT_LIVE_RESIZING)
865 break;
866 if (pthread_cond_timedwait(&in->wakeup, &in->lock, &ts))
867 break;
868 }
869 pthread_mutex_unlock(&in->lock);
870 }
871
render_frame(struct vo * vo)872 static bool render_frame(struct vo *vo)
873 {
874 struct vo_internal *in = vo->in;
875 struct vo_frame *frame = NULL;
876 bool more_frames = false;
877
878 update_display_fps(vo);
879
880 pthread_mutex_lock(&in->lock);
881
882 if (in->frame_queued) {
883 talloc_free(in->current_frame);
884 in->current_frame = in->frame_queued;
885 in->frame_queued = NULL;
886 } else if (in->paused || !in->current_frame || !in->hasframe ||
887 (in->current_frame->display_synced && in->current_frame->num_vsyncs < 1) ||
888 !in->current_frame->display_synced)
889 {
890 goto done;
891 }
892
893 frame = vo_frame_ref(in->current_frame);
894 assert(frame);
895
896 if (frame->display_synced) {
897 frame->pts = 0;
898 frame->duration = -1;
899 }
900
901 int64_t now = mp_time_us();
902 int64_t pts = frame->pts;
903 int64_t duration = frame->duration;
904 int64_t end_time = pts + duration;
905
906 // Time at which we should flip_page on the VO.
907 int64_t target = frame->display_synced ? 0 : pts - in->flip_queue_offset;
908
909 // "normal" strict drop threshold.
910 in->dropped_frame = duration >= 0 && end_time < now;
911
912 in->dropped_frame &= !frame->display_synced;
913 in->dropped_frame &= !(vo->driver->caps & VO_CAP_FRAMEDROP);
914 in->dropped_frame &= frame->can_drop;
915 // Even if we're hopelessly behind, rather degrade to 10 FPS playback,
916 // instead of just freezing the display forever.
917 in->dropped_frame &= now - in->prev_vsync < 100 * 1000;
918 in->dropped_frame &= in->hasframe_rendered;
919
920 // Setup parameters for the next time this frame is drawn. ("frame" is the
921 // frame currently drawn, while in->current_frame is the potentially next.)
922 in->current_frame->repeat = true;
923 if (frame->display_synced) {
924 in->current_frame->vsync_offset += in->current_frame->vsync_interval;
925 in->dropped_frame |= in->current_frame->num_vsyncs < 1;
926 }
927 if (in->current_frame->num_vsyncs > 0)
928 in->current_frame->num_vsyncs -= 1;
929
930 // Always render when paused (it's typically the last frame for a while).
931 in->dropped_frame &= !in->paused;
932
933 bool use_vsync = in->current_frame->display_synced && !in->paused;
934 if (use_vsync && !in->expecting_vsync) // first DS frame in a row
935 in->prev_vsync = now;
936 in->expecting_vsync = use_vsync;
937
938 if (in->dropped_frame) {
939 in->drop_count += 1;
940 wakeup_core(vo);
941 } else {
942 in->rendering = true;
943 in->hasframe_rendered = true;
944 int64_t prev_drop_count = vo->in->drop_count;
945 // Can the core queue new video now? Non-display-sync uses a separate
946 // timer instead, but possibly benefits from preparing a frame early.
947 bool can_queue = !in->frame_queued &&
948 (in->current_frame->num_vsyncs < 1 || !use_vsync);
949 pthread_mutex_unlock(&in->lock);
950
951 if (can_queue)
952 wakeup_core(vo);
953
954 stats_time_start(in->stats, "video-draw");
955
956 if (vo->driver->draw_frame) {
957 vo->driver->draw_frame(vo, frame);
958 } else {
959 vo->driver->draw_image(vo, mp_image_new_ref(frame->current));
960 }
961
962 stats_time_end(in->stats, "video-draw");
963
964 wait_until(vo, target);
965
966 stats_time_start(in->stats, "video-flip");
967
968 vo->driver->flip_page(vo);
969
970 struct vo_vsync_info vsync = {
971 .last_queue_display_time = -1,
972 .skipped_vsyncs = -1,
973 };
974 if (vo->driver->get_vsync)
975 vo->driver->get_vsync(vo, &vsync);
976
977 // Make up some crap if presentation feedback is missing.
978 if (vsync.last_queue_display_time < 0)
979 vsync.last_queue_display_time = mp_time_us();
980
981 stats_time_end(in->stats, "video-flip");
982
983 pthread_mutex_lock(&in->lock);
984 in->dropped_frame = prev_drop_count < vo->in->drop_count;
985 in->rendering = false;
986
987 update_vsync_timing_after_swap(vo, &vsync);
988 }
989
990 if (vo->driver->caps & VO_CAP_NORETAIN) {
991 talloc_free(in->current_frame);
992 in->current_frame = NULL;
993 }
994
995 if (in->dropped_frame) {
996 MP_STATS(vo, "drop-vo");
997 } else {
998 in->request_redraw = false;
999 }
1000
1001 if (in->current_frame && in->current_frame->num_vsyncs &&
1002 in->current_frame->display_synced)
1003 more_frames = true;
1004
1005 if (in->frame_queued && in->frame_queued->display_synced)
1006 more_frames = true;
1007
1008 pthread_cond_broadcast(&in->wakeup); // for vo_wait_frame()
1009
1010 done:
1011 talloc_free(frame);
1012 if (in->wakeup_on_done && !still_displaying(vo)) {
1013 in->wakeup_on_done = false;
1014 wakeup_core(vo);
1015 }
1016 pthread_mutex_unlock(&in->lock);
1017
1018 return more_frames;
1019 }
1020
do_redraw(struct vo * vo)1021 static void do_redraw(struct vo *vo)
1022 {
1023 struct vo_internal *in = vo->in;
1024
1025 if (!vo->config_ok || (vo->driver->caps & VO_CAP_NORETAIN))
1026 return;
1027
1028 pthread_mutex_lock(&in->lock);
1029 in->request_redraw = false;
1030 bool full_redraw = in->dropped_frame;
1031 struct vo_frame *frame = NULL;
1032 if (!vo->driver->untimed)
1033 frame = vo_frame_ref(in->current_frame);
1034 if (frame)
1035 in->dropped_frame = false;
1036 struct vo_frame dummy = {0};
1037 if (!frame)
1038 frame = &dummy;
1039 frame->redraw = !full_redraw; // unconditionally redraw if it was dropped
1040 frame->repeat = false;
1041 frame->still = true;
1042 frame->pts = 0;
1043 frame->duration = -1;
1044 pthread_mutex_unlock(&in->lock);
1045
1046 if (vo->driver->draw_frame) {
1047 vo->driver->draw_frame(vo, frame);
1048 } else if ((full_redraw || vo->driver->control(vo, VOCTRL_REDRAW_FRAME, NULL) < 1)
1049 && frame->current)
1050 {
1051 vo->driver->draw_image(vo, mp_image_new_ref(frame->current));
1052 }
1053
1054 vo->driver->flip_page(vo);
1055
1056 if (frame != &dummy)
1057 talloc_free(frame);
1058 }
1059
get_image_vo(void * ctx,int imgfmt,int w,int h,int stride_align)1060 static struct mp_image *get_image_vo(void *ctx, int imgfmt, int w, int h,
1061 int stride_align)
1062 {
1063 struct vo *vo = ctx;
1064 return vo->driver->get_image(vo, imgfmt, w, h, stride_align);
1065 }
1066
vo_thread(void * ptr)1067 static void *vo_thread(void *ptr)
1068 {
1069 struct vo *vo = ptr;
1070 struct vo_internal *in = vo->in;
1071 bool vo_paused = false;
1072
1073 mpthread_set_name("vo");
1074
1075 if (vo->driver->get_image) {
1076 in->dr_helper = dr_helper_create(in->dispatch, get_image_vo, vo);
1077 dr_helper_acquire_thread(in->dr_helper);
1078 }
1079
1080 int r = vo->driver->preinit(vo) ? -1 : 0;
1081 mp_rendezvous(vo, r); // init barrier
1082 if (r < 0)
1083 goto done;
1084
1085 read_opts(vo);
1086 update_display_fps(vo);
1087 vo_event(vo, VO_EVENT_WIN_STATE);
1088
1089 while (1) {
1090 mp_dispatch_queue_process(vo->in->dispatch, 0);
1091 if (in->terminate)
1092 break;
1093 stats_event(in->stats, "iterations");
1094 vo->driver->control(vo, VOCTRL_CHECK_EVENTS, NULL);
1095 bool working = render_frame(vo);
1096 int64_t now = mp_time_us();
1097 int64_t wait_until = now + (working ? 0 : (int64_t)1e9);
1098
1099 pthread_mutex_lock(&in->lock);
1100 if (in->wakeup_pts) {
1101 if (in->wakeup_pts > now) {
1102 wait_until = MPMIN(wait_until, in->wakeup_pts);
1103 } else {
1104 in->wakeup_pts = 0;
1105 wakeup_core(vo);
1106 }
1107 }
1108 if (vo->want_redraw && !in->want_redraw) {
1109 in->want_redraw = true;
1110 wakeup_core(vo);
1111 }
1112 vo->want_redraw = false;
1113 bool redraw = in->request_redraw;
1114 bool send_reset = in->send_reset;
1115 in->send_reset = false;
1116 bool send_pause = in->paused != vo_paused;
1117 vo_paused = in->paused;
1118 pthread_mutex_unlock(&in->lock);
1119
1120 if (send_reset)
1121 vo->driver->control(vo, VOCTRL_RESET, NULL);
1122 if (send_pause)
1123 vo->driver->control(vo, vo_paused ? VOCTRL_PAUSE : VOCTRL_RESUME, NULL);
1124 if (wait_until > now && redraw) {
1125 do_redraw(vo); // now is a good time
1126 continue;
1127 }
1128 if (vo->want_redraw) // might have been set by VOCTRLs
1129 wait_until = 0;
1130
1131 wait_vo(vo, wait_until);
1132 }
1133 forget_frames(vo); // implicitly synchronized
1134 talloc_free(in->current_frame);
1135 in->current_frame = NULL;
1136 vo->driver->uninit(vo);
1137 done:
1138 TA_FREEP(&in->dr_helper);
1139 return NULL;
1140 }
1141
vo_set_paused(struct vo * vo,bool paused)1142 void vo_set_paused(struct vo *vo, bool paused)
1143 {
1144 struct vo_internal *in = vo->in;
1145 pthread_mutex_lock(&in->lock);
1146 if (in->paused != paused) {
1147 in->paused = paused;
1148 if (in->paused && in->dropped_frame) {
1149 in->request_redraw = true;
1150 wakeup_core(vo);
1151 }
1152 reset_vsync_timings(vo);
1153 wakeup_locked(vo);
1154 }
1155 pthread_mutex_unlock(&in->lock);
1156 }
1157
vo_get_drop_count(struct vo * vo)1158 int64_t vo_get_drop_count(struct vo *vo)
1159 {
1160 pthread_mutex_lock(&vo->in->lock);
1161 int64_t r = vo->in->drop_count;
1162 pthread_mutex_unlock(&vo->in->lock);
1163 return r;
1164 }
1165
vo_increment_drop_count(struct vo * vo,int64_t n)1166 void vo_increment_drop_count(struct vo *vo, int64_t n)
1167 {
1168 pthread_mutex_lock(&vo->in->lock);
1169 vo->in->drop_count += n;
1170 pthread_mutex_unlock(&vo->in->lock);
1171 }
1172
1173 // Make the VO redraw the OSD at some point in the future.
vo_redraw(struct vo * vo)1174 void vo_redraw(struct vo *vo)
1175 {
1176 struct vo_internal *in = vo->in;
1177 pthread_mutex_lock(&in->lock);
1178 if (!in->request_redraw) {
1179 in->request_redraw = true;
1180 in->want_redraw = false;
1181 wakeup_locked(vo);
1182 }
1183 pthread_mutex_unlock(&in->lock);
1184 }
1185
vo_want_redraw(struct vo * vo)1186 bool vo_want_redraw(struct vo *vo)
1187 {
1188 struct vo_internal *in = vo->in;
1189 pthread_mutex_lock(&in->lock);
1190 bool r = in->want_redraw;
1191 pthread_mutex_unlock(&in->lock);
1192 return r;
1193 }
1194
vo_seek_reset(struct vo * vo)1195 void vo_seek_reset(struct vo *vo)
1196 {
1197 struct vo_internal *in = vo->in;
1198 pthread_mutex_lock(&in->lock);
1199 forget_frames(vo);
1200 reset_vsync_timings(vo);
1201 in->send_reset = true;
1202 wakeup_locked(vo);
1203 pthread_mutex_unlock(&in->lock);
1204 }
1205
1206 // Whether at least 1 frame was queued or rendered since last seek or reconfig.
vo_has_frame(struct vo * vo)1207 bool vo_has_frame(struct vo *vo)
1208 {
1209 return vo->in->hasframe;
1210 }
1211
run_query_format(void * p)1212 static void run_query_format(void *p)
1213 {
1214 void **pp = p;
1215 struct vo *vo = pp[0];
1216 uint8_t *list = pp[1];
1217 for (int format = IMGFMT_START; format < IMGFMT_END; format++)
1218 list[format - IMGFMT_START] = vo->driver->query_format(vo, format);
1219 }
1220
1221 // For each item in the list (allocated as uint8_t[IMGFMT_END - IMGFMT_START]),
1222 // set the supported format flags.
vo_query_formats(struct vo * vo,uint8_t * list)1223 void vo_query_formats(struct vo *vo, uint8_t *list)
1224 {
1225 void *p[] = {vo, list};
1226 mp_dispatch_run(vo->in->dispatch, run_query_format, p);
1227 }
1228
1229 // Calculate the appropriate source and destination rectangle to
1230 // get a correctly scaled picture, including pan-scan.
1231 // out_src: visible part of the video
1232 // out_dst: area of screen covered by the video source rectangle
1233 // out_osd: OSD size, OSD margins, etc.
1234 // Must be called from the VO thread only.
vo_get_src_dst_rects(struct vo * vo,struct mp_rect * out_src,struct mp_rect * out_dst,struct mp_osd_res * out_osd)1235 void vo_get_src_dst_rects(struct vo *vo, struct mp_rect *out_src,
1236 struct mp_rect *out_dst, struct mp_osd_res *out_osd)
1237 {
1238 if (!vo->params) {
1239 *out_src = *out_dst = (struct mp_rect){0};
1240 *out_osd = (struct mp_osd_res){0};
1241 return;
1242 }
1243 mp_get_src_dst_rects(vo->log, vo->opts, vo->driver->caps, vo->params,
1244 vo->dwidth, vo->dheight, vo->monitor_par,
1245 out_src, out_dst, out_osd);
1246 }
1247
1248 // flip_page[_timed] will be called offset_us microseconds too early.
1249 // (For vo_vdpau, which does its own timing.)
1250 // num_req_frames set the requested number of requested vo_frame.frames.
1251 // (For vo_gpu interpolation.)
vo_set_queue_params(struct vo * vo,int64_t offset_us,int num_req_frames)1252 void vo_set_queue_params(struct vo *vo, int64_t offset_us, int num_req_frames)
1253 {
1254 struct vo_internal *in = vo->in;
1255 pthread_mutex_lock(&in->lock);
1256 in->flip_queue_offset = offset_us;
1257 in->req_frames = MPCLAMP(num_req_frames, 1, VO_MAX_REQ_FRAMES);
1258 pthread_mutex_unlock(&in->lock);
1259 }
1260
vo_get_num_req_frames(struct vo * vo)1261 int vo_get_num_req_frames(struct vo *vo)
1262 {
1263 struct vo_internal *in = vo->in;
1264 pthread_mutex_lock(&in->lock);
1265 int res = in->req_frames;
1266 pthread_mutex_unlock(&in->lock);
1267 return res;
1268 }
1269
vo_get_vsync_interval(struct vo * vo)1270 int64_t vo_get_vsync_interval(struct vo *vo)
1271 {
1272 struct vo_internal *in = vo->in;
1273 pthread_mutex_lock(&in->lock);
1274 int64_t res = vo->in->vsync_interval > 1 ? vo->in->vsync_interval : -1;
1275 pthread_mutex_unlock(&in->lock);
1276 return res;
1277 }
1278
1279 // Returns duration of a display refresh in seconds.
vo_get_estimated_vsync_interval(struct vo * vo)1280 double vo_get_estimated_vsync_interval(struct vo *vo)
1281 {
1282 struct vo_internal *in = vo->in;
1283 pthread_mutex_lock(&in->lock);
1284 double res = in->estimated_vsync_interval / 1e6;
1285 pthread_mutex_unlock(&in->lock);
1286 return res;
1287 }
1288
vo_get_estimated_vsync_jitter(struct vo * vo)1289 double vo_get_estimated_vsync_jitter(struct vo *vo)
1290 {
1291 struct vo_internal *in = vo->in;
1292 pthread_mutex_lock(&in->lock);
1293 double res = in->estimated_vsync_jitter;
1294 pthread_mutex_unlock(&in->lock);
1295 return res;
1296 }
1297
1298 // Get the time in seconds at after which the currently rendering frame will
1299 // end. Returns positive values if the frame is yet to be finished, negative
1300 // values if it already finished.
1301 // This can only be called while no new frame is queued (after
1302 // vo_is_ready_for_frame). Returns 0 for non-display synced frames, or if the
1303 // deadline for continuous display was missed.
vo_get_delay(struct vo * vo)1304 double vo_get_delay(struct vo *vo)
1305 {
1306 struct vo_internal *in = vo->in;
1307 pthread_mutex_lock(&in->lock);
1308 assert (!in->frame_queued);
1309 int64_t res = 0;
1310 if (in->base_vsync && in->vsync_interval > 1 && in->current_frame) {
1311 res = in->base_vsync;
1312 int extra = !!in->rendering;
1313 res += (in->current_frame->num_vsyncs + extra) * in->vsync_interval;
1314 if (!in->current_frame->display_synced)
1315 res = 0;
1316 }
1317 pthread_mutex_unlock(&in->lock);
1318 return res ? (res - mp_time_us()) / 1e6 : 0;
1319 }
1320
vo_discard_timing_info(struct vo * vo)1321 void vo_discard_timing_info(struct vo *vo)
1322 {
1323 struct vo_internal *in = vo->in;
1324 pthread_mutex_lock(&in->lock);
1325 reset_vsync_timings(vo);
1326 pthread_mutex_unlock(&in->lock);
1327 }
1328
vo_get_delayed_count(struct vo * vo)1329 int64_t vo_get_delayed_count(struct vo *vo)
1330 {
1331 struct vo_internal *in = vo->in;
1332 pthread_mutex_lock(&in->lock);
1333 int64_t res = vo->in->delayed_count;
1334 pthread_mutex_unlock(&in->lock);
1335 return res;
1336 }
1337
vo_get_display_fps(struct vo * vo)1338 double vo_get_display_fps(struct vo *vo)
1339 {
1340 struct vo_internal *in = vo->in;
1341 pthread_mutex_lock(&in->lock);
1342 double res = vo->in->display_fps;
1343 pthread_mutex_unlock(&in->lock);
1344 return res;
1345 }
1346
1347 // Set specific event flags, and wakeup the playback core if needed.
1348 // vo_query_and_reset_events() can retrieve the events again.
vo_event(struct vo * vo,int event)1349 void vo_event(struct vo *vo, int event)
1350 {
1351 struct vo_internal *in = vo->in;
1352 pthread_mutex_lock(&in->lock);
1353 if ((in->queued_events & event & VO_EVENTS_USER) != (event & VO_EVENTS_USER))
1354 wakeup_core(vo);
1355 if (event)
1356 wakeup_locked(vo);
1357 in->queued_events |= event;
1358 in->internal_events |= event;
1359 pthread_mutex_unlock(&in->lock);
1360 }
1361
1362 // Check event flags set with vo_event(). Return the mask of events that was
1363 // set and included in the events parameter. Clear the returned events.
vo_query_and_reset_events(struct vo * vo,int events)1364 int vo_query_and_reset_events(struct vo *vo, int events)
1365 {
1366 struct vo_internal *in = vo->in;
1367 pthread_mutex_lock(&in->lock);
1368 int r = in->queued_events & events;
1369 in->queued_events &= ~(unsigned)r;
1370 pthread_mutex_unlock(&in->lock);
1371 return r;
1372 }
1373
vo_get_current_frame(struct vo * vo)1374 struct mp_image *vo_get_current_frame(struct vo *vo)
1375 {
1376 struct vo_internal *in = vo->in;
1377 pthread_mutex_lock(&in->lock);
1378 struct mp_image *r = NULL;
1379 if (vo->in->current_frame)
1380 r = mp_image_new_ref(vo->in->current_frame->current);
1381 pthread_mutex_unlock(&in->lock);
1382 return r;
1383 }
1384
vo_get_current_vo_frame(struct vo * vo)1385 struct vo_frame *vo_get_current_vo_frame(struct vo *vo)
1386 {
1387 struct vo_internal *in = vo->in;
1388 pthread_mutex_lock(&in->lock);
1389 struct vo_frame *r = vo_frame_ref(vo->in->current_frame);
1390 pthread_mutex_unlock(&in->lock);
1391 return r;
1392 }
1393
vo_get_image(struct vo * vo,int imgfmt,int w,int h,int stride_align)1394 struct mp_image *vo_get_image(struct vo *vo, int imgfmt, int w, int h,
1395 int stride_align)
1396 {
1397 if (vo->driver->get_image_ts)
1398 return vo->driver->get_image_ts(vo, imgfmt, w, h, stride_align);
1399 if (vo->in->dr_helper)
1400 return dr_helper_get_image(vo->in->dr_helper, imgfmt, w, h, stride_align);
1401 return NULL;
1402 }
1403
destroy_frame(void * p)1404 static void destroy_frame(void *p)
1405 {
1406 struct vo_frame *frame = p;
1407 for (int n = 0; n < frame->num_frames; n++)
1408 talloc_free(frame->frames[n]);
1409 }
1410
1411 // Return a new reference to the given frame. The image pointers are also new
1412 // references. Calling talloc_free() on the frame unrefs all currently set
1413 // image references. (Assuming current==frames[0].)
vo_frame_ref(struct vo_frame * frame)1414 struct vo_frame *vo_frame_ref(struct vo_frame *frame)
1415 {
1416 if (!frame)
1417 return NULL;
1418
1419 struct vo_frame *new = talloc_ptrtype(NULL, new);
1420 talloc_set_destructor(new, destroy_frame);
1421 *new = *frame;
1422 for (int n = 0; n < frame->num_frames; n++) {
1423 new->frames[n] = mp_image_new_ref(frame->frames[n]);
1424 if (!new->frames[n])
1425 abort(); // OOM on tiny allocs
1426 }
1427 new->current = new->num_frames ? new->frames[0] : NULL;
1428 return new;
1429 }
1430
1431 /*
1432 * lookup an integer in a table, table must have 0 as the last key
1433 * param: key key to search for
1434 * returns translation corresponding to key or "to" value of last mapping
1435 * if not found.
1436 */
lookup_keymap_table(const struct mp_keymap * map,int key)1437 int lookup_keymap_table(const struct mp_keymap *map, int key)
1438 {
1439 while (map->from && map->from != key)
1440 map++;
1441 return map->to;
1442 }
1443