1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include "config.h"
20
21 #include <stdint.h>
22 #include <string.h>
23
24 #include <vdpau/vdpau.h>
25
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34
35 typedef struct VDPAUDeviceContext {
36 VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
37 VdpVideoSurfaceGetBitsYCbCr *get_data;
38 VdpVideoSurfacePutBitsYCbCr *put_data;
39 VdpVideoSurfaceCreate *surf_create;
40 VdpVideoSurfaceDestroy *surf_destroy;
41
42 enum AVPixelFormat *pix_fmts[3];
43 int nb_pix_fmts[3];
44 } VDPAUDeviceContext;
45
46 typedef struct VDPAUFramesContext {
47 VdpVideoSurfaceGetBitsYCbCr *get_data;
48 VdpVideoSurfacePutBitsYCbCr *put_data;
49 VdpChromaType chroma_type;
50 int chroma_idx;
51
52 const enum AVPixelFormat *pix_fmts;
53 int nb_pix_fmts;
54 } VDPAUFramesContext;
55
56 typedef struct VDPAUPixFmtMap {
57 VdpYCbCrFormat vdpau_fmt;
58 enum AVPixelFormat pix_fmt;
59 } VDPAUPixFmtMap;
60
61 static const VDPAUPixFmtMap pix_fmts_420[] = {
62 { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
63 { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
64 { 0, AV_PIX_FMT_NONE, },
65 };
66
67 static const VDPAUPixFmtMap pix_fmts_422[] = {
68 { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
69 { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
70 { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
71 { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
72 { 0, AV_PIX_FMT_NONE, },
73 };
74
75 static const VDPAUPixFmtMap pix_fmts_444[] = {
76 { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
77 { 0, AV_PIX_FMT_NONE, },
78 };
79
80 static const struct {
81 VdpChromaType chroma_type;
82 enum AVPixelFormat frames_sw_format;
83 const VDPAUPixFmtMap *map;
84 } vdpau_pix_fmts[] = {
85 { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
86 { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
87 { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
88 };
89
count_pixfmts(const VDPAUPixFmtMap * map)90 static int count_pixfmts(const VDPAUPixFmtMap *map)
91 {
92 int count = 0;
93 while (map->pix_fmt != AV_PIX_FMT_NONE) {
94 map++;
95 count++;
96 }
97 return count;
98 }
99
vdpau_init_pixmfts(AVHWDeviceContext * ctx)100 static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
101 {
102 AVVDPAUDeviceContext *hwctx = ctx->hwctx;
103 VDPAUDeviceContext *priv = ctx->internal->priv;
104 int i;
105
106 for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
107 const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
108 int nb_pix_fmts;
109
110 nb_pix_fmts = count_pixfmts(map);
111 priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
112 if (!priv->pix_fmts[i])
113 return AVERROR(ENOMEM);
114
115 nb_pix_fmts = 0;
116 while (map->pix_fmt != AV_PIX_FMT_NONE) {
117 VdpBool supported;
118 VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
119 map->vdpau_fmt, &supported);
120 if (err == VDP_STATUS_OK && supported)
121 priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
122 map++;
123 }
124 priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
125 priv->nb_pix_fmts[i] = nb_pix_fmts;
126 }
127
128 return 0;
129 }
130
131 #define GET_CALLBACK(id, result) \
132 do { \
133 void *tmp; \
134 err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
135 if (err != VDP_STATUS_OK) { \
136 av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
137 return AVERROR_UNKNOWN; \
138 } \
139 result = tmp; \
140 } while (0)
141
vdpau_device_init(AVHWDeviceContext * ctx)142 static int vdpau_device_init(AVHWDeviceContext *ctx)
143 {
144 AVVDPAUDeviceContext *hwctx = ctx->hwctx;
145 VDPAUDeviceContext *priv = ctx->internal->priv;
146 VdpStatus err;
147 int ret;
148
149 GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
150 priv->get_transfer_caps);
151 GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
152 GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
153 GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
154 GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
155
156 ret = vdpau_init_pixmfts(ctx);
157 if (ret < 0) {
158 av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
159 return ret;
160 }
161
162 return 0;
163 }
164
vdpau_device_uninit(AVHWDeviceContext * ctx)165 static void vdpau_device_uninit(AVHWDeviceContext *ctx)
166 {
167 VDPAUDeviceContext *priv = ctx->internal->priv;
168 int i;
169
170 for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
171 av_freep(&priv->pix_fmts[i]);
172 }
173
vdpau_frames_get_constraints(AVHWDeviceContext * ctx,const void * hwconfig,AVHWFramesConstraints * constraints)174 static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx,
175 const void *hwconfig,
176 AVHWFramesConstraints *constraints)
177 {
178 VDPAUDeviceContext *priv = ctx->internal->priv;
179 int nb_sw_formats = 0;
180 int i;
181
182 constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(vdpau_pix_fmts) + 1,
183 sizeof(*constraints->valid_sw_formats));
184 if (!constraints->valid_sw_formats)
185 return AVERROR(ENOMEM);
186
187 for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
188 if (priv->nb_pix_fmts[i] > 1)
189 constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
190 }
191 constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
192
193 constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
194 if (!constraints->valid_hw_formats)
195 return AVERROR(ENOMEM);
196
197 constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
198 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
199
200 return 0;
201 }
202
vdpau_buffer_free(void * opaque,uint8_t * data)203 static void vdpau_buffer_free(void *opaque, uint8_t *data)
204 {
205 AVHWFramesContext *ctx = opaque;
206 VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
207 VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
208
209 device_priv->surf_destroy(surf);
210 }
211
vdpau_pool_alloc(void * opaque,int size)212 static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
213 {
214 AVHWFramesContext *ctx = opaque;
215 VDPAUFramesContext *priv = ctx->internal->priv;
216 AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
217 VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
218
219 AVBufferRef *ret;
220 VdpVideoSurface surf;
221 VdpStatus err;
222
223 err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
224 ctx->width, ctx->height, &surf);
225 if (err != VDP_STATUS_OK) {
226 av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
227 return NULL;
228 }
229
230 ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
231 vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY);
232 if (!ret) {
233 device_priv->surf_destroy(surf);
234 return NULL;
235 }
236
237 return ret;
238 }
239
vdpau_frames_init(AVHWFramesContext * ctx)240 static int vdpau_frames_init(AVHWFramesContext *ctx)
241 {
242 VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
243 VDPAUFramesContext *priv = ctx->internal->priv;
244
245 int i;
246
247 for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
248 if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
249 priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
250 priv->chroma_idx = i;
251 priv->pix_fmts = device_priv->pix_fmts[i];
252 priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
253 break;
254 }
255 }
256 if (priv->nb_pix_fmts < 2) {
257 av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
258 av_get_pix_fmt_name(ctx->sw_format));
259 return AVERROR(ENOSYS);
260 }
261
262 if (!ctx->pool) {
263 ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
264 vdpau_pool_alloc, NULL);
265 if (!ctx->internal->pool_internal)
266 return AVERROR(ENOMEM);
267 }
268
269 priv->get_data = device_priv->get_data;
270 priv->put_data = device_priv->put_data;
271
272 return 0;
273 }
274
vdpau_get_buffer(AVHWFramesContext * ctx,AVFrame * frame)275 static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
276 {
277 frame->buf[0] = av_buffer_pool_get(ctx->pool);
278 if (!frame->buf[0])
279 return AVERROR(ENOMEM);
280
281 frame->data[3] = frame->buf[0]->data;
282 frame->format = AV_PIX_FMT_VDPAU;
283 frame->width = ctx->width;
284 frame->height = ctx->height;
285
286 return 0;
287 }
288
vdpau_transfer_get_formats(AVHWFramesContext * ctx,enum AVHWFrameTransferDirection dir,enum AVPixelFormat ** formats)289 static int vdpau_transfer_get_formats(AVHWFramesContext *ctx,
290 enum AVHWFrameTransferDirection dir,
291 enum AVPixelFormat **formats)
292 {
293 VDPAUFramesContext *priv = ctx->internal->priv;
294
295 enum AVPixelFormat *fmts;
296
297 if (priv->nb_pix_fmts == 1) {
298 av_log(ctx, AV_LOG_ERROR,
299 "No target formats are supported for this chroma type\n");
300 return AVERROR(ENOSYS);
301 }
302
303 fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
304 if (!fmts)
305 return AVERROR(ENOMEM);
306
307 memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
308 *formats = fmts;
309
310 return 0;
311 }
312
vdpau_transfer_data_from(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)313 static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
314 const AVFrame *src)
315 {
316 VDPAUFramesContext *priv = ctx->internal->priv;
317 VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
318
319 void *data[3];
320 uint32_t linesize[3];
321
322 const VDPAUPixFmtMap *map;
323 VdpYCbCrFormat vdpau_format;
324 VdpStatus err;
325 int i;
326
327 for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
328 data[i] = dst->data[i];
329 if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
330 av_log(ctx, AV_LOG_ERROR,
331 "The linesize %d cannot be represented as uint32\n",
332 dst->linesize[i]);
333 return AVERROR(ERANGE);
334 }
335 linesize[i] = dst->linesize[i];
336 }
337
338 map = vdpau_pix_fmts[priv->chroma_idx].map;
339 for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
340 if (map[i].pix_fmt == dst->format) {
341 vdpau_format = map[i].vdpau_fmt;
342 break;
343 }
344 }
345 if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
346 av_log(ctx, AV_LOG_ERROR,
347 "Unsupported target pixel format: %s\n",
348 av_get_pix_fmt_name(dst->format));
349 return AVERROR(EINVAL);
350 }
351
352 if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
353 FFSWAP(void*, data[1], data[2]);
354
355 err = priv->get_data(surf, vdpau_format, data, linesize);
356 if (err != VDP_STATUS_OK) {
357 av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
358 return AVERROR_UNKNOWN;
359 }
360
361 return 0;
362 }
363
vdpau_transfer_data_to(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)364 static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
365 const AVFrame *src)
366 {
367 VDPAUFramesContext *priv = ctx->internal->priv;
368 VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
369
370 const void *data[3];
371 uint32_t linesize[3];
372
373 const VDPAUPixFmtMap *map;
374 VdpYCbCrFormat vdpau_format;
375 VdpStatus err;
376 int i;
377
378 for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
379 data[i] = src->data[i];
380 if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
381 av_log(ctx, AV_LOG_ERROR,
382 "The linesize %d cannot be represented as uint32\n",
383 src->linesize[i]);
384 return AVERROR(ERANGE);
385 }
386 linesize[i] = src->linesize[i];
387 }
388
389 map = vdpau_pix_fmts[priv->chroma_idx].map;
390 for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
391 if (map[i].pix_fmt == src->format) {
392 vdpau_format = map[i].vdpau_fmt;
393 break;
394 }
395 }
396 if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
397 av_log(ctx, AV_LOG_ERROR,
398 "Unsupported source pixel format: %s\n",
399 av_get_pix_fmt_name(src->format));
400 return AVERROR(EINVAL);
401 }
402
403 if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
404 FFSWAP(const void*, data[1], data[2]);
405
406 err = priv->put_data(surf, vdpau_format, data, linesize);
407 if (err != VDP_STATUS_OK) {
408 av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
409 return AVERROR_UNKNOWN;
410 }
411
412 return 0;
413 }
414
415 #if HAVE_VDPAU_X11
416 #include <vdpau/vdpau_x11.h>
417 #include <X11/Xlib.h>
418
419 typedef struct VDPAUDevicePriv {
420 VdpDeviceDestroy *device_destroy;
421 Display *dpy;
422 } VDPAUDevicePriv;
423
vdpau_device_free(AVHWDeviceContext * ctx)424 static void vdpau_device_free(AVHWDeviceContext *ctx)
425 {
426 AVVDPAUDeviceContext *hwctx = ctx->hwctx;
427 VDPAUDevicePriv *priv = ctx->user_opaque;
428
429 if (priv->device_destroy)
430 priv->device_destroy(hwctx->device);
431 if (priv->dpy)
432 XCloseDisplay(priv->dpy);
433 av_freep(&priv);
434 }
435
vdpau_device_create(AVHWDeviceContext * ctx,const char * device,AVDictionary * opts,int flags)436 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
437 AVDictionary *opts, int flags)
438 {
439 AVVDPAUDeviceContext *hwctx = ctx->hwctx;
440
441 VDPAUDevicePriv *priv;
442 VdpStatus err;
443 VdpGetInformationString *get_information_string;
444 const char *display, *vendor;
445
446 priv = av_mallocz(sizeof(*priv));
447 if (!priv)
448 return AVERROR(ENOMEM);
449
450 ctx->user_opaque = priv;
451 ctx->free = vdpau_device_free;
452
453 priv->dpy = XOpenDisplay(device);
454 if (!priv->dpy) {
455 av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
456 XDisplayName(device));
457 return AVERROR_UNKNOWN;
458 }
459 display = XDisplayString(priv->dpy);
460
461 err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
462 &hwctx->device, &hwctx->get_proc_address);
463 if (err != VDP_STATUS_OK) {
464 av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
465 display);
466 return AVERROR_UNKNOWN;
467 }
468
469 GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
470 GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
471
472 get_information_string(&vendor);
473 av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
474 "X11 display %s\n", vendor, display);
475
476 return 0;
477 }
478 #endif
479
480 const HWContextType ff_hwcontext_type_vdpau = {
481 .type = AV_HWDEVICE_TYPE_VDPAU,
482 .name = "VDPAU",
483
484 .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
485 .device_priv_size = sizeof(VDPAUDeviceContext),
486 .frames_priv_size = sizeof(VDPAUFramesContext),
487
488 #if HAVE_VDPAU_X11
489 .device_create = vdpau_device_create,
490 #endif
491 .device_init = vdpau_device_init,
492 .device_uninit = vdpau_device_uninit,
493 .frames_get_constraints = vdpau_frames_get_constraints,
494 .frames_init = vdpau_frames_init,
495 .frames_get_buffer = vdpau_get_buffer,
496 .transfer_get_formats = vdpau_transfer_get_formats,
497 .transfer_data_to = vdpau_transfer_data_to,
498 .transfer_data_from = vdpau_transfer_data_from,
499
500 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE },
501 };
502