FFmpeg  4.2.2
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUDeviceContext {
36  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
37  VdpVideoSurfaceGetBitsYCbCr *get_data;
38  VdpVideoSurfacePutBitsYCbCr *put_data;
39  VdpVideoSurfaceCreate *surf_create;
40  VdpVideoSurfaceDestroy *surf_destroy;
41 
43  int nb_pix_fmts[3];
45 
46 typedef struct VDPAUFramesContext {
47  VdpVideoSurfaceGetBitsYCbCr *get_data;
48  VdpVideoSurfacePutBitsYCbCr *put_data;
49  VdpChromaType chroma_type;
51 
52  const enum AVPixelFormat *pix_fmts;
55 
56 typedef struct VDPAUPixFmtMap {
57  VdpYCbCrFormat vdpau_fmt;
60 
61 static const VDPAUPixFmtMap pix_fmts_420[] = {
62  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
63  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
64  { 0, AV_PIX_FMT_NONE, },
65 };
66 
67 static const VDPAUPixFmtMap pix_fmts_422[] = {
68  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
69  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
70  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
71  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
72  { 0, AV_PIX_FMT_NONE, },
73 };
74 
75 static const VDPAUPixFmtMap pix_fmts_444[] = {
76 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
77  { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
78 #endif
79  { 0, AV_PIX_FMT_NONE, },
80 };
81 
82 static const struct {
83  VdpChromaType chroma_type;
86 } vdpau_pix_fmts[] = {
87  { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
88  { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
89  { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
90 };
91 
92 static int count_pixfmts(const VDPAUPixFmtMap *map)
93 {
94  int count = 0;
95  while (map->pix_fmt != AV_PIX_FMT_NONE) {
96  map++;
97  count++;
98  }
99  return count;
100 }
101 
103 {
104  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
105  VDPAUDeviceContext *priv = ctx->internal->priv;
106  int i;
107 
108  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
109  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
110  int nb_pix_fmts;
111 
112  nb_pix_fmts = count_pixfmts(map);
113  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
114  if (!priv->pix_fmts[i])
115  return AVERROR(ENOMEM);
116 
117  nb_pix_fmts = 0;
118  while (map->pix_fmt != AV_PIX_FMT_NONE) {
119  VdpBool supported;
120  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
121  map->vdpau_fmt, &supported);
122  if (err == VDP_STATUS_OK && supported)
123  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
124  map++;
125  }
126  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
127  priv->nb_pix_fmts[i] = nb_pix_fmts;
128  }
129 
130  return 0;
131 }
132 
133 #define GET_CALLBACK(id, result) \
134 do { \
135  void *tmp; \
136  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
137  if (err != VDP_STATUS_OK) { \
138  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
139  return AVERROR_UNKNOWN; \
140  } \
141  result = tmp; \
142 } while (0)
143 
145 {
146  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
147  VDPAUDeviceContext *priv = ctx->internal->priv;
148  VdpStatus err;
149  int ret;
150 
151  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
152  priv->get_transfer_caps);
153  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
154  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
155  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
156  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
157 
158  ret = vdpau_init_pixmfts(ctx);
159  if (ret < 0) {
160  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
161  return ret;
162  }
163 
164  return 0;
165 }
166 
168 {
169  VDPAUDeviceContext *priv = ctx->internal->priv;
170  int i;
171 
172  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
173  av_freep(&priv->pix_fmts[i]);
174 }
175 
177  const void *hwconfig,
178  AVHWFramesConstraints *constraints)
179 {
180  VDPAUDeviceContext *priv = ctx->internal->priv;
181  int nb_sw_formats = 0;
182  int i;
183 
185  sizeof(*constraints->valid_sw_formats));
186  if (!constraints->valid_sw_formats)
187  return AVERROR(ENOMEM);
188 
189  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
190  if (priv->nb_pix_fmts[i] > 1)
191  constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
192  }
193  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
194 
195  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
196  if (!constraints->valid_hw_formats)
197  return AVERROR(ENOMEM);
198 
199  constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
200  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
201 
202  return 0;
203 }
204 
205 static void vdpau_buffer_free(void *opaque, uint8_t *data)
206 {
207  AVHWFramesContext *ctx = opaque;
208  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
209  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
210 
211  device_priv->surf_destroy(surf);
212 }
213 
214 static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
215 {
216  AVHWFramesContext *ctx = opaque;
217  VDPAUFramesContext *priv = ctx->internal->priv;
218  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
219  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
220 
221  AVBufferRef *ret;
222  VdpVideoSurface surf;
223  VdpStatus err;
224 
225  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
226  ctx->width, ctx->height, &surf);
227  if (err != VDP_STATUS_OK) {
228  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
229  return NULL;
230  }
231 
232  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
234  if (!ret) {
235  device_priv->surf_destroy(surf);
236  return NULL;
237  }
238 
239  return ret;
240 }
241 
243 {
244  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
245  VDPAUFramesContext *priv = ctx->internal->priv;
246 
247  int i;
248 
249  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
250  if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
251  priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
252  priv->chroma_idx = i;
253  priv->pix_fmts = device_priv->pix_fmts[i];
254  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
255  break;
256  }
257  }
258  if (priv->nb_pix_fmts < 2) {
259  av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
261  return AVERROR(ENOSYS);
262  }
263 
264  if (!ctx->pool) {
265  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
267  if (!ctx->internal->pool_internal)
268  return AVERROR(ENOMEM);
269  }
270 
271  priv->get_data = device_priv->get_data;
272  priv->put_data = device_priv->put_data;
273 
274  return 0;
275 }
276 
278 {
279  frame->buf[0] = av_buffer_pool_get(ctx->pool);
280  if (!frame->buf[0])
281  return AVERROR(ENOMEM);
282 
283  frame->data[3] = frame->buf[0]->data;
284  frame->format = AV_PIX_FMT_VDPAU;
285  frame->width = ctx->width;
286  frame->height = ctx->height;
287 
288  return 0;
289 }
290 
293  enum AVPixelFormat **formats)
294 {
295  VDPAUFramesContext *priv = ctx->internal->priv;
296 
297  enum AVPixelFormat *fmts;
298 
299  if (priv->nb_pix_fmts == 1) {
300  av_log(ctx, AV_LOG_ERROR,
301  "No target formats are supported for this chroma type\n");
302  return AVERROR(ENOSYS);
303  }
304 
305  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
306  if (!fmts)
307  return AVERROR(ENOMEM);
308 
309  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
310  *formats = fmts;
311 
312  return 0;
313 }
314 
316  const AVFrame *src)
317 {
318  VDPAUFramesContext *priv = ctx->internal->priv;
319  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
320 
321  void *data[3];
322  uint32_t linesize[3];
323 
324  const VDPAUPixFmtMap *map;
325  VdpYCbCrFormat vdpau_format;
326  VdpStatus err;
327  int i;
328 
329  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
330  data[i] = dst->data[i];
331  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
332  av_log(ctx, AV_LOG_ERROR,
333  "The linesize %d cannot be represented as uint32\n",
334  dst->linesize[i]);
335  return AVERROR(ERANGE);
336  }
337  linesize[i] = dst->linesize[i];
338  }
339 
340  map = vdpau_pix_fmts[priv->chroma_idx].map;
341  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
342  if (map[i].pix_fmt == dst->format) {
343  vdpau_format = map[i].vdpau_fmt;
344  break;
345  }
346  }
347  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
348  av_log(ctx, AV_LOG_ERROR,
349  "Unsupported target pixel format: %s\n",
351  return AVERROR(EINVAL);
352  }
353 
354  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
355 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
356  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
357 #endif
358  )
359  FFSWAP(void*, data[1], data[2]);
360 
361  err = priv->get_data(surf, vdpau_format, data, linesize);
362  if (err != VDP_STATUS_OK) {
363  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
364  return AVERROR_UNKNOWN;
365  }
366 
367  return 0;
368 }
369 
371  const AVFrame *src)
372 {
373  VDPAUFramesContext *priv = ctx->internal->priv;
374  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
375 
376  const void *data[3];
377  uint32_t linesize[3];
378 
379  const VDPAUPixFmtMap *map;
380  VdpYCbCrFormat vdpau_format;
381  VdpStatus err;
382  int i;
383 
384  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
385  data[i] = src->data[i];
386  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
387  av_log(ctx, AV_LOG_ERROR,
388  "The linesize %d cannot be represented as uint32\n",
389  src->linesize[i]);
390  return AVERROR(ERANGE);
391  }
392  linesize[i] = src->linesize[i];
393  }
394 
395  map = vdpau_pix_fmts[priv->chroma_idx].map;
396  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
397  if (map[i].pix_fmt == src->format) {
398  vdpau_format = map[i].vdpau_fmt;
399  break;
400  }
401  }
402  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
403  av_log(ctx, AV_LOG_ERROR,
404  "Unsupported source pixel format: %s\n",
406  return AVERROR(EINVAL);
407  }
408 
409  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
410 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
411  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
412 #endif
413  )
414  FFSWAP(const void*, data[1], data[2]);
415 
416  err = priv->put_data(surf, vdpau_format, data, linesize);
417  if (err != VDP_STATUS_OK) {
418  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
419  return AVERROR_UNKNOWN;
420  }
421 
422  return 0;
423 }
424 
425 #if HAVE_VDPAU_X11
426 #include <vdpau/vdpau_x11.h>
427 #include <X11/Xlib.h>
428 
429 typedef struct VDPAUDevicePriv {
430  VdpDeviceDestroy *device_destroy;
431  Display *dpy;
432 } VDPAUDevicePriv;
433 
434 static void vdpau_device_free(AVHWDeviceContext *ctx)
435 {
436  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
437  VDPAUDevicePriv *priv = ctx->user_opaque;
438 
439  if (priv->device_destroy)
440  priv->device_destroy(hwctx->device);
441  if (priv->dpy)
442  XCloseDisplay(priv->dpy);
443  av_freep(&priv);
444 }
445 
446 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
447  AVDictionary *opts, int flags)
448 {
449  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
450 
451  VDPAUDevicePriv *priv;
452  VdpStatus err;
453  VdpGetInformationString *get_information_string;
454  const char *display, *vendor;
455 
456  priv = av_mallocz(sizeof(*priv));
457  if (!priv)
458  return AVERROR(ENOMEM);
459 
460  ctx->user_opaque = priv;
461  ctx->free = vdpau_device_free;
462 
463  priv->dpy = XOpenDisplay(device);
464  if (!priv->dpy) {
465  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
466  XDisplayName(device));
467  return AVERROR_UNKNOWN;
468  }
469  display = XDisplayString(priv->dpy);
470 
471  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
472  &hwctx->device, &hwctx->get_proc_address);
473  if (err != VDP_STATUS_OK) {
474  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
475  display);
476  return AVERROR_UNKNOWN;
477  }
478 
479  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
480  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
481 
482  get_information_string(&vendor);
483  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
484  "X11 display %s\n", vendor, display);
485 
486  return 0;
487 }
488 #endif
489 
492  .name = "VDPAU",
493 
494  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
495  .device_priv_size = sizeof(VDPAUDeviceContext),
496  .frames_priv_size = sizeof(VDPAUFramesContext),
497 
498 #if HAVE_VDPAU_X11
499  .device_create = vdpau_device_create,
500 #endif
502  .device_uninit = vdpau_device_uninit,
503  .frames_get_constraints = vdpau_frames_get_constraints,
504  .frames_init = vdpau_frames_init,
505  .frames_get_buffer = vdpau_get_buffer,
506  .transfer_get_formats = vdpau_transfer_get_formats,
507  .transfer_data_to = vdpau_transfer_data_to,
508  .transfer_data_from = vdpau_transfer_data_from,
509 
511 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
#define NULL
Definition: coverity.c:32
This struct is allocated as AVHWDeviceContext.hwctx.
static enum AVPixelFormat pix_fmt
int size
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
enum AVPixelFormat frames_sw_format
#define GET_CALLBACK(id, result)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:192
Memory handling functions.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:486
VdpGetProcAddress * get_proc_address
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
VdpVideoSurfacePutBitsYCbCr * put_data
VdpVideoSurfacePutBitsYCbCr * put_data
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
VdpChromaType chroma_type
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
const HWContextType ff_hwcontext_type_vdpau
#define src
Definition: vp8dsp.c:254
VdpVideoSurfaceGetBitsYCbCr * get_data
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
enum AVPixelFormat * pix_fmts
AVBufferPool * pool_internal
enum AVHWDeviceType type
uint8_t
VdpChromaType chroma_type
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
const char data[16]
Definition: mxf.c:91
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
enum AVPixelFormat * pix_fmts[3]
#define av_log(a,...)
static const VDPAUPixFmtMap pix_fmts_422[]
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
int width
Definition: frame.h:353
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static void vdpau_buffer_free(void *opaque, uint8_t *data)
#define AVERROR(e)
Definition: error.h:43
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVDictionary * opts
Definition: movenc.c:50
static int vdpau_device_init(AVHWDeviceContext *ctx)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:148
static const VDPAUPixFmtMap pix_fmts_444[]
AVFormatContext * ctx
Definition: movenc.c:48
VdpYCbCrFormat vdpau_fmt
static int vdpau_frames_init(AVHWFramesContext *ctx)
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:218
#define FF_ARRAY_ELEMS(a)
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:368
VdpVideoSurfaceCreate * surf_create
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:432
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
static const struct @302 vdpau_pix_fmts[]
uint8_t * data
The data buffer.
Definition: buffer.h:89
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:197
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
refcounted data buffer API
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:437
const VDPAUPixFmtMap * map
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:133
#define flags(name, subs,...)
Definition: cbs_av1.c:561
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:108
A reference to a data buffer.
Definition: buffer.h:81
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal and external API header
static int count_pixfmts(const VDPAUPixFmtMap *map)
#define HAVE_VDPAU_X11
Definition: config.h:328
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:201
enum AVPixelFormat pix_fmt
static const VDPAUPixFmtMap pix_fmts_420[]
VdpVideoSurfaceDestroy * surf_destroy
AVHWFrameTransferDirection
Definition: hwcontext.h:394
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:189
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:444
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:103
int height
Definition: frame.h:353
static AVBufferRef * vdpau_pool_alloc(void *opaque, int size)
#define av_freep(p)
void INT64 INT64 count
Definition: avisynth_c.h:766
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:334
#define av_malloc_array(a, b)
formats
Definition: signature.h:48
#define FFSWAP(type, a, b)
Definition: common.h:99
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:70
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2438
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
VdpVideoSurfaceGetBitsYCbCr * get_data