FFmpeg  4.3
vf_deinterlace_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <string.h>
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/common.h"
23 #include "libavutil/mem.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 
27 #include "avfilter.h"
28 #include "formats.h"
29 #include "internal.h"
30 #include "video.h"
31 #include "vaapi_vpp.h"
32 
33 #define MAX_REFERENCES 8
34 
35 typedef struct DeintVAAPIContext {
36  VAAPIVPPContext vpp_ctx; // must be the first field
37 
38  int mode;
41 
42  VAProcFilterCapDeinterlacing
43  deint_caps[VAProcDeinterlacingCount];
45  VAProcPipelineCaps pipeline_caps;
46 
52 
53 static const char *deint_vaapi_mode_name(int mode)
54 {
55  switch (mode) {
56 #define D(name) case VAProcDeinterlacing ## name: return #name
57  D(Bob);
58  D(Weave);
59  D(MotionAdaptive);
60  D(MotionCompensated);
61 #undef D
62  default:
63  return "Invalid";
64  }
65 }
66 
68 {
69  DeintVAAPIContext *ctx = avctx->priv;
70  int i;
71 
72  for (i = 0; i < ctx->queue_count; i++)
73  av_frame_free(&ctx->frame_queue[i]);
74  ctx->queue_count = 0;
75 
77 }
78 
80 {
81  VAAPIVPPContext *vpp_ctx = avctx->priv;
82  DeintVAAPIContext *ctx = avctx->priv;
83  VAStatus vas;
84  VAProcFilterParameterBufferDeinterlacing params;
85  int i;
86 
87  ctx->nb_deint_caps = VAProcDeinterlacingCount;
88  vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display,
89  vpp_ctx->va_context,
90  VAProcFilterDeinterlacing,
91  &ctx->deint_caps,
92  &ctx->nb_deint_caps);
93  if (vas != VA_STATUS_SUCCESS) {
94  av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
95  "caps: %d (%s).\n", vas, vaErrorStr(vas));
96  return AVERROR(EIO);
97  }
98 
99  if (ctx->mode == VAProcDeinterlacingNone) {
100  for (i = 0; i < ctx->nb_deint_caps; i++) {
101  if (ctx->deint_caps[i].type > ctx->mode)
102  ctx->mode = ctx->deint_caps[i].type;
103  }
104  av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
105  "deinterlacing mode.\n", ctx->mode,
107  } else {
108  for (i = 0; i < ctx->nb_deint_caps; i++) {
109  if (ctx->deint_caps[i].type == ctx->mode)
110  break;
111  }
112  if (i >= ctx->nb_deint_caps) {
113  av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
114  "not supported.\n", ctx->mode,
116  return AVERROR(EINVAL);
117  }
118  }
119 
120  params.type = VAProcFilterDeinterlacing;
121  params.algorithm = ctx->mode;
122  params.flags = 0;
123 
125  VAProcFilterParameterBufferType,
126  &params,
127  sizeof(params),
128  1);
129  if (vas)
130  return vas;
131 
132  vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
133  vpp_ctx->va_context,
134  &vpp_ctx->filter_buffers[0], 1,
135  &ctx->pipeline_caps);
136  if (vas != VA_STATUS_SUCCESS) {
137  av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
138  "caps: %d (%s).\n", vas, vaErrorStr(vas));
139  return AVERROR(EIO);
140  }
141 
142  ctx->extra_delay_for_timestamps = ctx->field_rate == 2 &&
143  ctx->pipeline_caps.num_backward_references == 0;
144 
145  ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
146  ctx->pipeline_caps.num_forward_references +
148  if (ctx->queue_depth > MAX_REFERENCES) {
149  av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
150  "references (%u forward, %u back).\n",
151  ctx->pipeline_caps.num_forward_references,
152  ctx->pipeline_caps.num_backward_references);
153  return AVERROR(ENOSYS);
154  }
155 
156  return 0;
157 }
158 
160 {
161  AVFilterLink *inlink = outlink->src->inputs[0];
162  AVFilterContext *avctx = outlink->src;
163  DeintVAAPIContext *ctx = avctx->priv;
164  int err;
165 
166  err = ff_vaapi_vpp_config_output(outlink);
167  if (err < 0)
168  return err;
169  outlink->time_base = av_mul_q(inlink->time_base,
170  (AVRational) { 1, ctx->field_rate });
171  outlink->frame_rate = av_mul_q(inlink->frame_rate,
172  (AVRational) { ctx->field_rate, 1 });
173 
174  return 0;
175 }
176 
177 static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
178 {
179  AVFilterContext *avctx = inlink->dst;
180  AVFilterLink *outlink = avctx->outputs[0];
181  VAAPIVPPContext *vpp_ctx = avctx->priv;
182  DeintVAAPIContext *ctx = avctx->priv;
184  VASurfaceID input_surface;
185  VASurfaceID backward_references[MAX_REFERENCES];
186  VASurfaceID forward_references[MAX_REFERENCES];
187  VAProcPipelineParameterBuffer params;
188  VAProcFilterParameterBufferDeinterlacing *filter_params;
189  VAStatus vas;
190  void *filter_params_addr = NULL;
191  int err, i, field, current_frame_index;
192 
193  av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
194  av_get_pix_fmt_name(input_frame->format),
195  input_frame->width, input_frame->height, input_frame->pts);
196 
197  if (ctx->queue_count < ctx->queue_depth) {
198  ctx->frame_queue[ctx->queue_count++] = input_frame;
199  if (ctx->queue_count < ctx->queue_depth) {
200  // Need more reference surfaces before we can continue.
201  return 0;
202  }
203  } else {
204  av_frame_free(&ctx->frame_queue[0]);
205  for (i = 0; i + 1 < ctx->queue_count; i++)
206  ctx->frame_queue[i] = ctx->frame_queue[i + 1];
207  ctx->frame_queue[i] = input_frame;
208  }
209 
210  current_frame_index = ctx->pipeline_caps.num_forward_references;
211 
212  input_frame = ctx->frame_queue[current_frame_index];
213  input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
214  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
215  forward_references[i] = (VASurfaceID)(uintptr_t)
216  ctx->frame_queue[current_frame_index - i - 1]->data[3];
217  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
218  backward_references[i] = (VASurfaceID)(uintptr_t)
219  ctx->frame_queue[current_frame_index + i + 1]->data[3];
220 
221  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
222  "deinterlace input.\n", input_surface);
223  av_log(avctx, AV_LOG_DEBUG, "Backward references:");
224  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
225  av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
226  av_log(avctx, AV_LOG_DEBUG, "\n");
227  av_log(avctx, AV_LOG_DEBUG, "Forward references:");
228  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
229  av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
230  av_log(avctx, AV_LOG_DEBUG, "\n");
231 
232  for (field = 0; field < ctx->field_rate; field++) {
233  output_frame = ff_get_video_buffer(outlink, vpp_ctx->output_width,
234  vpp_ctx->output_height);
235  if (!output_frame) {
236  err = AVERROR(ENOMEM);
237  goto fail;
238  }
239 
240  err = av_frame_copy_props(output_frame, input_frame);
241  if (err < 0)
242  goto fail;
243 
244  err = ff_vaapi_vpp_init_params(avctx, &params,
245  input_frame, output_frame);
246  if (err < 0)
247  goto fail;
248 
249  if (!ctx->auto_enable || input_frame->interlaced_frame) {
250  vas = vaMapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0],
251  &filter_params_addr);
252  if (vas != VA_STATUS_SUCCESS) {
253  av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
254  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
255  err = AVERROR(EIO);
256  goto fail;
257  }
258  filter_params = filter_params_addr;
259  filter_params->flags = 0;
260  if (input_frame->top_field_first) {
261  filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0;
262  } else {
263  filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
264  filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD;
265  }
266  filter_params_addr = NULL;
267  vas = vaUnmapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0]);
268  if (vas != VA_STATUS_SUCCESS)
269  av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
270  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
271 
272  params.filters = &vpp_ctx->filter_buffers[0];
273  params.num_filters = 1;
274 
275  params.forward_references = forward_references;
276  params.num_forward_references =
277  ctx->pipeline_caps.num_forward_references;
278  params.backward_references = backward_references;
279  params.num_backward_references =
280  ctx->pipeline_caps.num_backward_references;
281 
282  } else {
283  params.filters = NULL;
284  params.num_filters = 0;
285  }
286 
287  err = ff_vaapi_vpp_render_picture(avctx, &params, output_frame);
288  if (err < 0)
289  goto fail;
290 
291  if (ctx->field_rate == 2) {
292  if (field == 0)
293  output_frame->pts = 2 * input_frame->pts;
294  else
295  output_frame->pts = input_frame->pts +
296  ctx->frame_queue[current_frame_index + 1]->pts;
297  }
298  output_frame->interlaced_frame = 0;
299 
300  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
301  av_get_pix_fmt_name(output_frame->format),
302  output_frame->width, output_frame->height, output_frame->pts);
303 
304  err = ff_filter_frame(outlink, output_frame);
305  if (err < 0)
306  break;
307  }
308 
309  return err;
310 
311 fail:
312  if (filter_params_addr)
313  vaUnmapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0]);
314  av_frame_free(&output_frame);
315  return err;
316 }
317 
319 {
320  VAAPIVPPContext *vpp_ctx = avctx->priv;
321 
322  ff_vaapi_vpp_ctx_init(avctx);
325  vpp_ctx->output_format = AV_PIX_FMT_NONE;
326 
327  return 0;
328 }
329 
330 #define OFFSET(x) offsetof(DeintVAAPIContext, x)
331 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
332 static const AVOption deint_vaapi_options[] = {
333  { "mode", "Deinterlacing mode",
334  OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone },
335  VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" },
336  { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
337  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, 0, 0, FLAGS, "mode" },
338  { "bob", "Use the bob deinterlacing algorithm",
339  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, 0, 0, FLAGS, "mode" },
340  { "weave", "Use the weave deinterlacing algorithm",
341  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, 0, 0, FLAGS, "mode" },
342  { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
343  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, 0, 0, FLAGS, "mode" },
344  { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
345  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, 0, 0, FLAGS, "mode" },
346 
347  { "rate", "Generate output at frame rate or field rate",
348  OFFSET(field_rate), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 2, FLAGS, "rate" },
349  { "frame", "Output at frame rate (one frame of output for each field-pair)",
350  0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, FLAGS, "rate" },
351  { "field", "Output at field rate (one frame of output for each field)",
352  0, AV_OPT_TYPE_CONST, { .i64 = 2 }, 0, 0, FLAGS, "rate" },
353 
354  { "auto", "Only deinterlace fields, passing frames through unchanged",
355  OFFSET(auto_enable), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
356 
357  { NULL },
358 };
359 
360 static const AVClass deint_vaapi_class = {
361  .class_name = "deinterlace_vaapi",
362  .item_name = av_default_item_name,
363  .option = deint_vaapi_options,
364  .version = LIBAVUTIL_VERSION_INT,
365 };
366 
367 static const AVFilterPad deint_vaapi_inputs[] = {
368  {
369  .name = "default",
370  .type = AVMEDIA_TYPE_VIDEO,
371  .filter_frame = &deint_vaapi_filter_frame,
372  .config_props = &ff_vaapi_vpp_config_input,
373  },
374  { NULL }
375 };
376 
378  {
379  .name = "default",
380  .type = AVMEDIA_TYPE_VIDEO,
381  .config_props = &deint_vaapi_config_output,
382  },
383  { NULL }
384 };
385 
387  .name = "deinterlace_vaapi",
388  .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
389  .priv_size = sizeof(DeintVAAPIContext),
393  .inputs = deint_vaapi_inputs,
394  .outputs = deint_vaapi_outputs,
395  .priv_class = &deint_vaapi_class,
396  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
397 };
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:365
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
int ff_vaapi_vpp_config_input(AVFilterLink *inlink)
Definition: vaapi_vpp.c:70
AVOption.
Definition: opt.h:246
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
Main libavfilter public API header.
Memory handling functions.
int ff_vaapi_vpp_config_output(AVFilterLink *outlink)
Definition: vaapi_vpp.c:95
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
VAProcFilterCapDeinterlacing deint_caps[VAProcDeinterlacingCount]
static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
VAAPIVPPContext vpp_ctx
const char * name
Pad name.
Definition: internal.h:60
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
#define av_cold
Definition: attributes.h:88
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVOptions.
static void deint_vaapi_pipeline_uninit(AVFilterContext *avctx)
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:393
#define D(name)
AVFilter ff_vf_deinterlace_vaapi
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:447
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
int width
Definition: frame.h:358
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
simple assert() macros that are a bit more flexible than ISO C assert().
int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx, int type, const void *data, size_t size, int count)
Definition: vaapi_vpp.c:563
#define fail()
Definition: checkasm.h:123
static int deint_vaapi_config_output(AVFilterLink *outlink)
AVFrame * frame_queue[MAX_REFERENCES]
static const char * deint_vaapi_mode_name(int mode)
#define OFFSET(x)
void ff_vaapi_vpp_pipeline_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:44
AVFormatContext * ctx
Definition: movenc.c:48
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
VAProcPipelineCaps pipeline_caps
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:373
#define FLAGS
VABufferID filter_buffers[VAProcFilterCount]
Definition: vaapi_vpp.h:51
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:849
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
const char * name
Filter name.
Definition: avfilter.h:148
static const AVClass deint_vaapi_class
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
VAContextID va_context
Definition: vaapi_vpp.h:41
enum AVPixelFormat output_format
Definition: vaapi_vpp.h:47
AVVAAPIDeviceContext * hwctx
Definition: vaapi_vpp.h:36
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:314
int ff_vaapi_vpp_render_picture(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, AVFrame *output_frame)
Definition: vaapi_vpp.c:592
#define MAX_REFERENCES
static av_cold int deint_vaapi_init(AVFilterContext *avctx)
int ff_vaapi_vpp_query_formats(AVFilterContext *avctx)
Definition: vaapi_vpp.c:27
static const AVFilterPad deint_vaapi_inputs[]
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
common internal and external API header
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
Definition: vaapi_vpp.c:666
static const AVFilterPad deint_vaapi_outputs[]
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:452
An instance of a filter.
Definition: avfilter.h:338
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
int height
Definition: frame.h:358
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, const AVFrame *input_frame, AVFrame *output_frame)
Definition: vaapi_vpp.c:515
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2465
int(* build_filter_params)(AVFilterContext *avctx)
Definition: vaapi_vpp.h:54
internal API functions
mode
Use these values in ebur128_init (or&#39;ed).
Definition: ebur128.h:83
void(* pipeline_uninit)(AVFilterContext *avctx)
Definition: vaapi_vpp.h:56
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:680
for(j=16;j >0;--j)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
static const AVOption deint_vaapi_options[]