FFmpeg
vf_amf_common.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "vf_amf_common.h"
20 
21 #include "libavutil/avassert.h"
22 #include "avfilter.h"
23 #include "avfilter_internal.h"
24 #include "formats.h"
25 #include "libavutil/mem.h"
26 #include "libavutil/imgutils.h"
27 
28 #include "AMF/components/VideoDecoderUVD.h"
31 #include "scale_eval.h"
32 
33 #if CONFIG_DXVA2
34 #include <d3d9.h>
35 #endif
36 
37 #if CONFIG_D3D11VA
38 #include <d3d11.h>
39 #endif
40 
42 {
43  AMFFilterContext *ctx = avctx->priv;
44 
45  if (!strcmp(ctx->format_str, "same")) {
46  ctx->format = AV_PIX_FMT_NONE;
47  } else {
48  ctx->format = av_get_pix_fmt(ctx->format_str);
49  if (ctx->format == AV_PIX_FMT_NONE) {
50  av_log(avctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", ctx->format_str);
51  return AVERROR(EINVAL);
52  }
53  }
54 
55  return 0;
56 }
57 
59 {
60  AMFFilterContext *ctx = avctx->priv;
61 
62  if (ctx->component) {
63  ctx->component->pVtbl->Terminate(ctx->component);
64  ctx->component->pVtbl->Release(ctx->component);
65  ctx->component = NULL;
66  }
67 
68  if (ctx->master_display)
69  av_freep(&ctx->master_display);
70 
71  if (ctx->light_meta)
72  av_freep(&ctx->light_meta);
73 
74  av_buffer_unref(&ctx->amf_device_ref);
75  av_buffer_unref(&ctx->hwdevice_ref);
76  av_buffer_unref(&ctx->hwframes_in_ref);
77  av_buffer_unref(&ctx->hwframes_out_ref);
78 }
79 
81 {
82  AVFilterContext *avctx = inlink->dst;
83  AMFFilterContext *ctx = avctx->priv;
84  AVFilterLink *outlink = avctx->outputs[0];
85  AMF_RESULT res;
86  AMFSurface *surface_in;
87  AMFSurface *surface_out;
88  AMFData *data_out = NULL;
89  enum AVColorSpace out_colorspace;
90  enum AVColorRange out_color_range;
91 
92  AVFrame *out = NULL;
93  int ret = 0;
94 
95  if (!ctx->component)
96  return AVERROR(EINVAL);
97 
98  ret = amf_avframe_to_amfsurface(avctx, in, &surface_in);
99  if (ret < 0)
100  goto fail;
101 
102  res = ctx->component->pVtbl->SubmitInput(ctx->component, (AMFData*)surface_in);
103  surface_in->pVtbl->Release(surface_in); // release surface after use
104  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
105  res = ctx->component->pVtbl->QueryOutput(ctx->component, &data_out);
106  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryOutput() failed with error %d\n", res);
107 
108  if (data_out) {
109  AMFGuid guid = IID_AMFSurface();
110  res = data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface_out); // query for buffer interface
111  data_out->pVtbl->Release(data_out);
112  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryInterface(IID_AMFSurface) failed with error %d\n", res);
113  } else {
114  return AVERROR(EAGAIN);
115  }
116 
117  out = amf_amfsurface_to_avframe(avctx, surface_out);
118 
119  ret = av_frame_copy_props(out, in);
120  av_frame_unref(in);
121 
122  out_colorspace = AVCOL_SPC_UNSPECIFIED;
123 
124  if (ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
125  switch(ctx->color_profile) {
126  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
127  out_colorspace = AVCOL_SPC_SMPTE170M;
128  break;
129  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
130  out_colorspace = AVCOL_SPC_BT709;
131  break;
132  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
133  out_colorspace = AVCOL_SPC_BT2020_NCL;
134  break;
135  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
136  out_colorspace = AVCOL_SPC_RGB;
137  break;
138  default:
139  out_colorspace = AVCOL_SPC_UNSPECIFIED;
140  break;
141  }
142  out->colorspace = out_colorspace;
143  }
144 
145  out_color_range = AVCOL_RANGE_UNSPECIFIED;
146  if (ctx->out_color_range == AMF_COLOR_RANGE_FULL)
147  out_color_range = AVCOL_RANGE_JPEG;
148  else if (ctx->out_color_range == AMF_COLOR_RANGE_STUDIO)
149  out_color_range = AVCOL_RANGE_MPEG;
150 
151  if (ctx->out_color_range != AMF_COLOR_RANGE_UNDEFINED)
152  out->color_range = out_color_range;
153 
154  if (ctx->out_primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
155  out->color_primaries = ctx->out_primaries;
156 
157  if (ctx->out_trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
158  out->color_trc = ctx->out_trc;
159 
160 
161  if (ret < 0)
162  goto fail;
163 
164  out->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
165  if (!out->hw_frames_ctx) {
166  ret = AVERROR(ENOMEM);
167  goto fail;
168  }
169 
170  av_frame_free(&in);
171  return ff_filter_frame(outlink, out);
172 fail:
173  av_frame_free(&in);
174  av_frame_free(&out);
175  return ret;
176 }
177 
178 
179 
181  const enum AVPixelFormat *input_pix_fmts,
182  const enum AVPixelFormat *output_pix_fmts)
183 {
184  int err;
185  AVFilterFormats *input_formats;
186  AVFilterFormats *output_formats;
187 
188  //in case if hw_device_ctx is set to DXVA2 we change order of pixel formats to set DXVA2 be chosen by default
189  //The order is ignored if hw_frames_ctx is not NULL on the config_output stage
190  if (avctx->hw_device_ctx) {
191  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
192 
193  switch (device_ctx->type) {
194  #if CONFIG_D3D11VA
196  {
197  static const enum AVPixelFormat output_pix_fmts_d3d11[] = {
200  };
201  output_pix_fmts = output_pix_fmts_d3d11;
202  }
203  break;
204  #endif
205  #if CONFIG_DXVA2
207  {
208  static const enum AVPixelFormat output_pix_fmts_dxva2[] = {
211  };
212  output_pix_fmts = output_pix_fmts_dxva2;
213  }
214  break;
215  #endif
217  break;
218  default:
219  {
220  av_log(avctx, AV_LOG_ERROR, "Unsupported device : %s\n", av_hwdevice_get_type_name(device_ctx->type));
221  return AVERROR(EINVAL);
222  }
223  break;
224  }
225  }
226 
227  input_formats = ff_make_pixel_format_list(output_pix_fmts);
228  if (!input_formats) {
229  return AVERROR(ENOMEM);
230  }
231  output_formats = ff_make_pixel_format_list(output_pix_fmts);
232  if (!output_formats) {
233  return AVERROR(ENOMEM);
234  }
235 
236  if ((err = ff_formats_ref(input_formats, &avctx->inputs[0]->outcfg.formats)) < 0)
237  return err;
238 
239  if ((err = ff_formats_ref(output_formats, &avctx->outputs[0]->incfg.formats)) < 0)
240  return err;
241  return 0;
242 }
243 
245  AMFSurface* surface)
246 {
247  AMFPlane *plane;
248  uint8_t *dst_data[4];
249  int dst_linesize[4];
250  int planes;
251  int i;
252 
253  planes = (int)surface->pVtbl->GetPlanesCount(surface);
254  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
255 
256  for (i = 0; i < planes; i++) {
257  plane = surface->pVtbl->GetPlaneAt(surface, i);
258  dst_data[i] = plane->pVtbl->GetNative(plane);
259  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
260  }
261  av_image_copy(dst_data, dst_linesize,
262  (const uint8_t**)frame->data, frame->linesize, frame->format,
263  frame->width, frame->height);
264 
265  return 0;
266 }
267 
268 int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
269 {
270  int err;
271  AMF_RESULT res;
272  AVFilterContext *avctx = outlink->src;
273  AVFilterLink *inlink = avctx->inputs[0];
274  AMFFilterContext *ctx = avctx->priv;
275  AVHWFramesContext *hwframes_out;
276  AVHWDeviceContext *hwdev_ctx;
277  enum AVPixelFormat in_sw_format = inlink->format;
278  enum AVPixelFormat out_sw_format = ctx->format;
280  FilterLink *outl = ff_filter_link(outlink);
281  double w_adj = 1.0;
282 
283  if ((err = ff_scale_eval_dimensions(avctx,
284  ctx->w_expr, ctx->h_expr,
285  inlink, outlink,
286  &ctx->width, &ctx->height)) < 0)
287  return err;
288 
289  if (ctx->reset_sar && inlink->sample_aspect_ratio.num)
290  w_adj = (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den;
291 
292  ff_scale_adjust_dimensions(inlink, &ctx->width, &ctx->height,
293  ctx->force_original_aspect_ratio, ctx->force_divisible_by, w_adj);
294 
295  av_buffer_unref(&ctx->amf_device_ref);
296  av_buffer_unref(&ctx->hwframes_in_ref);
297  av_buffer_unref(&ctx->hwframes_out_ref);
298  ctx->local_context = 0;
299  if (inl->hw_frames_ctx) {
301  if (av_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
302  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
303  av_get_pix_fmt_name(frames_ctx->sw_format));
304  return AVERROR(EINVAL);
305  }
306 
307  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
308  if (err < 0)
309  return err;
310 
311  ctx->hwframes_in_ref = av_buffer_ref(inl->hw_frames_ctx);
312  if (!ctx->hwframes_in_ref)
313  return AVERROR(ENOMEM);
314 
315  in_sw_format = frames_ctx->sw_format;
316  } else if (avctx->hw_device_ctx) {
317  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
318  if (err < 0)
319  return err;
320  ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
321  if (!ctx->hwdevice_ref)
322  return AVERROR(ENOMEM);
323  } else {
324  res = av_hwdevice_ctx_create(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
325  AMF_RETURN_IF_FALSE(avctx, res == 0, res, "Failed to create hardware device context (AMF) : %s\n", av_err2str(res));
326 
327  }
328  if(out_sw_format == AV_PIX_FMT_NONE){
329  if(outlink->format == AV_PIX_FMT_AMF_SURFACE)
330  out_sw_format = in_sw_format;
331  else
332  out_sw_format = outlink->format;
333  }
334  ctx->hwframes_out_ref = av_hwframe_ctx_alloc(ctx->amf_device_ref);
335  if (!ctx->hwframes_out_ref)
336  return AVERROR(ENOMEM);
337  hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
338  hwdev_ctx = (AVHWDeviceContext*)ctx->amf_device_ref->data;
339  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
340  {
341  ctx->amf_device_ctx = hwdev_ctx->hwctx;
342  }
343  hwframes_out->format = AV_PIX_FMT_AMF_SURFACE;
344  hwframes_out->sw_format = out_sw_format;
345 
346  if (inlink->format == AV_PIX_FMT_AMF_SURFACE) {
347  *in_format = in_sw_format;
348  } else {
349  *in_format = inlink->format;
350  }
351  outlink->w = ctx->width;
352  outlink->h = ctx->height;
353 
354  if (ctx->reset_sar)
355  outlink->sample_aspect_ratio = (AVRational){1, 1};
356  else if (inlink->sample_aspect_ratio.num) {
357  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink->w, outlink->w * inlink->h}, inlink->sample_aspect_ratio);
358  } else
359  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
360 
361  hwframes_out->width = outlink->w;
362  hwframes_out->height = outlink->h;
363 
364  err = av_hwframe_ctx_init(ctx->hwframes_out_ref);
365  if (err < 0)
366  return err;
367 
368  outl->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
369  if (!outl->hw_frames_ctx) {
370  return AVERROR(ENOMEM);
371  }
372  return 0;
373 }
374 
375 void amf_free_amfsurface(void *opaque, uint8_t *data)
376 {
377  AMFSurface *surface = (AMFSurface*)data;
378  surface->pVtbl->Release(surface);
379 }
380 
381 AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface)
382 {
384  AMFFilterContext *ctx = avctx->priv;
385 
386  if (!frame)
387  return NULL;
388 
389  if (ctx->hwframes_out_ref) {
390  AVHWFramesContext *hwframes_out = (AVHWFramesContext *)ctx->hwframes_out_ref->data;
391  if (hwframes_out->format == AV_PIX_FMT_AMF_SURFACE) {
392  int ret = av_hwframe_get_buffer(ctx->hwframes_out_ref, frame, 0);
393  if (ret < 0) {
394  av_log(avctx, AV_LOG_ERROR, "Get hw frame failed.\n");
396  return NULL;
397  }
398  frame->data[0] = (uint8_t *)pSurface;
399  frame->buf[1] = av_buffer_create((uint8_t *)pSurface, sizeof(AMFSurface),
401  (void*)avctx,
403  } else { // FIXME: add processing of other hw formats
404  av_log(ctx, AV_LOG_ERROR, "Unknown pixel format\n");
405  return NULL;
406  }
407  } else {
408 
409  switch (pSurface->pVtbl->GetMemoryType(pSurface))
410  {
411  #if CONFIG_D3D11VA
412  case AMF_MEMORY_DX11:
413  {
414  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
415  frame->data[0] = plane0->pVtbl->GetNative(plane0);
416  frame->data[1] = (uint8_t*)(intptr_t)0;
417 
418  frame->buf[0] = av_buffer_create(NULL,
419  0,
421  pSurface,
423  }
424  break;
425  #endif
426  #if CONFIG_DXVA2
427  case AMF_MEMORY_DX9:
428  {
429  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
430  frame->data[3] = plane0->pVtbl->GetNative(plane0);
431 
432  frame->buf[0] = av_buffer_create(NULL,
433  0,
435  pSurface,
437  }
438  break;
439  #endif
440  default:
441  {
442  av_log(avctx, AV_LOG_ERROR, "Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
443  return NULL;
444  }
445  }
446  }
447 
448  return frame;
449 }
450 
451 int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface)
452 {
453  AMFVariantStruct var = { 0 };
454  AMFFilterContext *ctx = avctx->priv;
455  AMFSurface *surface;
456  AMF_RESULT res;
457  int hw_surface = 0;
458 
459  switch (frame->format) {
460 #if CONFIG_D3D11VA
461  case AV_PIX_FMT_D3D11:
462  {
463  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
464  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
465  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
466  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
467 
468  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
469  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
470  hw_surface = 1;
471  }
472  break;
473 #endif
475  {
476  surface = (AMFSurface*)frame->data[0]; // actual surface
477  surface->pVtbl->Acquire(surface); // returned surface has to be to be ref++
478  hw_surface = 1;
479  }
480  break;
481 
482 #if CONFIG_DXVA2
484  {
485  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
486 
487  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
488  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
489  hw_surface = 1;
490  }
491  break;
492 #endif
493  default:
494  {
495  AMF_SURFACE_FORMAT amf_fmt = av_av_to_amf_format(frame->format);
496  res = ctx->amf_device_ctx->context->pVtbl->AllocSurface(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt, frame->width, frame->height, &surface);
497  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
498  amf_copy_surface(avctx, frame, surface);
499  }
500  break;
501  }
502 
503  // If AMFSurface comes from other AMF components, it may have various
504  // properties already set. These properties can be used by other AMF
505  // components to perform their tasks. In the context of the AMF video
506  // filter, that other component could be an AMFVideoConverter. By default,
507  // AMFVideoConverter will use HDR related properties assigned to a surface
508  // by an AMFDecoder. If frames (surfaces) originated from any other source,
509  // i.e. from hevcdec, assign those properties from avframe; do not
510  // overwrite these properties if they already have a value.
511  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC, &var);
512 
513  if (res == AMF_NOT_FOUND && frame->color_trc != AVCOL_TRC_UNSPECIFIED)
514  // Note: as of now(Feb 2026), most AV and AMF enums are interchangeable.
515  // TBD: can enums change their values in the future?
516  // For better future-proofing it's better to have dedicated
517  // enum mapping functions.
518  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC, frame->color_trc);
519 
520  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_PRIMARIES, &var);
521  if (res == AMF_NOT_FOUND && frame->color_primaries != AVCOL_PRI_UNSPECIFIED)
522  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_PRIMARIES, frame->color_primaries);
523 
524  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_RANGE, &var);
525  if (res == AMF_NOT_FOUND && frame->color_range != AVCOL_RANGE_UNSPECIFIED)
526  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_RANGE, frame->color_range);
527 
528  // Color range for older drivers
529  if (frame->color_range == AVCOL_RANGE_JPEG) {
530  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 1);
531  } else if (frame->color_range != AVCOL_RANGE_UNSPECIFIED)
532  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 0);
533 
534  // Color profile for newer drivers
535  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_PROFILE, &var);
536  if (res == AMF_NOT_FOUND && frame->color_range != AVCOL_RANGE_UNSPECIFIED && frame->colorspace != AVCOL_SPC_UNSPECIFIED) {
537  amf_int64 color_profile = color_profile = av_amf_get_color_profile(frame->color_range, frame->colorspace);
538 
539  if (color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN)
540  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_PROFILE, color_profile);
541  }
542 
543  if (ctx->in_trc == AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084 && (ctx->master_display || ctx->light_meta)) {
544  AMFBuffer *hdrmeta_buffer = NULL;
545  res = ctx->amf_device_ctx->context->pVtbl->AllocBuffer(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
546  if (res == AMF_OK) {
547  AMFHDRMetadata *hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
548 
549  av_amf_display_mastering_meta_to_hdrmeta(ctx->master_display, hdrmeta);
550  av_amf_light_metadata_to_hdrmeta(ctx->light_meta, hdrmeta);
551  AMF_ASSIGN_PROPERTY_INTERFACE(res, surface, AMF_VIDEO_DECODER_HDR_METADATA, hdrmeta_buffer);
552  }
553  } else if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
554  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_HDR_METADATA, &var);
555  if (res == AMF_NOT_FOUND) {
556  AMFBuffer *hdrmeta_buffer = NULL;
557  res = ctx->amf_device_ctx->context->pVtbl->AllocBuffer(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
558  if (res == AMF_OK) {
559  AMFHDRMetadata *hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
560 
561  if (av_amf_extract_hdr_metadata(frame, hdrmeta) == 0)
562  AMF_ASSIGN_PROPERTY_INTERFACE(res, surface, AMF_VIDEO_DECODER_HDR_METADATA, hdrmeta_buffer);
563  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
564  }
565  }
566  }
567 
568  if (frame->crop_left || frame->crop_right || frame->crop_top || frame->crop_bottom) {
569  size_t crop_x = frame->crop_left;
570  size_t crop_y = frame->crop_top;
571  size_t crop_w = frame->width - (frame->crop_left + frame->crop_right);
572  size_t crop_h = frame->height - (frame->crop_top + frame->crop_bottom);
573  AVFilterLink *outlink = avctx->outputs[0];
574  if (crop_x || crop_y) {
575  if (crop_w == outlink->w && crop_h == outlink->h) {
576  AMFData *cropped_buffer = NULL;
577  res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
578  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "Duplicate() failed with error %d\n", res);
579  surface->pVtbl->Release(surface);
580  surface = (AMFSurface*)cropped_buffer;
581  }
582  else
583  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
584  }
585  else
586  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
587  }
588  else if (hw_surface) {
589  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
590  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
591  }
592 
593  surface->pVtbl->SetPts(surface, frame->pts);
594  *ppSurface = surface;
595  return 0;
596 }
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
amf_avframe_to_amfsurface
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Definition: vf_amf_common.c:451
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
static FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:777
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:669
ff_make_pixel_format_list
av_warn_unused_result AVFilterFormats * ff_make_pixel_format_list(const enum AVPixelFormat *fmts)
Create a list of supported pixel formats.
data
const char data[16]
Definition: mxf.c:149
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:701
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:356
amf_setup_input_output_formats
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
Definition: vf_amf_common.c:180
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
av_amf_display_mastering_meta_to_hdrmeta
int av_amf_display_mastering_meta_to_hdrmeta(const AVMasteringDisplayMetadata *display_meta, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:186
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
amf_free_amfsurface
void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: vf_amf_common.c:375
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
fail
#define fail()
Definition: checkasm.h:220
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:133
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_amf_get_color_profile
enum AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM av_amf_get_color_profile(enum AVColorRange color_range, enum AVColorSpace color_space)
Definition: hwcontext_amf.c:155
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:707
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:755
vf_amf_common.h
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:42
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AMFFilterContext
Definition: vf_amf_common.h:29
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
hwcontext_amf.h
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:120
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:639
if
if(ret)
Definition: filter_design.txt:179
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:599
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:282
planes
static const struct @562 planes[]
double
double
Definition: af_crystalizer.c:132
avfilter_internal.h
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:743
index
int index
Definition: gxfenc.c:90
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:199
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:683
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
av_amf_extract_hdr_metadata
int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:233
scale_eval.h
amf_copy_surface
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: vf_amf_common.c:244
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
amf_filter_filter_frame
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_amf_common.c:80
amf_filter_uninit
void amf_filter_uninit(AVFilterContext *avctx)
Definition: vf_amf_common.c:58
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:711
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:700
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:496
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:703
amf_filter_init
int amf_filter_init(AVFilterContext *avctx)
Definition: vf_amf_common.c:41
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:760
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:3388
hwcontext_amf_internal.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
avfilter.h
amf_init_filter_config
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
Definition: vf_amf_common.c:268
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
mem.h
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:126
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
amf_amfsurface_to_avframe
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
Definition: vf_amf_common.c:381
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_amf_light_metadata_to_hdrmeta
int av_amf_light_metadata_to_hdrmeta(const AVContentLightMetadata *light_meta, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:222
av_image_copy
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:702
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:742
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:506
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:122
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:286