FFmpeg
vf_amf_common.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "vf_amf_common.h"
20 
21 #include "libavutil/avassert.h"
22 #include "avfilter.h"
23 #include "avfilter_internal.h"
24 #include "formats.h"
25 #include "libavutil/mem.h"
26 #include "libavutil/imgutils.h"
27 
30 #include "AMF/components/ColorSpace.h"
31 #include "scale_eval.h"
32 
33 #if CONFIG_DXVA2
34 #include <d3d9.h>
35 #endif
36 
37 #if CONFIG_D3D11VA
38 #include <d3d11.h>
39 #endif
40 
42 {
43  AMFFilterContext *ctx = avctx->priv;
44 
45  if (!strcmp(ctx->format_str, "same")) {
46  ctx->format = AV_PIX_FMT_NONE;
47  } else {
48  ctx->format = av_get_pix_fmt(ctx->format_str);
49  if (ctx->format == AV_PIX_FMT_NONE) {
50  av_log(avctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", ctx->format_str);
51  return AVERROR(EINVAL);
52  }
53  }
54 
55  return 0;
56 }
57 
59 {
60  AMFFilterContext *ctx = avctx->priv;
61 
62  if (ctx->component) {
63  ctx->component->pVtbl->Terminate(ctx->component);
64  ctx->component->pVtbl->Release(ctx->component);
65  ctx->component = NULL;
66  }
67 
68  av_buffer_unref(&ctx->amf_device_ref);
69  av_buffer_unref(&ctx->hwdevice_ref);
70  av_buffer_unref(&ctx->hwframes_in_ref);
71  av_buffer_unref(&ctx->hwframes_out_ref);
72 }
73 
75 {
76  AVFilterContext *avctx = inlink->dst;
77  AMFFilterContext *ctx = avctx->priv;
78  AVFilterLink *outlink = avctx->outputs[0];
79  AMF_RESULT res;
80  AMFSurface *surface_in;
81  AMFSurface *surface_out;
82  AMFData *data_out = NULL;
83  enum AVColorSpace out_colorspace;
84  enum AVColorRange out_color_range;
85 
86  AVFrame *out = NULL;
87  int ret = 0;
88 
89  if (!ctx->component)
90  return AVERROR(EINVAL);
91 
92  ret = amf_avframe_to_amfsurface(avctx, in, &surface_in);
93  if (ret < 0)
94  goto fail;
95 
96  res = ctx->component->pVtbl->SubmitInput(ctx->component, (AMFData*)surface_in);
97  surface_in->pVtbl->Release(surface_in); // release surface after use
98  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
99  res = ctx->component->pVtbl->QueryOutput(ctx->component, &data_out);
100  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryOutput() failed with error %d\n", res);
101 
102  if (data_out) {
103  AMFGuid guid = IID_AMFSurface();
104  data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface_out); // query for buffer interface
105  data_out->pVtbl->Release(data_out);
106  }
107 
108  out = amf_amfsurface_to_avframe(avctx, surface_out);
109 
110  ret = av_frame_copy_props(out, in);
111  av_frame_unref(in);
112 
113  out_colorspace = AVCOL_SPC_UNSPECIFIED;
114 
115  if (ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
116  switch(ctx->color_profile) {
117  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
118  out_colorspace = AVCOL_SPC_SMPTE170M;
119  break;
120  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
121  out_colorspace = AVCOL_SPC_BT709;
122  break;
123  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
124  out_colorspace = AVCOL_SPC_BT2020_NCL;
125  break;
126  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
127  out_colorspace = AVCOL_SPC_RGB;
128  break;
129  default:
130  out_colorspace = AVCOL_SPC_UNSPECIFIED;
131  break;
132  }
133  out->colorspace = out_colorspace;
134  }
135 
136  out_color_range = AVCOL_RANGE_UNSPECIFIED;
137  if (ctx->color_range == AMF_COLOR_RANGE_FULL)
138  out_color_range = AVCOL_RANGE_JPEG;
139  else if (ctx->color_range == AMF_COLOR_RANGE_STUDIO)
140  out_color_range = AVCOL_RANGE_MPEG;
141 
142  if (ctx->color_range != AMF_COLOR_RANGE_UNDEFINED)
143  out->color_range = out_color_range;
144 
145  if (ctx->primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
146  out->color_primaries = ctx->primaries;
147 
148  if (ctx->trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
149  out->color_trc = ctx->trc;
150 
151 
152  if (ret < 0)
153  goto fail;
154 
155  out->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
156  if (!out->hw_frames_ctx) {
157  ret = AVERROR(ENOMEM);
158  goto fail;
159  }
160 
161  if (inlink->sample_aspect_ratio.num) {
162  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink->w, outlink->w * inlink->h}, inlink->sample_aspect_ratio);
163  } else
164  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
165 
166  av_frame_free(&in);
167  return ff_filter_frame(outlink, out);
168 fail:
169  av_frame_free(&in);
170  av_frame_free(&out);
171  return ret;
172 }
173 
174 
175 
177  const enum AVPixelFormat *input_pix_fmts,
178  const enum AVPixelFormat *output_pix_fmts)
179 {
180  int err;
181  AVFilterFormats *input_formats;
182  AVFilterFormats *output_formats;
183 
184  //in case if hw_device_ctx is set to DXVA2 we change order of pixel formats to set DXVA2 be choosen by default
185  //The order is ignored if hw_frames_ctx is not NULL on the config_output stage
186  if (avctx->hw_device_ctx) {
187  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
188 
189  switch (device_ctx->type) {
190  #if CONFIG_D3D11VA
192  {
193  static const enum AVPixelFormat output_pix_fmts_d3d11[] = {
196  };
197  output_pix_fmts = output_pix_fmts_d3d11;
198  }
199  break;
200  #endif
201  #if CONFIG_DXVA2
203  {
204  static const enum AVPixelFormat output_pix_fmts_dxva2[] = {
207  };
208  output_pix_fmts = output_pix_fmts_dxva2;
209  }
210  break;
211  #endif
213  break;
214  default:
215  {
216  av_log(avctx, AV_LOG_ERROR, "Unsupported device : %s\n", av_hwdevice_get_type_name(device_ctx->type));
217  return AVERROR(EINVAL);
218  }
219  break;
220  }
221  }
222 
223  input_formats = ff_make_format_list(output_pix_fmts);
224  if (!input_formats) {
225  return AVERROR(ENOMEM);
226  }
227  output_formats = ff_make_format_list(output_pix_fmts);
228  if (!output_formats) {
229  return AVERROR(ENOMEM);
230  }
231 
232  if ((err = ff_formats_ref(input_formats, &avctx->inputs[0]->outcfg.formats)) < 0)
233  return err;
234 
235  if ((err = ff_formats_ref(output_formats, &avctx->outputs[0]->incfg.formats)) < 0)
236  return err;
237  return 0;
238 }
239 
241  AMFSurface* surface)
242 {
243  AMFPlane *plane;
244  uint8_t *dst_data[4];
245  int dst_linesize[4];
246  int planes;
247  int i;
248 
249  planes = (int)surface->pVtbl->GetPlanesCount(surface);
250  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
251 
252  for (i = 0; i < planes; i++) {
253  plane = surface->pVtbl->GetPlaneAt(surface, i);
254  dst_data[i] = plane->pVtbl->GetNative(plane);
255  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
256  }
257  av_image_copy(dst_data, dst_linesize,
258  (const uint8_t**)frame->data, frame->linesize, frame->format,
259  frame->width, frame->height);
260 
261  return 0;
262 }
263 
264 int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
265 {
266  int err;
267  AMF_RESULT res;
268  AVFilterContext *avctx = outlink->src;
269  AVFilterLink *inlink = avctx->inputs[0];
270  AMFFilterContext *ctx = avctx->priv;
271  AVHWFramesContext *hwframes_out;
272  AVHWDeviceContext *hwdev_ctx;
273  enum AVPixelFormat in_sw_format = inlink->format;
274  enum AVPixelFormat out_sw_format = ctx->format;
276  FilterLink *outl = ff_filter_link(outlink);
277 
278  if ((err = ff_scale_eval_dimensions(avctx,
279  ctx->w_expr, ctx->h_expr,
280  inlink, outlink,
281  &ctx->width, &ctx->height)) < 0)
282  return err;
283 
284  ff_scale_adjust_dimensions(inlink, &ctx->width, &ctx->height,
285  ctx->force_original_aspect_ratio, ctx->force_divisible_by);
286 
287  av_buffer_unref(&ctx->amf_device_ref);
288  av_buffer_unref(&ctx->hwframes_in_ref);
289  av_buffer_unref(&ctx->hwframes_out_ref);
290  ctx->local_context = 0;
291  if (inl->hw_frames_ctx) {
293  if (av_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
294  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
295  av_get_pix_fmt_name(frames_ctx->sw_format));
296  return AVERROR(EINVAL);
297  }
298 
299  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
300  if (err < 0)
301  return err;
302 
303  ctx->hwframes_in_ref = av_buffer_ref(inl->hw_frames_ctx);
304  if (!ctx->hwframes_in_ref)
305  return AVERROR(ENOMEM);
306 
307  in_sw_format = frames_ctx->sw_format;
308  } else if (avctx->hw_device_ctx) {
309  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
310  if (err < 0)
311  return err;
312  ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
313  if (!ctx->hwdevice_ref)
314  return AVERROR(ENOMEM);
315  } else {
316  res = av_hwdevice_ctx_create(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
317  AMF_RETURN_IF_FALSE(avctx, res == 0, res, "Failed to create hardware device context (AMF) : %s\n", av_err2str(res));
318 
319  }
320  if(out_sw_format == AV_PIX_FMT_NONE){
321  if(outlink->format == AV_PIX_FMT_AMF_SURFACE)
322  out_sw_format = in_sw_format;
323  else
324  out_sw_format = outlink->format;
325  }
326  ctx->hwframes_out_ref = av_hwframe_ctx_alloc(ctx->amf_device_ref);
327  if (!ctx->hwframes_out_ref)
328  return AVERROR(ENOMEM);
329  hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
330  hwdev_ctx = (AVHWDeviceContext*)ctx->amf_device_ref->data;
331  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
332  {
333  ctx->amf_device_ctx = hwdev_ctx->hwctx;
334  }
335  hwframes_out->format = AV_PIX_FMT_AMF_SURFACE;
336  hwframes_out->sw_format = out_sw_format;
337 
338  if (inlink->format == AV_PIX_FMT_AMF_SURFACE) {
339  *in_format = in_sw_format;
340  } else {
341  *in_format = inlink->format;
342  }
343  outlink->w = ctx->width;
344  outlink->h = ctx->height;
345 
346  hwframes_out->width = outlink->w;
347  hwframes_out->height = outlink->h;
348 
349  err = av_hwframe_ctx_init(ctx->hwframes_out_ref);
350  if (err < 0)
351  return err;
352 
353  outl->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
354  if (!outl->hw_frames_ctx) {
355  return AVERROR(ENOMEM);
356  }
357  return 0;
358 }
359 
360 void amf_free_amfsurface(void *opaque, uint8_t *data)
361 {
362  AMFSurface *surface = (AMFSurface*)data;
363  surface->pVtbl->Release(surface);
364 }
365 
366 AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface)
367 {
369  AMFFilterContext *ctx = avctx->priv;
370 
371  if (!frame)
372  return NULL;
373 
374  if (ctx->hwframes_out_ref) {
375  AVHWFramesContext *hwframes_out = (AVHWFramesContext *)ctx->hwframes_out_ref->data;
376  if (hwframes_out->format == AV_PIX_FMT_AMF_SURFACE) {
377  int ret = av_hwframe_get_buffer(ctx->hwframes_out_ref, frame, 0);
378  if (ret < 0) {
379  av_log(avctx, AV_LOG_ERROR, "Get hw frame failed.\n");
381  return NULL;
382  }
383  frame->data[0] = (uint8_t *)pSurface;
384  frame->buf[1] = av_buffer_create((uint8_t *)pSurface, sizeof(AMFSurface),
386  (void*)avctx,
388  } else { // FIXME: add processing of other hw formats
389  av_log(ctx, AV_LOG_ERROR, "Unknown pixel format\n");
390  return NULL;
391  }
392  } else {
393 
394  switch (pSurface->pVtbl->GetMemoryType(pSurface))
395  {
396  #if CONFIG_D3D11VA
397  case AMF_MEMORY_DX11:
398  {
399  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
400  frame->data[0] = plane0->pVtbl->GetNative(plane0);
401  frame->data[1] = (uint8_t*)(intptr_t)0;
402 
403  frame->buf[0] = av_buffer_create(NULL,
404  0,
406  pSurface,
408  }
409  break;
410  #endif
411  #if CONFIG_DXVA2
412  case AMF_MEMORY_DX9:
413  {
414  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
415  frame->data[3] = plane0->pVtbl->GetNative(plane0);
416 
417  frame->buf[0] = av_buffer_create(NULL,
418  0,
420  pSurface,
422  }
423  break;
424  #endif
425  default:
426  {
427  av_log(avctx, AV_LOG_ERROR, "Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
428  return NULL;
429  }
430  }
431  }
432 
433  return frame;
434 }
435 
436 int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface)
437 {
438  AMFFilterContext *ctx = avctx->priv;
439  AMFSurface *surface;
440  AMF_RESULT res;
441  int hw_surface = 0;
442 
443  switch (frame->format) {
444 #if CONFIG_D3D11VA
445  case AV_PIX_FMT_D3D11:
446  {
447  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
448  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
449  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
450  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
451 
452  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
453  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
454  hw_surface = 1;
455  }
456  break;
457 #endif
459  {
460  surface = (AMFSurface*)frame->data[0]; // actual surface
461  surface->pVtbl->Acquire(surface); // returned surface has to be to be ref++
462  hw_surface = 1;
463  }
464  break;
465 
466 #if CONFIG_DXVA2
468  {
469  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
470 
471  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
472  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
473  hw_surface = 1;
474  }
475  break;
476 #endif
477  default:
478  {
479  AMF_SURFACE_FORMAT amf_fmt = av_av_to_amf_format(frame->format);
480  res = ctx->amf_device_ctx->context->pVtbl->AllocSurface(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt, frame->width, frame->height, &surface);
481  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
482  amf_copy_surface(avctx, frame, surface);
483  }
484  break;
485  }
486 
487  if (frame->crop_left || frame->crop_right || frame->crop_top || frame->crop_bottom) {
488  size_t crop_x = frame->crop_left;
489  size_t crop_y = frame->crop_top;
490  size_t crop_w = frame->width - (frame->crop_left + frame->crop_right);
491  size_t crop_h = frame->height - (frame->crop_top + frame->crop_bottom);
492  AVFilterLink *outlink = avctx->outputs[0];
493  if (crop_x || crop_y) {
494  if (crop_w == outlink->w && crop_h == outlink->h) {
495  AMFData *cropped_buffer = NULL;
496  res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
497  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "Duplicate() failed with error %d\n", res);
498  surface->pVtbl->Release(surface);
499  surface = (AMFSurface*)cropped_buffer;
500  }
501  else
502  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
503  }
504  else
505  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
506  }
507  else if (hw_surface) {
508  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
509  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
510  }
511 
512  surface->pVtbl->SetPts(surface, frame->pts);
513  *ppSurface = surface;
514  return 0;
515 }
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:86
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
amf_avframe_to_amfsurface
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Definition: vf_amf_common.c:436
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:435
out
FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1078
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:198
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:326
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:733
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:252
data
const char data[16]
Definition: mxf.c:149
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:657
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:339
amf_setup_input_output_formats
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
Definition: vf_amf_common.c:176
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:162
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:218
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
amf_free_amfsurface
void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: vf_amf_common.c:360
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:272
fail
#define fail()
Definition: checkasm.h:193
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:116
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:151
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:218
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:663
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
vf_amf_common.h
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AMFFilterContext
Definition: vf_amf_common.h:28
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
hwcontext_amf.h
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:116
if
if(ret)
Definition: filter_design.txt:179
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:211
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:726
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:127
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:265
avfilter_internal.h
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:699
index
int index
Definition: gxfenc.c:90
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
scale_eval.h
amf_copy_surface
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: vf_amf_common.c:240
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
amf_filter_filter_frame
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_amf_common.c:74
amf_filter_uninit
void amf_filter_uninit(AVFilterContext *avctx)
Definition: vf_amf_common.c:58
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:707
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:667
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:656
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:623
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:659
amf_filter_init
int amf_filter_init(AVFilterContext *avctx)
Definition: vf_amf_common.c:41
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:716
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:116
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:73
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:604
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:3180
planes
static const struct @473 planes[]
hwcontext_amf_internal.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
avfilter.h
amf_init_filter_config
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
Definition: vf_amf_common.c:264
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:257
mem.h
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:114
amf_amfsurface_to_avframe
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
Definition: vf_amf_common.c:366
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_image_copy
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:658
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:698
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:495
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3168
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:269