FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfxvideo.h>
22 #include <mfxjpeg.h>
23 
24 #include <stdio.h>
25 #include <string.h>
26 
27 #include "libavutil/avstring.h"
28 #include "libavutil/common.h"
29 #include "libavutil/error.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/imgutils.h"
33 #include "libavutil/avassert.h"
34 
35 #include "avcodec.h"
36 #include "qsv_internal.h"
37 
38 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
39 #define QSV_HAVE_USER_PLUGIN !QSV_ONEVPL
40 #define QSV_HAVE_AUDIO !QSV_ONEVPL
41 
42 #include "mfxvp8.h"
43 
44 #if QSV_HAVE_USER_PLUGIN
45 #include <mfxplugin.h>
46 #endif
47 
48 #if QSV_ONEVPL
49 #include <mfxdispatcher.h>
50 #else
51 #define MFXUnload(a) do { } while(0)
52 #endif
53 
55 {
56  switch (codec_id) {
57  case AV_CODEC_ID_H264:
58  return MFX_CODEC_AVC;
59  case AV_CODEC_ID_HEVC:
60  return MFX_CODEC_HEVC;
63  return MFX_CODEC_MPEG2;
64  case AV_CODEC_ID_VC1:
65  return MFX_CODEC_VC1;
66  case AV_CODEC_ID_VP8:
67  return MFX_CODEC_VP8;
68  case AV_CODEC_ID_MJPEG:
69  return MFX_CODEC_JPEG;
70  case AV_CODEC_ID_VP9:
71  return MFX_CODEC_VP9;
72 #if QSV_VERSION_ATLEAST(1, 34)
73  case AV_CODEC_ID_AV1:
74  return MFX_CODEC_AV1;
75 #endif
76 
77  default:
78  break;
79  }
80 
81  return AVERROR(ENOSYS);
82 }
83 
84 static const struct {
86  const char *desc;
87 } qsv_iopatterns[] = {
88  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
89  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
90 #if QSV_HAVE_OPAQUE
91  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
92 #endif
93  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
94  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
95 #if QSV_HAVE_OPAQUE
96  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
97 #endif
98 };
99 
100 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
101  const char *extra_string)
102 {
103  const char *desc = NULL;
104 
105  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
107  desc = qsv_iopatterns[i].desc;
108  }
109  }
110  if (!desc)
111  desc = "unknown iopattern";
112 
113  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
114  return 0;
115 }
116 
117 static const struct {
118  mfxStatus mfxerr;
119  int averr;
120  const char *desc;
121 } qsv_errors[] = {
122  { MFX_ERR_NONE, 0, "success" },
123  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
124  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
125  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
126  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
127  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
128  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
129  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
130  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
131  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
132  /* the following 3 errors should always be handled explicitly, so those "mappings"
133  * are for completeness only */
134  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
135  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
136  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
137  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
138  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
139  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
140  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
141  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
142  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
143 #if QSV_HAVE_AUDIO
144  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
145  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
146 #endif
147  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
148  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
149 
150  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
151  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
152  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
153  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
154  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
155  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
156  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
157  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
158 #if QSV_HAVE_AUDIO
159  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
160 #endif
161 
162 #if QSV_VERSION_ATLEAST(1, 31)
163  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
164 #endif
165 };
166 
167 /**
168  * Convert a libmfx error code into an FFmpeg error code.
169  */
170 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
171 {
172  int i;
173  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
174  if (qsv_errors[i].mfxerr == mfx_err) {
175  if (desc)
176  *desc = qsv_errors[i].desc;
177  return qsv_errors[i].averr;
178  }
179  }
180  if (desc)
181  *desc = "unknown error";
182  return AVERROR_UNKNOWN;
183 }
184 
185 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
186  const char *error_string)
187 {
188  const char *desc;
189  int ret = qsv_map_error(err, &desc);
190  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
191  return ret;
192 }
193 
194 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
195  const char *warning_string)
196 {
197  const char *desc;
198  int ret = qsv_map_error(err, &desc);
199  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
200  return ret;
201 }
202 
204 {
205  switch (fourcc) {
206  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
207  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
208  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
209  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
210  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
211  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
212  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
213  case MFX_FOURCC_AYUV: return AV_PIX_FMT_VUYX;
214  case MFX_FOURCC_Y410: return AV_PIX_FMT_XV30;
215 #if QSV_VERSION_ATLEAST(1, 31)
216  case MFX_FOURCC_P016: return AV_PIX_FMT_P012;
217  case MFX_FOURCC_Y216: return AV_PIX_FMT_Y212;
218  case MFX_FOURCC_Y416: return AV_PIX_FMT_XV36;
219 #endif
220  }
221  return AV_PIX_FMT_NONE;
222 }
223 
224 int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
225 {
226  switch (format) {
227  case AV_PIX_FMT_YUV420P:
228  case AV_PIX_FMT_YUVJ420P:
229  case AV_PIX_FMT_NV12:
230  *fourcc = MFX_FOURCC_NV12;
231  *shift = 0;
232  return AV_PIX_FMT_NV12;
234  case AV_PIX_FMT_P010:
235  *fourcc = MFX_FOURCC_P010;
236  *shift = 1;
237  return AV_PIX_FMT_P010;
238  case AV_PIX_FMT_X2RGB10:
239  *fourcc = MFX_FOURCC_A2RGB10;
240  *shift = 1;
241  return AV_PIX_FMT_X2RGB10;
242  case AV_PIX_FMT_BGRA:
243  *fourcc = MFX_FOURCC_RGB4;
244  *shift = 0;
245  return AV_PIX_FMT_BGRA;
246  case AV_PIX_FMT_YUV422P:
247  case AV_PIX_FMT_YUYV422:
248  *fourcc = MFX_FOURCC_YUY2;
249  *shift = 0;
250  return AV_PIX_FMT_YUYV422;
252  case AV_PIX_FMT_Y210:
253  *fourcc = MFX_FOURCC_Y210;
254  *shift = 1;
255  return AV_PIX_FMT_Y210;
256  case AV_PIX_FMT_VUYX:
257  *fourcc = MFX_FOURCC_AYUV;
258  *shift = 0;
259  return AV_PIX_FMT_VUYX;
260  case AV_PIX_FMT_XV30:
261  *fourcc = MFX_FOURCC_Y410;
262  *shift = 0;
263  return AV_PIX_FMT_XV30;
264 #if QSV_VERSION_ATLEAST(1, 31)
265  case AV_PIX_FMT_P012:
266  *fourcc = MFX_FOURCC_P016;
267  *shift = 1;
268  return AV_PIX_FMT_P012;
269  case AV_PIX_FMT_Y212:
270  *fourcc = MFX_FOURCC_Y216;
271  *shift = 1;
272  return AV_PIX_FMT_Y212;
273  case AV_PIX_FMT_XV36:
274  *fourcc = MFX_FOURCC_Y416;
275  *shift = 1;
276  return AV_PIX_FMT_XV36;
277 #endif
278  default:
279  return AVERROR(ENOSYS);
280  }
281 }
282 
283 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
284 {
285  switch (frame->format) {
286  case AV_PIX_FMT_NV12:
287  case AV_PIX_FMT_P010:
288  case AV_PIX_FMT_P012:
289  surface->Data.Y = frame->data[0];
290  surface->Data.UV = frame->data[1];
291  /* The SDK checks Data.V when using system memory for VP9 encoding */
292  surface->Data.V = surface->Data.UV + 1;
293  break;
295  case AV_PIX_FMT_BGRA:
296  surface->Data.B = frame->data[0];
297  surface->Data.G = frame->data[0] + 1;
298  surface->Data.R = frame->data[0] + 2;
299  surface->Data.A = frame->data[0] + 3;
300  break;
301  case AV_PIX_FMT_YUYV422:
302  surface->Data.Y = frame->data[0];
303  surface->Data.U = frame->data[0] + 1;
304  surface->Data.V = frame->data[0] + 3;
305  break;
306 
307  case AV_PIX_FMT_Y210:
308  case AV_PIX_FMT_Y212:
309  surface->Data.Y16 = (mfxU16 *)frame->data[0];
310  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
311  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
312  break;
313 
314  case AV_PIX_FMT_VUYX:
315  surface->Data.V = frame->data[0];
316  surface->Data.U = frame->data[0] + 1;
317  surface->Data.Y = frame->data[0] + 2;
318  // Only set Data.A to a valid address, the SDK doesn't
319  // use the value from the frame.
320  surface->Data.A = frame->data[0] + 3;
321  break;
322 
323  case AV_PIX_FMT_XV30:
324  surface->Data.U = frame->data[0];
325  break;
326 
327  case AV_PIX_FMT_XV36:
328  surface->Data.U = frame->data[0];
329  surface->Data.Y = frame->data[0] + 2;
330  surface->Data.V = frame->data[0] + 4;
331  // Only set Data.A to a valid address, the SDK doesn't
332  // use the value from the frame.
333  surface->Data.A = frame->data[0] + 6;
334  break;
335 
336  default:
337  return AVERROR(ENOSYS);
338  }
339  surface->Data.PitchLow = frame->linesize[0];
340 
341  return 0;
342 }
343 
345 {
346  int i;
347  for (i = 0; i < ctx->nb_mids; i++) {
348  QSVMid *mid = &ctx->mids[i];
349  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
350  if ((mid->handle_pair->first == pair->first) &&
351  (mid->handle_pair->second == pair->second))
352  return i;
353  }
354  return AVERROR_BUG;
355 }
356 
357 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
358 {
360  switch (mfx_pic_struct & 0xF) {
361  case MFX_PICSTRUCT_PROGRESSIVE:
363  break;
364  case MFX_PICSTRUCT_FIELD_TFF:
365  field = AV_FIELD_TT;
366  break;
367  case MFX_PICSTRUCT_FIELD_BFF:
368  field = AV_FIELD_BB;
369  break;
370  }
371 
372  return field;
373 }
374 
375 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
376 {
377  enum AVPictureType type;
378  switch (mfx_pic_type & 0x7) {
379  case MFX_FRAMETYPE_I:
380  if (mfx_pic_type & MFX_FRAMETYPE_S)
382  else
384  break;
385  case MFX_FRAMETYPE_B:
387  break;
388  case MFX_FRAMETYPE_P:
389  if (mfx_pic_type & MFX_FRAMETYPE_S)
391  else
393  break;
394  case MFX_FRAMETYPE_UNKNOWN:
396  break;
397  default:
398  av_assert0(0);
399  }
400 
401  return type;
402 }
403 
404 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
405  void *logctx)
406 {
407 #if QSV_HAVE_USER_PLUGIN
408  if (!load_plugins || !*load_plugins)
409  return 0;
410 
411  while (*load_plugins) {
412  mfxPluginUID uid;
413  mfxStatus ret;
414  int i, err = 0;
415 
416  char *plugin = av_get_token(&load_plugins, ":");
417  if (!plugin)
418  return AVERROR(ENOMEM);
419  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
420  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
421  err = AVERROR(EINVAL);
422  goto load_plugin_fail;
423  }
424 
425  for (i = 0; i < sizeof(uid.Data); i++) {
426  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
427  if (err != 1) {
428  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
429  err = AVERROR(EINVAL);
430  goto load_plugin_fail;
431  }
432 
433  }
434 
435  ret = MFXVideoUSER_Load(session, &uid, 1);
436  if (ret < 0) {
437  char errorbuf[128];
438  snprintf(errorbuf, sizeof(errorbuf),
439  "Could not load the requested plugin '%s'", plugin);
440  err = ff_qsv_print_error(logctx, ret, errorbuf);
441  goto load_plugin_fail;
442  }
443 
444  if (*load_plugins)
445  load_plugins++;
446 load_plugin_fail:
447  av_freep(&plugin);
448  if (err < 0)
449  return err;
450  }
451 #endif
452 
453  return 0;
454 
455 }
456 
457 //This code is only required for Linux since a display handle is required.
458 //For Windows the session is complete and ready to use.
459 
460 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
461 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
462 {
463  AVDictionary *child_device_opts = NULL;
464  AVVAAPIDeviceContext *hwctx;
465  int ret;
466 
467  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
468  av_dict_set(&child_device_opts, "driver", "iHD", 0);
469 
470  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
471  av_dict_free(&child_device_opts);
472  if (ret < 0) {
473  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
474  return ret;
475  } else {
476  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
477  hwctx = qs->va_device_ctx->hwctx;
478 
479  ret = MFXVideoCORE_SetHandle(qs->session,
480  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
481  if (ret < 0) {
482  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
483  }
484  }
485 
486  return 0;
487 }
488 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
489 
490 #if QSV_ONEVPL
491 static int qsv_new_mfx_loader(AVCodecContext *avctx,
492  mfxIMPL implementation,
493  mfxVersion *pver,
494  void **ploader)
495 {
496  mfxStatus sts;
497  mfxLoader loader = NULL;
498  mfxConfig cfg;
499  mfxVariant impl_value;
500 
501  loader = MFXLoad();
502  if (!loader) {
503  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
504  goto fail;
505  }
506 
507  /* Create configurations for implementation */
508  cfg = MFXCreateConfig(loader);
509  if (!cfg) {
510  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX configurations\n");
511  goto fail;
512  }
513 
514  impl_value.Type = MFX_VARIANT_TYPE_U32;
515  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
516  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
517  sts = MFXSetConfigFilterProperty(cfg,
518  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
519  if (sts != MFX_ERR_NONE) {
520  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
521  "property: %d\n", sts);
522  goto fail;
523  }
524 
525  impl_value.Type = MFX_VARIANT_TYPE_U32;
526  impl_value.Data.U32 = pver->Version;
527  sts = MFXSetConfigFilterProperty(cfg,
528  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
529  impl_value);
530  if (sts != MFX_ERR_NONE) {
531  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
532  "property: %d\n", sts);
533  goto fail;
534  }
535 
536  *ploader = loader;
537 
538  return 0;
539 
540 fail:
541  if (loader)
542  MFXUnload(loader);
543 
544  *ploader = NULL;
545  return AVERROR_UNKNOWN;
546 }
547 
548 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
549 {
550  mfxStatus sts;
551  mfxSession session = NULL;
552  uint32_t impl_idx = 0;
553 
554  while (1) {
555  /* Enumerate all implementations */
556  mfxImplDescription *impl_desc;
557 
558  sts = MFXEnumImplementations(loader, impl_idx,
559  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
560  (mfxHDL *)&impl_desc);
561  /* Failed to find an available implementation */
562  if (sts == MFX_ERR_NOT_FOUND)
563  break;
564  else if (sts != MFX_ERR_NONE) {
565  impl_idx++;
566  continue;
567  }
568 
569  sts = MFXCreateSession(loader, impl_idx, &session);
570  MFXDispReleaseImplDescription(loader, impl_desc);
571  if (sts == MFX_ERR_NONE)
572  break;
573 
574  impl_idx++;
575  }
576 
577  if (sts != MFX_ERR_NONE) {
578  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
579  goto fail;
580  }
581 
582  *psession = session;
583 
584  return 0;
585 
586 fail:
587  if (session)
588  MFXClose(session);
589 
590  *psession = NULL;
591  return AVERROR_UNKNOWN;
592 }
593 
594 static int qsv_create_mfx_session(AVCodecContext *avctx,
595  mfxIMPL implementation,
596  mfxVersion *pver,
597  int gpu_copy,
598  mfxSession *psession,
599  void **ploader)
600 {
601  mfxLoader loader = NULL;
602 
603  /* Don't create a new MFX loader if the input loader is valid */
604  if (*ploader == NULL) {
605  av_log(avctx, AV_LOG_VERBOSE,
606  "Use Intel(R) oneVPL to create MFX session, the required "
607  "implementation version is %d.%d\n",
608  pver->Major, pver->Minor);
609 
610  if (qsv_new_mfx_loader(avctx, implementation, pver, (void **)&loader))
611  goto fail;
612 
613  av_assert0(loader);
614  } else {
615  av_log(avctx, AV_LOG_VERBOSE,
616  "Use Intel(R) oneVPL to create MFX session with the specified MFX loader\n");
617 
618  loader = *ploader;
619  }
620 
621  if (qsv_create_mfx_session_from_loader(avctx, loader, psession))
622  goto fail;
623 
624  if (!*ploader)
625  *ploader = loader;
626 
627  return 0;
628 
629 fail:
630  if (!*ploader && loader)
631  MFXUnload(loader);
632 
633  return AVERROR_UNKNOWN;
634 }
635 
636 #else
637 
639  mfxIMPL implementation,
640  mfxVersion *pver,
641  int gpu_copy,
642  mfxSession *psession,
643  void **ploader)
644 {
645  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
646  mfxSession session = NULL;
647  mfxStatus sts;
648 
649  av_log(avctx, AV_LOG_VERBOSE,
650  "Use Intel(R) Media SDK to create MFX session, the required "
651  "implementation version is %d.%d\n",
652  pver->Major, pver->Minor);
653 
654  *psession = NULL;
655  *ploader = NULL;
656 
657  init_par.GPUCopy = gpu_copy;
658  init_par.Implementation = implementation;
659  init_par.Version = *pver;
660  sts = MFXInitEx(init_par, &session);
661  if (sts < 0)
662  return ff_qsv_print_error(avctx, sts,
663  "Error initializing a MFX session");
664  else if (sts > 0) {
665  ff_qsv_print_warning(avctx, sts,
666  "Warning in MFX initialization");
667  return AVERROR_UNKNOWN;
668  }
669 
670  *psession = session;
671 
672  return 0;
673 }
674 
675 #endif
676 
678  const char *load_plugins, int gpu_copy)
679 {
680  mfxIMPL impls[] = {
681 #if CONFIG_D3D11VA
682  MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11,
683 #endif
684  MFX_IMPL_AUTO_ANY
685  };
686  mfxIMPL impl;
687  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
688 
689  const char *desc;
690  int ret;
691 
692  for (int i = 0; i < FF_ARRAY_ELEMS(impls); i++) {
693  ret = qsv_create_mfx_session(avctx, impls[i], &ver, gpu_copy, &qs->session,
694  &qs->loader);
695 
696  if (ret == 0)
697  break;
698 
699  if (i == FF_ARRAY_ELEMS(impls) - 1)
700  return ret;
701  else
702  av_log(avctx, AV_LOG_ERROR, "The current mfx implementation is not "
703  "supported, try next mfx implementation.\n");
704  }
705 
706 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
707  ret = ff_qsv_set_display_handle(avctx, qs);
708  if (ret < 0)
709  return ret;
710 #endif
711 
712  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
713  if (ret < 0) {
714  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
715  return ret;
716  }
717 
718  ret = MFXQueryIMPL(qs->session, &impl);
719  if (ret != MFX_ERR_NONE)
720  return ff_qsv_print_error(avctx, ret,
721  "Error querying the session attributes");
722 
723  switch (MFX_IMPL_BASETYPE(impl)) {
724  case MFX_IMPL_SOFTWARE:
725  desc = "software";
726  break;
727  case MFX_IMPL_HARDWARE:
728  case MFX_IMPL_HARDWARE2:
729  case MFX_IMPL_HARDWARE3:
730  case MFX_IMPL_HARDWARE4:
731  desc = "hardware accelerated";
732  break;
733  default:
734  desc = "unknown";
735  }
736 
737  av_log(avctx, AV_LOG_VERBOSE,
738  "Initialized an internal MFX session using %s implementation\n",
739  desc);
740 
741  return 0;
742 }
743 
744 static void mids_buf_free(void *opaque, uint8_t *data)
745 {
746  AVBufferRef *hw_frames_ref = opaque;
747  av_buffer_unref(&hw_frames_ref);
748  av_freep(&data);
749 }
750 
751 static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
752 {
753  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
754  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
755  int nb_surfaces = frames_hwctx->nb_surfaces;
756 
757  AVBufferRef *mids_buf, *hw_frames_ref1;
758  QSVMid *mids;
759  int i;
760 
761  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
762  if (!hw_frames_ref1)
763  return NULL;
764 
765  mids = av_calloc(nb_surfaces, sizeof(*mids));
766  if (!mids) {
767  av_buffer_unref(&hw_frames_ref1);
768  return NULL;
769  }
770 
771  mids_buf = av_buffer_create((uint8_t*)mids, nb_surfaces * sizeof(*mids),
772  mids_buf_free, hw_frames_ref1, 0);
773  if (!mids_buf) {
774  av_buffer_unref(&hw_frames_ref1);
775  av_freep(&mids);
776  return NULL;
777  }
778 
779  for (i = 0; i < nb_surfaces; i++) {
780  QSVMid *mid = &mids[i];
781  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
782  mid->hw_frames_ref = hw_frames_ref1;
783  }
784 
785  return mids_buf;
786 }
787 
788 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
789  AVBufferRef *mids_buf)
790 {
791  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
792  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
793  QSVMid *mids = (QSVMid*)mids_buf->data;
794  int nb_surfaces = frames_hwctx->nb_surfaces;
795  int i;
796 
797  // the allocated size of the array is two larger than the number of
798  // surfaces, we store the references to the frames context and the
799  // QSVMid array there
800  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
801  if (!resp->mids)
802  return AVERROR(ENOMEM);
803 
804  for (i = 0; i < nb_surfaces; i++)
805  resp->mids[i] = &mids[i];
806  resp->NumFrameActual = nb_surfaces;
807 
808  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
809  if (!resp->mids[resp->NumFrameActual]) {
810  av_freep(&resp->mids);
811  return AVERROR(ENOMEM);
812  }
813 
814  resp->mids[resp->NumFrameActual + 1] = av_buffer_ref(mids_buf);
815  if (!resp->mids[resp->NumFrameActual + 1]) {
816  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
817  av_freep(&resp->mids);
818  return AVERROR(ENOMEM);
819  }
820 
821  return 0;
822 }
823 
824 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
825  mfxFrameAllocResponse *resp)
826 {
827  QSVFramesContext *ctx = pthis;
828  int ret;
829 
830  /* this should only be called from an encoder or decoder and
831  * only allocates video memory frames */
832  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
833  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
834  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
835  return MFX_ERR_UNSUPPORTED;
836 
837  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
838  /* external frames -- fill from the caller-supplied frames context */
839  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
840  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
841  mfxFrameInfo *i = &req->Info;
842  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
843 
844  if (i->Width > i1->Width || i->Height > i1->Height ||
845  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
846  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
847  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
848  i->Width, i->Height, i->FourCC, i->ChromaFormat,
849  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
850  return MFX_ERR_UNSUPPORTED;
851  }
852 
853  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
854  if (ret < 0) {
855  av_log(ctx->logctx, AV_LOG_ERROR,
856  "Error filling an external frame allocation request\n");
857  return MFX_ERR_MEMORY_ALLOC;
858  }
859  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
860  /* internal frames -- allocate a new hw frames context */
861  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
862  mfxFrameInfo *i = &req->Info;
863 
864  AVBufferRef *frames_ref, *mids_buf;
865  AVHWFramesContext *frames_ctx;
866  AVQSVFramesContext *frames_hwctx;
867 
868  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
869  if (!frames_ref)
870  return MFX_ERR_MEMORY_ALLOC;
871 
872  frames_ctx = (AVHWFramesContext*)frames_ref->data;
873  frames_hwctx = frames_ctx->hwctx;
874 
875  frames_ctx->format = AV_PIX_FMT_QSV;
876  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
877  frames_ctx->width = i->Width;
878  frames_ctx->height = i->Height;
879  frames_ctx->initial_pool_size = req->NumFrameSuggested;
880 
881  frames_hwctx->frame_type = req->Type;
882 
883  ret = av_hwframe_ctx_init(frames_ref);
884  if (ret < 0) {
885  av_log(ctx->logctx, AV_LOG_ERROR,
886  "Error initializing a frames context for an internal frame "
887  "allocation request\n");
888  av_buffer_unref(&frames_ref);
889  return MFX_ERR_MEMORY_ALLOC;
890  }
891 
892  mids_buf = qsv_create_mids(frames_ref);
893  if (!mids_buf) {
894  av_buffer_unref(&frames_ref);
895  return MFX_ERR_MEMORY_ALLOC;
896  }
897 
898  ret = qsv_setup_mids(resp, frames_ref, mids_buf);
899  av_buffer_unref(&mids_buf);
900  av_buffer_unref(&frames_ref);
901  if (ret < 0) {
902  av_log(ctx->logctx, AV_LOG_ERROR,
903  "Error filling an internal frame allocation request\n");
904  return MFX_ERR_MEMORY_ALLOC;
905  }
906  } else {
907  return MFX_ERR_UNSUPPORTED;
908  }
909 
910  return MFX_ERR_NONE;
911 }
912 
913 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
914 {
915  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
916  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual + 1]);
917  av_freep(&resp->mids);
918  return MFX_ERR_NONE;
919 }
920 
921 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
922 {
923  QSVMid *qsv_mid = mid;
924  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
925  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
926  int ret;
927 
928  if (qsv_mid->locked_frame)
929  return MFX_ERR_UNDEFINED_BEHAVIOR;
930 
931  /* Allocate a system memory frame that will hold the mapped data. */
932  qsv_mid->locked_frame = av_frame_alloc();
933  if (!qsv_mid->locked_frame)
934  return MFX_ERR_MEMORY_ALLOC;
935  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
936 
937  /* wrap the provided handle in a hwaccel AVFrame */
938  qsv_mid->hw_frame = av_frame_alloc();
939  if (!qsv_mid->hw_frame)
940  goto fail;
941 
942  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
943  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
944 
945  // doesn't really matter what buffer is used here
946  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
947  if (!qsv_mid->hw_frame->buf[0])
948  goto fail;
949 
950  qsv_mid->hw_frame->width = hw_frames_ctx->width;
951  qsv_mid->hw_frame->height = hw_frames_ctx->height;
952 
953  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
954  if (!qsv_mid->hw_frame->hw_frames_ctx)
955  goto fail;
956 
957  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
958  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
959 
960  /* map the data to the system memory */
961  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
963  if (ret < 0)
964  goto fail;
965 
966  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
967  ptr->Y = qsv_mid->locked_frame->data[0];
968  ptr->U = qsv_mid->locked_frame->data[1];
969  ptr->V = qsv_mid->locked_frame->data[1] + 1;
970 
971  return MFX_ERR_NONE;
972 fail:
973  av_frame_free(&qsv_mid->hw_frame);
974  av_frame_free(&qsv_mid->locked_frame);
975  return MFX_ERR_MEMORY_ALLOC;
976 }
977 
978 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
979 {
980  QSVMid *qsv_mid = mid;
981 
982  av_frame_free(&qsv_mid->locked_frame);
983  av_frame_free(&qsv_mid->hw_frame);
984 
985  return MFX_ERR_NONE;
986 }
987 
988 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
989 {
990  QSVMid *qsv_mid = (QSVMid*)mid;
991  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
992  mfxHDLPair *pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
993 
994  pair_dst->first = pair_src->first;
995 
996  if (pair_src->second != (mfxMemId)MFX_INFINITE)
997  pair_dst->second = pair_src->second;
998  return MFX_ERR_NONE;
999 }
1000 
1001 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
1002  AVBufferRef *device_ref, const char *load_plugins,
1003  int gpu_copy)
1004 {
1005  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
1006  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
1007  mfxSession parent_session = device_hwctx->session;
1008  void *loader = device_hwctx->loader;
1009  mfxHDL handle = NULL;
1010  int hw_handle_supported = 0;
1011 
1012  mfxSession session;
1013  mfxVersion ver;
1014  mfxIMPL impl;
1015  mfxHandleType handle_type;
1016  mfxStatus err;
1017  int ret;
1018 
1019  err = MFXQueryIMPL(parent_session, &impl);
1020  if (err == MFX_ERR_NONE)
1021  err = MFXQueryVersion(parent_session, &ver);
1022  if (err != MFX_ERR_NONE)
1023  return ff_qsv_print_error(avctx, err,
1024  "Error querying the session attributes");
1025 
1026  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
1027  handle_type = MFX_HANDLE_VA_DISPLAY;
1028  hw_handle_supported = 1;
1029  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
1030  handle_type = MFX_HANDLE_D3D11_DEVICE;
1031  hw_handle_supported = 1;
1032  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
1033  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1034  hw_handle_supported = 1;
1035  }
1036 
1037  if (hw_handle_supported) {
1038  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
1039  if (err != MFX_ERR_NONE) {
1040  return ff_qsv_print_error(avctx, err,
1041  "Error getting handle session");
1042  }
1043  }
1044  if (!handle) {
1045  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
1046  "from the session\n");
1047  }
1048 
1049  ret = qsv_create_mfx_session(avctx, impl, &ver, gpu_copy, &session,
1050  &loader);
1051  if (ret)
1052  return ret;
1053 
1054  if (handle) {
1055  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
1056  if (err != MFX_ERR_NONE)
1057  return ff_qsv_print_error(avctx, err,
1058  "Error setting a HW handle");
1059  }
1060 
1061  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1062  err = MFXJoinSession(parent_session, session);
1063  if (err != MFX_ERR_NONE)
1064  return ff_qsv_print_error(avctx, err,
1065  "Error joining session");
1066  }
1067 
1068  ret = qsv_load_plugins(session, load_plugins, avctx);
1069  if (ret < 0) {
1070  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
1071  return ret;
1072  }
1073 
1074  *psession = session;
1075  return 0;
1076 }
1077 
1078 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
1079  QSVFramesContext *qsv_frames_ctx,
1080  const char *load_plugins, int opaque, int gpu_copy)
1081 {
1082  mfxFrameAllocator frame_allocator = {
1083  .pthis = qsv_frames_ctx,
1084  .Alloc = qsv_frame_alloc,
1085  .Lock = qsv_frame_lock,
1086  .Unlock = qsv_frame_unlock,
1087  .GetHDL = qsv_frame_get_hdl,
1088  .Free = qsv_frame_free,
1089  };
1090 
1091  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
1092  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
1093 
1094  mfxSession session;
1095  mfxStatus err;
1096 
1097  int ret;
1098 
1099  ret = ff_qsv_init_session_device(avctx, &session,
1100  frames_ctx->device_ref, load_plugins, gpu_copy);
1101  if (ret < 0)
1102  return ret;
1103 
1104  if (!opaque) {
1105  qsv_frames_ctx->logctx = avctx;
1106 
1107  /* allocate the memory ids for the external frames */
1108  av_buffer_unref(&qsv_frames_ctx->mids_buf);
1109  qsv_frames_ctx->mids_buf = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
1110  if (!qsv_frames_ctx->mids_buf)
1111  return AVERROR(ENOMEM);
1112  qsv_frames_ctx->mids = (QSVMid*)qsv_frames_ctx->mids_buf->data;
1113  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
1114 
1115  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
1116  if (err != MFX_ERR_NONE)
1117  return ff_qsv_print_error(avctx, err,
1118  "Error setting a frame allocator");
1119  }
1120 
1121  *psession = session;
1122  return 0;
1123 }
1124 
1126 {
1127  if (qs->session) {
1128  MFXClose(qs->session);
1129  qs->session = NULL;
1130  }
1131 
1132  if (qs->loader) {
1133  MFXUnload(qs->loader);
1134  qs->loader = NULL;
1135  }
1136 
1137 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
1138  av_buffer_unref(&qs->va_device_ref);
1139 #endif
1140  return 0;
1141 }
1142 
1144  mfxExtBuffer * param)
1145 {
1146  int i;
1147 
1148  for (i = 0; i < frame->num_ext_params; i++) {
1149  mfxExtBuffer *ext_buffer = frame->ext_param[i];
1150 
1151  if (ext_buffer->BufferId == param->BufferId) {
1152  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
1153  "added\n");
1154  return;
1155  }
1156  }
1157 
1158  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
1159  frame->ext_param[frame->num_ext_params] = param;
1160  frame->num_ext_params++;
1161  frame->surface.Data.NumExtParam = frame->num_ext_params;
1162  } else {
1163  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
1164  "have enough space\n");
1165  }
1166 
1167 
1168 }
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:57
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:978
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:124
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2421
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:47
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:115
AVFieldOrder
AVFieldOrder
Definition: defs.h:198
averr
int averr
Definition: qsv.c:119
QSVMid::locked_frame
AVFrame * locked_frame
Definition: qsv_internal.h:74
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: defs.h:200
AVPictureType
AVPictureType
Definition: avutil.h:277
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:1125
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:375
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:375
AVFrame::width
int width
Definition: frame.h:447
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:344
data
const char data[16]
Definition: mxf.c:148
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:534
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:34
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:588
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:396
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:1001
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: defs.h:201
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:283
fail
#define fail()
Definition: checkasm.h:179
MFXUnload
#define MFXUnload(a)
Definition: qsv.c:51
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:404
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
desc
const char * desc
Definition: qsv.c:86
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:532
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:194
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: defs.h:199
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:48
QSVMid::hw_frame
AVFrame * hw_frame
Definition: qsv_internal.h:75
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:220
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:63
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:528
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
ctx
AVFormatContext * ctx
Definition: movenc.c:48
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:386
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
frame
static AVFrame * frame
Definition: demux_decode.c:54
if
if(ret)
Definition: filter_design.txt:179
mids_buf_free
static void mids_buf_free(void *opaque, uint8_t *data)
Definition: qsv.c:744
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:1078
QSVFrame
Definition: qsv_internal.h:79
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:280
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:283
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:100
QSVFramesContext::mids_buf
AVBufferRef * mids_buf
Definition: qsv_internal.h:122
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:284
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:384
mfxerr
mfxStatus mfxerr
Definition: qsv.c:118
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:529
shift
static int shift(int a, int b)
Definition: bonk.c:262
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:71
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:462
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:278
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:533
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:85
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:72
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
Definition: qsv_internal.h:123
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:59
hwcontext_qsv.h
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
Definition: qsv.c:224
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:357
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AV_FIELD_BB
@ AV_FIELD_BB
Bottom coded first, bottom displayed first.
Definition: defs.h:202
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
QSVMid::surf
mfxFrameSurface1 surf
Definition: qsv_internal.h:76
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:536
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:122
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:170
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:913
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
QSVSession
Definition: qsv_internal.h:105
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:54
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:988
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:726
AVCodecContext
main external API structure.
Definition: avcodec.h:445
AVFrame::height
int height
Definition: frame.h:447
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:824
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:143
qsv_create_mfx_session
static int qsv_create_mfx_session(AVCodecContext *avctx, mfxIMPL implementation, mfxVersion *pver, int gpu_copy, mfxSession *psession, void **ploader)
Definition: qsv.c:638
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:38
QSVSession::session
mfxSession session
Definition: qsv_internal.h:106
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:203
qsv_iopatterns
static const struct @140 qsv_iopatterns[]
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:528
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
Definition: qsv.c:788
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:116
QSVFramesContext
Definition: qsv_internal.h:114
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
qsv_create_mids
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:751
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:535
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:192
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:420
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
qsv_errors
static const struct @141 qsv_errors[]
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
QSVSession::loader
void * loader
Definition: qsv_internal.h:111
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:1143
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
QSVMid
Definition: qsv_internal.h:70
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:185
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:239
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:677
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:921