Go to the documentation of this file.
28 #include "AMF/components/VideoDecoderUVD.h"
45 if (!strcmp(
ctx->format_str,
"same")) {
63 ctx->component->pVtbl->Terminate(
ctx->component);
64 ctx->component->pVtbl->Release(
ctx->component);
68 if (
ctx->master_display)
86 AMFSurface *surface_in;
87 AMFSurface *surface_out;
88 AMFData *data_out =
NULL;
102 res =
ctx->component->pVtbl->SubmitInput(
ctx->component, (AMFData*)surface_in);
103 surface_in->pVtbl->Release(surface_in);
105 res =
ctx->component->pVtbl->QueryOutput(
ctx->component, &data_out);
109 AMFGuid guid = IID_AMFSurface();
110 res = data_out->pVtbl->QueryInterface(data_out, &guid, (
void**)&surface_out);
111 data_out->pVtbl->Release(data_out);
124 if (
ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
125 switch(
ctx->color_profile) {
126 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
129 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
132 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
135 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
142 out->colorspace = out_colorspace;
146 if (
ctx->out_color_range == AMF_COLOR_RANGE_FULL)
148 else if (
ctx->out_color_range == AMF_COLOR_RANGE_STUDIO)
151 if (
ctx->out_color_range != AMF_COLOR_RANGE_UNDEFINED)
152 out->color_range = out_color_range;
154 if (
ctx->out_primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
155 out->color_primaries =
ctx->out_primaries;
157 if (
ctx->out_trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
158 out->color_trc =
ctx->out_trc;
165 if (!
out->hw_frames_ctx) {
193 switch (device_ctx->
type) {
201 output_pix_fmts = output_pix_fmts_d3d11;
212 output_pix_fmts = output_pix_fmts_dxva2;
228 if (!input_formats) {
232 if (!output_formats) {
248 uint8_t *dst_data[4];
253 planes = (int)surface->pVtbl->GetPlanesCount(surface);
257 plane = surface->pVtbl->GetPlaneAt(surface,
i);
258 dst_data[
i] = plane->pVtbl->GetNative(plane);
259 dst_linesize[
i] = plane->pVtbl->GetHPitch(plane);
286 &
ctx->width, &
ctx->height)) < 0)
289 if (
ctx->reset_sar &&
inlink->sample_aspect_ratio.num)
293 ctx->force_original_aspect_ratio,
ctx->force_divisible_by, w_adj);
298 ctx->local_context = 0;
302 av_log(avctx,
AV_LOG_ERROR,
"Format of input frames context (%s) is not supported by AMF.\n",
312 if (!
ctx->hwframes_in_ref)
321 if (!
ctx->hwdevice_ref)
330 out_sw_format = in_sw_format;
332 out_sw_format = outlink->
format;
335 if (!
ctx->hwframes_out_ref)
341 ctx->amf_device_ctx = hwdev_ctx->
hwctx;
347 *in_format = in_sw_format;
349 *in_format =
inlink->format;
351 outlink->
w =
ctx->width;
352 outlink->
h =
ctx->height;
356 else if (
inlink->sample_aspect_ratio.num) {
361 hwframes_out->
width = outlink->
w;
362 hwframes_out->
height = outlink->
h;
377 AMFSurface *surface = (AMFSurface*)
data;
378 surface->pVtbl->Release(surface);
389 if (
ctx->hwframes_out_ref) {
398 frame->data[0] = (uint8_t *)pSurface;
409 switch (pSurface->pVtbl->GetMemoryType(pSurface))
412 case AMF_MEMORY_DX11:
414 AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
415 frame->data[0] = plane0->pVtbl->GetNative(plane0);
416 frame->data[1] = (uint8_t*)(intptr_t)0;
429 AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
430 frame->data[3] = plane0->pVtbl->GetNative(plane0);
442 av_log(avctx,
AV_LOG_ERROR,
"Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
453 AMFVariantStruct var = { 0 };
459 switch (
frame->format) {
463 static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
464 ID3D11Texture2D *texture = (ID3D11Texture2D*)
frame->data[0];
466 texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID,
sizeof(
index), &
index);
468 res =
ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(
ctx->amf_device_ctx->context, texture, &surface,
NULL);
476 surface = (AMFSurface*)
frame->data[0];
477 surface->pVtbl->Acquire(surface);
485 IDirect3DSurface9 *texture = (IDirect3DSurface9 *)
frame->data[3];
487 res =
ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(
ctx->amf_device_ctx->context, texture, &surface,
NULL);
496 res =
ctx->amf_device_ctx->context->pVtbl->AllocSurface(
ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt,
frame->width,
frame->height, &surface);
511 res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC, &var);
518 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC,
frame->color_trc);
520 res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_PRIMARIES, &var);
522 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_PRIMARIES,
frame->color_primaries);
524 res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_RANGE, &var);
526 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_RANGE,
frame->color_range);
530 AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 1);
532 AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 0);
535 res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_PROFILE, &var);
539 if (color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN)
540 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_PROFILE, color_profile);
543 if (
ctx->in_trc == AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084 && (
ctx->master_display ||
ctx->light_meta)) {
544 AMFBuffer *hdrmeta_buffer =
NULL;
545 res =
ctx->amf_device_ctx->context->pVtbl->AllocBuffer(
ctx->amf_device_ctx->context, AMF_MEMORY_HOST,
sizeof(AMFHDRMetadata), &hdrmeta_buffer);
547 AMFHDRMetadata *hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
551 AMF_ASSIGN_PROPERTY_INTERFACE(res, surface, AMF_VIDEO_DECODER_HDR_METADATA, hdrmeta_buffer);
554 res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_HDR_METADATA, &var);
555 if (res == AMF_NOT_FOUND) {
556 AMFBuffer *hdrmeta_buffer =
NULL;
557 res =
ctx->amf_device_ctx->context->pVtbl->AllocBuffer(
ctx->amf_device_ctx->context, AMF_MEMORY_HOST,
sizeof(AMFHDRMetadata), &hdrmeta_buffer);
559 AMFHDRMetadata *hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
562 AMF_ASSIGN_PROPERTY_INTERFACE(res, surface, AMF_VIDEO_DECODER_HDR_METADATA, hdrmeta_buffer);
563 hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
569 size_t crop_x =
frame->crop_left;
570 size_t crop_y =
frame->crop_top;
572 size_t crop_h =
frame->height - (
frame->crop_top +
frame->crop_bottom);
574 if (crop_x || crop_y) {
575 if (crop_w == outlink->
w && crop_h == outlink->
h) {
576 AMFData *cropped_buffer =
NULL;
577 res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
579 surface->pVtbl->Release(surface);
580 surface = (AMFSurface*)cropped_buffer;
583 surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
586 surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
588 else if (hw_surface) {
590 surface->pVtbl->SetCrop(surface, 0, 0,
frame->width,
frame->height);
593 surface->pVtbl->SetPts(surface,
frame->pts);
594 *ppSurface = surface;
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
AVPixelFormat
Pixel format.
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
uint8_t * data
The data buffer.
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
This structure describes decoded (raw) audio or video data.
@ AVCOL_RANGE_JPEG
Full range content.
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int width
The allocated dimensions of the frames in this pool.
A link between two filters.
AVFilterFormatsConfig outcfg
Lists of supported formats / etc.
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Link properties exposed to filter code, but not external callers.
int av_amf_display_mastering_meta_to_hdrmeta(const AVMasteringDisplayMetadata *display_meta, AMFHDRMetadata *hdrmeta)
void amf_free_amfsurface(void *opaque, uint8_t *data)
void * priv
private data for use by the filter
@ AV_HWDEVICE_TYPE_D3D11VA
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
enum AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM av_amf_get_color_profile(enum AVColorRange color_range, enum AVColorSpace color_space)
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
AVRational sample_aspect_ratio
agreed upon sample aspect ratio
#define av_assert0(cond)
assert() equivalent, that is always enabled.
static AVFormatContext * ctx
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Rational number (pair of numerator and denominator).
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
AVFilterLink ** inputs
array of pointers to input links
static const struct @562 planes[]
@ AVCOL_RANGE_UNSPECIFIED
static FilterLink * ff_filter_link(AVFilterLink *link)
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
#define i(width, name, range_min, range_max)
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
int format
agreed upon media format
int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
AVFilterContext * src
source filter
AVFilterFormatsConfig incfg
Lists of supported formats / etc.
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
AVBufferRef * hw_frames_ctx
For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames.
void amf_filter_uninit(AVFilterContext *avctx)
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
int w
agreed upon image width
AVColorSpace
YUV colorspace type.
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
int amf_filter_init(AVFilterContext *avctx)
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
This struct describes a set or pool of "hardware" frames (i.e.
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
int h
agreed upon image height
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
int av_amf_light_metadata_to_hdrmeta(const AVContentLightMetadata *light_meta, AMFHDRMetadata *hdrmeta)
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
AVColorRange
Visual content value range.
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
AVFilterLink ** outputs
array of pointers to output links