Go to the documentation of this file.
30 #include "AMF/components/ColorSpace.h"
45 if (!strcmp(
ctx->format_str,
"same")) {
63 ctx->component->pVtbl->Terminate(
ctx->component);
64 ctx->component->pVtbl->Release(
ctx->component);
80 AMFSurface *surface_in;
81 AMFSurface *surface_out;
82 AMFData *data_out =
NULL;
96 res =
ctx->component->pVtbl->SubmitInput(
ctx->component, (AMFData*)surface_in);
97 surface_in->pVtbl->Release(surface_in);
99 res =
ctx->component->pVtbl->QueryOutput(
ctx->component, &data_out);
103 AMFGuid guid = IID_AMFSurface();
104 data_out->pVtbl->QueryInterface(data_out, &guid, (
void**)&surface_out);
105 data_out->pVtbl->Release(data_out);
115 if (
ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
116 switch(
ctx->color_profile) {
117 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
120 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
123 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
126 case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
133 out->colorspace = out_colorspace;
137 if (
ctx->color_range == AMF_COLOR_RANGE_FULL)
139 else if (
ctx->color_range == AMF_COLOR_RANGE_STUDIO)
142 if (
ctx->color_range != AMF_COLOR_RANGE_UNDEFINED)
143 out->color_range = out_color_range;
145 if (
ctx->primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
146 out->color_primaries =
ctx->primaries;
148 if (
ctx->trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
149 out->color_trc =
ctx->trc;
156 if (!
out->hw_frames_ctx) {
161 if (
inlink->sample_aspect_ratio.num) {
189 switch (device_ctx->
type) {
197 output_pix_fmts = output_pix_fmts_d3d11;
208 output_pix_fmts = output_pix_fmts_dxva2;
224 if (!input_formats) {
228 if (!output_formats) {
244 uint8_t *dst_data[4];
249 planes = (int)surface->pVtbl->GetPlanesCount(surface);
253 plane = surface->pVtbl->GetPlaneAt(surface,
i);
254 dst_data[
i] = plane->pVtbl->GetNative(plane);
255 dst_linesize[
i] = plane->pVtbl->GetHPitch(plane);
281 &
ctx->width, &
ctx->height)) < 0)
285 ctx->force_original_aspect_ratio,
ctx->force_divisible_by);
290 ctx->local_context = 0;
294 av_log(avctx,
AV_LOG_ERROR,
"Format of input frames context (%s) is not supported by AMF.\n",
304 if (!
ctx->hwframes_in_ref)
313 if (!
ctx->hwdevice_ref)
322 out_sw_format = in_sw_format;
324 out_sw_format = outlink->
format;
327 if (!
ctx->hwframes_out_ref)
333 ctx->amf_device_ctx = hwdev_ctx->
hwctx;
339 *in_format = in_sw_format;
341 *in_format =
inlink->format;
343 outlink->
w =
ctx->width;
344 outlink->
h =
ctx->height;
346 hwframes_out->
width = outlink->
w;
347 hwframes_out->
height = outlink->
h;
362 AMFSurface *surface = (AMFSurface*)
data;
363 surface->pVtbl->Release(surface);
374 if (
ctx->hwframes_out_ref) {
383 frame->data[0] = (uint8_t *)pSurface;
394 switch (pSurface->pVtbl->GetMemoryType(pSurface))
397 case AMF_MEMORY_DX11:
399 AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
400 frame->data[0] = plane0->pVtbl->GetNative(plane0);
401 frame->data[1] = (uint8_t*)(intptr_t)0;
414 AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
415 frame->data[3] = plane0->pVtbl->GetNative(plane0);
427 av_log(avctx,
AV_LOG_ERROR,
"Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
443 switch (
frame->format) {
447 static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
448 ID3D11Texture2D *texture = (ID3D11Texture2D*)
frame->data[0];
450 texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID,
sizeof(
index), &
index);
452 res =
ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(
ctx->amf_device_ctx->context, texture, &surface,
NULL);
460 surface = (AMFSurface*)
frame->data[0];
461 surface->pVtbl->Acquire(surface);
469 IDirect3DSurface9 *texture = (IDirect3DSurface9 *)
frame->data[3];
471 res =
ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(
ctx->amf_device_ctx->context, texture, &surface,
NULL);
480 res =
ctx->amf_device_ctx->context->pVtbl->AllocSurface(
ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt,
frame->width,
frame->height, &surface);
488 size_t crop_x =
frame->crop_left;
489 size_t crop_y =
frame->crop_top;
491 size_t crop_h =
frame->height - (
frame->crop_top +
frame->crop_bottom);
493 if (crop_x || crop_y) {
494 if (crop_w == outlink->
w && crop_h == outlink->
h) {
495 AMFData *cropped_buffer =
NULL;
496 res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
498 surface->pVtbl->Release(surface);
499 surface = (AMFSurface*)cropped_buffer;
502 surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
505 surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
507 else if (hw_surface) {
509 surface->pVtbl->SetCrop(surface, 0, 0,
frame->width,
frame->height);
512 surface->pVtbl->SetPts(surface,
frame->pts);
513 *ppSurface = surface;
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
AVPixelFormat
Pixel format.
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
uint8_t * data
The data buffer.
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
This structure describes decoded (raw) audio or video data.
@ AVCOL_RANGE_JPEG
Full range content.
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int width
The allocated dimensions of the frames in this pool.
A link between two filters.
AVFilterFormatsConfig outcfg
Lists of supported formats / etc.
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Link properties exposed to filter code, but not external callers.
void amf_free_amfsurface(void *opaque, uint8_t *data)
void * priv
private data for use by the filter
@ AV_HWDEVICE_TYPE_D3D11VA
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
AVRational sample_aspect_ratio
agreed upon sample aspect ratio
#define av_assert0(cond)
assert() equivalent, that is always enabled.
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Rational number (pair of numerator and denominator).
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
AVFilterLink ** inputs
array of pointers to input links
@ AVCOL_RANGE_UNSPECIFIED
static FilterLink * ff_filter_link(AVFilterLink *link)
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
int format
agreed upon media format
AVFilterContext * src
source filter
AVFilterFormatsConfig incfg
Lists of supported formats / etc.
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
AVBufferRef * hw_frames_ctx
For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames.
void amf_filter_uninit(AVFilterContext *avctx)
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
#define i(width, name, range_min, range_max)
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
int w
agreed upon image width
AVColorSpace
YUV colorspace type.
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
int amf_filter_init(AVFilterContext *avctx)
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
This struct describes a set or pool of "hardware" frames (i.e.
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
static const struct @473 planes[]
int h
agreed upon image height
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
AVColorRange
Visual content value range.
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
AVFilterLink ** outputs
array of pointers to output links