Go to the documentation of this file.
20 #include "config_components.h"
43 #define AMF_AV_FRAME_REF L"av_frame_ref"
44 #define PTS_PROP L"PtsProp"
57 const unsigned int luma_den = 10000;
58 hdrmeta->maxMasteringLuminance =
60 hdrmeta->minMasteringLuminance =
64 const unsigned int chroma_den = 50000;
65 hdrmeta->redPrimary[0] =
67 hdrmeta->redPrimary[1] =
69 hdrmeta->greenPrimary[0] =
71 hdrmeta->greenPrimary[1] =
73 hdrmeta->bluePrimary[0] =
75 hdrmeta->bluePrimary[1] =
77 hdrmeta->whitePoint[0] =
79 hdrmeta->whitePoint[1] =
87 hdrmeta->maxContentLightLevel = (amf_uint16)light_meta->
MaxCLL;
88 hdrmeta->maxFrameAverageLightLevel = (amf_uint16)light_meta->
MaxFALL;
106 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
146 if(!
ctx->pts_property_name)
150 ctx->av_frame_property_name =
av_memdup(
name, alloc_size *
sizeof(
wchar_t));
151 if(!
ctx->av_frame_property_name)
177 AMF_RETURN_IF_FALSE(
ctx, amf_device_ctx->version >= AMF_MAKE_FULL_VERSION(1, 4, 32, 0),
AVERROR_UNKNOWN,
"10-bit encoder is not supported by AMD GPU drivers versions lower than 23.30.\n");
184 res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory, amf_device_ctx->context,
codec_id, &
ctx->encoder);
187 ctx->submitted_frame = 0;
188 ctx->encoded_frame = 0;
199 ctx->encoder->pVtbl->Terminate(
ctx->encoder);
200 ctx->encoder->pVtbl->Release(
ctx->encoder);
207 if (
ctx->output_list) {
227 uint8_t *dst_data[4] = {0};
228 int dst_linesize[4] = {0};
232 planes = (int)surface->pVtbl->GetPlanesCount(surface);
236 plane = surface->pVtbl->GetPlaneAt(surface,
i);
237 dst_data[
i] = plane->pVtbl->GetNative(plane);
238 dst_linesize[
i] = plane->pVtbl->GetHPitch(plane);
251 AMFVariantStruct var = {0};
262 buffer->pVtbl->GetProperty(
buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
263 if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
268 buffer->pVtbl->GetProperty(
buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
269 if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
274 buffer->pVtbl->GetProperty(
buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
275 if (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY) {
284 pkt->
pts = var.int64Value;
290 if ((
ctx->max_b_frames > 0 || ((
ctx->pa_adaptive_mini_gop == 1) ?
true :
false)) &&
ctx->dts_delay == 0) {
295 "timestamp_list is empty while max_b_frames = %d\n", avctx->
max_b_frames);
300 ctx->dts_delay = timestamp_last - timestamp;
316 if (!
ctx->timestamp_list) {
320 if (!
ctx->output_list)
325 ctx->hwsurfaces_in_queue = 0;
365 AMFVariantStruct var;
366 res = AMFVariantInit(&var);
368 AMFGuid guid_AMFInterface = IID_AMFInterface();
369 AMFInterface *amf_interface;
370 res =
val->pVtbl->QueryInterface(
val, &guid_AMFInterface, (
void**)&amf_interface);
373 res = AMFVariantAssignInterface(&var, amf_interface);
374 amf_interface->pVtbl->Release(amf_interface);
377 res =
object->pVtbl->SetProperty(
object,
name, var);
379 AMFVariantClear(&var);
386 AMF_RESULT res = AMF_FAIL;
391 AMF_ASSIGN_PROPERTY_INT64(res, surface,
ctx->av_frame_property_name,
data);
398 AMFVariantStruct var = {0};
399 AMF_RESULT res =
buffer->pVtbl->GetProperty(
buffer,
ctx->av_frame_property_name, &var);
400 if(res == AMF_OK && var.int64Value){
420 switch (
frame->format) {
424 static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
425 ID3D11Texture2D *texture = (ID3D11Texture2D*)
frame->data[0];
429 texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID,
sizeof(
index), &
index);
430 res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(amf_device_ctx->context, texture, &surface,
NULL);
439 IDirect3DSurface9 *texture = (IDirect3DSurface9 *)
frame->data[3];
440 res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(amf_device_ctx->context, texture, &surface,
NULL);
448 surface = (AMFSurface*)
frame->data[0];
449 surface->pVtbl->Acquire(surface);
455 res = amf_device_ctx->context->pVtbl->AllocSurface(amf_device_ctx->context, AMF_MEMORY_HOST,
ctx->format, avctx->
width, avctx->
height, &surface);
463 ctx->hwsurfaces_in_queue++;
465 surface->pVtbl->SetCrop(surface, 0, 0,
frame->width,
frame->height);
469 AMFBuffer * hdrmeta_buffer =
NULL;
470 res = amf_device_ctx->context->pVtbl->AllocBuffer(amf_device_ctx->context, AMF_MEMORY_HOST,
sizeof(AMFHDRMetadata), &hdrmeta_buffer);
472 AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
476 AMF_ASSIGN_PROPERTY_INTERFACE(res,
ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer);
break;
478 AMF_ASSIGN_PROPERTY_INTERFACE(res,
ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer);
break;
480 AMF_ASSIGN_PROPERTY_INTERFACE(res,
ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer);
break;
485 hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
488 surface->pVtbl->SetPts(surface,
frame->pts);
490 AMF_ASSIGN_PROPERTY_INT64(res, surface,
ctx->pts_property_name,
frame->pts);
494 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!
ctx->aud);
495 switch (
frame->pict_type) {
497 if (
ctx->forced_idr) {
498 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_SPS, 1);
499 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_PPS, 1);
500 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR);
502 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_I);
506 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_P);
509 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_B);
514 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!
ctx->aud);
515 switch (
frame->pict_type) {
517 if (
ctx->forced_idr) {
518 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_HEADER, 1);
519 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR);
521 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_I);
525 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_P);
531 if (
ctx->forced_idr) {
532 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_INSERT_SEQUENCE_HEADER, 1);
533 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_KEY);
535 AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_INTRA_ONLY);
543 res =
ctx->encoder->pVtbl->SubmitInput(
ctx->encoder, (AMFData*)surface);
544 if (res == AMF_INPUT_FULL) {
546 *surface_resubmit = surface;
548 surface->pVtbl->Release(surface);
550 ctx->submitted_frame++;
554 if(
ctx->submitted_frame <=
ctx->encoded_frame + output_delay)
577 AMF_RESULT
ret =
ctx->encoder->pVtbl->QueryOutput(
ctx->encoder, &
data);
580 AMFGuid guid = IID_AMFBuffer();
584 ctx->hwsurfaces_in_queue--;
585 ctx->encoded_frame++;
593 AMFSurface *surface =
NULL;
596 AMF_RESULT res_query;
620 if(
ctx->submitted_frame <=
ctx->encoded_frame + output_delay)
626 if (!
frame->buf[0]) {
628 if(!
ctx->delayed_drain) {
629 res =
ctx->encoder->pVtbl->Drain(
ctx->encoder);
630 if (res == AMF_INPUT_FULL) {
631 ctx->delayed_drain = 1;
661 if (
ctx->delayed_drain) {
662 res =
ctx->encoder->pVtbl->Drain(
ctx->encoder);
663 if (res != AMF_INPUT_FULL) {
664 ctx->delayed_drain = 0;
668 av_log(avctx,
AV_LOG_WARNING,
"Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
671 }
else if (
ctx->delayed_drain || (
ctx->eof && res_query != AMF_EOF) || (
ctx->hwsurfaces_in_queue >=
ctx->hwsurfaces_in_queue_max) || surface) {
675 if (!
ctx->query_timeout_supported || avpkt->
data || avpkt->
buf) {
679 }
while (block_and_wait);
681 if (res_query == AMF_EOF) {
689 res =
ctx->encoder->pVtbl->SubmitInput(
ctx->encoder, (AMFData*)surface);
690 if (res != AMF_INPUT_FULL)
693 if (!
ctx->query_timeout_supported)
704 }
while(res == AMF_INPUT_FULL);
706 surface->pVtbl->Release(surface);
707 if (res == AMF_INPUT_FULL) {
708 av_log(avctx,
AV_LOG_WARNING,
"Data acquired but delayed SubmitInput returned AMF_INPUT_FULL- should not happen\n");
714 ctx->submitted_frame++;
727 amf_int64 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
732 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
735 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
739 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
746 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
749 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
753 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
757 return color_profile;
static AMF_RESULT amf_release_attached_frame_ref(AMFEncoderContext *ctx, AMFBuffer *buffer)
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
enum AVColorSpace colorspace
YUV colorspace type.
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
#define AVERROR_EOF
End of file.
uint8_t * data
The data buffer.
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
unsigned MaxCLL
Max content light level (cd/m^2).
int av_fifo_peek(const AVFifo *f, void *buf, size_t nb_elems, size_t offset)
Read data from a FIFO without modifying FIFO state.
This structure describes decoded (raw) audio or video data.
@ AVCOL_RANGE_JPEG
Full range content.
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
static int ff_mutex_unlock(AVMutex *mutex)
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
const struct AVCodec * codec
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
int flags
AV_CODEC_FLAG_*.
static double val(void *priv, double ch)
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define FF_ARRAY_ELEMS(a)
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
static enum AVPixelFormat pix_fmt
static int amf_submit_frame(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
static double av_q2d(AVRational a)
Convert an AVRational to a double.
#define av_assert0(cond)
assert() equivalent, that is always enabled.
static int amf_init_encoder(AVCodecContext *avctx)
static AMF_RESULT amf_query_output(AVCodecContext *avctx, AMFBuffer **buffer)
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
int av_usleep(unsigned usec)
Sleep for a period of time.
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
enum AVColorRange color_range
MPEG vs JPEG YUV range.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
@ AV_PICTURE_TYPE_I
Intra.
size_t av_fifo_can_read(const AVFifo *f)
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
static int amf_submit_frame_locked(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
#define AV_PIX_FMT_X2BGR10
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
This struct is allocated as AVHWDeviceContext.hwctx.
#define AV_NOPTS_VALUE
Undefined timestamp value.
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
static int ff_mutex_lock(AVMutex *mutex)
int flags
A combination of AV_PKT_FLAG values.
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
static int amf_save_hdr_metadata(AVCodecContext *avctx, const AVFrame *frame, AMFHDRMetadata *hdrmeta)
static AVBufferRef * hw_device_ctx
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
static AMF_RESULT amf_store_attached_frame_ref(AMFEncoderContext *ctx, const AVFrame *frame, AMFSurface *surface)
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
This struct describes a set or pool of "hardware" frames (i.e.
static int frame_ref(AVFrame *dst, const AVFrame *src)
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
main external API structure.
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
@ AV_PICTURE_TYPE_P
Predicted.
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Structure to hold side data for an AVFrame.
int ff_amf_get_color_profile(AVCodecContext *avctx)
This structure stores compressed data.
static int64_t next_encoder_index
unsigned MaxFALL
Max average light level per frame (cd/m^2).
#define AV_PIX_FMT_RGBAF16
int width
picture width / height.
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
static const struct @543 planes[]
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.