Go to the documentation of this file.
24 #include <VideoToolbox/VideoToolbox.h>
53 #ifdef kCFCoreFoundationVersionNumber10_7
54 { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV12 },
55 { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV12 },
59 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
63 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
64 { kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV16 },
65 { kCVPixelFormatType_422YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV16 },
67 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
68 { kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange,
false,
AV_PIX_FMT_P210 },
69 { kCVPixelFormatType_422YpCbCr10BiPlanarFullRange,
true,
AV_PIX_FMT_P210 },
71 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
72 { kCVPixelFormatType_422YpCbCr16BiPlanarVideoRange,
false,
AV_PIX_FMT_P216 },
74 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
75 { kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV24 },
76 { kCVPixelFormatType_444YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV24 },
78 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
79 { kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange,
false,
AV_PIX_FMT_P410 },
80 { kCVPixelFormatType_444YpCbCr10BiPlanarFullRange,
true,
AV_PIX_FMT_P410 },
82 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
83 { kCVPixelFormatType_444YpCbCr16BiPlanarVideoRange,
false,
AV_PIX_FMT_P416 },
88 #ifdef kCFCoreFoundationVersionNumber10_7
95 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
98 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
101 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
104 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
107 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
110 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
113 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
120 const void *hwconfig,
190 CFMutableDictionaryRef attributes, iosurface_properties;
192 attributes = CFDictionaryCreateMutable(
195 &kCFTypeDictionaryKeyCallBacks,
196 &kCFTypeDictionaryValueCallBacks);
199 pixfmt = CFNumberCreate(
NULL, kCFNumberSInt32Type, &cv_pixfmt);
200 CFDictionarySetValue(
202 kCVPixelBufferPixelFormatTypeKey,
206 iosurface_properties = CFDictionaryCreateMutable(
209 &kCFTypeDictionaryKeyCallBacks,
210 &kCFTypeDictionaryValueCallBacks);
211 CFDictionarySetValue(attributes, kCVPixelBufferIOSurfacePropertiesKey, iosurface_properties);
212 CFRelease(iosurface_properties);
214 w = CFNumberCreate(
NULL, kCFNumberSInt32Type, &
ctx->width);
215 h = CFNumberCreate(
NULL, kCFNumberSInt32Type, &
ctx->height);
216 CFDictionarySetValue(attributes, kCVPixelBufferWidthKey,
w);
217 CFDictionarySetValue(attributes, kCVPixelBufferHeightKey,
h);
221 err = CVPixelBufferPoolCreate(
226 CFRelease(attributes);
228 if (err == kCVReturnSuccess)
237 CVPixelBufferRelease((CVPixelBufferRef)
data);
242 CVPixelBufferRef pixbuf;
248 err = CVPixelBufferPoolCreatePixelBuffer(
253 if (err != kCVReturnSuccess) {
261 CVPixelBufferRelease(pixbuf);
271 CVPixelBufferPoolRelease(fctx->
pool);
326 fmts[0] =
ctx->sw_format;
335 CVPixelBufferRef pixbuf = (CVPixelBufferRef)hwmap->
source->
data[3];
337 CVPixelBufferUnlockBaseAddress(pixbuf, (uintptr_t)hwmap->
priv);
343 CFMutableDictionaryRef par =
NULL;
347 if (avpar.
num == 0) {
348 CVBufferRemoveAttachment(pixbuf, kCVImageBufferPixelAspectRatioKey);
356 num = CFNumberCreate(kCFAllocatorDefault,
360 den = CFNumberCreate(kCFAllocatorDefault,
364 par = CFDictionaryCreateMutable(kCFAllocatorDefault,
366 &kCFCopyStringDictionaryKeyCallBacks,
367 &kCFTypeDictionaryValueCallBacks);
369 if (!par || !num || !den) {
370 if (par) CFRelease(par);
371 if (num) CFRelease(num);
372 if (den) CFRelease(den);
376 CFDictionarySetValue(
378 kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
380 CFDictionarySetValue(
382 kCVImageBufferPixelAspectRatioVerticalSpacingKey,
385 CVBufferSetAttachment(
387 kCVImageBufferPixelAspectRatioKey,
389 kCVAttachmentMode_ShouldPropagate
403 return kCVImageBufferChromaLocation_Left;
405 return kCVImageBufferChromaLocation_Center;
407 return kCVImageBufferChromaLocation_Top;
409 return kCVImageBufferChromaLocation_Bottom;
411 return kCVImageBufferChromaLocation_TopLeft;
413 return kCVImageBufferChromaLocation_BottomLeft;
425 CVBufferSetAttachment(
427 kCVImageBufferChromaLocationTopFieldKey,
429 kCVAttachmentMode_ShouldPropagate);
431 CVBufferRemoveAttachment(
433 kCVImageBufferChromaLocationTopFieldKey);
443 #if HAVE_KCVIMAGEBUFFERYCBCRMATRIX_ITU_R_2020
444 if (__builtin_available(macOS 10.11, iOS 9, *))
447 return CFSTR(
"ITU_R_2020");
450 return kCVImageBufferYCbCrMatrix_ITU_R_601_4;
452 return kCVImageBufferYCbCrMatrix_ITU_R_709_2;
454 return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995;
456 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
457 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
458 return CVYCbCrMatrixGetStringForIntegerCodePoint(
space);
469 #if HAVE_KCVIMAGEBUFFERCOLORPRIMARIES_ITU_R_2020
470 if (__builtin_available(macOS 10.11, iOS 9, *))
473 return CFSTR(
"ITU_R_2020");
475 return kCVImageBufferColorPrimaries_ITU_R_709_2;
477 return kCVImageBufferColorPrimaries_SMPTE_C;
479 return kCVImageBufferColorPrimaries_EBU_3213;
481 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
482 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
483 return CVColorPrimariesGetStringForIntegerCodePoint(pri);
495 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_2084_PQ
496 if (__builtin_available(macOS 10.13, iOS 11, *))
497 return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ;
499 return CFSTR(
"SMPTE_ST_2084_PQ");
502 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2020
503 if (__builtin_available(macOS 10.11, iOS 9, *))
506 return CFSTR(
"ITU_R_2020");
508 return kCVImageBufferTransferFunction_ITU_R_709_2;
510 return kCVImageBufferTransferFunction_SMPTE_240M_1995;
512 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_428_1
513 if (__builtin_available(macOS 10.12, iOS 10, *))
514 return kCVImageBufferTransferFunction_SMPTE_ST_428_1;
516 return CFSTR(
"SMPTE_ST_428_1");
518 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
519 if (__builtin_available(macOS 10.13, iOS 11, *))
520 return kCVImageBufferTransferFunction_ITU_R_2100_HLG;
522 return CFSTR(
"ITU_R_2100_HLG");
524 return kCVImageBufferTransferFunction_UseGamma;
526 return kCVImageBufferTransferFunction_UseGamma;
528 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
529 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
530 return CVTransferFunctionGetStringForIntegerCodePoint(trc);
541 CVAttachmentMode attachment_mode)
543 CFDictionaryRef dict;
546 #if (TARGET_OS_OSX && defined(__MAC_12_0) && __MAC_OS_X_VERSION_MAX_ALLOWED >= __MAC_12_0) || \
547 (TARGET_OS_IOS && defined(__IPHONE_15_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_15_0) || \
548 (TARGET_OS_TV && defined(__TVOS_15_0) && __TV_OS_VERSION_MAX_ALLOWED >= __TVOS_15_0)
550 if (__builtin_available(macOS 12.0, iOS 15.0, tvOS 15.0, *))
551 return CVBufferCopyAttachments(
buffer, attachment_mode);
557 #if (TARGET_OS_OSX && (!defined(__MAC_12_0) || __MAC_OS_X_VERSION_MIN_REQUIRED < __MAC_12_0)) || \
558 (TARGET_OS_IOS && (!defined(__IPHONE_15_0) || __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_15_0)) || \
559 (TARGET_OS_TV && (!defined(__TVOS_15_0) || __TV_OS_VERSION_MIN_REQUIRED < __TVOS_15_0))
561 dict = CVBufferGetAttachments(
buffer, attachment_mode);
562 return (dict) ? CFDictionaryCreateCopy(
NULL, dict) :
NULL;
571 CGColorSpaceRef colorspace =
NULL;
572 CFStringRef colormatrix =
NULL, colorpri =
NULL, colortrc =
NULL;
577 CVBufferSetAttachment(pixbuf, kCVImageBufferYCbCrMatrixKey,
578 colormatrix, kCVAttachmentMode_ShouldPropagate);
580 CVBufferRemoveAttachment(pixbuf, kCVImageBufferYCbCrMatrixKey);
583 "Color space %s is not supported.\n",
589 CVBufferSetAttachment(pixbuf, kCVImageBufferColorPrimariesKey,
590 colorpri, kCVAttachmentMode_ShouldPropagate);
592 CVBufferRemoveAttachment(pixbuf, kCVImageBufferColorPrimariesKey);
595 "Color primaries %s is not supported.\n",
601 CVBufferSetAttachment(pixbuf, kCVImageBufferTransferFunctionKey,
602 colortrc, kCVAttachmentMode_ShouldPropagate);
604 CVBufferRemoveAttachment(pixbuf, kCVImageBufferTransferFunctionKey);
607 "Color transfer function %s is not supported.\n",
617 CFNumberRef gamma_level = CFNumberCreate(
NULL, kCFNumberFloat32Type, &gamma);
618 CVBufferSetAttachment(pixbuf, kCVImageBufferGammaLevelKey,
619 gamma_level, kCVAttachmentMode_ShouldPropagate);
620 CFRelease(gamma_level);
622 CVBufferRemoveAttachment(pixbuf, kCVImageBufferGammaLevelKey);
624 #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MAX_ALLOWED >= 100800) || \
625 (TARGET_OS_IOS && __IPHONE_OS_VERSION_MAX_ALLOWED >= 100000)
626 if (__builtin_available(macOS 10.8, iOS 10, *)) {
627 CFDictionaryRef attachments =
632 CVImageBufferCreateColorSpaceFromAttachments(attachments);
633 CFRelease(attachments);
642 CVBufferSetAttachment(pixbuf, kCVImageBufferCGColorSpaceKey,
643 colorspace, kCVAttachmentMode_ShouldPropagate);
644 CFRelease(colorspace);
646 CVBufferRemoveAttachment(pixbuf, kCVImageBufferCGColorSpaceKey);
676 CVPixelBufferRef pixbuf = (CVPixelBufferRef)
src->data[3];
677 OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
679 uint32_t map_flags = 0;
691 if (CVPixelBufferGetWidth(pixbuf) !=
ctx->width ||
692 CVPixelBufferGetHeight(pixbuf) !=
ctx->height) {
698 map_flags = kCVPixelBufferLock_ReadOnly;
700 err = CVPixelBufferLockBaseAddress(pixbuf, map_flags);
701 if (err != kCVReturnSuccess) {
706 if (CVPixelBufferIsPlanar(pixbuf)) {
707 int planes = CVPixelBufferGetPlaneCount(pixbuf);
709 dst->data[
i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf,
i);
710 dst->linesize[
i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf,
i);
713 dst->data[0] = CVPixelBufferGetBaseAddress(pixbuf);
714 dst->linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
718 (
void *)(uintptr_t)map_flags);
725 CVPixelBufferUnlockBaseAddress(pixbuf, map_flags);
741 map->format =
dst->format;
748 map->height =
dst->height;
772 map->format =
src->format;
779 map->height =
src->height;
810 dst->height =
src->height;
822 if (device && device[0]) {
832 .name =
"videotoolbox",
AVBufferPool * pool_internal
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
CFStringRef av_map_videotoolbox_color_trc_from_av(enum AVColorTransferCharacteristic trc)
Convert an AVColorTransferCharacteristic to a VideoToolbox/CoreVideo color transfer function string.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
AVColorTransferCharacteristic
Color Transfer Characteristic.
AVFrame * source
A reference to the original source of the mapping.
CFStringRef av_map_videotoolbox_color_matrix_from_av(enum AVColorSpace space)
Convert an AVColorSpace to a VideoToolbox/CoreVideo color matrix string.
static int vt_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
This structure describes decoded (raw) audio or video data.
enum AVPixelFormat pix_fmt
@ AVCOL_RANGE_JPEG
Full range content.
int av_vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
@ AVCOL_TRC_BT2020_12
ITU-R BT2020 for 12-bit system.
static int vt_frames_init(AVHWFramesContext *ctx)
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
AVColorPrimaries
Chromaticity coordinates of the source primaries.
static int vt_map_frame(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
void * priv
Hardware-specific private data associated with the mapping.
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int width
The allocated dimensions of the frames in this pool.
CVPixelBufferPoolRef pool
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
const HWContextType ff_hwcontext_type_videotoolbox
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
static int vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
const char * av_color_space_name(enum AVColorSpace space)
@ AVCOL_TRC_GAMMA28
also ITU-R BT470BG
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
static int vt_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
static void vt_frames_uninit(AVHWFramesContext *ctx)
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
CFStringRef av_map_videotoolbox_color_primaries_from_av(enum AVColorPrimaries pri)
Convert an AVColorPrimaries to a VideoToolbox/CoreVideo color primaries string.
static FFHWFramesContext * ffhwframesctx(AVHWFramesContext *ctx)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
static int vt_pixbuf_set_chromaloc(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
static enum AVPixelFormat supported_formats[]
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
static int vt_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
static enum AVPixelFormat pix_fmts[]
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
static int vt_pixbuf_set_colorspace(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
static const struct @465 planes[]
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
static int vt_map_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src, int flags)
uint32_t av_map_videotoolbox_format_from_pixfmt(enum AVPixelFormat pix_fmt)
Convert an AVPixelFormat to a VideoToolbox (actually CoreVideo) format.
enum AVColorRange color_range
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Rational number (pair of numerator and denominator).
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
@ AVCOL_TRC_BT2020_10
ITU-R BT2020 for 10-bit system.
static AVBufferRef * vt_pool_alloc_buffer(void *opaque, size_t size)
@ AVCOL_RANGE_UNSPECIFIED
static CFDictionaryRef vt_cv_buffer_copy_attachments(CVBufferRef buffer, CVAttachmentMode attachment_mode)
Copy all attachments for the specified mode from the given buffer.
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
@ AVCOL_PRI_BT2020
ITU-R BT2020.
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
static int vt_pixbuf_set_par(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
#define AV_PIX_FMT_AYUV64
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
static int vt_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
#define AVERROR_EXTERNAL
Generic error in an external library.
static void vt_unmap(AVHWFramesContext *ctx, HWMapDescriptor *hwmap)
static int vt_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
@ AVCOL_TRC_BT709
also ITU-R BT1361
AVChromaLocation
Location of chroma samples.
@ AV_PIX_FMT_AYUV
packed AYUV 4:4:4:4, 32bpp (1 Cr & Cb sample per 1x1 Y & A samples), AYUVAYUV...
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
#define i(width, name, range_min, range_max)
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
#define av_malloc_array(a, b)
AVColorSpace
YUV colorspace type.
@ AV_PIX_FMT_NV24
planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
AVHWFrameTransferDirection
This struct describes a set or pool of "hardware" frames (i.e.
static int vt_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
static void videotoolbox_buffer_release(void *opaque, uint8_t *data)
enum AVPixelFormat pixfmt
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
enum AVPixelFormat av_map_videotoolbox_format_to_pixfmt(uint32_t cv_fmt)
Convert a VideoToolbox (actually CoreVideo) format to AVPixelFormat.
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
CFStringRef av_map_videotoolbox_chroma_loc_from_av(enum AVChromaLocation loc)
Convert an AVChromaLocation to a VideoToolbox/CoreVideo chroma location string.
A reference to a data buffer.
const VDPAUPixFmtMap * map
uint32_t av_map_videotoolbox_format_from_pixfmt2(enum AVPixelFormat pix_fmt, bool full_range)
Same as av_map_videotoolbox_format_from_pixfmt function, but can map and return full range pixel form...
static const struct @451 cv_pix_fmts[]
#define flags(name, subs,...)
static uint32_t vt_format_from_pixfmt(enum AVPixelFormat pix_fmt, enum AVColorRange range)
@ AVCOL_TRC_SMPTE428
SMPTE ST 428-1.
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
AVColorRange
Visual content value range.
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
@ AVCHROMA_LOC_BOTTOMLEFT
static int vt_pool_alloc(AVHWFramesContext *ctx)
AVVTFramesContext p
The public AVVTFramesContext.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
#define av_fourcc2str(fourcc)