FFmpeg
vf_colordetect.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2025 Niklas Haas
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Video color space detector, tries to auto-detect YUV range and alpha mode.
24  */
25 
26 #include <stdbool.h>
27 #include <stdatomic.h>
28 
29 #include "config.h"
30 
31 #include "libavutil/mem.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/pixdesc.h"
34 
35 #include "avfilter.h"
36 #include "filters.h"
37 #include "formats.h"
38 #include "video.h"
39 
40 #include "vf_colordetectdsp.h"
41 
45 };
46 
47 typedef struct ColorDetectContext {
48  const AVClass *class;
50  unsigned mode;
51 
54  int depth;
55  int idx_a;
56  int mpeg_min;
57  int mpeg_max;
58 
59  atomic_int detected_range; // enum AVColorRange
60  atomic_int detected_alpha; // enum FFAlphaDetect
62 
63 #define OFFSET(x) offsetof(ColorDetectContext, x)
64 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
65 
66 static const AVOption colordetect_options[] = {
67  { "mode", "Image properties to detect", OFFSET(mode), AV_OPT_TYPE_FLAGS, {.i64 = -1}, 0, UINT_MAX, FLAGS, .unit = "mode" },
68  { "color_range", "Detect (YUV) color range", 0, AV_OPT_TYPE_CONST, {.i64 = COLOR_DETECT_COLOR_RANGE}, 0, 0, FLAGS, .unit = "mode" },
69  { "alpha_mode", "Detect alpha mode", 0, AV_OPT_TYPE_CONST, {.i64 = COLOR_DETECT_ALPHA_MODE }, 0, 0, FLAGS, .unit = "mode" },
70  { "all", "Detect all supported properties", 0, AV_OPT_TYPE_CONST, {.i64 = -1}, 0, 0, FLAGS, .unit = "mode" },
71  { NULL }
72 };
73 
74 AVFILTER_DEFINE_CLASS(colordetect);
75 
76 static int query_format(const AVFilterContext *ctx,
77  AVFilterFormatsConfig **cfg_in,
78  AVFilterFormatsConfig **cfg_out)
79 {
80  int want_flags = AV_PIX_FMT_FLAG_PLANAR;
81  int reject_flags = AV_PIX_FMT_FLAG_PAL | AV_PIX_FMT_FLAG_HWACCEL |
84 
85  if (HAVE_BIGENDIAN) {
86  want_flags |= AV_PIX_FMT_FLAG_BE;
87  } else {
88  reject_flags |= AV_PIX_FMT_FLAG_BE;
89  }
90 
91  AVFilterFormats *formats = ff_formats_pixdesc_filter(want_flags, reject_flags);
92  return ff_set_common_formats2(ctx, cfg_in, cfg_out, formats);
93 }
94 
96 {
97  AVFilterContext *ctx = inlink->dst;
98  ColorDetectContext *s = ctx->priv;
100  const int depth = desc->comp[0].depth;
101  const int mpeg_min = 16 << (depth - 8);
102  const int mpeg_max = 235 << (depth - 8);
103  if (depth > 16) /* not currently possible; prevent future bugs */
104  return AVERROR(ENOTSUP);
105 
106  s->desc = desc;
107  s->depth = depth;
108  s->mpeg_min = mpeg_min;
109  s->mpeg_max = mpeg_max;
110  s->nb_threads = ff_filter_get_nb_threads(ctx);
111 
112  if (desc->flags & AV_PIX_FMT_FLAG_RGB) {
113  atomic_init(&s->detected_range, AVCOL_RANGE_JPEG);
114  } else {
115  atomic_init(&s->detected_range, AVCOL_RANGE_UNSPECIFIED);
116  }
117 
118  if (desc->flags & AV_PIX_FMT_FLAG_ALPHA) {
119  s->idx_a = desc->comp[desc->nb_components - 1].plane;
120  atomic_init(&s->detected_alpha, FF_ALPHA_UNDETERMINED);
121  } else {
122  atomic_init(&s->detected_alpha, FF_ALPHA_NONE);
123  }
124 
125  ff_color_detect_dsp_init(&s->dsp, depth, inlink->color_range);
126  return 0;
127 }
128 
129 static int detect_range(AVFilterContext *ctx, void *arg,
130  int jobnr, int nb_jobs)
131 {
132  ColorDetectContext *s = ctx->priv;
133  const AVFrame *in = arg;
134  const ptrdiff_t stride = in->linesize[0];
135  const int y_start = (in->height * jobnr) / nb_jobs;
136  const int y_end = (in->height * (jobnr + 1)) / nb_jobs;
137  const int h_slice = y_end - y_start;
138 
139  if (s->dsp.detect_range(in->data[0] + y_start * stride, stride,
140  in->width, h_slice, s->mpeg_min, s->mpeg_max))
141  atomic_store_explicit(&s->detected_range, AVCOL_RANGE_JPEG,
142  memory_order_relaxed);
143 
144  return 0;
145 }
146 
147 static int detect_alpha(AVFilterContext *ctx, void *arg,
148  int jobnr, int nb_jobs)
149 {
150  ColorDetectContext *s = ctx->priv;
151  const AVFrame *in = arg;
152  const int w = in->width;
153  const int h = in->height;
154  const int y_start = (h * jobnr) / nb_jobs;
155  const int y_end = (h * (jobnr + 1)) / nb_jobs;
156  const int h_slice = y_end - y_start;
157 
158  const int nb_planes = (s->desc->flags & AV_PIX_FMT_FLAG_RGB) ? 3 : 1;
159  const ptrdiff_t alpha_stride = in->linesize[s->idx_a];
160  const uint8_t *alpha = in->data[s->idx_a] + y_start * alpha_stride;
161 
162  /**
163  * To check if a value is out of range, we need to compare the color value
164  * against the maximum possible color for a given alpha value.
165  * x > ((mpeg_max - mpeg_min) / pixel_max) * a + mpeg_min
166  *
167  * This simplifies to:
168  * (x - mpeg_min) * pixel_max > (mpeg_max - mpeg_min) * a
169  * = alpha_max * x - offset > mpeg_range * a in the below formula.
170  *
171  * We subtract an additional offset of (1 << (depth - 1)) to account for
172  * rounding errors in the value of `x`.
173  */
174  const int alpha_max = (1 << s->depth) - 1;
175  const int mpeg_range = s->mpeg_max - s->mpeg_min;
176  const int offset = alpha_max * s->mpeg_min + (1 << (s->depth - 1));
177 
178  int ret = 0;
179  for (int i = 0; i < nb_planes; i++) {
180  const ptrdiff_t stride = in->linesize[i];
181  ret = s->dsp.detect_alpha(in->data[i] + y_start * stride, stride,
182  alpha, alpha_stride, w, h_slice, alpha_max,
183  mpeg_range, offset);
184  ret |= atomic_fetch_or_explicit(&s->detected_alpha, ret, memory_order_relaxed);
185  if (ret == FF_ALPHA_STRAIGHT)
186  break;
187  }
188 
189  return 0;
190 }
191 
193 {
194  AVFilterContext *ctx = inlink->dst;
195  ColorDetectContext *s = ctx->priv;
196  const int nb_threads = FFMIN(inlink->h, s->nb_threads);
197 
198  enum AVColorRange detected_range = atomic_load_explicit(&s->detected_range, memory_order_relaxed);
199  if (s->mode & COLOR_DETECT_COLOR_RANGE && detected_range == AVCOL_RANGE_UNSPECIFIED)
200  ff_filter_execute(ctx, detect_range, in, NULL, nb_threads);
201 
202  enum FFAlphaDetect detected_alpha = atomic_load_explicit(&s->detected_alpha, memory_order_relaxed);
203  if (s->mode & COLOR_DETECT_ALPHA_MODE && detected_alpha != FF_ALPHA_NONE &&
204  detected_alpha != FF_ALPHA_STRAIGHT)
205  ff_filter_execute(ctx, detect_alpha, in, NULL, nb_threads);
206 
207  return ff_filter_frame(inlink->dst->outputs[0], in);
208 }
209 
211 {
212  ColorDetectContext *s = ctx->priv;
213  if (!s->mode)
214  return;
215 
216  av_log(ctx, AV_LOG_INFO, "Detected color properties:\n");
217  if (s->mode & COLOR_DETECT_COLOR_RANGE) {
218  enum AVColorRange detected_range = atomic_load_explicit(&s->detected_range,
219  memory_order_relaxed);
220  av_log(ctx, AV_LOG_INFO, " Color range: %s\n",
221  detected_range == AVCOL_RANGE_JPEG ? "JPEG / full range"
222  : "undetermined");
223  }
224 
225  if (s->mode & COLOR_DETECT_ALPHA_MODE) {
226  enum FFAlphaDetect detected_alpha = atomic_load_explicit(&s->detected_alpha,
227  memory_order_relaxed);
228  av_log(ctx, AV_LOG_INFO, " Alpha mode: %s\n",
229  detected_alpha == FF_ALPHA_NONE ? "none" :
230  detected_alpha == FF_ALPHA_STRAIGHT ? "straight" :
231  detected_alpha == FF_ALPHA_TRANSPARENT ? "undetermined"
232  : "opaque");
233  }
234 }
235 
237 {
238  AVFilterLink *inlink = ctx->inputs[0];
239  AVFilterLink *outlink = ctx->outputs[0];
240  AVFrame *frame;
241  int64_t pts;
242  int ret;
243 
244  ret = ff_outlink_get_status(outlink);
245  if (ret) {
248  return 0;
249  }
250 
252  if (ret < 0) {
253  return ret;
254  } else if (ret) {
255  return filter_frame(inlink, frame);
256  }
257 
259  ff_outlink_set_status(outlink, ret, pts);
261  return 0;
262  }
263 
265  return FFERROR_NOT_READY;
266 }
267 
268 static const AVFilterPad colordetect_inputs[] = {
269  {
270  .name = "default",
271  .type = AVMEDIA_TYPE_VIDEO,
272  .config_props = config_input,
273  },
274 };
275 
277  .p.name = "colordetect",
278  .p.description = NULL_IF_CONFIG_SMALL("Detect video color properties."),
279  .p.priv_class = &colordetect_class,
281  .priv_size = sizeof(ColorDetectContext),
285  .activate = activate,
286 };
formats
formats
Definition: signature.h:47
ColorDetectContext::mode
unsigned mode
Definition: vf_colordetect.c:50
ColorDetectContext::desc
const AVPixFmtDescriptor * desc
Definition: vf_colordetect.c:52
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3456
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
ff_set_common_formats2
int ff_set_common_formats2(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out, AVFilterFormats *formats)
Definition: formats.c:1124
int64_t
long long int64_t
Definition: coverity.c:34
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
ColorDetectContext::idx_a
int idx_a
Definition: vf_colordetect.c:55
AV_PIX_FMT_FLAG_FLOAT
#define AV_PIX_FMT_FLAG_FLOAT
The pixel format contains IEEE-754 floating point values.
Definition: pixdesc.h:158
detect_range
static int detect_range(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colordetect.c:129
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:263
mode
Definition: swscale.c:56
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AVFrame::width
int width
Definition: frame.h:499
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:777
AVOption
AVOption.
Definition: opt.h:429
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
video.h
ff_vf_colordetect
const FFFilter ff_vf_colordetect
Definition: vf_colordetect.c:276
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:448
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_colordetect.c:95
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1517
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
pts
static int64_t pts
Definition: transcode_aac.c:644
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(colordetect)
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
colordetect_inputs
static const AVFilterPad colordetect_inputs[]
Definition: vf_colordetect.c:268
report_detected_props
static av_cold void report_detected_props(AVFilterContext *ctx)
Definition: vf_colordetect.c:210
av_cold
#define av_cold
Definition: attributes.h:106
vf_colordetectdsp.h
ff_video_default_filterpad
const AVFilterPad ff_video_default_filterpad[1]
An AVFilterPad array whose only entry has name "default" and is of type AVMEDIA_TYPE_VIDEO.
Definition: video.c:37
FFFilter
Definition: filters.h:266
FFAlphaDetect
FFAlphaDetect
Definition: vf_colordetectdsp.h:31
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:628
s
#define s(width, name)
Definition: cbs_vp9.c:198
filters.h
COLOR_DETECT_COLOR_RANGE
@ COLOR_DETECT_COLOR_RANGE
Definition: vf_colordetect.c:43
AV_PIX_FMT_FLAG_ALPHA
#define AV_PIX_FMT_FLAG_ALPHA
The pixel format has an alpha channel.
Definition: pixdesc.h:147
ctx
AVFormatContext * ctx
Definition: movenc.c:49
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
ColorDetectContext::detected_alpha
atomic_int detected_alpha
Definition: vf_colordetect.c:60
ColorDetectMode
ColorDetectMode
Definition: vf_colordetect.c:42
FF_ALPHA_TRANSPARENT
@ FF_ALPHA_TRANSPARENT
alpha < alpha_max
Definition: vf_colordetectdsp.h:34
detect_alpha
static int detect_alpha(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colordetect.c:147
FF_ALPHA_NONE
@ FF_ALPHA_NONE
Definition: vf_colordetectdsp.h:32
ColorDetectContext
Definition: vf_colordetect.c:47
ff_inlink_acknowledge_status
int ff_inlink_acknowledge_status(AVFilterLink *link, int *rstatus, int64_t *rpts)
Test and acknowledge the change of status on the link.
Definition: avfilter.c:1464
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:743
atomic_load_explicit
#define atomic_load_explicit(object, order)
Definition: stdatomic.h:96
AVFilterFormatsConfig
Lists of formats / etc.
Definition: avfilter.h:121
OFFSET
#define OFFSET(x)
Definition: vf_colordetect.c:63
ColorDetectContext::nb_threads
int nb_threads
Definition: vf_colordetect.c:53
ff_inlink_set_status
void ff_inlink_set_status(AVFilterLink *link, int status)
Set the status on an input link.
Definition: avfilter.c:1629
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AV_PIX_FMT_FLAG_RGB
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:136
AV_PIX_FMT_FLAG_BITSTREAM
#define AV_PIX_FMT_FLAG_BITSTREAM
All values of a component are bit-wise packed end to end.
Definition: pixdesc.h:124
query_format
static int query_format(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out)
Definition: vf_colordetect.c:76
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
AV_PIX_FMT_FLAG_BAYER
#define AV_PIX_FMT_FLAG_BAYER
The pixel format is following a Bayer pattern.
Definition: pixdesc.h:152
ff_formats_pixdesc_filter
AVFilterFormats * ff_formats_pixdesc_filter(unsigned want, unsigned rej)
Construct a formats list containing all pixel formats with certain properties.
Definition: formats.c:617
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
ColorDetectContext::depth
int depth
Definition: vf_colordetect.c:54
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:845
atomic_store_explicit
#define atomic_store_explicit(object, desired, order)
Definition: stdatomic.h:90
FILTER_QUERY_FUNC2
#define FILTER_QUERY_FUNC2(func)
Definition: filters.h:240
colordetect_options
static const AVOption colordetect_options[]
Definition: vf_colordetect.c:66
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_FLAG_BE
#define AV_PIX_FMT_FLAG_BE
Pixel format is big-endian.
Definition: pixdesc.h:116
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
stride
#define stride
Definition: h264pred_template.c:536
ret
ret
Definition: filter_design.txt:187
ColorDetectContext::dsp
FFColorDetectDSPContext dsp
Definition: vf_colordetect.c:49
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
AVFrame::height
int height
Definition: frame.h:499
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_colordetect.c:192
AV_PIX_FMT_FLAG_XYZ
#define AV_PIX_FMT_FLAG_XYZ
The pixel format contains XYZ-like data (as opposed to YUV/RGB/grayscale).
Definition: pixdesc.h:163
ff_filter_execute
int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: avfilter.c:1693
avfilter.h
atomic_fetch_or_explicit
#define atomic_fetch_or_explicit(object, operand, order)
Definition: stdatomic.h:155
AVFILTER_FLAG_METADATA_ONLY
#define AVFILTER_FLAG_METADATA_ONLY
The filter is a "metadata" filter - it does not modify the frame data in any way.
Definition: avfilter.h:183
AV_PIX_FMT_FLAG_PLANAR
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:132
ColorDetectContext::mpeg_max
int mpeg_max
Definition: vf_colordetect.c:57
ff_color_detect_dsp_init
static av_cold void ff_color_detect_dsp_init(FFColorDetectDSPContext *dsp, int depth, enum AVColorRange color_range)
Definition: vf_colordetectdsp.h:199
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1645
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:167
desc
const char * desc
Definition: libsvtav1.c:78
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
COLOR_DETECT_ALPHA_MODE
@ COLOR_DETECT_ALPHA_MODE
Definition: vf_colordetect.c:44
ColorDetectContext::detected_range
atomic_int detected_range
Definition: vf_colordetect.c:59
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
FFColorDetectDSPContext
Definition: vf_colordetectdsp.h:39
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Underlying C type is unsigned int.
Definition: opt.h:255
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:472
activate
static int activate(AVFilterContext *ctx)
Definition: vf_colordetect.c:236
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
h
h
Definition: vp9dsp_template.c:2070
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:742
FF_ALPHA_UNDETERMINED
@ FF_ALPHA_UNDETERMINED
Definition: vf_colordetectdsp.h:33
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
ColorDetectContext::mpeg_min
int mpeg_min
Definition: vf_colordetect.c:56
FLAGS
#define FLAGS
Definition: vf_colordetect.c:64
FF_ALPHA_STRAIGHT
@ FF_ALPHA_STRAIGHT
alpha < pixel
Definition: vf_colordetectdsp.h:35