FFmpeg
vf_colormap.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2022 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Compute a look-up table from map of colors.
24  */
25 
26 #include "libavutil/attributes.h"
27 #include "libavutil/avassert.h"
28 #include "libavutil/common.h"
29 #include "libavutil/opt.h"
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "framesync.h"
33 #include "video.h"
34 
35 #define MAX_SIZE 64
36 
37 enum KernelType {
41 };
42 
43 typedef struct ColorMapContext {
44  const AVClass *class;
45  int w, h;
46  int size;
47  int nb_maps;
48  int changed[2];
49 
50  float source[MAX_SIZE][4];
51  float ttarget[MAX_SIZE][4];
52  float target[MAX_SIZE][4];
53  float icoeff[4][4];
54  float coeff[MAX_SIZE][4];
55 
58  float (*kernel)(const float *x, const float *y);
59 
61 
62  double A[(MAX_SIZE + 4) * (MAX_SIZE + 4)];
63  double b[MAX_SIZE + 4];
64  int pivot[MAX_SIZE + 4];
66 
67 #define OFFSET(x) offsetof(ColorMapContext, x)
68 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
69 
70 static const AVOption colormap_options[] = {
71  { "patch_size", "set patch size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "64x64"}, 0, 0, FLAGS },
72  { "nb_patches", "set number of patches", OFFSET(size), AV_OPT_TYPE_INT, {.i64 = 0}, 0, MAX_SIZE, FLAGS },
73  { "type", "set the target type used", OFFSET(target_type), AV_OPT_TYPE_INT, {.i64=1}, 0, 1, FLAGS, .unit = "type" },
74  { "relative", "the target colors are relative", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 1, FLAGS, .unit = "type" },
75  { "absolute", "the target colors are absolute", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 1, FLAGS, .unit = "type" },
76  { "kernel", "set the kernel used for measuring color difference", OFFSET(kernel_type), AV_OPT_TYPE_INT, {.i64=0}, 0, NB_KERNELS-1, FLAGS, .unit = "kernel" },
77  { "euclidean", "square root of sum of squared differences", 0, AV_OPT_TYPE_CONST, {.i64=EUCLIDEAN}, 0, 0, FLAGS, .unit = "kernel" },
78  { "weuclidean", "weighted square root of sum of squared differences",0, AV_OPT_TYPE_CONST, {.i64=WEUCLIDEAN}, 0, 0, FLAGS, .unit = "kernel" },
79  { NULL }
80 };
81 
82 static int gauss_make_triangular(double *A, int *p, int n)
83 {
84  p[n - 1] = n - 1;
85  for (int k = 0; k < n; k++) {
86  double t1;
87  int m = k;
88 
89  for (int i = k + 1; i < n; i++)
90  if (fabs(A[k + n * i]) > fabs(A[k + n * m]))
91  m = i;
92  p[k] = m;
93  t1 = A[k + n * m];
94  A[k + n * m] = A[k + n * k];
95  A[k + n * k] = t1;
96  if (t1 != 0) {
97  for (int i = k + 1; i < n; i++)
98  A[k + n * i] /= -t1;
99  if (k != m)
100  for (int i = k + 1; i < n; i++) {
101  double t2 = A[i + n * m];
102  A[i + n * m] = A[i + n * k];
103  A[i + n * k] = t2;
104  }
105  for (int j = k + 1; j < n; j++)
106  for (int i = k + 1; i < n; i++)
107  A[i + n * j] += A[k + j * n] * A[i + k * n];
108  } else {
109  return 0;
110  }
111  }
112 
113  return 1;
114 }
115 
116 static void gauss_solve_triangular(const double *A, const int *p, double *b, int n)
117 {
118  for(int k = 0; k < n - 1; k++) {
119  int m = p[k];
120  double t = b[m];
121  b[m] = b[k];
122  b[k] = t;
123  for (int i = k + 1; i < n; i++)
124  b[i] += A[k + n * i] * t;
125  }
126 
127  for(int k = n - 1; k > 0; k--) {
128  double t = b[k] /= A[k + n * k];
129  for (int i = 0; i < k; i++)
130  b[i] -= A[k + n * i] * t;
131  }
132 
133  b[0] /= A[0 + 0 * n];
134 }
135 
136 static int gauss_solve(double *A, double *b, int n)
137 {
138  int p[3] = { 0 };
139 
140  av_assert2(n <= FF_ARRAY_ELEMS(p));
141 
142  if (!gauss_make_triangular(A, p, n))
143  return 1;
144 
145  gauss_solve_triangular(A, p, b, n);
146 
147  return 0;
148 }
149 
150 #define P2(x) ((x)*(x))
151 
152 static float euclidean_kernel(const float *x, const float *y)
153 {
154  const float d2 = P2(x[0]-y[0]) +
155  P2(x[1]-y[1]) +
156  P2(x[2]-y[2]);
157  return sqrtf(d2);
158 }
159 
160 static float weuclidean_kernel(const float *x, const float *y)
161 {
162  const float rm = (x[0] + y[0]) * 0.5f;
163  const float d2 = P2(x[0]-y[0]) * (2.f + rm) +
164  P2(x[1]-y[1]) * 4.f +
165  P2(x[2]-y[2]) * (3.f - rm);
166  return sqrtf(d2);
167 }
168 
170 {
171  ColorMapContext *s = ctx->priv;
172 
173  for (int j = 0; j < s->nb_maps; j++) {
174  s->target[j][0] = s->target_type == 0 ? s->source[j][0] + s->ttarget[j][0] : s->ttarget[j][0];
175  s->target[j][1] = s->target_type == 0 ? s->source[j][1] + s->ttarget[j][1] : s->ttarget[j][1];
176  s->target[j][2] = s->target_type == 0 ? s->source[j][2] + s->ttarget[j][2] : s->ttarget[j][2];
177  }
178 
179  for (int c = 0; c < 3; c++) {
180  for (int j = 0; j < s->nb_maps; j++)
181  s->coeff[j][c] = 0.f;
182 
183  for (int j = 0; j < 4; j++) {
184  s->icoeff[j][c] = 0;
185  s->icoeff[j][c] = 0;
186  s->icoeff[j][c] = 0;
187  }
188 
189  s->icoeff[c+1][c] = 1.f;
190 
191  switch (s->nb_maps) {
192  case 1:
193  {
194  float div = fabsf(s->source[0][c]) < 1e-6f ? 1e-6f : s->source[0][c];
195  s->icoeff[c][1+c] = s->target[0][c] / div;
196  }
197  break;
198  case 2:
199  {
200  double A[2 * 2] = { 1, s->source[0][c],
201  1, s->source[1][c] };
202  double b[2] = { s->target[0][c], s->target[1][c] };
203 
204  if (gauss_solve(A, b, 2))
205  continue;
206 
207  s->icoeff[0 ][c] = b[0];
208  s->icoeff[1+c][c] = b[1];
209  }
210  break;
211  case 3:
212  {
213  const uint8_t idx[3][3] = {{ 0, 1, 2 },
214  { 1, 0, 2 },
215  { 2, 0, 1 }};
216  const uint8_t didx[3][4] = {{ 0, 1, 2, 2 },
217  { 0, 2, 1, 2 },
218  { 0, 2, 2, 1 }};
219  const int C0 = idx[c][0];
220  const int C1 = idx[c][1];
221  const int C2 = idx[c][2];
222  double A[3 * 3] = { 1, s->source[0][C0], s->source[0][C1] + s->source[0][C2],
223  1, s->source[1][C0], s->source[1][C1] + s->source[1][C2],
224  1, s->source[2][C0], s->source[2][C1] + s->source[2][C2] };
225  double b[3] = { s->target[0][c], s->target[1][c], s->target[2][c] };
226 
227  if (gauss_solve(A, b, 3))
228  continue;
229 
230  s->icoeff[0][c] = b[didx[c][0]];
231  s->icoeff[1][c] = b[didx[c][1]];
232  s->icoeff[2][c] = b[didx[c][2]];
233  s->icoeff[3][c] = b[didx[c][3]];
234  }
235  break;
236  case 4:
237  {
238  double A[4 * 4] = { 1, s->source[0][0], s->source[0][1], s->source[0][2],
239  1, s->source[1][0], s->source[1][1], s->source[1][2],
240  1, s->source[2][0], s->source[2][1], s->source[2][2],
241  1, s->source[3][0], s->source[3][1], s->source[3][2] };
242  double b[4] = { s->target[0][c], s->target[1][c], s->target[2][c], s->target[3][c] };
243  int pivot[4];
244 
245  if (!gauss_make_triangular(A, pivot, 4))
246  continue;
247  gauss_solve_triangular(A, pivot, b, 4);
248 
249  s->icoeff[0][c] = b[0];
250  s->icoeff[1][c] = b[1];
251  s->icoeff[2][c] = b[2];
252  s->icoeff[3][c] = b[3];
253  }
254  break;
255  default:
256  {
257  const int N = s->nb_maps;
258  const int N4 = N + 4;
259  double *A = s->A;
260  double *b = s->b;
261  int *pivot = s->pivot;
262 
263  for (int j = 0; j < N; j++)
264  for (int i = j; i < N; i++)
265  A[j*N4+i] = A[i*N4+j] = s->kernel(s->source[i], s->source[j]);
266 
267  for (int i = 0; i < N; i++)
268  A[i*N4+N+0] = A[(N+0)*N4+i] = 1;
269  for (int i = 0; i < N; i++)
270  A[i*N4+N+1] = A[(N+1)*N4+i] = s->source[i][0];
271  for (int i = 0; i < N; i++)
272  A[i*N4+N+2] = A[(N+2)*N4+i] = s->source[i][1];
273  for (int i = 0; i < N; i++)
274  A[i*N4+N+3] = A[(N+3)*N4+i] = s->source[i][2];
275 
276  for (int j = N; j < N4; j++)
277  for (int i = N;i < N4; i++)
278  A[j * N4 + i] = 0.;
279 
280  if (gauss_make_triangular(A, pivot, N4)) {
281  for (int i = 0; i < N; i++)
282  b[i] = s->target[i][c];
283  for (int i = N; i < N + 4; i++)
284  b[i] = 0;
285 
286  gauss_solve_triangular(A, pivot, b, N4);
287 
288  for (int i = 0; i < N; i++)
289  s->coeff[i][c] = b[i];
290 
291  for (int i = 0; i < 4; i++)
292  s->icoeff[i][c] = b[N + i];
293  }
294  }
295  }
296  }
297 }
298 
299 typedef struct ThreadData {
300  AVFrame *in, *out;
301 } ThreadData;
302 
303 static int colormap_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
304 {
305  ColorMapContext *s = ctx->priv;
306  ThreadData *td = arg;
307  AVFrame *in = td->in;
308  AVFrame *out = td->out;
309  const int maps = s->nb_maps;
310  const int width = out->width;
311  const int height = out->height;
312  const int slice_start = (height * jobnr) / nb_jobs;
313  const int slice_end = (height * (jobnr + 1)) / nb_jobs;
314  const int sr_linesize = in->linesize[2] / 4;
315  const int dr_linesize = out->linesize[2] / 4;
316  const int sg_linesize = in->linesize[0] / 4;
317  const int dg_linesize = out->linesize[0] / 4;
318  const int sb_linesize = in->linesize[1] / 4;
319  const int db_linesize = out->linesize[1] / 4;
320  const float *sr = (float *)in->data[2] + slice_start * sr_linesize;
321  const float *sg = (float *)in->data[0] + slice_start * sg_linesize;
322  const float *sb = (float *)in->data[1] + slice_start * sb_linesize;
323  float *r = (float *)out->data[2] + slice_start * dr_linesize;
324  float *g = (float *)out->data[0] + slice_start * dg_linesize;
325  float *b = (float *)out->data[1] + slice_start * db_linesize;
326  float (*kernel)(const float *x, const float *y) = s->kernel;
327  const float *icoeff[4] = { s->icoeff[0], s->icoeff[1], s->icoeff[2], s->icoeff[3] };
328 
329  for (int y = slice_start; y < slice_end; y++) {
330  for (int x = 0; x < width; x++) {
331  const float input[3] = { sr[x], sg[x], sb[x] };
332  float srv, sgv, sbv;
333  float rv, gv, bv;
334 
335  srv = sr[x];
336  sgv = sg[x];
337  sbv = sb[x];
338 
339  rv = icoeff[0][0];
340  gv = icoeff[0][1];
341  bv = icoeff[0][2];
342 
343  rv += icoeff[1][0] * srv + icoeff[2][0] * sgv + icoeff[3][0] * sbv;
344  gv += icoeff[1][1] * srv + icoeff[2][1] * sgv + icoeff[3][1] * sbv;
345  bv += icoeff[1][2] * srv + icoeff[2][2] * sgv + icoeff[3][2] * sbv;
346 
347  for (int z = 0; z < maps && maps > 4; z++) {
348  const float *coeff = s->coeff[z];
349  const float cr = coeff[0];
350  const float cg = coeff[1];
351  const float cb = coeff[2];
352  const float f = kernel(input, s->source[z]);
353 
354  rv += f * cr;
355  gv += f * cg;
356  bv += f * cb;
357  }
358 
359  r[x] = rv;
360  g[x] = gv;
361  b[x] = bv;
362  }
363 
364  sg += sg_linesize;
365  g += dg_linesize;
366  sb += sb_linesize;
367  b += db_linesize;
368  sr += sr_linesize;
369  r += dr_linesize;
370  }
371 
372  return 0;
373 }
374 
376 {
377  AVFilterContext *ctx = inlink->dst;
378  ColorMapContext *s = ctx->priv;
379  const int is_target = FF_INLINK_IDX(inlink) > 1;
380  const int pw = s->w;
381  const int pw2 = s->w / 2;
382  const int ph = s->h;
383  const int ph2 = s->h / 2;
384  int changed = 0;
385  int idx;
386 
387  for (int plane = 0; plane < 3; plane++) {
388  const int c = plane == 0 ? 1 : plane == 1 ? 2 : 0;
389 
390  idx = 0;
391  for (int y = ph2; y < in->height && idx < MAX_SIZE; y += ph) {
392  const float *src = (const float *)(in->data[plane] + y * in->linesize[plane]);
393 
394  for (int x = pw2; x < in->width && idx < MAX_SIZE; x += pw) {
395  float value = src[x];
396 
397  if (is_target) {
398  if (s->ttarget[idx][c] != value)
399  changed = 1;
400  s->ttarget[idx][c] = value;
401  } else {
402  if (s->source[idx][c] != value)
403  changed = 1;
404  s->source[idx][c] = value;
405  }
406 
407  idx++;
408  }
409  }
410  }
411 
412  if (changed)
413  s->changed[is_target] = 1;
414  if (!s->size)
415  s->size = FFMIN(idx, MAX_SIZE);
416  if (!is_target)
417  s->nb_maps = FFMIN(idx, s->size);
418 
419  return 0;
420 }
421 
423 {
424  AVFilterContext *ctx = fs->parent;
425  ColorMapContext *s = fs->opaque;
426  AVFilterLink *outlink = ctx->outputs[0];
427  AVFrame *in, *out, *source, *target;
428  ThreadData td;
429  int ret;
430 
431  switch (s->kernel_type) {
432  case EUCLIDEAN:
433  s->kernel = euclidean_kernel;
434  break;
435  case WEUCLIDEAN:
436  s->kernel = weuclidean_kernel;
437  break;
438  default:
439  return AVERROR_BUG;
440  }
441 
442  if ((ret = ff_framesync_get_frame(&s->fs, 0, &in, 1)) < 0 ||
443  (ret = ff_framesync_get_frame(&s->fs, 1, &source, 0)) < 0 ||
444  (ret = ff_framesync_get_frame(&s->fs, 2, &target, 0)) < 0)
445  return ret;
446 
447  import_map(ctx->inputs[1], source);
448  import_map(ctx->inputs[2], target);
449 
450  if (s->changed[0] || s->changed[1]) {
451  build_map(ctx);
452  s->changed[0] = s->changed[1] = 0;
453  }
454 
455  if (!ctx->is_disabled) {
456  if (av_frame_is_writable(in)) {
457  out = in;
458  } else {
459  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
460  if (!out) {
461  av_frame_free(&in);
462  return AVERROR(ENOMEM);
463  }
465  }
466 
467  td.in = in;
468  td.out = out;
471 
472  if (out != in)
473  av_frame_free(&in);
474  } else {
475  out = in;
476  }
477 
478  out->pts = av_rescale_q(s->fs.pts, s->fs.time_base, outlink->time_base);
479 
480  return ff_filter_frame(outlink, out);
481 }
482 
483 static int config_output(AVFilterLink *outlink)
484 {
485  FilterLink *outl = ff_filter_link(outlink);
486  AVFilterContext *ctx = outlink->src;
487  ColorMapContext *s = ctx->priv;
488  AVFilterLink *inlink = ctx->inputs[0];
490  AVFilterLink *source = ctx->inputs[1];
491  AVFilterLink *target = ctx->inputs[2];
492  FFFrameSyncIn *in;
493  int ret;
494 
495  outlink->time_base = inlink->time_base;
496  outl->frame_rate = inl->frame_rate;
497  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
498  outlink->w = inlink->w;
499  outlink->h = inlink->h;
500 
501  if ((ret = ff_framesync_init(&s->fs, ctx, 3)) < 0)
502  return ret;
503 
504  in = s->fs.in;
505  in[0].time_base = inlink->time_base;
506  in[1].time_base = source->time_base;
507  in[2].time_base = target->time_base;
508  in[0].sync = 1;
509  in[0].before = EXT_STOP;
510  in[0].after = EXT_INFINITY;
511  in[1].sync = 1;
512  in[1].before = EXT_STOP;
513  in[1].after = EXT_INFINITY;
514  in[2].sync = 1;
515  in[2].before = EXT_STOP;
516  in[2].after = EXT_INFINITY;
517  s->fs.opaque = s;
518  s->fs.on_event = process_frame;
519 
520  ret = ff_framesync_configure(&s->fs);
521  outlink->time_base = s->fs.time_base;
522 
523  return ret;
524 }
525 
527 {
528  ColorMapContext *s = ctx->priv;
529  return ff_framesync_activate(&s->fs);
530 }
531 
533 {
534  ColorMapContext *const s = ctx->priv;
535 
536  ff_framesync_uninit(&s->fs);
537 }
538 
539 static const AVFilterPad inputs[] = {
540  {
541  .name = "default",
542  .type = AVMEDIA_TYPE_VIDEO,
543  },
544  {
545  .name = "source",
546  .type = AVMEDIA_TYPE_VIDEO,
547  },
548  {
549  .name = "target",
550  .type = AVMEDIA_TYPE_VIDEO,
551  },
552 };
553 
554 static const AVFilterPad outputs[] = {
555  {
556  .name = "default",
557  .type = AVMEDIA_TYPE_VIDEO,
558  .config_props = config_output,
559  },
560 };
561 
562 AVFILTER_DEFINE_CLASS(colormap);
563 
565  .name = "colormap",
566  .description = NULL_IF_CONFIG_SMALL("Apply custom Color Maps to video stream."),
567  .priv_class = &colormap_class,
568  .priv_size = sizeof(ColorMapContext),
569  .activate = activate,
575  .process_command = ff_filter_process_command,
576  .uninit = uninit,
577 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:116
A
#define A(x)
Definition: vpx_arith.h:28
FFFrameSyncIn::time_base
AVRational time_base
Time base for the incoming frames.
Definition: framesync.h:117
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:137
r
const char * r
Definition: vf_curves.c:127
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ColorMapContext::coeff
float coeff[MAX_SIZE][4]
Definition: vf_colormap.c:54
C2
#define C2
Definition: mpegaudiodsp_template.c:239
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:301
out
FILE * out
Definition: movenc.c:55
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:247
gauss_solve
static int gauss_solve(double *A, double *b, int n)
Definition: vf_colormap.c:136
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:269
C0
#define C0
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
WEUCLIDEAN
@ WEUCLIDEAN
Definition: vf_colormap.c:39
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
ph
static int FUNC() ph(CodedBitstreamContext *ctx, RWContext *rw, H266RawPH *current)
Definition: cbs_h266_syntax_template.c:3042
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
ColorMapContext::pivot
int pivot[MAX_SIZE+4]
Definition: vf_colormap.c:64
AVFrame::width
int width
Definition: frame.h:461
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:429
b
#define b
Definition: input.c:41
EUCLIDEAN
@ EUCLIDEAN
Definition: vf_colormap.c:38
P2
#define P2(x)
Definition: vf_colormap.c:150
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(colormap)
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:205
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:526
video.h
ThreadData::in
AVFrame * in
Definition: af_adecorrelate.c:155
euclidean_kernel
static float euclidean_kernel(const float *x, const float *y)
Definition: vf_colormap.c:152
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:410
colormap_options
static const AVOption colormap_options[]
Definition: vf_colormap.c:70
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
build_map
static void build_map(AVFilterContext *ctx)
Definition: vf_colormap.c:169
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
Handle slice ends.
Definition: mpeg12dec.c:1719
C1
#define C1
Definition: mpegaudiodsp_template.c:238
FFFrameSyncIn
Input stream structure.
Definition: framesync.h:102
KernelType
KernelType
Definition: vf_colormap.c:37
ColorMapContext::ttarget
float ttarget[MAX_SIZE][4]
Definition: vf_colormap.c:51
FFFrameSyncIn::sync
unsigned sync
Synchronization level: frames on input at the highest sync level will generate output frame events.
Definition: framesync.h:160
fabsf
static __device__ float fabsf(float a)
Definition: cuda_runtime.h:181
ColorMapContext::source
float source[MAX_SIZE][4]
Definition: vf_colormap.c:50
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
avassert.h
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
float
float
Definition: af_crystalizer.c:122
s
#define s(width, name)
Definition: cbs_vp9.c:198
g
const char * g
Definition: vf_curves.c:128
filters.h
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
gauss_solve_triangular
static void gauss_solve_triangular(const double *A, const int *p, double *b, int n)
Definition: vf_colormap.c:116
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
fabs
static __device__ float fabs(float a)
Definition: cuda_runtime.h:182
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:713
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:200
process_frame
static int process_frame(FFFrameSync *fs)
Definition: vf_colormap.c:422
FLAGS
#define FLAGS
Definition: vf_colormap.c:68
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
Underlying C type is two consecutive integers.
Definition: opt.h:303
ColorMapContext::target
float target[MAX_SIZE][4]
Definition: vf_colormap.c:52
ColorMapContext::changed
int changed[2]
Definition: vf_colormap.c:48
colormap_slice
static int colormap_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colormap.c:303
ColorMapContext::fs
FFFrameSync fs
Definition: vf_colormap.c:60
sqrtf
static __device__ float sqrtf(float a)
Definition: cuda_runtime.h:184
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
ColorMapContext::nb_maps
int nb_maps
Definition: vf_colormap.c:47
f
f
Definition: af_crystalizer.c:122
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
height
#define height
Definition: dsp.h:85
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_GBRPF32
#define AV_PIX_FMT_GBRPF32
Definition: pixfmt.h:532
size
int size
Definition: twinvq_data.h:10344
OFFSET
#define OFFSET(x)
Definition: vf_colormap.c:67
ColorMapContext::h
int h
Definition: vf_colormap.c:45
av_frame_is_writable
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:649
ff_filter_process_command
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options.
Definition: avfilter.c:901
attributes.h
ColorMapContext::A
double A[(MAX_SIZE+4) *(MAX_SIZE+4)]
Definition: vf_colormap.c:62
N
#define N
Definition: af_mcompand.c:54
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
outputs
static const AVFilterPad outputs[]
Definition: vf_colormap.c:554
av_assert2
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:67
ColorMapContext::target_type
int target_type
Definition: vf_colormap.c:56
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: filters.h:213
common.h
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:841
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
MAX_SIZE
#define MAX_SIZE
Definition: vf_colormap.c:35
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
ColorMapContext::w
int w
Definition: vf_colormap.c:45
import_map
static int import_map(AVFilterLink *inlink, AVFrame *in)
Definition: vf_colormap.c:375
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
FILTER_PIXFMTS
#define FILTER_PIXFMTS(...)
Definition: filters.h:248
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:738
AVFilter
Filter definition.
Definition: avfilter.h:201
inputs
static const AVFilterPad inputs[]
Definition: vf_colormap.c:539
ret
ret
Definition: filter_design.txt:187
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:86
ColorMapContext::kernel_type
int kernel_type
Definition: vf_colormap.c:57
ColorMapContext::b
double b[MAX_SIZE+4]
Definition: vf_colormap.c:63
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_colormap.c:483
AVFrame::height
int height
Definition: frame.h:461
FFFrameSyncIn::before
enum FFFrameSyncExtMode before
Extrapolation mode for timestamps before the first frame.
Definition: framesync.h:107
framesync.h
ff_filter_execute
int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: avfilter.c:1667
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
AV_PIX_FMT_GBRAPF32
#define AV_PIX_FMT_GBRAPF32
Definition: pixfmt.h:533
gauss_make_triangular
static int gauss_make_triangular(double *A, int *p, int n)
Definition: vf_colormap.c:82
weuclidean_kernel
static float weuclidean_kernel(const float *x, const float *y)
Definition: vf_colormap.c:160
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_colormap.c:532
ColorMapContext::icoeff
float icoeff[4][4]
Definition: vf_colormap.c:53
ColorMapContext
Definition: vf_colormap.c:43
AVFilterContext
An instance of a filter.
Definition: avfilter.h:457
icoeff
static const double icoeff[2][5]
Definition: vf_owdenoise.c:95
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:152
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:248
ColorMapContext::kernel
float(* kernel)(const float *x, const float *y)
Definition: vf_colormap.c:58
AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
Definition: avfilter.h:190
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:434
coeff
static const double coeff[2][5]
Definition: vf_owdenoise.c:80
NB_KERNELS
@ NB_KERNELS
Definition: vf_colormap.c:40
FFFrameSyncIn::after
enum FFFrameSyncExtMode after
Extrapolation mode for timestamps after the last frame.
Definition: framesync.h:112
ColorMapContext::size
int size
Definition: vf_colormap.c:46
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:352
ff_vf_colormap
const AVFilter ff_vf_colormap
Definition: vf_colormap.c:564
width
#define width
Definition: dsp.h:85
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
src
#define src
Definition: vp8dsp.c:248
activate
static int activate(AVFilterContext *ctx)
Definition: vf_colormap.c:526