FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/mem.h"
25 #include "libavutil/opt.h"
26 #include "libavutil/pixdesc.h"
27 #include "filters.h"
28 #include "internal.h"
29 
30 enum FilterMode {
36 };
37 
38 typedef struct SignalstatsContext {
39  const AVClass *class;
40  int chromah; // height of chroma plane
41  int chromaw; // width of chroma plane
42  int hsub; // horizontal subsampling
43  int vsub; // vertical subsampling
44  int depth; // pixel depth
45  int fs; // pixel count per frame
46  int cfs; // pixel count per frame of chroma planes
47  int outfilter; // FilterMode
48  int filters;
50  uint8_t rgba_color[4];
51  int yuv_color[3];
52  int nb_jobs;
53  int *jobs_rets;
54 
55  int maxsize; // history stats array size
56  int *histy, *histu, *histv, *histsat;
57 
61 
62 typedef struct ThreadData {
63  const AVFrame *in;
64  AVFrame *out;
65 } ThreadData;
66 
67 typedef struct ThreadDataHueSatMetrics {
68  const AVFrame *src;
71 
72 #define OFFSET(x) offsetof(SignalstatsContext, x)
73 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
74 
75 static const AVOption signalstats_options[] = {
76  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, .unit = "filters"},
77  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, .unit = "filters"},
78  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, .unit = "filters"},
79  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, .unit = "filters"},
80  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, .unit = "out"},
81  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, .unit = "out"},
82  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, .unit = "out"},
83  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, .unit = "out"},
84  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
85  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
86  {NULL}
87 };
88 
89 AVFILTER_DEFINE_CLASS(signalstats);
90 
92 {
93  uint8_t r, g, b;
94  SignalstatsContext *s = ctx->priv;
95 
96  if (s->outfilter != FILTER_NONE)
97  s->filters |= 1 << s->outfilter;
98 
99  r = s->rgba_color[0];
100  g = s->rgba_color[1];
101  b = s->rgba_color[2];
102  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
103  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
104  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
105  return 0;
106 }
107 
109 {
110  SignalstatsContext *s = ctx->priv;
111  av_frame_free(&s->frame_prev);
112  av_frame_free(&s->frame_sat);
113  av_frame_free(&s->frame_hue);
114  av_freep(&s->jobs_rets);
115  av_freep(&s->histy);
116  av_freep(&s->histu);
117  av_freep(&s->histv);
118  av_freep(&s->histsat);
119 }
120 
121 // TODO: add more
122 static const enum AVPixelFormat pix_fmts[] = {
135 };
136 
137 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
138 {
140  if (!frame)
141  return NULL;
142 
143  frame->format = pixfmt;
144  frame->width = w;
145  frame->height = h;
146 
147  if (av_frame_get_buffer(frame, 0) < 0) {
149  return NULL;
150  }
151 
152  return frame;
153 }
154 
155 static int config_output(AVFilterLink *outlink)
156 {
157  AVFilterContext *ctx = outlink->src;
158  SignalstatsContext *s = ctx->priv;
159  AVFilterLink *inlink = outlink->src->inputs[0];
161  s->hsub = desc->log2_chroma_w;
162  s->vsub = desc->log2_chroma_h;
163  s->depth = desc->comp[0].depth;
164  s->maxsize = 1 << s->depth;
165  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
166  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
167  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
168  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
169 
170  if (!s->histy || !s->histu || !s->histv || !s->histsat)
171  return AVERROR(ENOMEM);
172 
173  outlink->w = inlink->w;
174  outlink->h = inlink->h;
175 
176  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
177  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
178 
179  s->fs = inlink->w * inlink->h;
180  s->cfs = s->chromaw * s->chromah;
181 
182  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
183  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
184  if (!s->jobs_rets)
185  return AVERROR(ENOMEM);
186 
187  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
188  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
189  if (!s->frame_sat || !s->frame_hue)
190  return AVERROR(ENOMEM);
191 
192  return 0;
193 }
194 
195 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
196 {
197  const int chromax = x >> s->hsub;
198  const int chromay = y >> s->vsub;
199  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
200  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
201  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
202 }
203 
204 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
205 {
206  const int chromax = x >> s->hsub;
207  const int chromay = y >> s->vsub;
208  const int mult = 1 << (s->depth - 8);
209  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
210  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
211  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
212 }
213 
214 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
215 {
216  ThreadData *td = arg;
217  const SignalstatsContext *s = ctx->priv;
218  const AVFrame *in = td->in;
219  AVFrame *out = td->out;
220  const int w = in->width;
221  const int h = in->height;
222  const int slice_start = (h * jobnr ) / nb_jobs;
223  const int slice_end = (h * (jobnr+1)) / nb_jobs;
224  int x, y, score = 0;
225 
226  for (y = slice_start; y < slice_end; y++) {
227  const int yc = y >> s->vsub;
228  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
229  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
230  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
231 
232  for (x = 0; x < w; x++) {
233  const int xc = x >> s->hsub;
234  const int luma = pluma[x];
235  const int chromau = pchromau[xc];
236  const int chromav = pchromav[xc];
237  const int filt = luma < 16 || luma > 235 ||
238  chromau < 16 || chromau > 240 ||
239  chromav < 16 || chromav > 240;
240  score += filt;
241  if (out && filt)
242  burn_frame8(s, out, x, y);
243  }
244  }
245  return score;
246 }
247 
248 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
249 {
250  ThreadData *td = arg;
251  const SignalstatsContext *s = ctx->priv;
252  const AVFrame *in = td->in;
253  AVFrame *out = td->out;
254  const int mult = 1 << (s->depth - 8);
255  const int w = in->width;
256  const int h = in->height;
257  const int slice_start = (h * jobnr ) / nb_jobs;
258  const int slice_end = (h * (jobnr+1)) / nb_jobs;
259  int x, y, score = 0;
260 
261  for (y = slice_start; y < slice_end; y++) {
262  const int yc = y >> s->vsub;
263  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
264  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
265  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
266 
267  for (x = 0; x < w; x++) {
268  const int xc = x >> s->hsub;
269  const int luma = pluma[x];
270  const int chromau = pchromau[xc];
271  const int chromav = pchromav[xc];
272  const int filt = luma < 16 * mult || luma > 235 * mult ||
273  chromau < 16 * mult || chromau > 240 * mult ||
274  chromav < 16 * mult || chromav > 240 * mult;
275  score += filt;
276  if (out && filt)
277  burn_frame16(s, out, x, y);
278  }
279  }
280  return score;
281 }
282 
283 static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
284 {
285  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
286 }
287 
288 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
289 {
290  ThreadData *td = arg;
291  const SignalstatsContext *s = ctx->priv;
292  const AVFrame *in = td->in;
293  AVFrame *out = td->out;
294  const int w = in->width;
295  const int h = in->height;
296  const int slice_start = (h * jobnr ) / nb_jobs;
297  const int slice_end = (h * (jobnr+1)) / nb_jobs;
298  const uint8_t *p = in->data[0];
299  int lw = in->linesize[0];
300  int x, y, score = 0, filt;
301 
302  for (y = slice_start; y < slice_end; y++) {
303 
304  if (y - 1 < 0 || y + 1 >= h)
305  continue;
306 
307  // detect two pixels above and below (to eliminate interlace artefacts)
308  // should check that video format is infact interlaced.
309 
310 #define FILTER(i, j) \
311  filter_tout_outlier(p[(y-j) * lw + x + i], \
312  p[ y * lw + x + i], \
313  p[(y+j) * lw + x + i])
314 
315 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
316 
317  if (y - 2 >= 0 && y + 2 < h) {
318  for (x = 1; x < w - 1; x++) {
319  filt = FILTER3(2) && FILTER3(1);
320  score += filt;
321  if (filt && out)
322  burn_frame8(s, out, x, y);
323  }
324  } else {
325  for (x = 1; x < w - 1; x++) {
326  filt = FILTER3(1);
327  score += filt;
328  if (filt && out)
329  burn_frame8(s, out, x, y);
330  }
331  }
332  }
333  return score;
334 }
335 
336 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
337 {
338  ThreadData *td = arg;
339  const SignalstatsContext *s = ctx->priv;
340  const AVFrame *in = td->in;
341  AVFrame *out = td->out;
342  const int w = in->width;
343  const int h = in->height;
344  const int slice_start = (h * jobnr ) / nb_jobs;
345  const int slice_end = (h * (jobnr+1)) / nb_jobs;
346  const uint16_t *p = (uint16_t *)in->data[0];
347  int lw = in->linesize[0] / 2;
348  int x, y, score = 0, filt;
349 
350  for (y = slice_start; y < slice_end; y++) {
351 
352  if (y - 1 < 0 || y + 1 >= h)
353  continue;
354 
355  // detect two pixels above and below (to eliminate interlace artefacts)
356  // should check that video format is infact interlaced.
357 
358  if (y - 2 >= 0 && y + 2 < h) {
359  for (x = 1; x < w - 1; x++) {
360  filt = FILTER3(2) && FILTER3(1);
361  score += filt;
362  if (filt && out)
363  burn_frame16(s, out, x, y);
364  }
365  } else {
366  for (x = 1; x < w - 1; x++) {
367  filt = FILTER3(1);
368  score += filt;
369  if (filt && out)
370  burn_frame16(s, out, x, y);
371  }
372  }
373  }
374  return score;
375 }
376 
377 #define VREP_START 4
378 
379 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
380 {
381  ThreadData *td = arg;
382  const SignalstatsContext *s = ctx->priv;
383  const AVFrame *in = td->in;
384  AVFrame *out = td->out;
385  const int w = in->width;
386  const int h = in->height;
387  const int slice_start = (h * jobnr ) / nb_jobs;
388  const int slice_end = (h * (jobnr+1)) / nb_jobs;
389  const uint8_t *p = in->data[0];
390  const int lw = in->linesize[0];
391  int x, y, score = 0;
392 
393  for (y = slice_start; y < slice_end; y++) {
394  const int y2lw = (y - VREP_START) * lw;
395  const int ylw = y * lw;
396  int filt, totdiff = 0;
397 
398  if (y < VREP_START)
399  continue;
400 
401  for (x = 0; x < w; x++)
402  totdiff += abs(p[y2lw + x] - p[ylw + x]);
403  filt = totdiff < w;
404 
405  score += filt;
406  if (filt && out)
407  for (x = 0; x < w; x++)
408  burn_frame8(s, out, x, y);
409  }
410  return score * w;
411 }
412 
413 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
414 {
415  ThreadData *td = arg;
416  const SignalstatsContext *s = ctx->priv;
417  const AVFrame *in = td->in;
418  AVFrame *out = td->out;
419  const int w = in->width;
420  const int h = in->height;
421  const int slice_start = (h * jobnr ) / nb_jobs;
422  const int slice_end = (h * (jobnr+1)) / nb_jobs;
423  const uint16_t *p = (uint16_t *)in->data[0];
424  const int lw = in->linesize[0] / 2;
425  int x, y, score = 0;
426 
427  for (y = slice_start; y < slice_end; y++) {
428  const int y2lw = (y - VREP_START) * lw;
429  const int ylw = y * lw;
430  int64_t totdiff = 0;
431  int filt;
432 
433  if (y < VREP_START)
434  continue;
435 
436  for (x = 0; x < w; x++)
437  totdiff += abs(p[y2lw + x] - p[ylw + x]);
438  filt = totdiff < w;
439 
440  score += filt;
441  if (filt && out)
442  for (x = 0; x < w; x++)
443  burn_frame16(s, out, x, y);
444  }
445  return score * w;
446 }
447 
448 static const struct {
449  const char *name;
450  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
451  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
452 } filters_def[] = {
453  {"TOUT", filter8_tout, filter16_tout},
454  {"VREP", filter8_vrep, filter16_vrep},
455  {"BRNG", filter8_brng, filter16_brng},
456  {NULL}
457 };
458 
459 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
460 {
461  int i, j;
463  const SignalstatsContext *s = ctx->priv;
464  const AVFrame *src = td->src;
465  AVFrame *dst_sat = td->dst_sat;
466  AVFrame *dst_hue = td->dst_hue;
467 
468  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
469  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
470 
471  const int lsz_u = src->linesize[1];
472  const int lsz_v = src->linesize[2];
473  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
474  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
475 
476  const int lsz_sat = dst_sat->linesize[0];
477  const int lsz_hue = dst_hue->linesize[0];
478  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
479  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
480 
481  for (j = slice_start; j < slice_end; j++) {
482  for (i = 0; i < s->chromaw; i++) {
483  const int yuvu = p_u[i];
484  const int yuvv = p_v[i];
485  p_sat[i] = hypotf(yuvu - 128, yuvv - 128); // int or round?
486  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-128, yuvv-128) + 180.f), 360.f);
487  }
488  p_u += lsz_u;
489  p_v += lsz_v;
490  p_sat += lsz_sat;
491  p_hue += lsz_hue;
492  }
493 
494  return 0;
495 }
496 
497 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
498 {
499  int i, j;
501  const SignalstatsContext *s = ctx->priv;
502  const AVFrame *src = td->src;
503  AVFrame *dst_sat = td->dst_sat;
504  AVFrame *dst_hue = td->dst_hue;
505  const int mid = 1 << (s->depth - 1);
506 
507  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
508  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
509 
510  const int lsz_u = src->linesize[1] / 2;
511  const int lsz_v = src->linesize[2] / 2;
512  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
513  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
514 
515  const int lsz_sat = dst_sat->linesize[0] / 2;
516  const int lsz_hue = dst_hue->linesize[0] / 2;
517  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
518  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
519 
520  for (j = slice_start; j < slice_end; j++) {
521  for (i = 0; i < s->chromaw; i++) {
522  const int yuvu = p_u[i];
523  const int yuvv = p_v[i];
524  p_sat[i] = hypotf(yuvu - mid, yuvv - mid); // int or round?
525  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180.f), 360.f);
526  }
527  p_u += lsz_u;
528  p_v += lsz_v;
529  p_sat += lsz_sat;
530  p_hue += lsz_hue;
531  }
532 
533  return 0;
534 }
535 
536 static unsigned compute_bit_depth(uint16_t mask)
537 {
538  return av_popcount(mask);
539 }
540 
542 {
543  AVFilterContext *ctx = link->dst;
544  SignalstatsContext *s = ctx->priv;
545  AVFilterLink *outlink = ctx->outputs[0];
546  AVFrame *out = in;
547  int i, j;
548  int w = 0, cw = 0, // in
549  pw = 0, cpw = 0; // prev
550  int fil;
551  char metabuf[128];
552  unsigned int *histy = s->histy,
553  *histu = s->histu,
554  *histv = s->histv,
555  histhue[360] = {0},
556  *histsat = s->histsat;
557  int miny = -1, minu = -1, minv = -1;
558  int maxy = -1, maxu = -1, maxv = -1;
559  int lowy = -1, lowu = -1, lowv = -1;
560  int highy = -1, highu = -1, highv = -1;
561  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
562  int lowp, highp, clowp, chighp;
563  int accy, accu, accv;
564  int accsat, acchue = 0;
565  int medhue, maxhue;
566  int toty = 0, totu = 0, totv = 0, totsat=0;
567  int tothue = 0;
568  int dify = 0, difu = 0, difv = 0;
569  uint16_t masky = 0, masku = 0, maskv = 0;
570  int ret;
571  int filtot[FILT_NUMB] = {0};
572  AVFrame *prev;
573 
574  AVFrame *sat = s->frame_sat;
575  AVFrame *hue = s->frame_hue;
576  const uint8_t *p_sat = sat->data[0];
577  const uint8_t *p_hue = hue->data[0];
578  const int lsz_sat = sat->linesize[0];
579  const int lsz_hue = hue->linesize[0];
580  ThreadDataHueSatMetrics td_huesat = {
581  .src = in,
582  .dst_sat = sat,
583  .dst_hue = hue,
584  };
585 
586  if (!s->frame_prev)
587  s->frame_prev = av_frame_clone(in);
588 
589  prev = s->frame_prev;
590 
591  if (s->outfilter != FILTER_NONE) {
592  out = av_frame_clone(in);
593  if (!out) {
594  av_frame_free(&in);
595  return AVERROR(ENOMEM);
596  }
598  if (ret < 0) {
599  av_frame_free(&out);
600  av_frame_free(&in);
601  return ret;
602  }
603  }
604 
606  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
607 
608  // Calculate luma histogram and difference with previous frame or field.
609  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
610  for (j = 0; j < link->h; j++) {
611  for (i = 0; i < link->w; i++) {
612  const int yuv = in->data[0][w + i];
613 
614  masky |= yuv;
615  histy[yuv]++;
616  dify += abs(yuv - prev->data[0][pw + i]);
617  }
618  w += in->linesize[0];
619  pw += prev->linesize[0];
620  }
621 
622  // Calculate chroma histogram and difference with previous frame or field.
623  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
624  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
625  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
626  for (j = 0; j < s->chromah; j++) {
627  for (i = 0; i < s->chromaw; i++) {
628  const int yuvu = in->data[1][cw+i];
629  const int yuvv = in->data[2][cw+i];
630 
631  masku |= yuvu;
632  maskv |= yuvv;
633  histu[yuvu]++;
634  difu += abs(yuvu - prev->data[1][cpw+i]);
635  histv[yuvv]++;
636  difv += abs(yuvv - prev->data[2][cpw+i]);
637 
638  histsat[p_sat[i]]++;
639  histhue[((int16_t*)p_hue)[i]]++;
640  }
641  cw += in->linesize[1];
642  cpw += prev->linesize[1];
643  p_sat += lsz_sat;
644  p_hue += lsz_hue;
645  }
646 
647  for (fil = 0; fil < FILT_NUMB; fil ++) {
648  if (s->filters & 1<<fil) {
649  ThreadData td = {
650  .in = in,
651  .out = out != in && s->outfilter == fil ? out : NULL,
652  };
653  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
655  &td, s->jobs_rets, s->nb_jobs);
656  for (i = 0; i < s->nb_jobs; i++)
657  filtot[fil] += s->jobs_rets[i];
658  }
659  }
660 
661  // find low / high based on histogram percentile
662  // these only need to be calculated once.
663 
664  lowp = lrint(s->fs * 10 / 100.);
665  highp = lrint(s->fs * 90 / 100.);
666  clowp = lrint(s->cfs * 10 / 100.);
667  chighp = lrint(s->cfs * 90 / 100.);
668 
669  accy = accu = accv = accsat = 0;
670  for (fil = 0; fil < s->maxsize; fil++) {
671  if (miny < 0 && histy[fil]) miny = fil;
672  if (minu < 0 && histu[fil]) minu = fil;
673  if (minv < 0 && histv[fil]) minv = fil;
674  if (minsat < 0 && histsat[fil]) minsat = fil;
675 
676  if (histy[fil]) maxy = fil;
677  if (histu[fil]) maxu = fil;
678  if (histv[fil]) maxv = fil;
679  if (histsat[fil]) maxsat = fil;
680 
681  toty += histy[fil] * fil;
682  totu += histu[fil] * fil;
683  totv += histv[fil] * fil;
684  totsat += histsat[fil] * fil;
685 
686  accy += histy[fil];
687  accu += histu[fil];
688  accv += histv[fil];
689  accsat += histsat[fil];
690 
691  if (lowy == -1 && accy >= lowp) lowy = fil;
692  if (lowu == -1 && accu >= clowp) lowu = fil;
693  if (lowv == -1 && accv >= clowp) lowv = fil;
694  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
695 
696  if (highy == -1 && accy >= highp) highy = fil;
697  if (highu == -1 && accu >= chighp) highu = fil;
698  if (highv == -1 && accv >= chighp) highv = fil;
699  if (highsat == -1 && accsat >= chighp) highsat = fil;
700  }
701 
702  maxhue = histhue[0];
703  medhue = -1;
704  for (fil = 0; fil < 360; fil++) {
705  tothue += histhue[fil] * fil;
706  acchue += histhue[fil];
707 
708  if (medhue == -1 && acchue > s->cfs / 2)
709  medhue = fil;
710  if (histhue[fil] > maxhue) {
711  maxhue = histhue[fil];
712  }
713  }
714 
715  av_frame_free(&s->frame_prev);
716  s->frame_prev = av_frame_clone(in);
717 
718 #define SET_META(key, fmt, val) do { \
719  snprintf(metabuf, sizeof(metabuf), fmt, val); \
720  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
721 } while (0)
722 
723  SET_META("YMIN", "%d", miny);
724  SET_META("YLOW", "%d", lowy);
725  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
726  SET_META("YHIGH", "%d", highy);
727  SET_META("YMAX", "%d", maxy);
728 
729  SET_META("UMIN", "%d", minu);
730  SET_META("ULOW", "%d", lowu);
731  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
732  SET_META("UHIGH", "%d", highu);
733  SET_META("UMAX", "%d", maxu);
734 
735  SET_META("VMIN", "%d", minv);
736  SET_META("VLOW", "%d", lowv);
737  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
738  SET_META("VHIGH", "%d", highv);
739  SET_META("VMAX", "%d", maxv);
740 
741  SET_META("SATMIN", "%d", minsat);
742  SET_META("SATLOW", "%d", lowsat);
743  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
744  SET_META("SATHIGH", "%d", highsat);
745  SET_META("SATMAX", "%d", maxsat);
746 
747  SET_META("HUEMED", "%d", medhue);
748  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
749 
750  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
751  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
752  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
753 
754  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
755  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
756  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
757 
758  for (fil = 0; fil < FILT_NUMB; fil ++) {
759  if (s->filters & 1<<fil) {
760  char metaname[128];
761  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
762  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
763  av_dict_set(&out->metadata, metaname, metabuf, 0);
764  }
765  }
766 
767  if (in != out)
768  av_frame_free(&in);
769  return ff_filter_frame(outlink, out);
770 }
771 
773 {
774  AVFilterContext *ctx = link->dst;
775  SignalstatsContext *s = ctx->priv;
776  AVFilterLink *outlink = ctx->outputs[0];
777  AVFrame *out = in;
778  int i, j;
779  int w = 0, cw = 0, // in
780  pw = 0, cpw = 0; // prev
781  int fil;
782  char metabuf[128];
783  unsigned int *histy = s->histy,
784  *histu = s->histu,
785  *histv = s->histv,
786  histhue[360] = {0},
787  *histsat = s->histsat;
788  int miny = -1, minu = -1, minv = -1;
789  int maxy = -1, maxu = -1, maxv = -1;
790  int lowy = -1, lowu = -1, lowv = -1;
791  int highy = -1, highu = -1, highv = -1;
792  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
793  int lowp, highp, clowp, chighp;
794  int accy, accu, accv;
795  int accsat, acchue = 0;
796  int medhue, maxhue;
797  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
798  int64_t tothue = 0;
799  int64_t dify = 0, difu = 0, difv = 0;
800  uint16_t masky = 0, masku = 0, maskv = 0;
801 
802  int filtot[FILT_NUMB] = {0};
803  AVFrame *prev;
804  int ret;
805  AVFrame *sat = s->frame_sat;
806  AVFrame *hue = s->frame_hue;
807  const uint16_t *p_sat = (uint16_t *)sat->data[0];
808  const uint16_t *p_hue = (uint16_t *)hue->data[0];
809  const int lsz_sat = sat->linesize[0] / 2;
810  const int lsz_hue = hue->linesize[0] / 2;
811  ThreadDataHueSatMetrics td_huesat = {
812  .src = in,
813  .dst_sat = sat,
814  .dst_hue = hue,
815  };
816 
817  if (!s->frame_prev)
818  s->frame_prev = av_frame_clone(in);
819 
820  prev = s->frame_prev;
821 
822  if (s->outfilter != FILTER_NONE) {
823  out = av_frame_clone(in);
824  if (!out) {
825  av_frame_free(&in);
826  return AVERROR(ENOMEM);
827  }
829  if (ret < 0) {
830  av_frame_free(&out);
831  av_frame_free(&in);
832  return ret;
833  }
834  }
835 
837  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
838 
839  // Calculate luma histogram and difference with previous frame or field.
840  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
841  for (j = 0; j < link->h; j++) {
842  for (i = 0; i < link->w; i++) {
843  const int yuv = AV_RN16(in->data[0] + w + i * 2);
844 
845  masky |= yuv;
846  histy[yuv]++;
847  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
848  }
849  w += in->linesize[0];
850  pw += prev->linesize[0];
851  }
852 
853  // Calculate chroma histogram and difference with previous frame or field.
854  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
855  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
856  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
857  for (j = 0; j < s->chromah; j++) {
858  for (i = 0; i < s->chromaw; i++) {
859  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
860  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
861 
862  masku |= yuvu;
863  maskv |= yuvv;
864  histu[yuvu]++;
865  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
866  histv[yuvv]++;
867  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
868 
869  histsat[p_sat[i]]++;
870  histhue[((int16_t*)p_hue)[i]]++;
871  }
872  cw += in->linesize[1];
873  cpw += prev->linesize[1];
874  p_sat += lsz_sat;
875  p_hue += lsz_hue;
876  }
877 
878  for (fil = 0; fil < FILT_NUMB; fil ++) {
879  if (s->filters & 1<<fil) {
880  ThreadData td = {
881  .in = in,
882  .out = out != in && s->outfilter == fil ? out : NULL,
883  };
884  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
886  &td, s->jobs_rets, s->nb_jobs);
887  for (i = 0; i < s->nb_jobs; i++)
888  filtot[fil] += s->jobs_rets[i];
889  }
890  }
891 
892  // find low / high based on histogram percentile
893  // these only need to be calculated once.
894 
895  lowp = lrint(s->fs * 10 / 100.);
896  highp = lrint(s->fs * 90 / 100.);
897  clowp = lrint(s->cfs * 10 / 100.);
898  chighp = lrint(s->cfs * 90 / 100.);
899 
900  accy = accu = accv = accsat = 0;
901  for (fil = 0; fil < s->maxsize; fil++) {
902  if (miny < 0 && histy[fil]) miny = fil;
903  if (minu < 0 && histu[fil]) minu = fil;
904  if (minv < 0 && histv[fil]) minv = fil;
905  if (minsat < 0 && histsat[fil]) minsat = fil;
906 
907  if (histy[fil]) maxy = fil;
908  if (histu[fil]) maxu = fil;
909  if (histv[fil]) maxv = fil;
910  if (histsat[fil]) maxsat = fil;
911 
912  toty += histy[fil] * fil;
913  totu += histu[fil] * fil;
914  totv += histv[fil] * fil;
915  totsat += histsat[fil] * fil;
916 
917  accy += histy[fil];
918  accu += histu[fil];
919  accv += histv[fil];
920  accsat += histsat[fil];
921 
922  if (lowy == -1 && accy >= lowp) lowy = fil;
923  if (lowu == -1 && accu >= clowp) lowu = fil;
924  if (lowv == -1 && accv >= clowp) lowv = fil;
925  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
926 
927  if (highy == -1 && accy >= highp) highy = fil;
928  if (highu == -1 && accu >= chighp) highu = fil;
929  if (highv == -1 && accv >= chighp) highv = fil;
930  if (highsat == -1 && accsat >= chighp) highsat = fil;
931  }
932 
933  maxhue = histhue[0];
934  medhue = -1;
935  for (fil = 0; fil < 360; fil++) {
936  tothue += histhue[fil] * fil;
937  acchue += histhue[fil];
938 
939  if (medhue == -1 && acchue > s->cfs / 2)
940  medhue = fil;
941  if (histhue[fil] > maxhue) {
942  maxhue = histhue[fil];
943  }
944  }
945 
946  av_frame_free(&s->frame_prev);
947  s->frame_prev = av_frame_clone(in);
948 
949  SET_META("YMIN", "%d", miny);
950  SET_META("YLOW", "%d", lowy);
951  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
952  SET_META("YHIGH", "%d", highy);
953  SET_META("YMAX", "%d", maxy);
954 
955  SET_META("UMIN", "%d", minu);
956  SET_META("ULOW", "%d", lowu);
957  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
958  SET_META("UHIGH", "%d", highu);
959  SET_META("UMAX", "%d", maxu);
960 
961  SET_META("VMIN", "%d", minv);
962  SET_META("VLOW", "%d", lowv);
963  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
964  SET_META("VHIGH", "%d", highv);
965  SET_META("VMAX", "%d", maxv);
966 
967  SET_META("SATMIN", "%d", minsat);
968  SET_META("SATLOW", "%d", lowsat);
969  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
970  SET_META("SATHIGH", "%d", highsat);
971  SET_META("SATMAX", "%d", maxsat);
972 
973  SET_META("HUEMED", "%d", medhue);
974  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
975 
976  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
977  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
978  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
979 
980  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
981  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
982  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
983 
984  for (fil = 0; fil < FILT_NUMB; fil ++) {
985  if (s->filters & 1<<fil) {
986  char metaname[128];
987  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
988  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
989  av_dict_set(&out->metadata, metaname, metabuf, 0);
990  }
991  }
992 
993  if (in != out)
994  av_frame_free(&in);
995  return ff_filter_frame(outlink, out);
996 }
997 
999 {
1000  AVFilterContext *ctx = link->dst;
1001  SignalstatsContext *s = ctx->priv;
1002 
1003  if (s->depth > 8)
1004  return filter_frame16(link, in);
1005  else
1006  return filter_frame8(link, in);
1007 }
1008 
1010  {
1011  .name = "default",
1012  .type = AVMEDIA_TYPE_VIDEO,
1013  .filter_frame = filter_frame,
1014  },
1015 };
1016 
1018  {
1019  .name = "default",
1020  .config_props = config_output,
1021  .type = AVMEDIA_TYPE_VIDEO,
1022  },
1023 };
1024 
1026  .name = "signalstats",
1027  .description = "Generate statistics from video analysis.",
1028  .init = init,
1029  .uninit = uninit,
1030  .priv_size = sizeof(SignalstatsContext),
1034  .priv_class = &signalstats_class,
1035  .flags = AVFILTER_FLAG_SLICE_THREADS,
1036 };
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
filter_frame8
static int filter_frame8(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:541
r
const char * r
Definition: vf_curves.c:127
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:497
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:43
out
FILE * out
Definition: movenc.c:55
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:288
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:50
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:40
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1015
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:91
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
SET_META
#define SET_META(key, fmt, val)
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: internal.h:162
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:214
atan2f
#define atan2f(y, x)
Definition: libm.h:45
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:358
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:45
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:375
pixdesc.h
AVFrame::width
int width
Definition: frame.h:447
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:52
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:346
b
#define b
Definition: input.c:41
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
VREP_START
#define VREP_START
Definition: vf_signalstats.c:377
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:56
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:63
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
av_popcount
#define av_popcount
Definition: common.h:153
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:527
filter_frame16
static int filter_frame16(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:772
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:396
FLAGS
#define FLAGS
Definition: vf_signalstats.c:73
FilterMode
FilterMode
Definition: vp9.h:64
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:476
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:56
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:336
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:462
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:283
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_signalstats.c:122
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:35
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:481
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
SignalstatsContext
Definition: vf_signalstats.c:38
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:108
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:60
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:33
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:490
mask
static const uint16_t mask[17]
Definition: lzw.c:38
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:137
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:51
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:72
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:56
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:491
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:59
g
const char * g
Definition: vf_curves.c:128
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1730
filters
#define filters(fmt, type, inverse, clp, inverset, clip, one, clip_fn, packed)
Definition: af_crystalizer.c:54
filters.h
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:475
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:489
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:593
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1489
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:34
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:204
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:288
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:69
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:47
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Definition: opt.h:250
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:480
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
ff_vf_signalstats
const AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:1025
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:42
name
const char * name
Definition: vf_signalstats.c:449
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:1009
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:195
f
f
Definition: af_crystalizer.c:121
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:998
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:56
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:483
FILTER3
#define FILTER3(j)
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:485
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:1017
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:69
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:67
M_PI
#define M_PI
Definition: mathematics.h:67
internal.h
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:459
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:48
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:450
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:32
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:49
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:827
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:55
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:155
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:451
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:44
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:41
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:477
AVFilter
Filter definition.
Definition: avfilter.h:166
ret
ret
Definition: filter_design.txt:187
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:367
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:46
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:482
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:248
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:487
AVFrame::height
int height
Definition: frame.h:447
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:58
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:31
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:75
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:379
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:688
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
desc
const char * desc
Definition: libsvtav1.c:75
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:413
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:234
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:420
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:53
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:484
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:488
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:134
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:536
int
int
Definition: ffmpeg_filter.c:424
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:59
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:244
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:68
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:486
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:370
filters_def
static const struct @295 filters_def[]