FFmpeg
avf_aphasemeter.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * audio to video multimedia aphasemeter filter
24  */
25 
26 #include <float.h>
27 
29 #include "libavutil/intreadwrite.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/parseutils.h"
32 #include "libavutil/timestamp.h"
33 #include "avfilter.h"
34 #include "filters.h"
35 #include "formats.h"
36 #include "audio.h"
37 #include "video.h"
38 
39 typedef struct AudioPhaseMeterContext {
40  const AVClass *class;
43  int do_video;
45  int w, h;
47  int contrast[4];
48  uint8_t *mpc_str;
49  uint8_t mpc[4];
51  int is_mono;
56  float tolerance;
57  float angle;
58  float phase;
65 
66 #define MAX_DURATION (24*60*60*1000000LL)
67 #define OFFSET(x) offsetof(AudioPhaseMeterContext, x)
68 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
69 #define get_duration(index) (index[1] - index[0])
70 
71 static const AVOption aphasemeter_options[] = {
72  { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS },
73  { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS },
74  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="800x400"}, 0, 0, FLAGS },
75  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="800x400"}, 0, 0, FLAGS },
76  { "rc", "set red contrast", OFFSET(contrast[0]), AV_OPT_TYPE_INT, {.i64=2}, 0, 255, FLAGS },
77  { "gc", "set green contrast", OFFSET(contrast[1]), AV_OPT_TYPE_INT, {.i64=7}, 0, 255, FLAGS },
78  { "bc", "set blue contrast", OFFSET(contrast[2]), AV_OPT_TYPE_INT, {.i64=1}, 0, 255, FLAGS },
79  { "mpc", "set median phase color", OFFSET(mpc_str), AV_OPT_TYPE_STRING, {.str = "none"}, 0, 0, FLAGS },
80  { "video", "set video output", OFFSET(do_video), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS },
81  { "phasing", "set mono and out-of-phase detection output", OFFSET(do_phasing_detection), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
82  { "tolerance", "set phase tolerance for mono detection", OFFSET(tolerance), AV_OPT_TYPE_FLOAT, {.dbl = 0.}, 0, 1, FLAGS },
83  { "t", "set phase tolerance for mono detection", OFFSET(tolerance), AV_OPT_TYPE_FLOAT, {.dbl = 0.}, 0, 1, FLAGS },
84  { "angle", "set angle threshold for out-of-phase detection", OFFSET(angle), AV_OPT_TYPE_FLOAT, {.dbl = 170.}, 90, 180, FLAGS },
85  { "a", "set angle threshold for out-of-phase detection", OFFSET(angle), AV_OPT_TYPE_FLOAT, {.dbl = 170.}, 90, 180, FLAGS },
86  { "duration", "set minimum mono or out-of-phase duration in seconds", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64=2000000}, 0, MAX_DURATION, FLAGS },
87  { "d", "set minimum mono or out-of-phase duration in seconds", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64=2000000}, 0, MAX_DURATION, FLAGS },
88  { NULL }
89 };
90 
91 AVFILTER_DEFINE_CLASS(aphasemeter);
92 
93 static int query_formats(const AVFilterContext *ctx,
94  AVFilterFormatsConfig **cfg_in,
95  AVFilterFormatsConfig **cfg_out)
96 {
97  const AudioPhaseMeterContext *s = ctx->priv;
100  static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_RGBA, AV_PIX_FMT_NONE };
101  static const AVChannelLayout layouts[] = {
103  { .nb_channels = 0 },
104  };
105  int ret;
106 
108  if ((ret = ff_formats_ref(formats, &cfg_in[0]->formats)) < 0 ||
109  (ret = ff_formats_ref(formats, &cfg_out[0]->formats)) < 0)
110  return ret;
111 
113  if (ret < 0)
114  return ret;
115 
116  if (s->do_video) {
118  if ((ret = ff_formats_ref(formats, &cfg_out[1]->formats)) < 0)
119  return ret;
120  }
121 
122  return 0;
123 }
124 
126 {
127  AVFilterContext *ctx = inlink->dst;
128  AudioPhaseMeterContext *s = ctx->priv;
129  s->duration = av_rescale(s->duration, inlink->sample_rate, AV_TIME_BASE);
130 
131  if (s->do_video)
132  s->nb_samples = FFMAX(1, av_rescale(inlink->sample_rate, s->frame_rate.den, s->frame_rate.num));
133 
134  return 0;
135 }
136 
137 static int config_video_output(AVFilterLink *outlink)
138 {
139  AVFilterContext *ctx = outlink->src;
140  AudioPhaseMeterContext *s = ctx->priv;
141  FilterLink *l = ff_filter_link(outlink);
142 
143  s->last_pts = AV_NOPTS_VALUE;
144 
145  outlink->w = s->w;
146  outlink->h = s->h;
147  outlink->sample_aspect_ratio = (AVRational){1,1};
148  l->frame_rate = s->frame_rate;
149  outlink->time_base = av_inv_q(l->frame_rate);
150 
151  if (!strcmp(s->mpc_str, "none"))
152  s->draw_median_phase = 0;
153  else if (av_parse_color(s->mpc, s->mpc_str, -1, ctx) >= 0)
154  s->draw_median_phase = 1;
155  else
156  return AVERROR(EINVAL);
157 
158  return 0;
159 }
160 
161 static inline int get_x(float phase, int w)
162 {
163  return (phase + 1.f) / 2.f * (w - 1.f);
164 }
165 
166 static inline void add_metadata(AVFrame *insamples, const char *key, char *value)
167 {
168  char buf[128];
169 
170  snprintf(buf, sizeof(buf), "lavfi.aphasemeter.%s", key);
171  av_dict_set(&insamples->metadata, buf, value, 0);
172 }
173 
174 static inline void update_mono_detection(AVFilterContext *ctx, AVFrame *insamples, int mono_measurement)
175 {
176  AudioPhaseMeterContext *s = ctx->priv;
177  int64_t mono_duration;
178  if (!s->is_mono && mono_measurement) {
179  s->is_mono = 1;
180  s->start_mono_presence = 1;
181  s->mono_idx[0] = insamples->pts;
182  }
183  if (s->is_mono && mono_measurement && s->start_mono_presence) {
184  s->mono_idx[1] = s->frame_end;
185  mono_duration = get_duration(s->mono_idx);
186  if (mono_duration >= s->duration) {
187  add_metadata(insamples, "mono_start", av_ts2timestr(s->mono_idx[0], &s->time_base));
188  av_log(ctx, AV_LOG_INFO, "mono_start: %s\n", av_ts2timestr(s->mono_idx[0], &s->time_base));
189  s->start_mono_presence = 0;
190  }
191  }
192  if (s->is_mono && !mono_measurement) {
193  s->mono_idx[1] = insamples ? insamples->pts : s->frame_end;
194  mono_duration = get_duration(s->mono_idx);
195  if (mono_duration >= s->duration) {
196  if (insamples) {
197  add_metadata(insamples, "mono_end", av_ts2timestr(s->mono_idx[1], &s->time_base));
198  add_metadata(insamples, "mono_duration", av_ts2timestr(mono_duration, &s->time_base));
199  }
200  av_log(ctx, AV_LOG_INFO, "mono_end: %s | mono_duration: %s\n", av_ts2timestr(s->mono_idx[1], &s->time_base), av_ts2timestr(mono_duration, &s->time_base));
201  }
202  s->is_mono = 0;
203  }
204 }
205 
206 static inline void update_out_phase_detection(AVFilterContext *ctx, AVFrame *insamples, int out_phase_measurement)
207 {
208  AudioPhaseMeterContext *s = ctx->priv;
209  int64_t out_phase_duration;
210  if (!s->is_out_phase && out_phase_measurement) {
211  s->is_out_phase = 1;
212  s->start_out_phase_presence = 1;
213  s->out_phase_idx[0] = insamples->pts;
214  }
215  if (s->is_out_phase && out_phase_measurement && s->start_out_phase_presence) {
216  s->out_phase_idx[1] = s->frame_end;
217  out_phase_duration = get_duration(s->out_phase_idx);
218  if (out_phase_duration >= s->duration) {
219  add_metadata(insamples, "out_phase_start", av_ts2timestr(s->out_phase_idx[0], &s->time_base));
220  av_log(ctx, AV_LOG_INFO, "out_phase_start: %s\n", av_ts2timestr(s->out_phase_idx[0], &s->time_base));
221  s->start_out_phase_presence = 0;
222  }
223  }
224  if (s->is_out_phase && !out_phase_measurement) {
225  s->out_phase_idx[1] = insamples ? insamples->pts : s->frame_end;
226  out_phase_duration = get_duration(s->out_phase_idx);
227  if (out_phase_duration >= s->duration) {
228  if (insamples) {
229  add_metadata(insamples, "out_phase_end", av_ts2timestr(s->out_phase_idx[1], &s->time_base));
230  add_metadata(insamples, "out_phase_duration", av_ts2timestr(out_phase_duration, &s->time_base));
231  }
232  av_log(ctx, AV_LOG_INFO, "out_phase_end: %s | out_phase_duration: %s\n", av_ts2timestr(s->out_phase_idx[1], &s->time_base), av_ts2timestr(out_phase_duration, &s->time_base));
233  }
234  s->is_out_phase = 0;
235  }
236 }
237 
239 {
240  AVFilterContext *ctx = inlink->dst;
241  AudioPhaseMeterContext *s = ctx->priv;
242  AVFilterLink *outlink = s->do_video ? ctx->outputs[1] : NULL;
243  AVFilterLink *aoutlink = ctx->outputs[0];
245  const int rc = s->contrast[0];
246  const int gc = s->contrast[1];
247  const int bc = s->contrast[2];
248  float fphase = 0;
249  AVFrame *out;
250  uint8_t *dst;
251  int i, ret;
252  int mono_measurement;
253  int out_phase_measurement;
254  float tolerance = 1.0f - s->tolerance;
255  float angle = cosf(s->angle/180.0f*M_PIf);
256  int64_t new_pts;
257 
258  if (s->do_video && (!s->out || s->out->width != outlink->w ||
259  s->out->height != outlink->h)) {
260  av_frame_free(&s->out);
261  s->out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
262  if (!s->out) {
263  ret = AVERROR(ENOMEM);
264  goto fail;
265  }
266 
267  out = s->out;
268  for (i = 0; i < outlink->h; i++)
269  memset(out->data[0] + i * out->linesize[0], 0, outlink->w * 4);
270  } else if (s->do_video) {
271  ret = ff_inlink_make_frame_writable(outlink, &s->out);
272  if (ret < 0)
273  goto fail;
274  out = s->out;
275  for (i = outlink->h - 1; i >= 10; i--)
276  memmove(out->data[0] + (i ) * out->linesize[0],
277  out->data[0] + (i-1) * out->linesize[0],
278  outlink->w * 4);
279  for (i = 0; i < outlink->w; i++)
280  AV_WL32(out->data[0] + i * 4, 0);
281  }
282 
283  for (i = 0; i < in->nb_samples; i++) {
284  const float *src = (float *)in->data[0] + i * 2;
285  const float f = src[0] * src[1] / (src[0]*src[0] + src[1] * src[1]) * 2;
286  const float phase = isnan(f) ? 1 : f;
287  const int x = get_x(phase, s->w);
288 
289  if (s->do_video) {
290  dst = out->data[0] + x * 4;
291  dst[0] = FFMIN(255, dst[0] + rc);
292  dst[1] = FFMIN(255, dst[1] + gc);
293  dst[2] = FFMIN(255, dst[2] + bc);
294  dst[3] = 255;
295  }
296  fphase += phase;
297  }
298  fphase /= in->nb_samples;
299  s->phase = fphase;
300 
301  if (s->do_video) {
302  if (s->draw_median_phase) {
303  dst = out->data[0] + get_x(fphase, s->w) * 4;
304  AV_WL32(dst, AV_RL32(s->mpc));
305  }
306 
307  for (i = 1; i < 10 && i < outlink->h; i++)
308  memcpy(out->data[0] + i * out->linesize[0], out->data[0], outlink->w * 4);
309  }
310 
311  metadata = &in->metadata;
312  if (metadata) {
313  uint8_t value[128];
314 
315  snprintf(value, sizeof(value), "%f", fphase);
316  add_metadata(in, "phase", value);
317  }
318 
319  if (s->do_phasing_detection) {
320  s->time_base = inlink->time_base;
321  s->frame_end = in->pts + av_rescale_q(in->nb_samples,
322  (AVRational){ 1, in->sample_rate }, inlink->time_base);
323 
324  mono_measurement = (tolerance - fphase) < FLT_EPSILON;
325  out_phase_measurement = (angle - fphase) > FLT_EPSILON;
326 
327  update_mono_detection(ctx, in, mono_measurement);
328  update_out_phase_detection(ctx, in, out_phase_measurement);
329  }
330 
331  if (s->do_video)
332  new_pts = av_rescale_q(in->pts, inlink->time_base, outlink->time_base);
333  if (s->do_video && new_pts != s->last_pts) {
334  AVFrame *clone;
335 
336  s->out->pts = s->last_pts = new_pts;
337  s->out->duration = 1;
338 
339  clone = av_frame_clone(s->out);
340  if (!clone) {
341  ret = AVERROR(ENOMEM);
342  goto fail;
343  }
344  ret = ff_filter_frame(outlink, clone);
345  if (ret < 0)
346  goto fail;
347  }
348  s->in = NULL;
349  return ff_filter_frame(aoutlink, in);
350 fail:
351  av_frame_free(&in);
352  s->in = NULL;
353  return ret;
354 }
355 
357 {
358  AVFilterLink *inlink = ctx->inputs[0];
359  AVFilterLink *outlink = ctx->outputs[0];
360  AudioPhaseMeterContext *s = ctx->priv;
361  int ret;
362 
364  if (s->do_video)
366 
367  if (!s->in) {
368  if (s->nb_samples > 0)
369  ret = ff_inlink_consume_samples(inlink, s->nb_samples, s->nb_samples, &s->in);
370  else
372  if (ret < 0)
373  return ret;
374  if (ret > 0)
375  return filter_frame(inlink, s->in);
376  }
377 
380  if (s->do_video)
381  FF_FILTER_FORWARD_WANTED(ctx->outputs[1], inlink);
382 
383  return FFERROR_NOT_READY;
384 }
385 
387 {
388  AudioPhaseMeterContext *s = ctx->priv;
389 
390  if (s->do_phasing_detection) {
393  }
394  av_frame_free(&s->out);
395 }
396 
398 {
399  AudioPhaseMeterContext *s = ctx->priv;
400  AVFilterPad pad;
401  int ret;
402 
403  pad = (AVFilterPad){
404  .name = "out0",
405  .type = AVMEDIA_TYPE_AUDIO,
406  };
407  ret = ff_append_outpad(ctx, &pad);
408  if (ret < 0)
409  return ret;
410 
411  if (s->do_video) {
412  pad = (AVFilterPad){
413  .name = "out1",
414  .type = AVMEDIA_TYPE_VIDEO,
415  .config_props = config_video_output,
416  };
417  ret = ff_append_outpad(ctx, &pad);
418  if (ret < 0)
419  return ret;
420  }
421 
422  return 0;
423 }
424 
425 static const AVFilterPad inputs[] = {
426  {
427  .name = "default",
428  .type = AVMEDIA_TYPE_AUDIO,
429  .config_props = config_input,
430  },
431 };
432 
434  .p.name = "aphasemeter",
435  .p.description = NULL_IF_CONFIG_SMALL("Convert input audio to phase meter video output."),
436  .p.priv_class = &aphasemeter_class,
438  .init = init,
439  .uninit = uninit,
440  .priv_size = sizeof(AudioPhaseMeterContext),
442  .activate = activate,
444 };
formats
formats
Definition: signature.h:47
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:116
inputs
static const AVFilterPad inputs[]
Definition: avf_aphasemeter.c:425
config_video_output
static int config_video_output(AVFilterLink *outlink)
Definition: avf_aphasemeter.c:137
AudioPhaseMeterContext::start_mono_presence
int start_mono_presence
Definition: avf_aphasemeter.c:53
query_formats
static int query_formats(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out)
Definition: avf_aphasemeter.c:93
AudioPhaseMeterContext::duration
int64_t duration
Definition: avf_aphasemeter.c:60
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:435
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:422
out
FILE * out
Definition: movenc.c:55
AV_CHANNEL_LAYOUT_STEREO
#define AV_CHANNEL_LAYOUT_STEREO
Definition: channel_layout.h:395
init
static av_cold int init(AVFilterContext *ctx)
Definition: avf_aphasemeter.c:397
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:948
layouts
enum MovChannelLayoutTag * layouts
Definition: mov_chan.c:335
av_parse_color
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx)
Put the RGBA values that correspond to color_string in rgba_color.
Definition: parseutils.c:359
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
Underlying C type is AVRational.
Definition: opt.h:315
int64_t
long long int64_t
Definition: coverity.c:34
metadata
Stream codec metadata
Definition: ogg-flac-chained-meta.txt:2
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:63
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:263
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
config_input
static int config_input(AVFilterLink *inlink)
Definition: avf_aphasemeter.c:125
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:529
AudioPhaseMeterContext::in
AVFrame * in
Definition: avf_aphasemeter.c:41
w
uint8_t w
Definition: llviddspenc.c:38
AudioPhaseMeterContext::w
int w
Definition: avf_aphasemeter.c:45
AVOption
AVOption.
Definition: opt.h:429
AV_OPT_TYPE_DURATION
@ AV_OPT_TYPE_DURATION
Underlying C type is int64_t.
Definition: opt.h:319
ff_set_common_channel_layouts_from_list2
int ff_set_common_channel_layouts_from_list2(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out, const AVChannelLayout *fmts)
Definition: formats.c:920
FLAGS
#define FLAGS
Definition: avf_aphasemeter.c:68
float.h
AVDictionary
Definition: dict.c:32
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:215
video.h
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:638
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:448
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
AudioPhaseMeterContext::nb_samples
int nb_samples
Definition: avf_aphasemeter.c:55
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1510
AudioPhaseMeterContext::start_out_phase_presence
int start_out_phase_presence
Definition: avf_aphasemeter.c:54
cosf
#define cosf(x)
Definition: libm.h:80
fail
#define fail()
Definition: checkasm.h:199
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AudioPhaseMeterContext::is_out_phase
int is_out_phase
Definition: avf_aphasemeter.c:52
av_cold
#define av_cold
Definition: attributes.h:90
FFFilter
Definition: filters.h:266
duration
int64_t duration
Definition: movenc.c:65
AudioPhaseMeterContext::out
AVFrame * out
Definition: avf_aphasemeter.c:41
intreadwrite.h
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: avf_aphasemeter.c:238
s
#define s(width, name)
Definition: cbs_vp9.c:198
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:201
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
AudioPhaseMeterContext::frame_rate
AVRational frame_rate
Definition: avf_aphasemeter.c:46
AudioPhaseMeterContext::mono_idx
int64_t mono_idx[2]
Definition: avf_aphasemeter.c:62
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:298
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:481
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
key
const char * key
Definition: hwcontext_opencl.c:189
AudioPhaseMeterContext::h
int h
Definition: avf_aphasemeter.c:45
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1557
update_mono_detection
static void update_mono_detection(AVFilterContext *ctx, AVFrame *insamples, int mono_measurement)
Definition: avf_aphasemeter.c:174
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
ff_inlink_consume_samples
int ff_inlink_consume_samples(AVFilterLink *link, unsigned min, unsigned max, AVFrame **rframe)
Take samples from the link's FIFO and update the link's stats.
Definition: avfilter.c:1530
NULL
#define NULL
Definition: coverity.c:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
isnan
#define isnan(x)
Definition: libm.h:342
aphasemeter_options
static const AVOption aphasemeter_options[]
Definition: avf_aphasemeter.c:71
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
Underlying C type is two consecutive integers.
Definition: opt.h:303
parseutils.h
AudioPhaseMeterContext::do_phasing_detection
int do_phasing_detection
Definition: avf_aphasemeter.c:44
AudioPhaseMeterContext::frame_end
int64_t frame_end
Definition: avf_aphasemeter.c:61
AVFilterFormatsConfig
Lists of formats / etc.
Definition: avfilter.h:121
AudioPhaseMeterContext::do_video
int do_video
Definition: avf_aphasemeter.c:43
AudioPhaseMeterContext
Definition: avf_aphasemeter.c:39
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(aphasemeter)
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
AVFILTER_FLAG_DYNAMIC_OUTPUTS
#define AVFILTER_FLAG_DYNAMIC_OUTPUTS
The number of the filter outputs is not determined just by AVFilter.outputs.
Definition: avfilter.h:157
MAX_DURATION
#define MAX_DURATION
Definition: avf_aphasemeter.c:66
get_x
static int get_x(float phase, int w)
Definition: avf_aphasemeter.c:161
f
f
Definition: af_crystalizer.c:122
av_ts2timestr
#define av_ts2timestr(ts, tb)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:83
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVChannelLayout
An AVChannelLayout holds information about the channel layout of audio data.
Definition: channel_layout.h:319
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:87
AudioPhaseMeterContext::last_pts
int64_t last_pts
Definition: avf_aphasemeter.c:42
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:56
AudioPhaseMeterContext::mpc_str
uint8_t * mpc_str
Definition: avf_aphasemeter.c:48
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:247
AudioPhaseMeterContext::is_mono
int is_mono
Definition: avf_aphasemeter.c:51
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
OFFSET
#define OFFSET(x)
Definition: avf_aphasemeter.c:67
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Underlying C type is float.
Definition: opt.h:271
AudioPhaseMeterContext::phase
float phase
Definition: avf_aphasemeter.c:58
AVFrame::nb_samples
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:507
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
M_PIf
#define M_PIf
Definition: mathematics.h:70
AV_TIME_BASE
#define AV_TIME_BASE
Internal time base represented as integer.
Definition: avutil.h:253
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:55
FILTER_QUERY_FUNC2
#define FILTER_QUERY_FUNC2(func)
Definition: filters.h:240
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
AudioPhaseMeterContext::mpc
uint8_t mpc[4]
Definition: avf_aphasemeter.c:49
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
av_rescale
int64_t av_rescale(int64_t a, int64_t b, int64_t c)
Rescale a 64-bit integer with rounding to nearest.
Definition: mathematics.c:129
AudioPhaseMeterContext::contrast
int contrast[4]
Definition: avf_aphasemeter.c:47
AudioPhaseMeterContext::time_base
AVRational time_base
Definition: avf_aphasemeter.c:59
activate
static int activate(AVFilterContext *ctx)
Definition: avf_aphasemeter.c:356
ret
ret
Definition: filter_design.txt:187
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: avf_aphasemeter.c:386
channel_layout.h
AudioPhaseMeterContext::draw_median_phase
int draw_median_phase
Definition: avf_aphasemeter.c:50
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:705
add_metadata
static void add_metadata(AVFrame *insamples, const char *key, char *value)
Definition: avf_aphasemeter.c:166
AVFilterContext
An instance of a filter.
Definition: avfilter.h:269
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
audio.h
get_duration
#define get_duration(index)
Definition: avf_aphasemeter.c:69
AudioPhaseMeterContext::tolerance
float tolerance
Definition: avf_aphasemeter.c:56
ff_append_outpad
int ff_append_outpad(AVFilterContext *f, AVFilterPad *p)
Definition: avfilter.c:138
ff_avf_aphasemeter
const FFFilter ff_avf_aphasemeter
Definition: avf_aphasemeter.c:433
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
update_out_phase_detection
static void update_out_phase_detection(AVFilterContext *ctx, AVFrame *insamples, int out_phase_measurement)
Definition: avf_aphasemeter.c:206
FF_FILTER_FORWARD_STATUS_ALL
#define FF_FILTER_FORWARD_STATUS_ALL(inlink, filter)
Acknowledge the status on an input link and forward it to an output link.
Definition: filters.h:678
timestamp.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AudioPhaseMeterContext::angle
float angle
Definition: avf_aphasemeter.c:57
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
snprintf
#define snprintf
Definition: snprintf.h:34
AV_SAMPLE_FMT_FLT
@ AV_SAMPLE_FMT_FLT
float
Definition: samplefmt.h:60
src
#define src
Definition: vp8dsp.c:248
AudioPhaseMeterContext::out_phase_idx
int64_t out_phase_idx[2]
Definition: avf_aphasemeter.c:63