48 static const uint16_t 
coef_lf[2] = { 4309, 213 };
 
   49 static const uint16_t 
coef_hf[3] = { 5570, 3801, 1016 };
 
   50 static const uint16_t 
coef_sp[2] = { 5077, 981 };
 
   60 #define FILTER_INTRA() \ 
   61     for (x = 0; x < w; x++) { \ 
   62         interpol = (coef_sp[0] * (cur[mrefs] + cur[prefs]) - coef_sp[1] * (cur[mrefs3] + cur[prefs3])) >> 13; \ 
   63         dst[0] = av_clip(interpol, 0, clip_max); \ 
   70     for (x = 0; x < w; x++) { \ 
   72         int d = (prev2[0] + next2[0]) >> 1; \ 
   74         int temporal_diff0 = FFABS(prev2[0] - next2[0]); \ 
   75         int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e)) >> 1; \ 
   76         int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e)) >> 1; \ 
   77         int diff = FFMAX3(temporal_diff0 >> 1, temporal_diff1, temporal_diff2); \ 
   83 #define SPAT_CHECK() \ 
   84             int b = ((prev2[mrefs2] + next2[mrefs2]) >> 1) - c; \ 
   85             int f = ((prev2[prefs2] + next2[prefs2]) >> 1) - e; \ 
   88             int max = FFMAX3(de, dc, FFMIN(b, f)); \ 
   89             int min = FFMIN3(de, dc, FFMAX(b, f)); \ 
   90             diff = FFMAX3(diff, min, -max); 
   92 #define FILTER_LINE() \ 
   94             if (FFABS(c - e) > temporal_diff0) { \ 
   95                 interpol = (((coef_hf[0] * (prev2[0] + next2[0]) \ 
   96                     - coef_hf[1] * (prev2[mrefs2] + next2[mrefs2] + prev2[prefs2] + next2[prefs2]) \ 
   97                     + coef_hf[2] * (prev2[mrefs4] + next2[mrefs4] + prev2[prefs4] + next2[prefs4])) >> 2) \ 
   98                     + coef_lf[0] * (c + e) - coef_lf[1] * (cur[mrefs3] + cur[prefs3])) >> 13; \ 
  100                 interpol = (coef_sp[0] * (c + e) - coef_sp[1] * (cur[mrefs3] + cur[prefs3])) >> 13; \ 
  103 #define FILTER_EDGE() \ 
  107             interpol = (c + e) >> 1; 
  110             if (interpol > d + diff) \ 
  111                 interpol = d + diff; \ 
  112             else if (interpol < d - diff) \ 
  113                 interpol = d - diff; \ 
  115             dst[0] = av_clip(interpol, 0, clip_max); \ 
  126 static void filter_intra(
void *dst1, 
void *cur1, 
int w, 
int prefs, 
int mrefs,
 
  127                          int prefs3, 
int mrefs3, 
int parity, 
int clip_max)
 
  137                           int w, 
int prefs, 
int mrefs, 
int prefs2, 
int mrefs2,
 
  138                           int prefs3, 
int mrefs3, 
int prefs4, 
int mrefs4,
 
  145     uint8_t *prev2 = parity ? prev : cur ;
 
  146     uint8_t *next2 = parity ? cur  : next;
 
  154 static void filter_edge(
void *dst1, 
void *prev1, 
void *cur1, 
void *next1,
 
  155                         int w, 
int prefs, 
int mrefs, 
int prefs2, 
int mrefs2,
 
  156                         int parity, 
int clip_max, 
int spat)
 
  162     uint8_t *prev2 = parity ? prev : cur ;
 
  163     uint8_t *next2 = parity ? cur  : next;
 
  172                                int prefs3, 
int mrefs3, 
int parity, 
int clip_max)
 
  174     uint16_t *dst = dst1;
 
  175     uint16_t *cur = cur1;
 
  182                                 int w, 
int prefs, 
int mrefs, 
int prefs2, 
int mrefs2,
 
  183                                 int prefs3, 
int mrefs3, 
int prefs4, 
int mrefs4,
 
  186     uint16_t *dst   = dst1;
 
  187     uint16_t *prev  = prev1;
 
  188     uint16_t *cur   = cur1;
 
  189     uint16_t *next  = next1;
 
  190     uint16_t *prev2 = parity ? prev : cur ;
 
  191     uint16_t *next2 = parity ? cur  : next;
 
  200                               int w, 
int prefs, 
int mrefs, 
int prefs2, 
int mrefs2,
 
  201                               int parity, 
int clip_max, 
int spat)
 
  203     uint16_t *dst   = dst1;
 
  204     uint16_t *prev  = prev1;
 
  205     uint16_t *cur   = cur1;
 
  206     uint16_t *next  = next1;
 
  207     uint16_t *prev2 = parity ? prev : cur ;
 
  208     uint16_t *next2 = parity ? cur  : next;
 
  223     int refs = linesize / 
df;
 
  224     int slice_start = (td->
h *  jobnr   ) / nb_jobs;
 
  225     int slice_end   = (td->
h * (jobnr+1)) / nb_jobs;
 
  228     for (y = slice_start; y < 
slice_end; y++) {
 
  229         if ((y ^ td->
parity) & 1) {
 
  236                                 y > (df - 1) ? -refs : refs,
 
  237                                 (y + 3*df) < td->
h ? 3 * refs : -refs,
 
  238                                 y > (3*df - 1) ? -3 * refs : refs,
 
  240             } 
else if ((y < 4) || ((y + 5) > td->
h)) {
 
  242                                (y + df) < td->
h ? refs : -refs,
 
  243                                y > (df - 1) ? -refs : refs,
 
  244                                refs << 1, -(refs << 1),
 
  246                                (y < 2) || ((y + 3) > td->
h) ? 0 : 1);
 
  249                                refs, -refs, refs << 1, -(refs << 1),
 
  250                                3 * refs, -3 * refs, refs << 2, -(refs << 2),
 
  272         if (i == 1 || i == 2) {
 
  296     if (bwdif->
parity == -1) {
 
  314     filter(ctx, bwdif->
out, tff ^ !is_second, tff);
 
  317         int64_t cur_pts  = bwdif->
cur->
pts;
 
  318         int64_t next_pts = bwdif->
next->
pts;
 
  321             bwdif->
out->
pts = cur_pts + next_pts;
 
  449     } 
else if (ret < 0) {
 
  508     if (link->w < 3 || link->h < 3) {
 
  514     if (
s->csp->comp[0].depth > 8) {
 
  531 #define OFFSET(x) offsetof(BWDIFContext, x) 
  532 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM 
  534 #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, INT_MIN, INT_MAX, FLAGS, unit } 
  578     .priv_class    = &bwdif_class,
 
  581     .
inputs        = avfilter_vf_bwdif_inputs,
 
  582     .
outputs       = avfilter_vf_bwdif_outputs,
 
static const uint16_t coef_sp[2]
 
send 1 frame for each frame 
 
#define AV_PIX_FMT_YUVA422P16
 
static av_cold void uninit(AVFilterContext *ctx)
 
#define AV_PIX_FMT_YUVA422P9
 
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
 
static const uint16_t coef_lf[2]
 
This structure describes decoded (raw) audio or video data. 
 
#define AV_PIX_FMT_YUVA420P10
 
#define CONST(name, help, val, unit)
 
#define AV_PIX_FMT_YUV444P14
 
#define AV_PIX_FMT_YUVA422P10
 
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) 
 
Main libavfilter public API header. 
 
void(* filter_edge)(void *dst, void *prev, void *cur, void *next, int w, int prefs, int mrefs, int prefs2, int mrefs2, int parity, int clip_max, int spat)
 
void ff_bwdif_init_x86(BWDIFContext *bwdif)
 
int h
agreed upon image height 
 
int repeat_pict
When decoding, this signals how much the picture must be delayed. 
 
#define AV_PIX_FMT_GBRP10
 
static int interpol(MBContext *s, uint32_t *color, int x, int y, int linesize)
 
#define AV_PIX_FMT_YUV420P12
 
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src. 
 
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions. 
 
static int filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
 
int is_disabled
the enabled state from the last expression evaluation 
 
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width. 
 
const char * name
Pad name. 
 
AVFilterLink ** inputs
array of pointers to input links 
 
#define av_assert0(cond)
assert() equivalent, that is always enabled. 
 
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter. 
 
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) 
 
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed. 
 
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user). 
 
#define AV_PIX_FMT_YUVA420P9
 
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range...
 
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
 
#define AVERROR_EOF
End of file. 
 
#define AV_LOG_VERBOSE
Detailed information. 
 
#define AV_PIX_FMT_YUV444P16
 
int interlaced_frame
The content of the picture is interlaced. 
 
#define AV_PIX_FMT_YUV422P12
 
static void filter(AVFilterContext *ctx, AVFrame *dstpic, int parity, int tff)
 
#define AV_PIX_FMT_YUVA420P16
 
static int filter_frame(AVFilterLink *link, AVFrame *frame)
 
static int request_frame(AVFilterLink *link)
 
A filter pad used for either input or output. 
 
A link between two filters. 
 
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) 
 
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered. 
 
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height. 
 
AVRational frame_rate
Frame rate of the stream on the link, or 1/0 if unknown or variable; if left to 0/0, will be automatically copied from the first input of the source filter if it exists. 
 
static void filter_line_c(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int prefs2, int mrefs2, int prefs3, int mrefs3, int prefs4, int mrefs4, int parity, int clip_max)
 
static int return_frame(AVFilterContext *ctx, int is_second)
 
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g. 
 
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
 
void * priv
private data for use by the filter 
 
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
 
#define AV_PIX_FMT_YUVA444P16
 
AVRational time_base
Define the time base used by the PTS of the frames/samples which will pass through this link...
 
simple assert() macros that are a bit more flexible than ISO C assert(). 
 
#define AV_PIX_FMT_YUV444P10
 
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data. 
 
#define AV_PIX_FMT_GBRAP16
 
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) 
 
int w
agreed upon image width 
 
#define AV_PIX_FMT_YUV422P9
 
static void filter_intra_16bit(void *dst1, void *cur1, int w, int prefs, int mrefs, int prefs3, int mrefs3, int parity, int clip_max)
 
uint8_t nb_components
The number of components each pixel has, (1-4) 
 
#define AV_PIX_FMT_GBRP16
 
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance. 
 
#define AV_PIX_FMT_GRAY16
 
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
 
void(* filter_intra)(void *dst1, void *cur1, int w, int prefs, int mrefs, int prefs3, int mrefs3, int parity, int clip_max)
 
AVFILTER_DEFINE_CLASS(bwdif)
 
static const uint16_t coef_hf[3]
 
AVFilterContext * src
source filter 
 
#define AV_PIX_FMT_YUVA444P10
 
void(* filter_line)(void *dst, void *prev, void *cur, void *next, int w, int prefs, int mrefs, int prefs2, int mrefs2, int prefs3, int mrefs3, int prefs4, int mrefs4, int parity, int clip_max)
 
static const AVFilterPad inputs[]
 
#define AV_PIX_FMT_YUV444P9
 
#define AV_PIX_FMT_GBRP14
 
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src. 
 
static const AVFilterPad outputs[]
 
#define AV_PIX_FMT_YUV420P16
 
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
 
const AVPixFmtDescriptor * csp
 
#define AV_PIX_FMT_YUV420P14
 
Used for passing data between threads. 
 
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line. 
 
static int config_props(AVFilterLink *link)
 
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) 
 
only deinterlace frames marked as interlaced 
 
static int checkstride(BWDIFContext *bwdif, const AVFrame *a, const AVFrame *b)
 
#define AV_PIX_FMT_YUV420P10
 
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) 
 
Rational number (pair of numerator and denominator). 
 
static void filter_edge(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int prefs2, int mrefs2, int parity, int clip_max, int spat)
 
const char * name
Filter name. 
 
#define AV_PIX_FMT_YUV420P9
 
static const AVFilterPad avfilter_vf_bwdif_inputs[]
 
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
 
AVFilterLink ** outputs
array of pointers to output links 
 
static enum AVPixelFormat pix_fmts[]
 
static void filter_line_c_16bit(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int prefs2, int mrefs2, int prefs3, int mrefs3, int prefs4, int mrefs4, int parity, int clip_max)
 
#define AV_PIX_FMT_YUV422P14
 
#define AV_PIX_FMT_GBRP12
 
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields. 
 
#define flags(name, subs,...)
 
AVFilterInternal * internal
An opaque struct for libavfilter internal use. 
 
#define AV_PIX_FMT_YUV422P10
 
static const AVOption bwdif_options[]
 
#define AV_PIX_FMT_YUV444P12
 
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes. 
 
AVFrame * ff_default_get_video_buffer(AVFilterLink *link, int w, int h)
 
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) 
 
common internal and external API header 
 
planar GBRA 4:4:4:4 32bpp 
 
static int query_formats(AVFilterContext *ctx)
 
#define AV_PIX_FMT_YUVA444P9
 
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
 
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) 
 
avfilter_execute_func * execute
 
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends. 
 
int top_field_first
If the content is interlaced, is top field displayed first. 
 
AVFilterContext * dst
dest filter 
 
static void filter_intra(void *dst1, void *cur1, int w, int prefs, int mrefs, int prefs3, int mrefs3, int parity, int clip_max)
 
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
 
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals. 
 
static void fixstride(AVFilterLink *link, AVFrame *f)
 
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) 
 
static const AVFilterPad avfilter_vf_bwdif_outputs[]
 
send 1 frame for each field 
 
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link. 
 
int depth
Number of bits in the component. 
 
AVPixelFormat
Pixel format. 
 
mode
Use these values in ebur128_init (or'ed). 
 
#define AV_PIX_FMT_YUV422P16
 
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst. 
 
#define AV_NOPTS_VALUE
Undefined timestamp value. 
 
static void filter_edge_16bit(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int prefs2, int mrefs2, int parity, int clip_max, int spat)
 
#define AV_CEIL_RSHIFT(a, b)