FFmpeg
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <float.h>
27 #include <stdio.h>
28 #include <string.h>
29 
30 #include "avfilter.h"
31 #include "formats.h"
32 #include "framesync.h"
33 #include "internal.h"
34 #include "scale_eval.h"
35 #include "video.h"
36 #include "libavutil/eval.h"
38 #include "libavutil/internal.h"
39 #include "libavutil/mem.h"
40 #include "libavutil/opt.h"
41 #include "libavutil/parseutils.h"
42 #include "libavutil/pixdesc.h"
43 #include "libswscale/swscale.h"
44 
45 static const char *const var_names[] = {
46  "in_w", "iw",
47  "in_h", "ih",
48  "out_w", "ow",
49  "out_h", "oh",
50  "a",
51  "sar",
52  "dar",
53  "hsub",
54  "vsub",
55  "ohsub",
56  "ovsub",
57  "n",
58  "t",
59 #if FF_API_FRAME_PKT
60  "pos",
61 #endif
62  "ref_w", "rw",
63  "ref_h", "rh",
64  "ref_a",
65  "ref_sar",
66  "ref_dar", "rdar",
67  "ref_hsub",
68  "ref_vsub",
69  "ref_n",
70  "ref_t",
71  "ref_pos",
72  /* Legacy variables for scale2ref */
73  "main_w",
74  "main_h",
75  "main_a",
76  "main_sar",
77  "main_dar", "mdar",
78  "main_hsub",
79  "main_vsub",
80  "main_n",
81  "main_t",
82  "main_pos",
83  NULL
84 };
85 
86 enum var_name {
100 #if FF_API_FRAME_PKT
101  VAR_POS,
102 #endif
124 };
125 
126 enum EvalMode {
130 };
131 
132 typedef struct ScaleContext {
133  const AVClass *class;
134  struct SwsContext *sws; ///< software scaler context
135  struct SwsContext *isws[2]; ///< software scaler context for interlaced material
136  // context used for forwarding options to sws
139 
140  /**
141  * New dimensions. Special values are:
142  * 0 = original width/height
143  * -1 = keep original aspect
144  * -N = try to keep aspect but make sure it is divisible by N
145  */
146  int w, h;
147  char *size_str;
148  double param[2]; // sws params
149 
150  int hsub, vsub; ///< chroma subsampling
151  int slice_y; ///< top of current output slice
152  int input_is_pal; ///< set to 1 if the input format is paletted
153  int output_is_pal; ///< set to 1 if the output format is paletted
155  int uses_ref;
156 
157  char *w_expr; ///< width expression string
158  char *h_expr; ///< height expression string
162 
163  char *flags_str;
164 
167 
168  int in_range;
170 
175 
178 
179  int eval_mode; ///< expression evaluation mode
180 
181 } ScaleContext;
182 
184 
185 static int config_props(AVFilterLink *outlink);
186 
188 {
189  ScaleContext *scale = ctx->priv;
190  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
191 
192  if (!scale->w_pexpr && !scale->h_pexpr)
193  return AVERROR(EINVAL);
194 
195  if (scale->w_pexpr)
196  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
197  if (scale->h_pexpr)
198  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
199 
200  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
201  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
202  return AVERROR(EINVAL);
203  }
204 
205  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
206  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
207  return AVERROR(EINVAL);
208  }
209 
210  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
211  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
212  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
213  }
214 
215  if (vars_w[VAR_REF_W] || vars_h[VAR_REF_W] ||
216  vars_w[VAR_RW] || vars_h[VAR_RW] ||
217  vars_w[VAR_REF_H] || vars_h[VAR_REF_H] ||
218  vars_w[VAR_RH] || vars_h[VAR_RH] ||
219  vars_w[VAR_REF_A] || vars_h[VAR_REF_A] ||
220  vars_w[VAR_REF_SAR] || vars_h[VAR_REF_SAR] ||
221  vars_w[VAR_REF_DAR] || vars_h[VAR_REF_DAR] ||
222  vars_w[VAR_RDAR] || vars_h[VAR_RDAR] ||
223  vars_w[VAR_REF_HSUB] || vars_h[VAR_REF_HSUB] ||
224  vars_w[VAR_REF_VSUB] || vars_h[VAR_REF_VSUB] ||
225  vars_w[VAR_REF_N] || vars_h[VAR_REF_N] ||
226  vars_w[VAR_REF_T] || vars_h[VAR_REF_T] ||
227  vars_w[VAR_REF_POS] || vars_h[VAR_REF_POS]) {
228  scale->uses_ref = 1;
229  }
230 
231  if (ctx->filter != &ff_vf_scale2ref &&
232  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
233  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
234  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
235  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
236  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
237  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
238  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
239  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
240  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
241  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
242  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
243  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
244  return AVERROR(EINVAL);
245  }
246 
247  if (ctx->filter != &ff_vf_scale2ref &&
248  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
249  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
250  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
251  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
252  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
253  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
254  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
255  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
256  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
257  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
258  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
259  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
260  return AVERROR(EINVAL);
261  }
262 
263  if (scale->eval_mode == EVAL_MODE_INIT &&
264  (vars_w[VAR_N] || vars_h[VAR_N] ||
265  vars_w[VAR_T] || vars_h[VAR_T] ||
267  vars_w[VAR_POS] || vars_h[VAR_POS] ||
268 #endif
269  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
270  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
271  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
272  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
273  return AVERROR(EINVAL);
274  }
275 
276  return 0;
277 }
278 
279 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
280 {
281  ScaleContext *scale = ctx->priv;
282  int ret, is_inited = 0;
283  char *old_str_expr = NULL;
284  AVExpr *old_pexpr = NULL;
285 
286  if (str_expr) {
287  old_str_expr = av_strdup(str_expr);
288  if (!old_str_expr)
289  return AVERROR(ENOMEM);
290  av_opt_set(scale, var, args, 0);
291  }
292 
293  if (*pexpr_ptr) {
294  old_pexpr = *pexpr_ptr;
295  *pexpr_ptr = NULL;
296  is_inited = 1;
297  }
298 
299  ret = av_expr_parse(pexpr_ptr, args, var_names,
300  NULL, NULL, NULL, NULL, 0, ctx);
301  if (ret < 0) {
302  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
303  goto revert;
304  }
305 
306  ret = check_exprs(ctx);
307  if (ret < 0)
308  goto revert;
309 
310  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
311  goto revert;
312 
313  av_expr_free(old_pexpr);
314  old_pexpr = NULL;
315  av_freep(&old_str_expr);
316 
317  return 0;
318 
319 revert:
320  av_expr_free(*pexpr_ptr);
321  *pexpr_ptr = NULL;
322  if (old_str_expr) {
323  av_opt_set(scale, var, old_str_expr, 0);
324  av_free(old_str_expr);
325  }
326  if (old_pexpr)
327  *pexpr_ptr = old_pexpr;
328 
329  return ret;
330 }
331 
333 {
334  ScaleContext *scale = ctx->priv;
335  int ret;
336 
337  scale->sws_opts = sws_alloc_context();
338  if (!scale->sws_opts)
339  return AVERROR(ENOMEM);
340 
341  // set threads=0, so we can later check whether the user modified it
342  ret = av_opt_set_int(scale->sws_opts, "threads", 0, 0);
343  if (ret < 0)
344  return ret;
345 
347 
348  return 0;
349 }
350 
351 static const int sws_colorspaces[] = {
360  -1
361 };
362 
363 static int do_scale(FFFrameSync *fs);
364 
366 {
367  ScaleContext *scale = ctx->priv;
368  int64_t threads;
369  int ret;
370 
371  if (ctx->filter == &ff_vf_scale2ref)
372  av_log(ctx, AV_LOG_WARNING, "scale2ref is deprecated, use scale=rw:rh instead\n");
373 
374  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
376  "Size and width/height expressions cannot be set at the same time.\n");
377  return AVERROR(EINVAL);
378  }
379 
380  if (scale->w_expr && !scale->h_expr)
381  FFSWAP(char *, scale->w_expr, scale->size_str);
382 
383  if (scale->size_str) {
384  char buf[32];
385  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
387  "Invalid size '%s'\n", scale->size_str);
388  return ret;
389  }
390  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
391  av_opt_set(scale, "w", buf, 0);
392  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
393  av_opt_set(scale, "h", buf, 0);
394  }
395  if (!scale->w_expr)
396  av_opt_set(scale, "w", "iw", 0);
397  if (!scale->h_expr)
398  av_opt_set(scale, "h", "ih", 0);
399 
400  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
401  if (ret < 0)
402  return ret;
403 
404  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
405  if (ret < 0)
406  return ret;
407 
408  if (scale->in_color_matrix != -1 &&
409  !ff_fmt_is_in(scale->in_color_matrix, sws_colorspaces)) {
410  av_log(ctx, AV_LOG_ERROR, "Unsupported input color matrix '%s'\n",
411  av_color_space_name(scale->in_color_matrix));
412  return AVERROR(EINVAL);
413  }
414 
415  if (!ff_fmt_is_in(scale->out_color_matrix, sws_colorspaces)) {
416  av_log(ctx, AV_LOG_ERROR, "Unsupported output color matrix '%s'\n",
417  av_color_space_name(scale->out_color_matrix));
418  return AVERROR(EINVAL);
419  }
420 
421  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
422  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
423 
424  if (scale->flags_str && *scale->flags_str) {
425  ret = av_opt_set(scale->sws_opts, "sws_flags", scale->flags_str, 0);
426  if (ret < 0)
427  return ret;
428  }
429 
430  for (int i = 0; i < FF_ARRAY_ELEMS(scale->param); i++)
431  if (scale->param[i] != DBL_MAX) {
432  ret = av_opt_set_double(scale->sws_opts, i ? "param1" : "param0",
433  scale->param[i], 0);
434  if (ret < 0)
435  return ret;
436  }
437 
438  // use generic thread-count if the user did not set it explicitly
439  ret = av_opt_get_int(scale->sws_opts, "threads", 0, &threads);
440  if (ret < 0)
441  return ret;
442  if (!threads)
443  av_opt_set_int(scale->sws_opts, "threads", ff_filter_get_nb_threads(ctx), 0);
444 
445  if (ctx->filter != &ff_vf_scale2ref && scale->uses_ref) {
446  AVFilterPad pad = {
447  .name = "ref",
448  .type = AVMEDIA_TYPE_VIDEO,
449  };
450  ret = ff_append_inpad(ctx, &pad);
451  if (ret < 0)
452  return ret;
453  }
454 
455  return 0;
456 }
457 
459 {
460  ScaleContext *scale = ctx->priv;
461  av_expr_free(scale->w_pexpr);
462  av_expr_free(scale->h_pexpr);
463  scale->w_pexpr = scale->h_pexpr = NULL;
465  sws_freeContext(scale->sws_opts);
466  sws_freeContext(scale->sws);
467  sws_freeContext(scale->isws[0]);
468  sws_freeContext(scale->isws[1]);
469  scale->sws = NULL;
470 }
471 
473 {
474  ScaleContext *scale = ctx->priv;
476  const AVPixFmtDescriptor *desc;
477  enum AVPixelFormat pix_fmt;
478  int ret;
479 
480  desc = NULL;
481  formats = NULL;
482  while ((desc = av_pix_fmt_desc_next(desc))) {
486  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
487  return ret;
488  }
489  }
490  if ((ret = ff_formats_ref(formats, &ctx->inputs[0]->outcfg.formats)) < 0)
491  return ret;
492 
493  desc = NULL;
494  formats = NULL;
495  while ((desc = av_pix_fmt_desc_next(desc))) {
499  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
500  return ret;
501  }
502  }
503  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.formats)) < 0)
504  return ret;
505 
506  /* accept all supported inputs, even if user overrides their properties */
508  &ctx->inputs[0]->outcfg.color_spaces)) < 0)
509  return ret;
510 
512  &ctx->inputs[0]->outcfg.color_ranges)) < 0)
513  return ret;
514 
515  /* propagate output properties if overridden */
516  formats = scale->out_color_matrix != AVCOL_SPC_UNSPECIFIED
517  ? ff_make_formats_list_singleton(scale->out_color_matrix)
519  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.color_spaces)) < 0)
520  return ret;
521 
522  formats = scale->out_range != AVCOL_RANGE_UNSPECIFIED
525  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.color_ranges)) < 0)
526  return ret;
527 
528  return 0;
529 }
530 
532 {
533  ScaleContext *scale = ctx->priv;
534  const char scale2ref = ctx->filter == &ff_vf_scale2ref;
535  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
536  const AVFilterLink *outlink = ctx->outputs[0];
538  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
539  char *expr;
540  int eval_w, eval_h;
541  int ret;
542  double res;
543  const AVPixFmtDescriptor *main_desc;
544  const AVFilterLink *main_link;
545 
546  if (scale2ref) {
547  main_link = ctx->inputs[0];
548  main_desc = av_pix_fmt_desc_get(main_link->format);
549  }
550 
551  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
552  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
553  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
554  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
555  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
556  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
557  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
558  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
559  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
560  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
561  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
562  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
563 
564  if (scale2ref) {
565  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
566  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
567  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
568  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
569  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
570  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
571  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
572  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
573  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
574  }
575 
576  if (scale->uses_ref) {
577  const AVFilterLink *reflink = ctx->inputs[1];
578  const AVPixFmtDescriptor *ref_desc = av_pix_fmt_desc_get(reflink->format);
579  scale->var_values[VAR_REF_W] = scale->var_values[VAR_RW] = reflink->w;
580  scale->var_values[VAR_REF_H] = scale->var_values[VAR_RH] = reflink->h;
581  scale->var_values[VAR_REF_A] = (double) reflink->w / reflink->h;
582  scale->var_values[VAR_REF_SAR] = reflink->sample_aspect_ratio.num ?
583  (double) reflink->sample_aspect_ratio.num / reflink->sample_aspect_ratio.den : 1;
584  scale->var_values[VAR_REF_DAR] = scale->var_values[VAR_RDAR] =
585  scale->var_values[VAR_REF_A] * scale->var_values[VAR_REF_SAR];
586  scale->var_values[VAR_REF_HSUB] = 1 << ref_desc->log2_chroma_w;
587  scale->var_values[VAR_REF_VSUB] = 1 << ref_desc->log2_chroma_h;
588  }
589 
590  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
591  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
592 
593  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
594  if (isnan(res)) {
595  expr = scale->h_expr;
596  ret = AVERROR(EINVAL);
597  goto fail;
598  }
599  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
600 
601  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
602  if (isnan(res)) {
603  expr = scale->w_expr;
604  ret = AVERROR(EINVAL);
605  goto fail;
606  }
607  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
608 
609  scale->w = eval_w;
610  scale->h = eval_h;
611 
612  return 0;
613 
614 fail:
616  "Error when evaluating the expression '%s'.\n", expr);
617  return ret;
618 }
619 
620 static int config_props(AVFilterLink *outlink)
621 {
622  AVFilterContext *ctx = outlink->src;
623  AVFilterLink *inlink0 = outlink->src->inputs[0];
624  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref ?
625  outlink->src->inputs[1] :
626  outlink->src->inputs[0];
627  enum AVPixelFormat outfmt = outlink->format;
629  const AVPixFmtDescriptor *outdesc = av_pix_fmt_desc_get(outfmt);
630  ScaleContext *scale = ctx->priv;
631  uint8_t *flags_val = NULL;
632  int in_range, in_colorspace;
633  int ret;
634 
635  if ((ret = scale_eval_dimensions(ctx)) < 0)
636  goto fail;
637 
638  outlink->w = scale->w;
639  outlink->h = scale->h;
640 
641  ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
642  scale->force_original_aspect_ratio,
643  scale->force_divisible_by);
644 
645  if (outlink->w > INT_MAX ||
646  outlink->h > INT_MAX ||
647  (outlink->h * inlink->w) > INT_MAX ||
648  (outlink->w * inlink->h) > INT_MAX)
649  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
650 
651  /* TODO: make algorithm configurable */
652 
653  scale->input_is_pal = desc->flags & AV_PIX_FMT_FLAG_PAL;
654  if (outfmt == AV_PIX_FMT_PAL8) outfmt = AV_PIX_FMT_BGR8;
655  scale->output_is_pal = av_pix_fmt_desc_get(outfmt)->flags & AV_PIX_FMT_FLAG_PAL;
656 
657  in_range = scale->in_range;
658  if (in_range == AVCOL_RANGE_UNSPECIFIED)
659  in_range = inlink0->color_range;
660 
661  in_colorspace = scale->in_color_matrix;
662  if (in_colorspace == -1 /* auto */)
663  in_colorspace = inlink0->colorspace;
664 
665  if (scale->sws)
666  sws_freeContext(scale->sws);
667  if (scale->isws[0])
668  sws_freeContext(scale->isws[0]);
669  if (scale->isws[1])
670  sws_freeContext(scale->isws[1]);
671  scale->isws[0] = scale->isws[1] = scale->sws = NULL;
672  if (inlink0->w == outlink->w &&
673  inlink0->h == outlink->h &&
674  in_range == outlink->color_range &&
675  in_colorspace == outlink->colorspace &&
676  inlink0->format == outlink->format)
677  ;
678  else {
679  struct SwsContext **swscs[3] = {&scale->sws, &scale->isws[0], &scale->isws[1]};
680  int i;
681 
682  for (i = 0; i < 3; i++) {
683  int in_v_chr_pos = scale->in_v_chr_pos, out_v_chr_pos = scale->out_v_chr_pos;
684  int in_full, out_full, brightness, contrast, saturation;
685  const int *inv_table, *table;
686  struct SwsContext *const s = sws_alloc_context();
687  if (!s)
688  return AVERROR(ENOMEM);
689  *swscs[i] = s;
690 
691  ret = av_opt_copy(s, scale->sws_opts);
692  if (ret < 0)
693  return ret;
694 
695  av_opt_set_int(s, "srcw", inlink0 ->w, 0);
696  av_opt_set_int(s, "srch", inlink0 ->h >> !!i, 0);
697  av_opt_set_int(s, "src_format", inlink0->format, 0);
698  av_opt_set_int(s, "dstw", outlink->w, 0);
699  av_opt_set_int(s, "dsth", outlink->h >> !!i, 0);
700  av_opt_set_int(s, "dst_format", outfmt, 0);
701  if (in_range != AVCOL_RANGE_UNSPECIFIED)
702  av_opt_set_int(s, "src_range",
703  in_range == AVCOL_RANGE_JPEG, 0);
704  if (outlink->color_range != AVCOL_RANGE_UNSPECIFIED)
705  av_opt_set_int(s, "dst_range",
706  outlink->color_range == AVCOL_RANGE_JPEG, 0);
707 
708  /* Override chroma location default settings to have the correct
709  * chroma positions. MPEG chroma positions are used by convention.
710  * Note that this works for both MPEG-1/JPEG and MPEG-2/4 chroma
711  * locations, since they share a vertical alignment */
712  if (desc->log2_chroma_h == 1 && scale->in_v_chr_pos == -513) {
713  in_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
714  }
715 
716  if (outdesc->log2_chroma_h == 1 && scale->out_v_chr_pos == -513) {
717  out_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
718  }
719 
720  av_opt_set_int(s, "src_h_chr_pos", scale->in_h_chr_pos, 0);
721  av_opt_set_int(s, "src_v_chr_pos", in_v_chr_pos, 0);
722  av_opt_set_int(s, "dst_h_chr_pos", scale->out_h_chr_pos, 0);
723  av_opt_set_int(s, "dst_v_chr_pos", out_v_chr_pos, 0);
724 
725  if ((ret = sws_init_context(s, NULL, NULL)) < 0)
726  return ret;
727 
728  sws_getColorspaceDetails(s, (int **)&inv_table, &in_full,
729  (int **)&table, &out_full,
731 
732  if (scale->in_color_matrix == -1 /* auto */)
733  inv_table = sws_getCoefficients(inlink0->colorspace);
734  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
735  inv_table = sws_getCoefficients(scale->in_color_matrix);
736  if (outlink->colorspace != AVCOL_SPC_UNSPECIFIED)
738  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
739  table = inv_table;
740 
741  sws_setColorspaceDetails(s, inv_table, in_full,
742  table, out_full,
744 
745  if (!scale->interlaced)
746  break;
747  }
748  }
749 
750  if (inlink0->sample_aspect_ratio.num){
751  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
752  } else
753  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
754 
755  if (scale->sws)
756  av_opt_get(scale->sws, "sws_flags", 0, &flags_val);
757 
758  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d -> w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d flags:%s\n",
759  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
760  av_color_space_name(inlink->colorspace), av_color_range_name(inlink->color_range),
761  inlink->sample_aspect_ratio.num, inlink->sample_aspect_ratio.den,
762  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
764  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
765  flags_val);
766  av_freep(&flags_val);
767 
768  if (ctx->filter != &ff_vf_scale2ref) {
770  ret = ff_framesync_init(&scale->fs, ctx, ctx->nb_inputs);
771  if (ret < 0)
772  return ret;
773  scale->fs.on_event = do_scale;
774  scale->fs.in[0].time_base = ctx->inputs[0]->time_base;
775  scale->fs.in[0].sync = 1;
776  scale->fs.in[0].before = EXT_STOP;
777  scale->fs.in[0].after = EXT_STOP;
778  if (scale->uses_ref) {
779  av_assert0(ctx->nb_inputs == 2);
780  scale->fs.in[1].time_base = ctx->inputs[1]->time_base;
781  scale->fs.in[1].sync = 0;
782  scale->fs.in[1].before = EXT_NULL;
783  scale->fs.in[1].after = EXT_INFINITY;
784  }
785 
787  if (ret < 0)
788  return ret;
789  }
790 
791  return 0;
792 
793 fail:
794  return ret;
795 }
796 
797 static int config_props_ref(AVFilterLink *outlink)
798 {
799  AVFilterLink *inlink = outlink->src->inputs[1];
800 
801  outlink->w = inlink->w;
802  outlink->h = inlink->h;
803  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
804  outlink->time_base = inlink->time_base;
805  outlink->frame_rate = inlink->frame_rate;
806  outlink->colorspace = inlink->colorspace;
807  outlink->color_range = inlink->color_range;
808 
809  return 0;
810 }
811 
812 static int request_frame(AVFilterLink *outlink)
813 {
814  return ff_request_frame(outlink->src->inputs[0]);
815 }
816 
817 static int request_frame_ref(AVFilterLink *outlink)
818 {
819  return ff_request_frame(outlink->src->inputs[1]);
820 }
821 
822 static void frame_offset(AVFrame *frame, int dir, int is_pal)
823 {
824  for (int i = 0; i < 4 && frame->data[i]; i++) {
825  if (i == 1 && is_pal)
826  break;
827  frame->data[i] += frame->linesize[i] * dir;
828  }
829 }
830 
832  int field)
833 {
834  int orig_h_src = src->height;
835  int orig_h_dst = dst->height;
836  int ret;
837 
838  // offset the data pointers for the bottom field
839  if (field) {
840  frame_offset(src, 1, scale->input_is_pal);
841  frame_offset(dst, 1, scale->output_is_pal);
842  }
843 
844  // take every second line
845  for (int i = 0; i < 4; i++) {
846  src->linesize[i] *= 2;
847  dst->linesize[i] *= 2;
848  }
849  src->height /= 2;
850  dst->height /= 2;
851 
852  ret = sws_scale_frame(scale->isws[field], dst, src);
853  if (ret < 0)
854  return ret;
855 
856  // undo the changes we made above
857  for (int i = 0; i < 4; i++) {
858  src->linesize[i] /= 2;
859  dst->linesize[i] /= 2;
860  }
861  src->height = orig_h_src;
862  dst->height = orig_h_dst;
863 
864  if (field) {
865  frame_offset(src, -1, scale->input_is_pal);
866  frame_offset(dst, -1, scale->output_is_pal);
867  }
868 
869  return 0;
870 }
871 
872 static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
873 {
874  AVFilterContext *ctx = link->dst;
875  ScaleContext *scale = ctx->priv;
876  AVFilterLink *outlink = ctx->outputs[0];
877  AVFrame *out;
879  char buf[32];
880  int ret;
881  int frame_changed;
882 
883  *frame_out = NULL;
884  if (in->colorspace == AVCOL_SPC_YCGCO)
885  av_log(link->dst, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
886 
887  frame_changed = in->width != link->w ||
888  in->height != link->h ||
889  in->format != link->format ||
892  in->colorspace != link->colorspace ||
893  in->color_range != link->color_range;
894 
895  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
896  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
897 
898  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
899  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
900 
901  if (scale->eval_mode == EVAL_MODE_FRAME &&
902  !frame_changed &&
903  ctx->filter != &ff_vf_scale2ref &&
904  !(vars_w[VAR_N] || vars_w[VAR_T]
906  || vars_w[VAR_POS]
907 #endif
908  ) &&
909  !(vars_h[VAR_N] || vars_h[VAR_T]
911  || vars_h[VAR_POS]
912 #endif
913  ) &&
914  scale->w && scale->h)
915  goto scale;
916 
917  if (scale->eval_mode == EVAL_MODE_INIT) {
918  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
919  av_opt_set(scale, "w", buf, 0);
920  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
921  av_opt_set(scale, "h", buf, 0);
922 
923  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
924  if (ret < 0)
925  return ret;
926 
927  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
928  if (ret < 0)
929  return ret;
930  }
931 
932  if (ctx->filter == &ff_vf_scale2ref) {
933  scale->var_values[VAR_S2R_MAIN_N] = link->frame_count_out;
934  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
935 #if FF_API_FRAME_PKT
937  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
939 #endif
940  } else {
941  scale->var_values[VAR_N] = link->frame_count_out;
942  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
943 #if FF_API_FRAME_PKT
945  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
947 #endif
948  }
949 
950  link->dst->inputs[0]->format = in->format;
951  link->dst->inputs[0]->w = in->width;
952  link->dst->inputs[0]->h = in->height;
953  link->dst->inputs[0]->colorspace = in->colorspace;
954  link->dst->inputs[0]->color_range = in->color_range;
955 
956  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
957  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
958 
959  if ((ret = config_props(outlink)) < 0)
960  return ret;
961  }
962 
963 scale:
964  if (!scale->sws) {
965  *frame_out = in;
966  return 0;
967  }
968 
969  scale->hsub = desc->log2_chroma_w;
970  scale->vsub = desc->log2_chroma_h;
971 
972  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
973  if (!out) {
974  av_frame_free(&in);
975  return AVERROR(ENOMEM);
976  }
977  *frame_out = out;
978 
980  out->width = outlink->w;
981  out->height = outlink->h;
982  out->color_range = outlink->color_range;
983  out->colorspace = outlink->colorspace;
984 
985  if (scale->output_is_pal)
986  avpriv_set_systematic_pal2((uint32_t*)out->data[1], outlink->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : outlink->format);
987 
988  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
989  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
990  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
991  INT_MAX);
992 
993  if (scale->interlaced>0 || (scale->interlaced<0 &&
994  (in->flags & AV_FRAME_FLAG_INTERLACED))) {
995  ret = scale_field(scale, out, in, 0);
996  if (ret >= 0)
997  ret = scale_field(scale, out, in, 1);
998  } else {
999  ret = sws_scale_frame(scale->sws, out, in);
1000  }
1001 
1002  av_frame_free(&in);
1003  if (ret < 0)
1004  av_frame_free(frame_out);
1005  return ret;
1006 }
1007 
1009 {
1010  AVFilterContext *ctx = fs->parent;
1011  ScaleContext *scale = ctx->priv;
1012  AVFilterLink *outlink = ctx->outputs[0];
1013  AVFrame *out, *in = NULL, *ref = NULL;
1014  int ret = 0, frame_changed;
1015 
1016  ret = ff_framesync_get_frame(fs, 0, &in, 1);
1017  if (ret < 0)
1018  goto err;
1019 
1020  if (scale->uses_ref) {
1021  ret = ff_framesync_get_frame(fs, 1, &ref, 0);
1022  if (ret < 0)
1023  goto err;
1024  }
1025 
1026  if (ref) {
1027  AVFilterLink *reflink = ctx->inputs[1];
1028  frame_changed = ref->width != reflink->w ||
1029  ref->height != reflink->h ||
1030  ref->format != reflink->format ||
1031  ref->sample_aspect_ratio.den != reflink->sample_aspect_ratio.den ||
1032  ref->sample_aspect_ratio.num != reflink->sample_aspect_ratio.num ||
1033  ref->colorspace != reflink->colorspace ||
1034  ref->color_range != reflink->color_range;
1035 
1036  if (frame_changed) {
1037  reflink->format = ref->format;
1038  reflink->w = ref->width;
1039  reflink->h = ref->height;
1040  reflink->sample_aspect_ratio.num = ref->sample_aspect_ratio.num;
1041  reflink->sample_aspect_ratio.den = ref->sample_aspect_ratio.den;
1042  reflink->colorspace = ref->colorspace;
1043  reflink->color_range = ref->color_range;
1044 
1045  ret = config_props(outlink);
1046  if (ret < 0)
1047  goto err;
1048  }
1049 
1050  if (scale->eval_mode == EVAL_MODE_FRAME) {
1051  scale->var_values[VAR_REF_N] = reflink->frame_count_out;
1052  scale->var_values[VAR_REF_T] = TS2T(ref->pts, reflink->time_base);
1053 #if FF_API_FRAME_PKT
1055  scale->var_values[VAR_REF_POS] = ref->pkt_pos == -1 ? NAN : ref->pkt_pos;
1057 #endif
1058  }
1059  }
1060 
1061  ret = scale_frame(ctx->inputs[0], in, &out);
1062  if (out) {
1063  out->pts = av_rescale_q(fs->pts, fs->time_base, outlink->time_base);
1064  return ff_filter_frame(outlink, out);
1065  }
1066 
1067 err:
1068  if (ret < 0)
1069  av_frame_free(&in);
1070  return ret;
1071 }
1072 
1074 {
1075  AVFilterContext *ctx = link->dst;
1076  AVFilterLink *outlink = ctx->outputs[0];
1077  AVFrame *out;
1078  int ret;
1079 
1080  ret = scale_frame(link, in, &out);
1081  if (out)
1082  return ff_filter_frame(outlink, out);
1083 
1084  return ret;
1085 }
1086 
1088 {
1089  ScaleContext *scale = link->dst->priv;
1090  AVFilterLink *outlink = link->dst->outputs[1];
1091  int frame_changed;
1092 
1093  frame_changed = in->width != link->w ||
1094  in->height != link->h ||
1095  in->format != link->format ||
1098  in->colorspace != link->colorspace ||
1099  in->color_range != link->color_range;
1100 
1101  if (frame_changed) {
1102  link->format = in->format;
1103  link->w = in->width;
1104  link->h = in->height;
1107  link->colorspace = in->colorspace;
1108  link->color_range = in->color_range;
1109 
1110  config_props_ref(outlink);
1111  }
1112 
1113  if (scale->eval_mode == EVAL_MODE_FRAME) {
1114  scale->var_values[VAR_N] = link->frame_count_out;
1115  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
1116 #if FF_API_FRAME_PKT
1118  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
1120 #endif
1121  }
1122 
1123  return ff_filter_frame(outlink, in);
1124 }
1125 
1126 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
1127  char *res, int res_len, int flags)
1128 {
1129  ScaleContext *scale = ctx->priv;
1130  char *str_expr;
1131  AVExpr **pexpr_ptr;
1132  int ret, w, h;
1133 
1134  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
1135  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
1136 
1137  if (w || h) {
1138  str_expr = w ? scale->w_expr : scale->h_expr;
1139  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
1140 
1141  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
1142  } else
1143  ret = AVERROR(ENOSYS);
1144 
1145  if (ret < 0)
1146  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
1147 
1148  return ret;
1149 }
1150 
1152 {
1153  ScaleContext *scale = ctx->priv;
1154  return ff_framesync_activate(&scale->fs);
1155 }
1156 
1157 static const AVClass *child_class_iterate(void **iter)
1158 {
1159  switch ((uintptr_t) *iter) {
1160  case 0:
1161  *iter = (void*)(uintptr_t) 1;
1162  return sws_get_class();
1163  case 1:
1164  *iter = (void*)(uintptr_t) 2;
1165  return &ff_framesync_class;
1166  }
1167 
1168  return NULL;
1169 }
1170 
1171 static void *child_next(void *obj, void *prev)
1172 {
1173  ScaleContext *s = obj;
1174  if (!prev)
1175  return s->sws_opts;
1176  if (prev == s->sws_opts)
1177  return &s->fs;
1178  return NULL;
1179 }
1180 
1181 #define OFFSET(x) offsetof(ScaleContext, x)
1182 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
1183 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
1184 
1185 static const AVOption scale_options[] = {
1186  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1187  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1188  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1189  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1190  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "" }, .flags = FLAGS },
1191  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
1192  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1193  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1194  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color" },
1195  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, 0, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color"},
1196  { "auto", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = -1 }, 0, 0, FLAGS, .unit = "color" },
1197  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1198  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1199  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1200  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT709 }, 0, 0, FLAGS, .unit = "color" },
1201  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_FCC }, 0, 0, FLAGS, .unit = "color" },
1202  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_SMPTE240M }, 0, 0, FLAGS, .unit = "color" },
1203  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT2020_NCL }, 0, 0, FLAGS, .unit = "color" },
1204  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1205  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1206  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1207  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1208  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1209  { "limited",NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1210  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1211  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1212  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1213  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1214  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1215  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1216  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1217  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1218  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, .unit = "force_oar" },
1219  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1220  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1221  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1222  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
1223  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1224  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1225  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1226  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
1227  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
1228  { NULL }
1229 };
1230 
1231 static const AVClass scale_class = {
1232  .class_name = "scale",
1233  .item_name = av_default_item_name,
1234  .option = scale_options,
1235  .version = LIBAVUTIL_VERSION_INT,
1236  .category = AV_CLASS_CATEGORY_FILTER,
1237  .child_class_iterate = child_class_iterate,
1239 };
1240 
1242  {
1243  .name = "default",
1244  .type = AVMEDIA_TYPE_VIDEO,
1245  },
1246 };
1247 
1249  {
1250  .name = "default",
1251  .type = AVMEDIA_TYPE_VIDEO,
1252  .config_props = config_props,
1253  },
1254 };
1255 
1257  .name = "scale",
1258  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
1259  .preinit = preinit,
1260  .init = init,
1261  .uninit = uninit,
1262  .priv_size = sizeof(ScaleContext),
1263  .priv_class = &scale_class,
1267  .activate = activate,
1268  .process_command = process_command,
1270 };
1271 
1272 static const AVClass *scale2ref_child_class_iterate(void **iter)
1273 {
1274  const AVClass *c = *iter ? NULL : sws_get_class();
1275  *iter = (void*)(uintptr_t)c;
1276  return c;
1277 }
1278 
1279 static void *scale2ref_child_next(void *obj, void *prev)
1280 {
1281  ScaleContext *s = obj;
1282  if (!prev)
1283  return s->sws_opts;
1284  return NULL;
1285 }
1286 
1287 static const AVClass scale2ref_class = {
1288  .class_name = "scale(2ref)",
1289  .item_name = av_default_item_name,
1290  .option = scale_options,
1291  .version = LIBAVUTIL_VERSION_INT,
1292  .category = AV_CLASS_CATEGORY_FILTER,
1293  .child_class_iterate = scale2ref_child_class_iterate,
1295 };
1296 
1298  {
1299  .name = "default",
1300  .type = AVMEDIA_TYPE_VIDEO,
1301  .filter_frame = filter_frame,
1302  },
1303  {
1304  .name = "ref",
1305  .type = AVMEDIA_TYPE_VIDEO,
1306  .filter_frame = filter_frame_ref,
1307  },
1308 };
1309 
1311  {
1312  .name = "default",
1313  .type = AVMEDIA_TYPE_VIDEO,
1314  .config_props = config_props,
1315  .request_frame= request_frame,
1316  },
1317  {
1318  .name = "ref",
1319  .type = AVMEDIA_TYPE_VIDEO,
1320  .config_props = config_props_ref,
1321  .request_frame= request_frame_ref,
1322  },
1323 };
1324 
1326  .name = "scale2ref",
1327  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1328  .preinit = preinit,
1329  .init = init,
1330  .uninit = uninit,
1331  .priv_size = sizeof(ScaleContext),
1332  .priv_class = &scale2ref_class,
1336  .process_command = process_command,
1337 };
filter_frame_ref
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:1087
ScaleContext::param
double param[2]
Definition: vf_scale.c:148
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:116
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:112
ScaleContext::fs
FFFrameSync fs
Definition: vf_scale.c:138
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:115
VAR_HSUB
@ VAR_HSUB
Definition: vf_scale.c:94
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:797
SwsContext::saturation
int saturation
Definition: swscale_internal.h:456
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:137
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:653
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
TFLAGS
#define TFLAGS
Definition: vf_scale.c:1183
ScaleContext::sws_opts
struct SwsContext * sws_opts
Definition: vf_scale.c:137
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:187
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
var_name
var_name
Definition: noise.c:47
ScaleContext::input_is_pal
int input_is_pal
set to 1 if the input format is paletted
Definition: vf_scale.c:152
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:436
VAR_REF_POS
@ VAR_REF_POS
Definition: vf_scale.c:112
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:307
out
FILE * out
Definition: movenc.c:55
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
ScaleContext
Definition: vf_scale.c:132
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1015
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_scale.c:472
ScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale.c:177
VAR_REF_N
@ VAR_REF_N
Definition: vf_scale.c:110
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:270
avfilter_vf_scale2ref_outputs
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1310
FLAGS
#define FLAGS
Definition: vf_scale.c:1182
ScaleContext::flags_str
char * flags_str
Definition: vf_scale.c:163
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:664
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:486
AVFrame::width
int width
Definition: frame.h:446
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:686
VAR_A
@ VAR_A
Definition: vf_scale.c:91
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:817
av_opt_set_double
int av_opt_set_double(void *obj, const char *name, double val, int search_flags)
Definition: opt.c:804
AVOption
AVOption.
Definition: opt.h:357
AVCOL_SPC_NB
@ AVCOL_SPC_NB
Not part of ABI.
Definition: pixfmt.h:629
scale_parse_expr
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:279
scale2ref_class
static const AVClass scale2ref_class
Definition: vf_scale.c:1287
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:159
table
static const uint16_t table[]
Definition: prosumer.c:203
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:812
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2972
VAR_REF_T
@ VAR_REF_T
Definition: vf_scale.c:111
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:463
VAR_S2R_MAIN_HSUB
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:118
ScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale.c:161
ScaleContext::out_range
int out_range
Definition: vf_scale.c:169
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale.c:117
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:610
float.h
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale.c:128
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:114
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:646
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
ScaleContext::in_h_chr_pos
int in_h_chr_pos
Definition: vf_scale.c:173
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale.c:90
video.h
ff_make_formats_list_singleton
AVFilterFormats * ff_make_formats_list_singleton(int fmt)
Equivalent to ff_make_format_list({const int[]}{ fmt, -1 })
Definition: formats.c:530
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:122
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:615
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
ff_append_inpad
int ff_append_inpad(AVFilterContext *f, AVFilterPad *p)
Append a new input/output pad to the filter's list of such pads.
Definition: avfilter.c:127
av_color_space_name
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:3341
VAR_DAR
@ VAR_DAR
Definition: vf_scale.c:93
avfilter_vf_scale_inputs
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:1241
fail
#define fail()
Definition: checkasm.h:184
VARS_NB
@ VARS_NB
Definition: vf_scale.c:123
frame_offset
static void frame_offset(AVFrame *frame, int dir, int is_pal)
Definition: vf_scale.c:822
ScaleContext::isws
struct SwsContext * isws[2]
software scaler context for interlaced material
Definition: vf_scale.c:135
VAR_REF_A
@ VAR_REF_A
Definition: vf_scale.c:105
ScaleContext::eval_mode
int eval_mode
expression evaluation mode
Definition: vf_scale.c:179
EXT_NULL
@ EXT_NULL
Ignore this stream and continue processing the other ones.
Definition: framesync.h:70
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale.c:88
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale.c:129
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:97
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:747
VAR_REF_W
@ VAR_REF_W
Definition: vf_scale.c:103
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:106
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
OFFSET
#define OFFSET(x)
Definition: vf_scale.c:1181
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: vf_scale.c:332
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
activate
static int activate(AVFilterContext *ctx)
Definition: vf_scale.c:1151
AV_PIX_FMT_BGR8
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:90
TS2T
#define TS2T(ts, tb)
Definition: internal.h:259
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_REF_H
@ VAR_REF_H
Definition: vf_scale.c:104
scale2ref_child_next
static void * scale2ref_child_next(void *obj, void *prev)
Definition: vf_scale.c:1279
ScaleContext::sws
struct SwsContext * sws
software scaler context
Definition: vf_scale.c:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
VAR_OH
@ VAR_OH
Definition: vf_scale.c:90
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:113
SwsContext::brightness
int brightness
Definition: swscale_internal.h:456
scale_frame
static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
Definition: vf_scale.c:872
ScaleContext::slice_y
int slice_y
top of current output slice
Definition: vf_scale.c:151
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:616
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Definition: opt.h:247
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:679
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_scale.c:365
VAR_OVSUB
@ VAR_OVSUB
Definition: vf_scale.c:97
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
ScaleContext::uses_ref
int uses_ref
Definition: vf_scale.c:155
ctx
AVFormatContext * ctx
Definition: movenc.c:49
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:1126
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVExpr
Definition: eval.c:158
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
SwsContext::contrast
int contrast
Definition: swscale_internal.h:456
ScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale.c:159
avpriv_set_systematic_pal2
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:178
NAN
#define NAN
Definition: mathematics.h:115
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
ScaleContext::out_h_chr_pos
int out_h_chr_pos
Definition: vf_scale.c:171
av_color_range_name
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:3281
scale_field
static int scale_field(ScaleContext *scale, AVFrame *dst, AVFrame *src, int field)
Definition: vf_scale.c:831
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
VAR_REF_DAR
@ VAR_REF_DAR
Definition: vf_scale.c:107
ff_framesync_class
const AVClass ff_framesync_class
Definition: framesync.c:54
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
ScaleContext::out_v_chr_pos
int out_v_chr_pos
Definition: vf_scale.c:172
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:709
VAR_POS
@ VAR_POS
Definition: noise.c:56
VAR_T
@ VAR_T
Definition: vf_scale.c:99
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:200
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
isnan
#define isnan(x)
Definition: libm.h:340
scale2ref_child_class_iterate
static const AVClass * scale2ref_child_class_iterate(void **iter)
Definition: vf_scale.c:1272
ScaleContext::in_range
int in_range
Definition: vf_scale.c:168
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale.c:87
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
ff_add_format
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:505
parseutils.h
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1213
ff_fmt_is_in
int ff_fmt_is_in(int fmt, const int *fmts)
Tell if an integer is contained in the provided -1-terminated list of integers.
Definition: formats.c:407
ScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale.c:160
double
double
Definition: af_crystalizer.c:131
AVCOL_SPC_YCGCO
@ AVCOL_SPC_YCGCO
used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
Definition: pixfmt.h:618
av_opt_get_int
int av_opt_get_int(void *obj, const char *name, int search_flags, int64_t *out_val)
Definition: opt.c:1217
sws_setColorspaceDetails
int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4], int srcRange, const int table[4], int dstRange, int brightness, int contrast, int saturation)
Definition: utils.c:1030
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:94
ff_vf_scale2ref
const AVFilter ff_vf_scale2ref
Definition: vf_scale.c:183
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:652
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ScaleContext::out_color_matrix
int out_color_matrix
Definition: vf_scale.c:166
av_opt_set_int
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
Definition: opt.c:799
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
VAR_IW
@ VAR_IW
Definition: vf_scale.c:87
av_opt_copy
int av_opt_copy(void *dst, const void *src)
Copy options from src object into dest object.
Definition: opt.c:2125
eval.h
VAR_IH
@ VAR_IH
Definition: vf_scale.c:88
VAR_REF_SAR
@ VAR_REF_SAR
Definition: vf_scale.c:106
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVClass::child_next
void *(* child_next)(void *obj, void *prev)
Return next AVOptions-enabled child or NULL.
Definition: log.h:131
child_class_iterate
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:1157
ScaleContext::w
int w
New dimensions.
Definition: vf_scale.c:146
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:501
VAR_RH
@ VAR_RH
Definition: vf_scale.c:104
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:684
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
scale_eval.h
VAR_RW
@ VAR_RW
Definition: vf_scale.c:103
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
ScaleContext::hsub
int hsub
Definition: vf_scale.c:150
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale.c:89
imgutils_internal.h
ff_all_color_ranges
AVFilterFormats * ff_all_color_ranges(void)
Construct an AVFilterFormats representing all possible color ranges.
Definition: formats.c:647
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2984
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:1073
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
sws_isSupportedInput
#define sws_isSupportedInput(x)
internal.h
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:617
ScaleContext::vsub
int vsub
chroma subsampling
Definition: vf_scale.c:150
sws_scale_frame
int sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src)
Scale source data from src and write the output to dst.
Definition: swscale.c:1187
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:620
interlaced
uint8_t interlaced
Definition: mxfenc.c:2264
ScaleContext::output_is_pal
int output_is_pal
set to 1 if the output format is paletted
Definition: vf_scale.c:153
VAR_SAR
@ VAR_SAR
Definition: vf_scale.c:92
VAR_RDAR
@ VAR_RDAR
Definition: vf_scale.c:107
sws_isSupportedEndiannessConversion
int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt)
Definition: utils.c:370
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:620
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:120
internal.h
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:827
EvalMode
EvalMode
Definition: af_volume.h:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:612
ScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale.c:158
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:633
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:669
avfilter_vf_scale_outputs
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:1248
AVFilter
Filter definition.
Definition: avfilter.h:166
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ScaleContext::in_color_matrix
int in_color_matrix
Definition: vf_scale.c:165
VAR_REF_HSUB
@ VAR_REF_HSUB
Definition: vf_scale.c:108
child_next
static void * child_next(void *obj, void *prev)
Definition: vf_scale.c:1171
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:86
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:481
sws_getColorspaceDetails
int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table, int *srcRange, int **table, int *dstRange, int *brightness, int *contrast, int *saturation)
Definition: utils.c:1189
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
sws_init_context
av_warn_unused_result int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter)
Initialize the swscaler context sws_context.
Definition: utils.c:2069
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:121
scale_eval_dimensions
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:531
var_names
static const char *const var_names[]
Definition: vf_scale.c:45
AVFrame::height
int height
Definition: frame.h:446
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:117
scale_options
static const AVOption scale_options[]
Definition: vf_scale.c:1185
framesync.h
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2433
do_scale
static int do_scale(FFFrameSync *fs)
Definition: vf_scale.c:1008
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:614
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:245
avfilter.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:458
ScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale.c:176
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
avfilter_vf_scale2ref_inputs
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1297
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
VAR_OW
@ VAR_OW
Definition: vf_scale.c:89
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
VAR_VSUB
@ VAR_VSUB
Definition: vf_scale.c:95
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
sws_getCoefficients
const int * sws_getCoefficients(int colorspace)
Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDet...
Definition: yuv2rgb.c:61
sws_colorspaces
static const int sws_colorspaces[]
Definition: vf_scale.c:351
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ScaleContext::interlaced
int interlaced
Definition: vf_scale.c:154
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
VAR_N
@ VAR_N
Definition: vf_scale.c:98
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:261
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
scale_class
static const AVClass scale_class
Definition: vf_scale.c:1231
ScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale.c:157
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale.c:127
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
av_opt_get
int av_opt_get(void *obj, const char *name, int search_flags, uint8_t **out_val)
Definition: opt.c:1159
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
VAR_REF_VSUB
@ VAR_REF_VSUB
Definition: vf_scale.c:109
h
h
Definition: vp9dsp_template.c:2038
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:358
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:249
VAR_OHSUB
@ VAR_OHSUB
Definition: vf_scale.c:96
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:611
int
int
Definition: ffmpeg_filter.c:424
SwsContext
Definition: swscale_internal.h:301
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:254
ff_vf_scale
const AVFilter ff_vf_scale
Definition: vf_scale.c:1256
snprintf
#define snprintf
Definition: snprintf.h:34
ScaleContext::size_str
char * size_str
Definition: vf_scale.c:147
VAR_S2R_MAIN_VSUB
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:119
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
ff_framesync_preinit
void ff_framesync_preinit(FFFrameSync *fs)
Pre-initialize a frame sync structure.
Definition: framesync.c:78
swscale.h
ScaleContext::h
int h
Definition: vf_scale.c:146
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:312
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2885
ScaleContext::in_v_chr_pos
int in_v_chr_pos
Definition: vf_scale.c:174
SwsContext::param
double param[2]
Input parameters for scaling algorithms that need them.
Definition: swscale_internal.h:344