FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/mem.h"
25 #include "libavutil/opt.h"
26 #include "libavutil/pixdesc.h"
27 #include "filters.h"
28 
29 enum FilterMode {
35 };
36 
37 typedef struct SignalstatsContext {
38  const AVClass *class;
39  int chromah; // height of chroma plane
40  int chromaw; // width of chroma plane
41  int hsub; // horizontal subsampling
42  int vsub; // vertical subsampling
43  int depth; // pixel depth
44  int fs; // pixel count per frame
45  int cfs; // pixel count per frame of chroma planes
46  int outfilter; // FilterMode
47  int filters;
49  uint8_t rgba_color[4];
50  int yuv_color[3];
51  int nb_jobs;
52  int *jobs_rets;
53 
54  int maxsize; // history stats array size
55  int *histy, *histu, *histv, *histsat;
56 
60 
61 typedef struct ThreadData {
62  const AVFrame *in;
63  AVFrame *out;
64 } ThreadData;
65 
66 typedef struct ThreadDataHueSatMetrics {
67  const AVFrame *src;
70 
71 #define OFFSET(x) offsetof(SignalstatsContext, x)
72 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
73 
74 static const AVOption signalstats_options[] = {
75  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, .unit = "filters"},
76  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, .unit = "filters"},
77  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, .unit = "filters"},
78  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, .unit = "filters"},
79  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, .unit = "out"},
80  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, .unit = "out"},
81  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, .unit = "out"},
82  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, .unit = "out"},
83  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
84  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
85  {NULL}
86 };
87 
88 AVFILTER_DEFINE_CLASS(signalstats);
89 
91 {
92  uint8_t r, g, b;
93  SignalstatsContext *s = ctx->priv;
94 
95  if (s->outfilter != FILTER_NONE)
96  s->filters |= 1 << s->outfilter;
97 
98  r = s->rgba_color[0];
99  g = s->rgba_color[1];
100  b = s->rgba_color[2];
101  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
102  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
103  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
104  return 0;
105 }
106 
108 {
109  SignalstatsContext *s = ctx->priv;
110  av_frame_free(&s->frame_prev);
111  av_frame_free(&s->frame_sat);
112  av_frame_free(&s->frame_hue);
113  av_freep(&s->jobs_rets);
114  av_freep(&s->histy);
115  av_freep(&s->histu);
116  av_freep(&s->histv);
117  av_freep(&s->histsat);
118 }
119 
120 // TODO: add more
121 static const enum AVPixelFormat pix_fmts[] = {
134 };
135 
136 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
137 {
139  if (!frame)
140  return NULL;
141 
142  frame->format = pixfmt;
143  frame->width = w;
144  frame->height = h;
145 
146  if (av_frame_get_buffer(frame, 0) < 0) {
148  return NULL;
149  }
150 
151  return frame;
152 }
153 
154 static int config_output(AVFilterLink *outlink)
155 {
156  AVFilterContext *ctx = outlink->src;
157  SignalstatsContext *s = ctx->priv;
158  AVFilterLink *inlink = outlink->src->inputs[0];
160  s->hsub = desc->log2_chroma_w;
161  s->vsub = desc->log2_chroma_h;
162  s->depth = desc->comp[0].depth;
163  s->maxsize = 1 << s->depth;
164  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
165  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
166  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
167  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
168 
169  if (!s->histy || !s->histu || !s->histv || !s->histsat)
170  return AVERROR(ENOMEM);
171 
172  outlink->w = inlink->w;
173  outlink->h = inlink->h;
174 
175  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
176  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
177 
178  s->fs = inlink->w * inlink->h;
179  s->cfs = s->chromaw * s->chromah;
180 
181  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
182  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
183  if (!s->jobs_rets)
184  return AVERROR(ENOMEM);
185 
186  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
187  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
188  if (!s->frame_sat || !s->frame_hue)
189  return AVERROR(ENOMEM);
190 
191  return 0;
192 }
193 
194 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
195 {
196  const int chromax = x >> s->hsub;
197  const int chromay = y >> s->vsub;
198  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
199  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
200  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
201 }
202 
203 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
204 {
205  const int chromax = x >> s->hsub;
206  const int chromay = y >> s->vsub;
207  const int mult = 1 << (s->depth - 8);
208  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
209  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
210  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
211 }
212 
213 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
214 {
215  ThreadData *td = arg;
216  const SignalstatsContext *s = ctx->priv;
217  const AVFrame *in = td->in;
218  AVFrame *out = td->out;
219  const int w = in->width;
220  const int h = in->height;
221  const int slice_start = (h * jobnr ) / nb_jobs;
222  const int slice_end = (h * (jobnr+1)) / nb_jobs;
223  int x, y, score = 0;
224 
225  for (y = slice_start; y < slice_end; y++) {
226  const int yc = y >> s->vsub;
227  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
228  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
229  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
230 
231  for (x = 0; x < w; x++) {
232  const int xc = x >> s->hsub;
233  const int luma = pluma[x];
234  const int chromau = pchromau[xc];
235  const int chromav = pchromav[xc];
236  const int filt = luma < 16 || luma > 235 ||
237  chromau < 16 || chromau > 240 ||
238  chromav < 16 || chromav > 240;
239  score += filt;
240  if (out && filt)
241  burn_frame8(s, out, x, y);
242  }
243  }
244  return score;
245 }
246 
247 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
248 {
249  ThreadData *td = arg;
250  const SignalstatsContext *s = ctx->priv;
251  const AVFrame *in = td->in;
252  AVFrame *out = td->out;
253  const int mult = 1 << (s->depth - 8);
254  const int w = in->width;
255  const int h = in->height;
256  const int slice_start = (h * jobnr ) / nb_jobs;
257  const int slice_end = (h * (jobnr+1)) / nb_jobs;
258  int x, y, score = 0;
259 
260  for (y = slice_start; y < slice_end; y++) {
261  const int yc = y >> s->vsub;
262  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
263  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
264  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
265 
266  for (x = 0; x < w; x++) {
267  const int xc = x >> s->hsub;
268  const int luma = pluma[x];
269  const int chromau = pchromau[xc];
270  const int chromav = pchromav[xc];
271  const int filt = luma < 16 * mult || luma > 235 * mult ||
272  chromau < 16 * mult || chromau > 240 * mult ||
273  chromav < 16 * mult || chromav > 240 * mult;
274  score += filt;
275  if (out && filt)
276  burn_frame16(s, out, x, y);
277  }
278  }
279  return score;
280 }
281 
282 static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
283 {
284  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
285 }
286 
287 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
288 {
289  ThreadData *td = arg;
290  const SignalstatsContext *s = ctx->priv;
291  const AVFrame *in = td->in;
292  AVFrame *out = td->out;
293  const int w = in->width;
294  const int h = in->height;
295  const int slice_start = (h * jobnr ) / nb_jobs;
296  const int slice_end = (h * (jobnr+1)) / nb_jobs;
297  const uint8_t *p = in->data[0];
298  int lw = in->linesize[0];
299  int x, y, score = 0, filt;
300 
301  for (y = slice_start; y < slice_end; y++) {
302 
303  if (y - 1 < 0 || y + 1 >= h)
304  continue;
305 
306  // detect two pixels above and below (to eliminate interlace artefacts)
307  // should check that video format is infact interlaced.
308 
309 #define FILTER(i, j) \
310  filter_tout_outlier(p[(y-j) * lw + x + i], \
311  p[ y * lw + x + i], \
312  p[(y+j) * lw + x + i])
313 
314 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
315 
316  if (y - 2 >= 0 && y + 2 < h) {
317  for (x = 1; x < w - 1; x++) {
318  filt = FILTER3(2) && FILTER3(1);
319  score += filt;
320  if (filt && out)
321  burn_frame8(s, out, x, y);
322  }
323  } else {
324  for (x = 1; x < w - 1; x++) {
325  filt = FILTER3(1);
326  score += filt;
327  if (filt && out)
328  burn_frame8(s, out, x, y);
329  }
330  }
331  }
332  return score;
333 }
334 
335 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
336 {
337  ThreadData *td = arg;
338  const SignalstatsContext *s = ctx->priv;
339  const AVFrame *in = td->in;
340  AVFrame *out = td->out;
341  const int w = in->width;
342  const int h = in->height;
343  const int slice_start = (h * jobnr ) / nb_jobs;
344  const int slice_end = (h * (jobnr+1)) / nb_jobs;
345  const uint16_t *p = (uint16_t *)in->data[0];
346  int lw = in->linesize[0] / 2;
347  int x, y, score = 0, filt;
348 
349  for (y = slice_start; y < slice_end; y++) {
350 
351  if (y - 1 < 0 || y + 1 >= h)
352  continue;
353 
354  // detect two pixels above and below (to eliminate interlace artefacts)
355  // should check that video format is infact interlaced.
356 
357  if (y - 2 >= 0 && y + 2 < h) {
358  for (x = 1; x < w - 1; x++) {
359  filt = FILTER3(2) && FILTER3(1);
360  score += filt;
361  if (filt && out)
362  burn_frame16(s, out, x, y);
363  }
364  } else {
365  for (x = 1; x < w - 1; x++) {
366  filt = FILTER3(1);
367  score += filt;
368  if (filt && out)
369  burn_frame16(s, out, x, y);
370  }
371  }
372  }
373  return score;
374 }
375 
376 #define VREP_START 4
377 
378 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
379 {
380  ThreadData *td = arg;
381  const SignalstatsContext *s = ctx->priv;
382  const AVFrame *in = td->in;
383  AVFrame *out = td->out;
384  const int w = in->width;
385  const int h = in->height;
386  const int slice_start = (h * jobnr ) / nb_jobs;
387  const int slice_end = (h * (jobnr+1)) / nb_jobs;
388  const uint8_t *p = in->data[0];
389  const int lw = in->linesize[0];
390  int x, y, score = 0;
391 
392  for (y = slice_start; y < slice_end; y++) {
393  const int y2lw = (y - VREP_START) * lw;
394  const int ylw = y * lw;
395  int filt, totdiff = 0;
396 
397  if (y < VREP_START)
398  continue;
399 
400  for (x = 0; x < w; x++)
401  totdiff += abs(p[y2lw + x] - p[ylw + x]);
402  filt = totdiff < w;
403 
404  score += filt;
405  if (filt && out)
406  for (x = 0; x < w; x++)
407  burn_frame8(s, out, x, y);
408  }
409  return score * w;
410 }
411 
412 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
413 {
414  ThreadData *td = arg;
415  const SignalstatsContext *s = ctx->priv;
416  const AVFrame *in = td->in;
417  AVFrame *out = td->out;
418  const int w = in->width;
419  const int h = in->height;
420  const int slice_start = (h * jobnr ) / nb_jobs;
421  const int slice_end = (h * (jobnr+1)) / nb_jobs;
422  const uint16_t *p = (uint16_t *)in->data[0];
423  const int lw = in->linesize[0] / 2;
424  int x, y, score = 0;
425 
426  for (y = slice_start; y < slice_end; y++) {
427  const int y2lw = (y - VREP_START) * lw;
428  const int ylw = y * lw;
429  int64_t totdiff = 0;
430  int filt;
431 
432  if (y < VREP_START)
433  continue;
434 
435  for (x = 0; x < w; x++)
436  totdiff += abs(p[y2lw + x] - p[ylw + x]);
437  filt = totdiff < w;
438 
439  score += filt;
440  if (filt && out)
441  for (x = 0; x < w; x++)
442  burn_frame16(s, out, x, y);
443  }
444  return score * w;
445 }
446 
447 static const struct {
448  const char *name;
449  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
450  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
451 } filters_def[] = {
452  {"TOUT", filter8_tout, filter16_tout},
453  {"VREP", filter8_vrep, filter16_vrep},
454  {"BRNG", filter8_brng, filter16_brng},
455  {NULL}
456 };
457 
458 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
459 {
460  int i, j;
462  const SignalstatsContext *s = ctx->priv;
463  const AVFrame *src = td->src;
464  AVFrame *dst_sat = td->dst_sat;
465  AVFrame *dst_hue = td->dst_hue;
466 
467  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
468  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
469 
470  const int lsz_u = src->linesize[1];
471  const int lsz_v = src->linesize[2];
472  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
473  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
474 
475  const int lsz_sat = dst_sat->linesize[0];
476  const int lsz_hue = dst_hue->linesize[0];
477  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
478  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
479 
480  for (j = slice_start; j < slice_end; j++) {
481  for (i = 0; i < s->chromaw; i++) {
482  const int yuvu = p_u[i];
483  const int yuvv = p_v[i];
484  p_sat[i] = hypotf(yuvu - 128, yuvv - 128); // int or round?
485  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-128, yuvv-128) + 180.f), 360.f);
486  }
487  p_u += lsz_u;
488  p_v += lsz_v;
489  p_sat += lsz_sat;
490  p_hue += lsz_hue;
491  }
492 
493  return 0;
494 }
495 
496 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
497 {
498  int i, j;
500  const SignalstatsContext *s = ctx->priv;
501  const AVFrame *src = td->src;
502  AVFrame *dst_sat = td->dst_sat;
503  AVFrame *dst_hue = td->dst_hue;
504  const int mid = 1 << (s->depth - 1);
505 
506  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
507  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
508 
509  const int lsz_u = src->linesize[1] / 2;
510  const int lsz_v = src->linesize[2] / 2;
511  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
512  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
513 
514  const int lsz_sat = dst_sat->linesize[0] / 2;
515  const int lsz_hue = dst_hue->linesize[0] / 2;
516  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
517  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
518 
519  for (j = slice_start; j < slice_end; j++) {
520  for (i = 0; i < s->chromaw; i++) {
521  const int yuvu = p_u[i];
522  const int yuvv = p_v[i];
523  p_sat[i] = hypotf(yuvu - mid, yuvv - mid); // int or round?
524  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180.f), 360.f);
525  }
526  p_u += lsz_u;
527  p_v += lsz_v;
528  p_sat += lsz_sat;
529  p_hue += lsz_hue;
530  }
531 
532  return 0;
533 }
534 
535 static unsigned compute_bit_depth(uint16_t mask)
536 {
537  return av_popcount(mask);
538 }
539 
541 {
542  AVFilterContext *ctx = link->dst;
543  SignalstatsContext *s = ctx->priv;
544  AVFilterLink *outlink = ctx->outputs[0];
545  AVFrame *out = in;
546  int w = 0, cw = 0, // in
547  pw = 0, cpw = 0; // prev
548  int fil;
549  char metabuf[128];
550  unsigned int *histy = s->histy,
551  *histu = s->histu,
552  *histv = s->histv,
553  histhue[360] = {0},
554  *histsat = s->histsat;
555  int miny = -1, minu = -1, minv = -1;
556  int maxy = -1, maxu = -1, maxv = -1;
557  int lowy = -1, lowu = -1, lowv = -1;
558  int highy = -1, highu = -1, highv = -1;
559  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
560  int lowp, highp, clowp, chighp;
561  int accy, accu, accv;
562  int accsat, acchue = 0;
563  int medhue, maxhue;
564  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
565  int64_t tothue = 0;
566  int64_t dify = 0, difu = 0, difv = 0;
567  uint16_t masky = 0, masku = 0, maskv = 0;
568 
569  int filtot[FILT_NUMB] = {0};
570  AVFrame *prev;
571  int ret;
572  AVFrame *sat = s->frame_sat;
573  AVFrame *hue = s->frame_hue;
574  const int hbd = s->depth > 8;
575  ThreadDataHueSatMetrics td_huesat = {
576  .src = in,
577  .dst_sat = sat,
578  .dst_hue = hue,
579  };
580 
581  if (!s->frame_prev)
582  s->frame_prev = av_frame_clone(in);
583 
584  prev = s->frame_prev;
585 
586  if (s->outfilter != FILTER_NONE) {
587  out = av_frame_clone(in);
588  if (!out) {
589  av_frame_free(&in);
590  return AVERROR(ENOMEM);
591  }
593  if (ret < 0) {
594  av_frame_free(&out);
595  av_frame_free(&in);
596  return ret;
597  }
598  }
599 
601  : compute_sat_hue_metrics8, &td_huesat,
602  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
603 
604  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
605  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
606  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
607  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
608 
609  if (hbd) {
610  const uint16_t *p_sat = (uint16_t *)sat->data[0];
611  const uint16_t *p_hue = (uint16_t *)hue->data[0];
612  const int lsz_sat = sat->linesize[0] / 2;
613  const int lsz_hue = hue->linesize[0] / 2;
614  // Calculate luma histogram and difference with previous frame or field.
615  for (int j = 0; j < link->h; j++) {
616  for (int i = 0; i < link->w; i++) {
617  const int yuv = AV_RN16(in->data[0] + w + i * 2);
618 
619  masky |= yuv;
620  histy[yuv]++;
621  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
622  }
623  w += in->linesize[0];
624  pw += prev->linesize[0];
625  }
626 
627  // Calculate chroma histogram and difference with previous frame or field.
628  for (int j = 0; j < s->chromah; j++) {
629  for (int i = 0; i < s->chromaw; i++) {
630  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
631  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
632 
633  masku |= yuvu;
634  maskv |= yuvv;
635  histu[yuvu]++;
636  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
637  histv[yuvv]++;
638  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
639 
640  histsat[p_sat[i]]++;
641  histhue[((int16_t*)p_hue)[i]]++;
642  }
643  cw += in->linesize[1];
644  cpw += prev->linesize[1];
645  p_sat += lsz_sat;
646  p_hue += lsz_hue;
647  }
648  } else {
649  const uint8_t *p_sat = sat->data[0];
650  const uint8_t *p_hue = hue->data[0];
651  const int lsz_sat = sat->linesize[0];
652  const int lsz_hue = hue->linesize[0];
653  // Calculate luma histogram and difference with previous frame or field.
654  for (int j = 0; j < link->h; j++) {
655  for (int i = 0; i < link->w; i++) {
656  const int yuv = in->data[0][w + i];
657 
658  masky |= yuv;
659  histy[yuv]++;
660  dify += abs(yuv - prev->data[0][pw + i]);
661  }
662  w += in->linesize[0];
663  pw += prev->linesize[0];
664  }
665 
666  // Calculate chroma histogram and difference with previous frame or field.
667  for (int j = 0; j < s->chromah; j++) {
668  for (int i = 0; i < s->chromaw; i++) {
669  const int yuvu = in->data[1][cw+i];
670  const int yuvv = in->data[2][cw+i];
671 
672  masku |= yuvu;
673  maskv |= yuvv;
674  histu[yuvu]++;
675  difu += abs(yuvu - prev->data[1][cpw+i]);
676  histv[yuvv]++;
677  difv += abs(yuvv - prev->data[2][cpw+i]);
678 
679  histsat[p_sat[i]]++;
680  histhue[((int16_t*)p_hue)[i]]++;
681  }
682  cw += in->linesize[1];
683  cpw += prev->linesize[1];
684  p_sat += lsz_sat;
685  p_hue += lsz_hue;
686  }
687  }
688 
689  for (fil = 0; fil < FILT_NUMB; fil ++) {
690  if (s->filters & 1<<fil) {
691  ThreadData td = {
692  .in = in,
693  .out = out != in && s->outfilter == fil ? out : NULL,
694  };
695  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
697  &td, s->jobs_rets, s->nb_jobs);
698  for (int i = 0; i < s->nb_jobs; i++)
699  filtot[fil] += s->jobs_rets[i];
700  }
701  }
702 
703  // find low / high based on histogram percentile
704  // these only need to be calculated once.
705 
706  lowp = lrint(s->fs * 10 / 100.);
707  highp = lrint(s->fs * 90 / 100.);
708  clowp = lrint(s->cfs * 10 / 100.);
709  chighp = lrint(s->cfs * 90 / 100.);
710 
711  accy = accu = accv = accsat = 0;
712  for (fil = 0; fil < s->maxsize; fil++) {
713  if (miny < 0 && histy[fil]) miny = fil;
714  if (minu < 0 && histu[fil]) minu = fil;
715  if (minv < 0 && histv[fil]) minv = fil;
716  if (minsat < 0 && histsat[fil]) minsat = fil;
717 
718  if (histy[fil]) maxy = fil;
719  if (histu[fil]) maxu = fil;
720  if (histv[fil]) maxv = fil;
721  if (histsat[fil]) maxsat = fil;
722 
723  toty += (uint64_t)histy[fil] * fil;
724  totu += (uint64_t)histu[fil] * fil;
725  totv += (uint64_t)histv[fil] * fil;
726  totsat += (uint64_t)histsat[fil] * fil;
727 
728  accy += histy[fil];
729  accu += histu[fil];
730  accv += histv[fil];
731  accsat += histsat[fil];
732 
733  if (lowy == -1 && accy >= lowp) lowy = fil;
734  if (lowu == -1 && accu >= clowp) lowu = fil;
735  if (lowv == -1 && accv >= clowp) lowv = fil;
736  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
737 
738  if (highy == -1 && accy >= highp) highy = fil;
739  if (highu == -1 && accu >= chighp) highu = fil;
740  if (highv == -1 && accv >= chighp) highv = fil;
741  if (highsat == -1 && accsat >= chighp) highsat = fil;
742  }
743 
744  maxhue = histhue[0];
745  medhue = -1;
746  for (fil = 0; fil < 360; fil++) {
747  tothue += (uint64_t)histhue[fil] * fil;
748  acchue += histhue[fil];
749 
750  if (medhue == -1 && acchue > s->cfs / 2)
751  medhue = fil;
752  if (histhue[fil] > maxhue) {
753  maxhue = histhue[fil];
754  }
755  }
756 
757  av_frame_free(&s->frame_prev);
758  s->frame_prev = av_frame_clone(in);
759 
760 #define SET_META(key, fmt, val) do { \
761  snprintf(metabuf, sizeof(metabuf), fmt, val); \
762  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
763 } while (0)
764 
765  av_dict_set_int(&out->metadata, "lavfi.signalstats.YMIN", miny, 0);
766  av_dict_set_int(&out->metadata, "lavfi.signalstats.YLOW", lowy, 0);
767  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
768  av_dict_set_int(&out->metadata, "lavfi.signalstats.YHIGH", highy, 0);
769  av_dict_set_int(&out->metadata, "lavfi.signalstats.YMAX", maxy, 0);
770 
771  av_dict_set_int(&out->metadata, "lavfi.signalstats.UMIN", minu, 0);
772  av_dict_set_int(&out->metadata, "lavfi.signalstats.ULOW", lowu, 0);
773  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
774  av_dict_set_int(&out->metadata, "lavfi.signalstats.UHIGH", highu, 0);
775  av_dict_set_int(&out->metadata, "lavfi.signalstats.UMAX", maxu, 0);
776 
777  av_dict_set_int(&out->metadata, "lavfi.signalstats.VMIN", minv, 0);
778  av_dict_set_int(&out->metadata, "lavfi.signalstats.VLOW", lowv, 0);
779  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
780  av_dict_set_int(&out->metadata, "lavfi.signalstats.VHIGH", highv, 0);
781  av_dict_set_int(&out->metadata, "lavfi.signalstats.VMAX", maxv, 0);
782 
783  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATMIN", minsat, 0);
784  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATLOW", lowsat, 0);
785  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
786  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATHIGH", highsat, 0);
787  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATMAX", maxsat, 0);
788 
789  av_dict_set_int(&out->metadata, "lavfi.signalstats.HUEMED", medhue, 0);
790  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
791 
792  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
793  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
794  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
795 
796  av_dict_set_int(&out->metadata, "lavfi.signalstats.YBITDEPTH", compute_bit_depth(masky), 0);
797  av_dict_set_int(&out->metadata, "lavfi.signalstats.UBITDEPTH", compute_bit_depth(masku), 0);
798  av_dict_set_int(&out->metadata, "lavfi.signalstats.VBITDEPTH", compute_bit_depth(maskv), 0);
799 
800  for (fil = 0; fil < FILT_NUMB; fil ++) {
801  if (s->filters & 1<<fil) {
802  char metaname[128];
803  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
804  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
805  av_dict_set(&out->metadata, metaname, metabuf, 0);
806  }
807  }
808 
809  if (in != out)
810  av_frame_free(&in);
811  return ff_filter_frame(outlink, out);
812 }
813 
814 static const AVFilterPad signalstats_inputs[] = {
815  {
816  .name = "default",
817  .type = AVMEDIA_TYPE_VIDEO,
818  .filter_frame = filter_frame,
819  },
820 };
821 
823  {
824  .name = "default",
825  .config_props = config_output,
826  .type = AVMEDIA_TYPE_VIDEO,
827  },
828 };
829 
831  .name = "signalstats",
832  .description = "Generate statistics from video analysis.",
833  .init = init,
834  .uninit = uninit,
835  .priv_size = sizeof(SignalstatsContext),
839  .priv_class = &signalstats_class,
841 };
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
r
const char * r
Definition: vf_curves.c:127
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:496
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:42
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: filters.h:242
out
FILE * out
Definition: movenc.c:55
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:292
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:49
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:39
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:90
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3170
SET_META
#define SET_META(key, fmt, val)
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:213
atan2f
#define atan2f(y, x)
Definition: libm.h:45
int64_t
long long int64_t
Definition: coverity.c:34
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
mask
int mask
Definition: mediacodecdec_common.c:154
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:356
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:44
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
AVFrame::width
int width
Definition: frame.h:461
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:51
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:429
b
#define b
Definition: input.c:41
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:502
VREP_START
#define VREP_START
Definition: vf_signalstats.c:376
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:55
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:62
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:205
av_popcount
#define av_popcount
Definition: common.h:154
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:526
ThreadData::in
AVFrame * in
Definition: af_adecorrelate.c:155
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:410
FLAGS
#define FLAGS
Definition: vf_signalstats.c:72
FilterMode
FilterMode
Definition: vp9.h:64
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
Handle slice ends.
Definition: mpeg12dec.c:1719
filters_def
static const struct @355 filters_def[]
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:500
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:55
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:335
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:486
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:282
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_signalstats.c:121
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:34
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:505
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
SignalstatsContext
Definition: vf_signalstats.c:37
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:107
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:60
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:32
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:514
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:136
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:50
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:71
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:55
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:515
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:60
g
const char * g
Definition: vf_curves.c:128
filters
#define filters(fmt, type, inverse, clp, inverset, clip, one, clip_fn, packed)
Definition: af_crystalizer.c:55
filters.h
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:499
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:513
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:597
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1538
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:33
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:203
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:287
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:68
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:46
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Underlying C type is uint8_t[4].
Definition: opt.h:323
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:465
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:504
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:503
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
ff_vf_signalstats
const AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:830
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:41
name
const char * name
Definition: vf_signalstats.c:448
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:814
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:194
f
f
Definition: af_crystalizer.c:122
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:540
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:55
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:507
FILTER3
#define FILTER3(j)
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:509
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:822
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:68
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:66
M_PI
#define M_PI
Definition: mathematics.h:67
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:458
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:47
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:449
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:31
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:48
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:841
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:54
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:154
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:450
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:43
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:40
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:40
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:501
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:738
AVFilter
Filter definition.
Definition: avfilter.h:201
ret
ret
Definition: filter_design.txt:187
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:367
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:45
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:506
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:247
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:511
AVFrame::height
int height
Definition: frame.h:461
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:57
ff_filter_execute
int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: avfilter.c:1667
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:30
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:74
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:378
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFilterContext
An instance of a filter.
Definition: avfilter.h:457
av_dict_set_int
int av_dict_set_int(AVDictionary **pm, const char *key, int64_t value, int flags)
Convenience wrapper for av_dict_set() that converts the value to a string and stores it.
Definition: dict.c:167
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:152
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:412
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Underlying C type is unsigned int.
Definition: opt.h:255
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:434
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:52
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:508
h
h
Definition: vp9dsp_template.c:2070
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:512
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:535
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:58
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:67
src
#define src
Definition: vp8dsp.c:248
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:510
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:368