vf_smartblur.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. /*
  2. * Copyright (c) 2002 Michael Niedermayer <michaelni@gmx.at>
  3. * Copyright (c) 2012 Jeremy Tran
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License along
  18. * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
  19. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  20. */
  21. /**
  22. * @file
  23. * Apply a smartblur filter to the input video
  24. * Ported from MPlayer libmpcodecs/vf_smartblur.c by Michael Niedermayer.
  25. */
  26. #include "libavutil/opt.h"
  27. #include "libavutil/pixdesc.h"
  28. #include "libswscale/swscale.h"
  29. #include "avfilter.h"
  30. #include "formats.h"
  31. #include "internal.h"
  32. #define RADIUS_MIN 0.1
  33. #define RADIUS_MAX 5.0
  34. #define STRENGTH_MIN -1.0
  35. #define STRENGTH_MAX 1.0
  36. #define THRESHOLD_MIN -30
  37. #define THRESHOLD_MAX 30
  38. typedef struct {
  39. float radius;
  40. float strength;
  41. int threshold;
  42. float quality;
  43. struct SwsContext *filter_context;
  44. } FilterParam;
  45. typedef struct {
  46. const AVClass *class;
  47. FilterParam luma;
  48. FilterParam chroma;
  49. int hsub;
  50. int vsub;
  51. unsigned int sws_flags;
  52. } SmartblurContext;
  53. #define OFFSET(x) offsetof(SmartblurContext, x)
  54. #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
  55. static const AVOption smartblur_options[] = {
  56. { "luma_radius", "set luma radius", OFFSET(luma.radius), AV_OPT_TYPE_FLOAT, {.dbl=1.0}, RADIUS_MIN, RADIUS_MAX, .flags=FLAGS },
  57. { "lr" , "set luma radius", OFFSET(luma.radius), AV_OPT_TYPE_FLOAT, {.dbl=1.0}, RADIUS_MIN, RADIUS_MAX, .flags=FLAGS },
  58. { "luma_strength", "set luma strength", OFFSET(luma.strength), AV_OPT_TYPE_FLOAT, {.dbl=1.0}, STRENGTH_MIN, STRENGTH_MAX, .flags=FLAGS },
  59. { "ls", "set luma strength", OFFSET(luma.strength), AV_OPT_TYPE_FLOAT, {.dbl=1.0}, STRENGTH_MIN, STRENGTH_MAX, .flags=FLAGS },
  60. { "luma_threshold", "set luma threshold", OFFSET(luma.threshold), AV_OPT_TYPE_INT, {.i64=0}, THRESHOLD_MIN, THRESHOLD_MAX, .flags=FLAGS },
  61. { "lt", "set luma threshold", OFFSET(luma.threshold), AV_OPT_TYPE_INT, {.i64=0}, THRESHOLD_MIN, THRESHOLD_MAX, .flags=FLAGS },
  62. { "chroma_radius", "set chroma radius", OFFSET(chroma.radius), AV_OPT_TYPE_FLOAT, {.dbl=RADIUS_MIN-1}, RADIUS_MIN-1, RADIUS_MAX, .flags=FLAGS },
  63. { "cr", "set chroma radius", OFFSET(chroma.radius), AV_OPT_TYPE_FLOAT, {.dbl=RADIUS_MIN-1}, RADIUS_MIN-1, RADIUS_MAX, .flags=FLAGS },
  64. { "chroma_strength", "set chroma strength", OFFSET(chroma.strength), AV_OPT_TYPE_FLOAT, {.dbl=STRENGTH_MIN-1}, STRENGTH_MIN-1, STRENGTH_MAX, .flags=FLAGS },
  65. { "cs", "set chroma strength", OFFSET(chroma.strength), AV_OPT_TYPE_FLOAT, {.dbl=STRENGTH_MIN-1}, STRENGTH_MIN-1, STRENGTH_MAX, .flags=FLAGS },
  66. { "chroma_threshold", "set chroma threshold", OFFSET(chroma.threshold), AV_OPT_TYPE_INT, {.i64=THRESHOLD_MIN-1}, THRESHOLD_MIN-1, THRESHOLD_MAX, .flags=FLAGS },
  67. { "ct", "set chroma threshold", OFFSET(chroma.threshold), AV_OPT_TYPE_INT, {.i64=THRESHOLD_MIN-1}, THRESHOLD_MIN-1, THRESHOLD_MAX, .flags=FLAGS },
  68. { NULL }
  69. };
  70. AVFILTER_DEFINE_CLASS(smartblur);
  71. static av_cold int init(AVFilterContext *ctx)
  72. {
  73. SmartblurContext *sblur = ctx->priv;
  74. /* make chroma default to luma values, if not explicitly set */
  75. if (sblur->chroma.radius < RADIUS_MIN)
  76. sblur->chroma.radius = sblur->luma.radius;
  77. if (sblur->chroma.strength < STRENGTH_MIN)
  78. sblur->chroma.strength = sblur->luma.strength;
  79. if (sblur->chroma.threshold < THRESHOLD_MIN)
  80. sblur->chroma.threshold = sblur->luma.threshold;
  81. sblur->luma.quality = sblur->chroma.quality = 3.0;
  82. sblur->sws_flags = SWS_BICUBIC;
  83. av_log(ctx, AV_LOG_VERBOSE,
  84. "luma_radius:%f luma_strength:%f luma_threshold:%d "
  85. "chroma_radius:%f chroma_strength:%f chroma_threshold:%d\n",
  86. sblur->luma.radius, sblur->luma.strength, sblur->luma.threshold,
  87. sblur->chroma.radius, sblur->chroma.strength, sblur->chroma.threshold);
  88. return 0;
  89. }
  90. static av_cold void uninit(AVFilterContext *ctx)
  91. {
  92. SmartblurContext *sblur = ctx->priv;
  93. sws_freeContext(sblur->luma.filter_context);
  94. sws_freeContext(sblur->chroma.filter_context);
  95. }
  96. static int query_formats(AVFilterContext *ctx)
  97. {
  98. static const enum AVPixelFormat pix_fmts[] = {
  99. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P,
  100. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV411P,
  101. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P,
  102. AV_PIX_FMT_GRAY8,
  103. AV_PIX_FMT_NONE
  104. };
  105. ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
  106. return 0;
  107. }
  108. static int alloc_sws_context(FilterParam *f, int width, int height, unsigned int flags)
  109. {
  110. SwsVector *vec;
  111. SwsFilter sws_filter;
  112. vec = sws_getGaussianVec(f->radius, f->quality);
  113. if (!vec)
  114. return AVERROR(EINVAL);
  115. sws_scaleVec(vec, f->strength);
  116. vec->coeff[vec->length / 2] += 1.0 - f->strength;
  117. sws_filter.lumH = sws_filter.lumV = vec;
  118. sws_filter.chrH = sws_filter.chrV = NULL;
  119. f->filter_context = sws_getCachedContext(NULL,
  120. width, height, AV_PIX_FMT_GRAY8,
  121. width, height, AV_PIX_FMT_GRAY8,
  122. flags, &sws_filter, NULL, NULL);
  123. sws_freeVec(vec);
  124. if (!f->filter_context)
  125. return AVERROR(EINVAL);
  126. return 0;
  127. }
  128. static int config_props(AVFilterLink *inlink)
  129. {
  130. SmartblurContext *sblur = inlink->dst->priv;
  131. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
  132. sblur->hsub = desc->log2_chroma_w;
  133. sblur->vsub = desc->log2_chroma_h;
  134. alloc_sws_context(&sblur->luma, inlink->w, inlink->h, sblur->sws_flags);
  135. alloc_sws_context(&sblur->chroma,
  136. FF_CEIL_RSHIFT(inlink->w, sblur->hsub),
  137. FF_CEIL_RSHIFT(inlink->h, sblur->vsub),
  138. sblur->sws_flags);
  139. return 0;
  140. }
  141. static void blur(uint8_t *dst, const int dst_linesize,
  142. const uint8_t *src, const int src_linesize,
  143. const int w, const int h, const int threshold,
  144. struct SwsContext *filter_context)
  145. {
  146. int x, y;
  147. int orig, filtered;
  148. int diff;
  149. /* Declare arrays of 4 to get aligned data */
  150. const uint8_t* const src_array[4] = {src};
  151. uint8_t *dst_array[4] = {dst};
  152. int src_linesize_array[4] = {src_linesize};
  153. int dst_linesize_array[4] = {dst_linesize};
  154. sws_scale(filter_context, src_array, src_linesize_array,
  155. 0, h, dst_array, dst_linesize_array);
  156. if (threshold > 0) {
  157. for (y = 0; y < h; ++y) {
  158. for (x = 0; x < w; ++x) {
  159. orig = src[x + y * src_linesize];
  160. filtered = dst[x + y * dst_linesize];
  161. diff = orig - filtered;
  162. if (diff > 0) {
  163. if (diff > 2 * threshold)
  164. dst[x + y * dst_linesize] = orig;
  165. else if (diff > threshold)
  166. /* add 'diff' and subtract 'threshold' from 'filtered' */
  167. dst[x + y * dst_linesize] = orig - threshold;
  168. } else {
  169. if (-diff > 2 * threshold)
  170. dst[x + y * dst_linesize] = orig;
  171. else if (-diff > threshold)
  172. /* add 'diff' and 'threshold' to 'filtered' */
  173. dst[x + y * dst_linesize] = orig + threshold;
  174. }
  175. }
  176. }
  177. } else if (threshold < 0) {
  178. for (y = 0; y < h; ++y) {
  179. for (x = 0; x < w; ++x) {
  180. orig = src[x + y * src_linesize];
  181. filtered = dst[x + y * dst_linesize];
  182. diff = orig - filtered;
  183. if (diff > 0) {
  184. if (diff <= -threshold)
  185. dst[x + y * dst_linesize] = orig;
  186. else if (diff <= -2 * threshold)
  187. /* subtract 'diff' and 'threshold' from 'orig' */
  188. dst[x + y * dst_linesize] = filtered - threshold;
  189. } else {
  190. if (diff >= threshold)
  191. dst[x + y * dst_linesize] = orig;
  192. else if (diff >= 2 * threshold)
  193. /* add 'threshold' and subtract 'diff' from 'orig' */
  194. dst[x + y * dst_linesize] = filtered + threshold;
  195. }
  196. }
  197. }
  198. }
  199. }
  200. static int filter_frame(AVFilterLink *inlink, AVFrame *inpic)
  201. {
  202. SmartblurContext *sblur = inlink->dst->priv;
  203. AVFilterLink *outlink = inlink->dst->outputs[0];
  204. AVFrame *outpic;
  205. int cw = FF_CEIL_RSHIFT(inlink->w, sblur->hsub);
  206. int ch = FF_CEIL_RSHIFT(inlink->h, sblur->vsub);
  207. outpic = ff_get_video_buffer(outlink, outlink->w, outlink->h);
  208. if (!outpic) {
  209. av_frame_free(&inpic);
  210. return AVERROR(ENOMEM);
  211. }
  212. av_frame_copy_props(outpic, inpic);
  213. blur(outpic->data[0], outpic->linesize[0],
  214. inpic->data[0], inpic->linesize[0],
  215. inlink->w, inlink->h, sblur->luma.threshold,
  216. sblur->luma.filter_context);
  217. if (inpic->data[2]) {
  218. blur(outpic->data[1], outpic->linesize[1],
  219. inpic->data[1], inpic->linesize[1],
  220. cw, ch, sblur->chroma.threshold,
  221. sblur->chroma.filter_context);
  222. blur(outpic->data[2], outpic->linesize[2],
  223. inpic->data[2], inpic->linesize[2],
  224. cw, ch, sblur->chroma.threshold,
  225. sblur->chroma.filter_context);
  226. }
  227. av_frame_free(&inpic);
  228. return ff_filter_frame(outlink, outpic);
  229. }
  230. static const AVFilterPad smartblur_inputs[] = {
  231. {
  232. .name = "default",
  233. .type = AVMEDIA_TYPE_VIDEO,
  234. .filter_frame = filter_frame,
  235. .config_props = config_props,
  236. },
  237. { NULL }
  238. };
  239. static const AVFilterPad smartblur_outputs[] = {
  240. {
  241. .name = "default",
  242. .type = AVMEDIA_TYPE_VIDEO,
  243. },
  244. { NULL }
  245. };
  246. AVFilter ff_vf_smartblur = {
  247. .name = "smartblur",
  248. .description = NULL_IF_CONFIG_SMALL("Blur the input video without impacting the outlines."),
  249. .priv_size = sizeof(SmartblurContext),
  250. .init = init,
  251. .uninit = uninit,
  252. .query_formats = query_formats,
  253. .inputs = smartblur_inputs,
  254. .outputs = smartblur_outputs,
  255. .priv_class = &smartblur_class,
  256. .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC,
  257. };