vf_phase.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333
  1. /*
  2. * Copyright (c) 2004 Ville Saari
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License along
  17. * with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "libavutil/avassert.h"
  21. #include "libavutil/imgutils.h"
  22. #include "libavutil/pixdesc.h"
  23. #include "libavutil/opt.h"
  24. #include "avfilter.h"
  25. #include "formats.h"
  26. #include "internal.h"
  27. #include "video.h"
  28. enum PhaseMode {
  29. PROGRESSIVE,
  30. TOP_FIRST,
  31. BOTTOM_FIRST,
  32. TOP_FIRST_ANALYZE,
  33. BOTTOM_FIRST_ANALYZE,
  34. ANALYZE,
  35. FULL_ANALYZE,
  36. AUTO,
  37. AUTO_ANALYZE
  38. };
  39. typedef struct PhaseContext {
  40. const AVClass *class;
  41. int mode; ///<PhaseMode
  42. AVFrame *frame; /* previous frame */
  43. int nb_planes;
  44. int planeheight[4];
  45. int linesize[4];
  46. } PhaseContext;
  47. #define OFFSET(x) offsetof(PhaseContext, x)
  48. #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
  49. #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, 0, 0, FLAGS, unit }
  50. static const AVOption phase_options[] = {
  51. { "mode", "set phase mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=AUTO_ANALYZE}, PROGRESSIVE, AUTO_ANALYZE, FLAGS, "mode" },
  52. CONST("p", "progressive", PROGRESSIVE, "mode"),
  53. CONST("t", "top first", TOP_FIRST, "mode"),
  54. CONST("b", "bottom first", BOTTOM_FIRST, "mode"),
  55. CONST("T", "top first analyze", TOP_FIRST_ANALYZE, "mode"),
  56. CONST("B", "bottom first analyze", BOTTOM_FIRST_ANALYZE, "mode"),
  57. CONST("u", "analyze", ANALYZE, "mode"),
  58. CONST("U", "full analyze", FULL_ANALYZE, "mode"),
  59. CONST("a", "auto", AUTO, "mode"),
  60. CONST("A", "auto analyze", AUTO_ANALYZE, "mode"),
  61. { NULL }
  62. };
  63. AVFILTER_DEFINE_CLASS(phase);
  64. static int query_formats(AVFilterContext *ctx)
  65. {
  66. static const enum AVPixelFormat pix_fmts[] = {
  67. AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA420P,
  68. AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P, AV_PIX_FMT_YUVJ422P,AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ411P,
  69. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P,
  70. AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE
  71. };
  72. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  73. if (!fmts_list)
  74. return AVERROR(ENOMEM);
  75. return ff_set_common_formats(ctx, fmts_list);
  76. }
  77. static int config_input(AVFilterLink *inlink)
  78. {
  79. PhaseContext *s = inlink->dst->priv;
  80. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
  81. int ret;
  82. if ((ret = av_image_fill_linesizes(s->linesize, inlink->format, inlink->w)) < 0)
  83. return ret;
  84. s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
  85. s->planeheight[0] = s->planeheight[3] = inlink->h;
  86. s->nb_planes = av_pix_fmt_count_planes(inlink->format);
  87. return 0;
  88. }
  89. /*
  90. * This macro interpolates the value of both fields at a point halfway
  91. * between lines and takes the squared difference. In field resolution
  92. * the point is a quarter pixel below a line in one field and a quarter
  93. * pixel above a line in other.
  94. *
  95. * (The result is actually multiplied by 25)
  96. */
  97. #define DIFF(a, as, b, bs) ((t) = ((*(a) - (b)[bs]) << 2) + (a)[(as) << 1] - (b)[-(bs)], (t) * (t))
  98. /*
  99. * Find which field combination has the smallest average squared difference
  100. * between the fields.
  101. */
  102. static enum PhaseMode analyze_plane(void *ctx, enum PhaseMode mode, AVFrame *old, AVFrame *new)
  103. {
  104. double bdiff, tdiff, pdiff;
  105. if (mode == AUTO) {
  106. mode = new->interlaced_frame ? new->top_field_first ?
  107. TOP_FIRST : BOTTOM_FIRST : PROGRESSIVE;
  108. } else if (mode == AUTO_ANALYZE) {
  109. mode = new->interlaced_frame ? new->top_field_first ?
  110. TOP_FIRST_ANALYZE : BOTTOM_FIRST_ANALYZE : FULL_ANALYZE;
  111. }
  112. if (mode <= BOTTOM_FIRST) {
  113. bdiff = pdiff = tdiff = 65536.0;
  114. } else {
  115. const int ns = new->linesize[0];
  116. const int os = old->linesize[0];
  117. const uint8_t *nptr = new->data[0];
  118. const uint8_t *optr = old->data[0];
  119. const int h = new->height;
  120. const int w = new->width;
  121. int bdif, tdif, pdif;
  122. double scale;
  123. int top = 0, t;
  124. const uint8_t *rend, *end = nptr + (h - 2) * ns;
  125. bdiff = pdiff = tdiff = 0.0;
  126. nptr += ns;
  127. optr += os;
  128. while (nptr < end) {
  129. pdif = tdif = bdif = 0;
  130. switch (mode) {
  131. case TOP_FIRST_ANALYZE:
  132. if (top) {
  133. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  134. pdif += DIFF(nptr, ns, nptr, ns);
  135. tdif += DIFF(nptr, ns, optr, os);
  136. }
  137. } else {
  138. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  139. pdif += DIFF(nptr, ns, nptr, ns);
  140. tdif += DIFF(optr, os, nptr, ns);
  141. }
  142. }
  143. break;
  144. case BOTTOM_FIRST_ANALYZE:
  145. if (top) {
  146. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  147. pdif += DIFF(nptr, ns, nptr, ns);
  148. bdif += DIFF(optr, os, nptr, ns);
  149. }
  150. } else {
  151. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  152. pdif += DIFF(nptr, ns, nptr, ns);
  153. bdif += DIFF(nptr, ns, optr, os);
  154. }
  155. }
  156. break;
  157. case ANALYZE:
  158. if (top) {
  159. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  160. tdif += DIFF(nptr, ns, optr, os);
  161. bdif += DIFF(optr, os, nptr, ns);
  162. }
  163. } else {
  164. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  165. bdif += DIFF(nptr, ns, optr, os);
  166. tdif += DIFF(optr, os, nptr, ns);
  167. }
  168. }
  169. break;
  170. case FULL_ANALYZE:
  171. if (top) {
  172. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  173. pdif += DIFF(nptr, ns, nptr, ns);
  174. tdif += DIFF(nptr, ns, optr, os);
  175. bdif += DIFF(optr, os, nptr, ns);
  176. }
  177. } else {
  178. for (rend = nptr + w; nptr < rend; nptr++, optr++) {
  179. pdif += DIFF(nptr, ns, nptr, ns);
  180. bdif += DIFF(nptr, ns, optr, os);
  181. tdif += DIFF(optr, os, nptr, ns);
  182. }
  183. }
  184. break;
  185. default:
  186. av_assert0(0);
  187. }
  188. pdiff += (double)pdif;
  189. tdiff += (double)tdif;
  190. bdiff += (double)bdif;
  191. nptr += ns - w;
  192. optr += os - w;
  193. top ^= 1;
  194. }
  195. scale = 1.0 / (w * (h - 3)) / 25.0;
  196. pdiff *= scale;
  197. tdiff *= scale;
  198. bdiff *= scale;
  199. if (mode == TOP_FIRST_ANALYZE) {
  200. bdiff = 65536.0;
  201. } else if (mode == BOTTOM_FIRST_ANALYZE) {
  202. tdiff = 65536.0;
  203. } else if (mode == ANALYZE) {
  204. pdiff = 65536.0;
  205. }
  206. if (bdiff < pdiff && bdiff < tdiff) {
  207. mode = BOTTOM_FIRST;
  208. } else if (tdiff < pdiff && tdiff < bdiff) {
  209. mode = TOP_FIRST;
  210. } else {
  211. mode = PROGRESSIVE;
  212. }
  213. }
  214. av_log(ctx, AV_LOG_DEBUG, "mode=%c tdiff=%f bdiff=%f pdiff=%f\n",
  215. mode == BOTTOM_FIRST ? 'b' : mode == TOP_FIRST ? 't' : 'p',
  216. tdiff, bdiff, pdiff);
  217. return mode;
  218. }
  219. static int filter_frame(AVFilterLink *inlink, AVFrame *in)
  220. {
  221. AVFilterContext *ctx = inlink->dst;
  222. AVFilterLink *outlink = ctx->outputs[0];
  223. PhaseContext *s = ctx->priv;
  224. enum PhaseMode mode;
  225. int plane, top, y;
  226. AVFrame *out;
  227. if (ctx->is_disabled) {
  228. av_frame_free(&s->frame);
  229. /* we keep a reference to the previous frame so the filter can start
  230. * being useful as soon as it's not disabled, avoiding the 1-frame
  231. * delay. */
  232. s->frame = av_frame_clone(in);
  233. return ff_filter_frame(outlink, in);
  234. }
  235. out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
  236. if (!out) {
  237. av_frame_free(&in);
  238. return AVERROR(ENOMEM);
  239. }
  240. av_frame_copy_props(out, in);
  241. if (!s->frame) {
  242. s->frame = in;
  243. mode = PROGRESSIVE;
  244. } else {
  245. mode = analyze_plane(ctx, s->mode, s->frame, in);
  246. }
  247. for (plane = 0; plane < s->nb_planes; plane++) {
  248. const uint8_t *buf = s->frame->data[plane];
  249. const uint8_t *from = in->data[plane];
  250. uint8_t *to = out->data[plane];
  251. for (y = 0, top = 1; y < s->planeheight[plane]; y++, top ^= 1) {
  252. memcpy(to, mode == (top ? BOTTOM_FIRST : TOP_FIRST) ? buf : from, s->linesize[plane]);
  253. buf += s->frame->linesize[plane];
  254. from += in->linesize[plane];
  255. to += out->linesize[plane];
  256. }
  257. }
  258. if (in != s->frame)
  259. av_frame_free(&s->frame);
  260. s->frame = in;
  261. return ff_filter_frame(outlink, out);
  262. }
  263. static av_cold void uninit(AVFilterContext *ctx)
  264. {
  265. PhaseContext *s = ctx->priv;
  266. av_frame_free(&s->frame);
  267. }
  268. static const AVFilterPad phase_inputs[] = {
  269. {
  270. .name = "default",
  271. .type = AVMEDIA_TYPE_VIDEO,
  272. .filter_frame = filter_frame,
  273. .config_props = config_input,
  274. },
  275. { NULL }
  276. };
  277. static const AVFilterPad phase_outputs[] = {
  278. {
  279. .name = "default",
  280. .type = AVMEDIA_TYPE_VIDEO,
  281. },
  282. { NULL }
  283. };
  284. AVFilter ff_vf_phase = {
  285. .name = "phase",
  286. .description = NULL_IF_CONFIG_SMALL("Phase shift fields."),
  287. .priv_size = sizeof(PhaseContext),
  288. .priv_class = &phase_class,
  289. .uninit = uninit,
  290. .query_formats = query_formats,
  291. .inputs = phase_inputs,
  292. .outputs = phase_outputs,
  293. .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
  294. };