vf_idet.c 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. /*
  2. * Copyright (C) 2012 Michael Niedermayer <michaelni@gmx.at>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "libavutil/cpu.h"
  21. #include "libavutil/common.h"
  22. #include "libavutil/pixdesc.h"
  23. #include "avfilter.h"
  24. #undef NDEBUG
  25. #include <assert.h>
  26. #define HIST_SIZE 4
  27. typedef enum {
  28. TFF,
  29. BFF,
  30. PROGRSSIVE,
  31. UNDETERMINED,
  32. } Type;
  33. typedef struct {
  34. float interlace_threshold;
  35. float progressive_threshold;
  36. Type last_type;
  37. Type prestat[4];
  38. Type poststat[4];
  39. uint8_t history[HIST_SIZE];
  40. AVFilterBufferRef *cur;
  41. AVFilterBufferRef *next;
  42. AVFilterBufferRef *prev;
  43. AVFilterBufferRef *out;
  44. int (*filter_line)(const uint8_t *prev, const uint8_t *cur, const uint8_t *next, int w);
  45. const AVPixFmtDescriptor *csp;
  46. } IDETContext;
  47. static const char *type2str(Type type)
  48. {
  49. switch(type) {
  50. case TFF : return "Top Field First ";
  51. case BFF : return "Bottom Field First";
  52. case PROGRSSIVE : return "Progressive ";
  53. case UNDETERMINED: return "Undetermined ";
  54. }
  55. return NULL;
  56. }
  57. static int filter_line_c(const uint8_t *a, const uint8_t *b, const uint8_t *c, int w)
  58. {
  59. int x;
  60. int ret=0;
  61. for(x=0; x<w; x++){
  62. ret += FFABS((*a++ + *c++) - 2 * *b++);
  63. }
  64. return ret;
  65. }
  66. static int filter_line_c_16bit(const uint16_t *a, const uint16_t *b, const uint16_t *c, int w)
  67. {
  68. int x;
  69. int ret=0;
  70. for(x=0; x<w; x++){
  71. ret += FFABS((*a++ + *c++) - 2 * *b++);
  72. }
  73. return ret;
  74. }
  75. static void filter(AVFilterContext *ctx)
  76. {
  77. IDETContext *idet = ctx->priv;
  78. int y, i;
  79. int64_t alpha[2]={0};
  80. int64_t delta=0;
  81. Type type, best_type;
  82. int match = 0;
  83. for (i = 0; i < idet->csp->nb_components; i++) {
  84. int w = idet->cur->video->w;
  85. int h = idet->cur->video->h;
  86. int refs = idet->cur->linesize[i];
  87. if (i && i<3) {
  88. w >>= idet->csp->log2_chroma_w;
  89. h >>= idet->csp->log2_chroma_h;
  90. }
  91. for (y = 2; y < h - 2; y++) {
  92. uint8_t *prev = &idet->prev->data[i][y*refs];
  93. uint8_t *cur = &idet->cur ->data[i][y*refs];
  94. uint8_t *next = &idet->next->data[i][y*refs];
  95. alpha[ y &1] += idet->filter_line(cur-refs, prev, cur+refs, w);
  96. alpha[(y^1)&1] += idet->filter_line(cur-refs, next, cur+refs, w);
  97. delta += idet->filter_line(cur-refs, cur, cur+refs, w);
  98. }
  99. }
  100. #if HAVE_MMX
  101. __asm__ volatile("emms \n\t" : : : "memory");
  102. #endif
  103. if (alpha[0] / (float)alpha[1] > idet->interlace_threshold){
  104. type = TFF;
  105. }else if(alpha[1] / (float)alpha[0] > idet->interlace_threshold){
  106. type = BFF;
  107. }else if(alpha[1] / (float)delta > idet->progressive_threshold){
  108. type = PROGRSSIVE;
  109. }else{
  110. type = UNDETERMINED;
  111. }
  112. memmove(idet->history+1, idet->history, HIST_SIZE-1);
  113. idet->history[0] = type;
  114. best_type = UNDETERMINED;
  115. for(i=0; i<HIST_SIZE; i++){
  116. if(idet->history[i] != UNDETERMINED){
  117. if(best_type == UNDETERMINED)
  118. best_type = idet->history[i];
  119. if(idet->history[i] == best_type) {
  120. match++;
  121. }else{
  122. match=0;
  123. break;
  124. }
  125. }
  126. }
  127. if(idet->last_type == UNDETERMINED){
  128. if(match ) idet->last_type = best_type;
  129. }else{
  130. if(match>2) idet->last_type = best_type;
  131. }
  132. if (idet->last_type == TFF){
  133. idet->cur->video->top_field_first = 1;
  134. idet->cur->video->interlaced = 1;
  135. }else if(idet->last_type == BFF){
  136. idet->cur->video->top_field_first = 0;
  137. idet->cur->video->interlaced = 1;
  138. }else if(idet->last_type == PROGRSSIVE){
  139. idet->cur->video->interlaced = 0;
  140. }
  141. idet->prestat [ type] ++;
  142. idet->poststat[idet->last_type] ++;
  143. av_log(ctx, AV_LOG_DEBUG, "Single frame:%s, Multi frame:%s\n", type2str(type), type2str(idet->last_type));
  144. }
  145. static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
  146. {
  147. AVFilterContext *ctx = link->dst;
  148. IDETContext *idet = ctx->priv;
  149. if (idet->prev)
  150. avfilter_unref_buffer(idet->prev);
  151. idet->prev = idet->cur;
  152. idet->cur = idet->next;
  153. idet->next = picref;
  154. if (!idet->cur)
  155. return;
  156. if (!idet->prev)
  157. idet->prev = avfilter_ref_buffer(idet->cur, AV_PERM_READ);
  158. avfilter_start_frame(ctx->outputs[0], avfilter_ref_buffer(idet->cur, AV_PERM_READ));
  159. }
  160. static void end_frame(AVFilterLink *link)
  161. {
  162. AVFilterContext *ctx = link->dst;
  163. IDETContext *idet = ctx->priv;
  164. if (!idet->cur)
  165. return;
  166. if (!idet->csp)
  167. idet->csp = &av_pix_fmt_descriptors[link->format];
  168. if (idet->csp->comp[0].depth_minus1 / 8 == 1)
  169. idet->filter_line = (void*)filter_line_c_16bit;
  170. filter(ctx);
  171. avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
  172. avfilter_end_frame(ctx->outputs[0]);
  173. }
  174. static int request_frame(AVFilterLink *link)
  175. {
  176. AVFilterContext *ctx = link->src;
  177. IDETContext *idet = ctx->priv;
  178. do {
  179. int ret;
  180. if ((ret = avfilter_request_frame(link->src->inputs[0])))
  181. return ret;
  182. } while (!idet->cur);
  183. return 0;
  184. }
  185. static int poll_frame(AVFilterLink *link)
  186. {
  187. IDETContext *idet = link->src->priv;
  188. int ret, val;
  189. val = avfilter_poll_frame(link->src->inputs[0]);
  190. if (val >= 1 && !idet->next) { //FIXME change API to not requre this red tape
  191. if ((ret = avfilter_request_frame(link->src->inputs[0])) < 0)
  192. return ret;
  193. val = avfilter_poll_frame(link->src->inputs[0]);
  194. }
  195. assert(idet->next || !val);
  196. return val;
  197. }
  198. static av_cold void uninit(AVFilterContext *ctx)
  199. {
  200. IDETContext *idet = ctx->priv;
  201. av_log(ctx, AV_LOG_INFO, "Single frame detection: TFF:%d BFF:%d Progressive:%d Undetermined:%d\n",
  202. idet->prestat[TFF],
  203. idet->prestat[BFF],
  204. idet->prestat[PROGRSSIVE],
  205. idet->prestat[UNDETERMINED]
  206. );
  207. av_log(ctx, AV_LOG_INFO, "Multi frame detection: TFF:%d BFF:%d Progressive:%d Undetermined:%d\n",
  208. idet->poststat[TFF],
  209. idet->poststat[BFF],
  210. idet->poststat[PROGRSSIVE],
  211. idet->poststat[UNDETERMINED]
  212. );
  213. if (idet->prev) avfilter_unref_buffer(idet->prev);
  214. if (idet->cur ) avfilter_unref_buffer(idet->cur );
  215. if (idet->next) avfilter_unref_buffer(idet->next);
  216. }
  217. static int query_formats(AVFilterContext *ctx)
  218. {
  219. static const enum PixelFormat pix_fmts[] = {
  220. PIX_FMT_YUV420P,
  221. PIX_FMT_YUV422P,
  222. PIX_FMT_YUV444P,
  223. PIX_FMT_YUV410P,
  224. PIX_FMT_YUV411P,
  225. PIX_FMT_GRAY8,
  226. PIX_FMT_YUVJ420P,
  227. PIX_FMT_YUVJ422P,
  228. PIX_FMT_YUVJ444P,
  229. AV_NE( PIX_FMT_GRAY16BE, PIX_FMT_GRAY16LE ),
  230. PIX_FMT_YUV440P,
  231. PIX_FMT_YUVJ440P,
  232. AV_NE( PIX_FMT_YUV420P10BE, PIX_FMT_YUV420P10LE ),
  233. AV_NE( PIX_FMT_YUV422P10BE, PIX_FMT_YUV422P10LE ),
  234. AV_NE( PIX_FMT_YUV444P10BE, PIX_FMT_YUV444P10LE ),
  235. AV_NE( PIX_FMT_YUV420P16BE, PIX_FMT_YUV420P16LE ),
  236. AV_NE( PIX_FMT_YUV422P16BE, PIX_FMT_YUV422P16LE ),
  237. AV_NE( PIX_FMT_YUV444P16BE, PIX_FMT_YUV444P16LE ),
  238. PIX_FMT_YUVA420P,
  239. PIX_FMT_NONE
  240. };
  241. avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
  242. return 0;
  243. }
  244. static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
  245. {
  246. IDETContext *idet = ctx->priv;
  247. idet->csp = NULL;
  248. idet->interlace_threshold = 1.01;
  249. idet->progressive_threshold = 2.5;
  250. if (args) sscanf(args, "%f:%f", &idet->interlace_threshold, &idet->progressive_threshold);
  251. idet->last_type = UNDETERMINED;
  252. memset(idet->history, UNDETERMINED, HIST_SIZE);
  253. idet->filter_line = filter_line_c;
  254. return 0;
  255. }
  256. static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { }
  257. AVFilter avfilter_vf_idet = {
  258. .name = "idet",
  259. .description = NULL_IF_CONFIG_SMALL("Interlace detect Filter."),
  260. .priv_size = sizeof(IDETContext),
  261. .init = init,
  262. .uninit = uninit,
  263. .query_formats = query_formats,
  264. .inputs = (const AVFilterPad[]) {{ .name = "default",
  265. .type = AVMEDIA_TYPE_VIDEO,
  266. .start_frame = start_frame,
  267. .draw_slice = null_draw_slice,
  268. .end_frame = end_frame,
  269. .rej_perms = AV_PERM_REUSE2, },
  270. { .name = NULL}},
  271. .outputs = (const AVFilterPad[]) {{ .name = "default",
  272. .type = AVMEDIA_TYPE_VIDEO,
  273. .poll_frame = poll_frame,
  274. .request_frame = request_frame, },
  275. { .name = NULL}},
  276. };