vf_codecview.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. /*
  2. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  3. * Copyright (c) 2014 Clément Bœsch <u pkh me>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Codec debug viewer filter.
  24. *
  25. * All the MV drawing code from Michael Niedermayer is extracted from
  26. * libavcodec/mpegvideo.c.
  27. *
  28. * TODO: segmentation
  29. */
  30. #include "libavutil/imgutils.h"
  31. #include "libavutil/motion_vector.h"
  32. #include "libavutil/opt.h"
  33. #include "avfilter.h"
  34. #include "internal.h"
  35. #define MV_P_FOR (1<<0)
  36. #define MV_B_FOR (1<<1)
  37. #define MV_B_BACK (1<<2)
  38. #define MV_TYPE_FOR (1<<0)
  39. #define MV_TYPE_BACK (1<<1)
  40. #define FRAME_TYPE_I (1<<0)
  41. #define FRAME_TYPE_P (1<<1)
  42. #define FRAME_TYPE_B (1<<2)
  43. typedef struct {
  44. const AVClass *class;
  45. unsigned mv;
  46. unsigned frame_type;
  47. unsigned mv_type;
  48. int hsub, vsub;
  49. int qp;
  50. } CodecViewContext;
  51. #define OFFSET(x) offsetof(CodecViewContext, x)
  52. #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
  53. #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, 0, 0, FLAGS, unit }
  54. static const AVOption codecview_options[] = {
  55. { "mv", "set motion vectors to visualize", OFFSET(mv), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv" },
  56. CONST("pf", "forward predicted MVs of P-frames", MV_P_FOR, "mv"),
  57. CONST("bf", "forward predicted MVs of B-frames", MV_B_FOR, "mv"),
  58. CONST("bb", "backward predicted MVs of B-frames", MV_B_BACK, "mv"),
  59. { "qp", NULL, OFFSET(qp), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS },
  60. { "mv_type", "set motion vectors type", OFFSET(mv_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv_type" },
  61. { "mvt", "set motion vectors type", OFFSET(mv_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv_type" },
  62. CONST("fp", "forward predicted MVs", MV_TYPE_FOR, "mv_type"),
  63. CONST("bp", "backward predicted MVs", MV_TYPE_BACK, "mv_type"),
  64. { "frame_type", "set frame types to visualize motion vectors of", OFFSET(frame_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "frame_type" },
  65. { "ft", "set frame types to visualize motion vectors of", OFFSET(frame_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "frame_type" },
  66. CONST("if", "I-frames", FRAME_TYPE_I, "frame_type"),
  67. CONST("pf", "P-frames", FRAME_TYPE_P, "frame_type"),
  68. CONST("bf", "B-frames", FRAME_TYPE_B, "frame_type"),
  69. { NULL }
  70. };
  71. AVFILTER_DEFINE_CLASS(codecview);
  72. static int query_formats(AVFilterContext *ctx)
  73. {
  74. // TODO: we can probably add way more pixel formats without any other
  75. // changes; anything with 8-bit luma in first plane should be working
  76. static const enum AVPixelFormat pix_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE};
  77. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  78. if (!fmts_list)
  79. return AVERROR(ENOMEM);
  80. return ff_set_common_formats(ctx, fmts_list);
  81. }
  82. static int clip_line(int *sx, int *sy, int *ex, int *ey, int maxx)
  83. {
  84. if(*sx > *ex)
  85. return clip_line(ex, ey, sx, sy, maxx);
  86. if (*sx < 0) {
  87. if (*ex < 0)
  88. return 1;
  89. *sy = *ey + (*sy - *ey) * (int64_t)*ex / (*ex - *sx);
  90. *sx = 0;
  91. }
  92. if (*ex > maxx) {
  93. if (*sx > maxx)
  94. return 1;
  95. *ey = *sy + (*ey - *sy) * (int64_t)(maxx - *sx) / (*ex - *sx);
  96. *ex = maxx;
  97. }
  98. return 0;
  99. }
  100. /**
  101. * Draw a line from (ex, ey) -> (sx, sy).
  102. * @param w width of the image
  103. * @param h height of the image
  104. * @param stride stride/linesize of the image
  105. * @param color color of the arrow
  106. */
  107. static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey,
  108. int w, int h, int stride, int color)
  109. {
  110. int x, y, fr, f;
  111. if (clip_line(&sx, &sy, &ex, &ey, w - 1))
  112. return;
  113. if (clip_line(&sy, &sx, &ey, &ex, h - 1))
  114. return;
  115. sx = av_clip(sx, 0, w - 1);
  116. sy = av_clip(sy, 0, h - 1);
  117. ex = av_clip(ex, 0, w - 1);
  118. ey = av_clip(ey, 0, h - 1);
  119. buf[sy * stride + sx] += color;
  120. if (FFABS(ex - sx) > FFABS(ey - sy)) {
  121. if (sx > ex) {
  122. FFSWAP(int, sx, ex);
  123. FFSWAP(int, sy, ey);
  124. }
  125. buf += sx + sy * stride;
  126. ex -= sx;
  127. f = ((ey - sy) << 16) / ex;
  128. for (x = 0; x <= ex; x++) {
  129. y = (x * f) >> 16;
  130. fr = (x * f) & 0xFFFF;
  131. buf[ y * stride + x] += (color * (0x10000 - fr)) >> 16;
  132. if(fr) buf[(y + 1) * stride + x] += (color * fr ) >> 16;
  133. }
  134. } else {
  135. if (sy > ey) {
  136. FFSWAP(int, sx, ex);
  137. FFSWAP(int, sy, ey);
  138. }
  139. buf += sx + sy * stride;
  140. ey -= sy;
  141. if (ey)
  142. f = ((ex - sx) << 16) / ey;
  143. else
  144. f = 0;
  145. for(y= 0; y <= ey; y++){
  146. x = (y*f) >> 16;
  147. fr = (y*f) & 0xFFFF;
  148. buf[y * stride + x ] += (color * (0x10000 - fr)) >> 16;
  149. if(fr) buf[y * stride + x + 1] += (color * fr ) >> 16;
  150. }
  151. }
  152. }
  153. /**
  154. * Draw an arrow from (ex, ey) -> (sx, sy).
  155. * @param w width of the image
  156. * @param h height of the image
  157. * @param stride stride/linesize of the image
  158. * @param color color of the arrow
  159. */
  160. static void draw_arrow(uint8_t *buf, int sx, int sy, int ex,
  161. int ey, int w, int h, int stride, int color, int tail, int direction)
  162. {
  163. int dx,dy;
  164. if (direction) {
  165. FFSWAP(int, sx, ex);
  166. FFSWAP(int, sy, ey);
  167. }
  168. sx = av_clip(sx, -100, w + 100);
  169. sy = av_clip(sy, -100, h + 100);
  170. ex = av_clip(ex, -100, w + 100);
  171. ey = av_clip(ey, -100, h + 100);
  172. dx = ex - sx;
  173. dy = ey - sy;
  174. if (dx * dx + dy * dy > 3 * 3) {
  175. int rx = dx + dy;
  176. int ry = -dx + dy;
  177. int length = sqrt((rx * rx + ry * ry) << 8);
  178. // FIXME subpixel accuracy
  179. rx = ROUNDED_DIV(rx * 3 << 4, length);
  180. ry = ROUNDED_DIV(ry * 3 << 4, length);
  181. if (tail) {
  182. rx = -rx;
  183. ry = -ry;
  184. }
  185. draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
  186. draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
  187. }
  188. draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
  189. }
  190. static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
  191. {
  192. AVFilterContext *ctx = inlink->dst;
  193. CodecViewContext *s = ctx->priv;
  194. AVFilterLink *outlink = ctx->outputs[0];
  195. if (s->qp) {
  196. int qstride, qp_type;
  197. int8_t *qp_table = av_frame_get_qp_table(frame, &qstride, &qp_type);
  198. if (qp_table) {
  199. int x, y;
  200. const int w = AV_CEIL_RSHIFT(frame->width, s->hsub);
  201. const int h = AV_CEIL_RSHIFT(frame->height, s->vsub);
  202. uint8_t *pu = frame->data[1];
  203. uint8_t *pv = frame->data[2];
  204. const int lzu = frame->linesize[1];
  205. const int lzv = frame->linesize[2];
  206. for (y = 0; y < h; y++) {
  207. for (x = 0; x < w; x++) {
  208. const int qp = ff_norm_qscale(qp_table[(y >> 3) * qstride + (x >> 3)], qp_type) * 128/31;
  209. pu[x] = pv[x] = qp;
  210. }
  211. pu += lzu;
  212. pv += lzv;
  213. }
  214. }
  215. }
  216. if (s->mv || s->mv_type) {
  217. AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_MOTION_VECTORS);
  218. if (sd) {
  219. int i;
  220. const AVMotionVector *mvs = (const AVMotionVector *)sd->data;
  221. const int is_iframe = (s->frame_type & FRAME_TYPE_I) && frame->pict_type == AV_PICTURE_TYPE_I;
  222. const int is_pframe = (s->frame_type & FRAME_TYPE_P) && frame->pict_type == AV_PICTURE_TYPE_P;
  223. const int is_bframe = (s->frame_type & FRAME_TYPE_B) && frame->pict_type == AV_PICTURE_TYPE_B;
  224. for (i = 0; i < sd->size / sizeof(*mvs); i++) {
  225. const AVMotionVector *mv = &mvs[i];
  226. const int direction = mv->source > 0;
  227. if (s->mv_type) {
  228. const int is_fp = direction == 0 && (s->mv_type & MV_TYPE_FOR);
  229. const int is_bp = direction == 1 && (s->mv_type & MV_TYPE_BACK);
  230. if ((!s->frame_type && (is_fp || is_bp)) ||
  231. is_iframe && is_fp || is_iframe && is_bp ||
  232. is_pframe && is_fp ||
  233. is_bframe && is_fp || is_bframe && is_bp)
  234. draw_arrow(frame->data[0], mv->dst_x, mv->dst_y, mv->src_x, mv->src_y,
  235. frame->width, frame->height, frame->linesize[0],
  236. 100, 0, direction);
  237. } else if (s->mv)
  238. if ((direction == 0 && (s->mv & MV_P_FOR) && frame->pict_type == AV_PICTURE_TYPE_P) ||
  239. (direction == 0 && (s->mv & MV_B_FOR) && frame->pict_type == AV_PICTURE_TYPE_B) ||
  240. (direction == 1 && (s->mv & MV_B_BACK) && frame->pict_type == AV_PICTURE_TYPE_B))
  241. draw_arrow(frame->data[0], mv->dst_x, mv->dst_y, mv->src_x, mv->src_y,
  242. frame->width, frame->height, frame->linesize[0],
  243. 100, 0, direction);
  244. }
  245. }
  246. }
  247. return ff_filter_frame(outlink, frame);
  248. }
  249. static int config_input(AVFilterLink *inlink)
  250. {
  251. AVFilterContext *ctx = inlink->dst;
  252. CodecViewContext *s = ctx->priv;
  253. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
  254. s->hsub = desc->log2_chroma_w;
  255. s->vsub = desc->log2_chroma_h;
  256. return 0;
  257. }
  258. static const AVFilterPad codecview_inputs[] = {
  259. {
  260. .name = "default",
  261. .type = AVMEDIA_TYPE_VIDEO,
  262. .filter_frame = filter_frame,
  263. .config_props = config_input,
  264. .needs_writable = 1,
  265. },
  266. { NULL }
  267. };
  268. static const AVFilterPad codecview_outputs[] = {
  269. {
  270. .name = "default",
  271. .type = AVMEDIA_TYPE_VIDEO,
  272. },
  273. { NULL }
  274. };
  275. AVFilter ff_vf_codecview = {
  276. .name = "codecview",
  277. .description = NULL_IF_CONFIG_SMALL("Visualize information about some codecs."),
  278. .priv_size = sizeof(CodecViewContext),
  279. .query_formats = query_formats,
  280. .inputs = codecview_inputs,
  281. .outputs = codecview_outputs,
  282. .priv_class = &codecview_class,
  283. .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC,
  284. };