uncoded_frame.c 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273
  1. #include <stdio.h>
  2. #include <stdlib.h>
  3. #include <string.h>
  4. #include "libavutil/avassert.h"
  5. #include "libavdevice/avdevice.h"
  6. #include "libavfilter/avfilter.h"
  7. #include "libavfilter/buffersink.h"
  8. #include "libavformat/avformat.h"
  9. typedef struct {
  10. AVFormatContext *mux;
  11. AVStream *stream;
  12. AVFilterContext *sink;
  13. } Stream;
  14. static int create_sink(Stream *st, AVFilterGraph *graph,
  15. AVFilterContext *f, int idx)
  16. {
  17. enum AVMediaType type = avfilter_pad_get_type(f->output_pads, idx);
  18. const char *sink_name;
  19. int ret;
  20. switch (type) {
  21. case AVMEDIA_TYPE_VIDEO: sink_name = "buffersink"; break;
  22. case AVMEDIA_TYPE_AUDIO: sink_name = "abuffersink"; break;
  23. default:
  24. av_log(NULL, AV_LOG_ERROR, "Stream type not supported\n");
  25. return AVERROR(EINVAL);
  26. }
  27. ret = avfilter_graph_create_filter(&st->sink,
  28. avfilter_get_by_name(sink_name),
  29. NULL, NULL, NULL, graph);
  30. if (ret < 0)
  31. return ret;
  32. ret = avfilter_link(f, idx, st->sink, 0);
  33. if (ret < 0)
  34. return ret;
  35. return 0;
  36. }
  37. int main(int argc, char **argv)
  38. {
  39. char *in_graph_desc, **out_dev_name;
  40. int nb_out_dev = 0, nb_streams = 0;
  41. AVFilterGraph *in_graph = NULL;
  42. Stream *streams = NULL, *st;
  43. AVFrame *frame = NULL;
  44. int i, j, run = 1, ret;
  45. //av_log_set_level(AV_LOG_DEBUG);
  46. if (argc < 3) {
  47. av_log(NULL, AV_LOG_ERROR,
  48. "Usage: %s filter_graph dev:out [dev2:out2...]\n\n"
  49. "Examples:\n"
  50. "%s movie=file.nut:s=v+a xv:- alsa:default\n"
  51. "%s movie=file.nut:s=v+a uncodedframecrc:pipe:0\n",
  52. argv[0], argv[0], argv[0]);
  53. exit(1);
  54. }
  55. in_graph_desc = argv[1];
  56. out_dev_name = argv + 2;
  57. nb_out_dev = argc - 2;
  58. avdevice_register_all();
  59. /* Create input graph */
  60. if (!(in_graph = avfilter_graph_alloc())) {
  61. ret = AVERROR(ENOMEM);
  62. av_log(NULL, AV_LOG_ERROR, "Unable to alloc graph graph: %s\n",
  63. av_err2str(ret));
  64. goto fail;
  65. }
  66. ret = avfilter_graph_parse_ptr(in_graph, in_graph_desc, NULL, NULL, NULL);
  67. if (ret < 0) {
  68. av_log(NULL, AV_LOG_ERROR, "Unable to parse graph: %s\n",
  69. av_err2str(ret));
  70. goto fail;
  71. }
  72. nb_streams = 0;
  73. for (i = 0; i < in_graph->nb_filters; i++) {
  74. AVFilterContext *f = in_graph->filters[i];
  75. for (j = 0; j < f->nb_inputs; j++) {
  76. if (!f->inputs[j]) {
  77. av_log(NULL, AV_LOG_ERROR, "Graph has unconnected inputs\n");
  78. ret = AVERROR(EINVAL);
  79. goto fail;
  80. }
  81. }
  82. for (j = 0; j < f->nb_outputs; j++)
  83. if (!f->outputs[j])
  84. nb_streams++;
  85. }
  86. if (!nb_streams) {
  87. av_log(NULL, AV_LOG_ERROR, "Graph has no output stream\n");
  88. ret = AVERROR(EINVAL);
  89. goto fail;
  90. }
  91. if (nb_out_dev != 1 && nb_out_dev != nb_streams) {
  92. av_log(NULL, AV_LOG_ERROR,
  93. "Graph has %d output streams, %d devices given\n",
  94. nb_streams, nb_out_dev);
  95. ret = AVERROR(EINVAL);
  96. goto fail;
  97. }
  98. if (!(streams = av_calloc(nb_streams, sizeof(*streams)))) {
  99. ret = AVERROR(ENOMEM);
  100. av_log(NULL, AV_LOG_ERROR, "Could not allocate streams\n");
  101. }
  102. st = streams;
  103. for (i = 0; i < in_graph->nb_filters; i++) {
  104. AVFilterContext *f = in_graph->filters[i];
  105. for (j = 0; j < f->nb_outputs; j++) {
  106. if (!f->outputs[j]) {
  107. if ((ret = create_sink(st++, in_graph, f, j)) < 0)
  108. goto fail;
  109. }
  110. }
  111. }
  112. av_assert0(st - streams == nb_streams);
  113. if ((ret = avfilter_graph_config(in_graph, NULL)) < 0) {
  114. av_log(NULL, AV_LOG_ERROR, "Failed to configure graph\n");
  115. goto fail;
  116. }
  117. /* Create output devices */
  118. for (i = 0; i < nb_out_dev; i++) {
  119. char *fmt = NULL, *dev = out_dev_name[i];
  120. st = &streams[i];
  121. if ((dev = strchr(dev, ':'))) {
  122. *(dev++) = 0;
  123. fmt = out_dev_name[i];
  124. }
  125. ret = avformat_alloc_output_context2(&st->mux, NULL, fmt, dev);
  126. if (ret < 0) {
  127. av_log(NULL, AV_LOG_ERROR, "Failed to allocate output: %s\n",
  128. av_err2str(ret));
  129. goto fail;
  130. }
  131. if (!(st->mux->oformat->flags & AVFMT_NOFILE)) {
  132. ret = avio_open2(&st->mux->pb, st->mux->url, AVIO_FLAG_WRITE,
  133. NULL, NULL);
  134. if (ret < 0) {
  135. av_log(st->mux, AV_LOG_ERROR, "Failed to init output: %s\n",
  136. av_err2str(ret));
  137. goto fail;
  138. }
  139. }
  140. }
  141. for (; i < nb_streams; i++)
  142. streams[i].mux = streams[0].mux;
  143. /* Create output device streams */
  144. for (i = 0; i < nb_streams; i++) {
  145. st = &streams[i];
  146. if (!(st->stream = avformat_new_stream(st->mux, NULL))) {
  147. ret = AVERROR(ENOMEM);
  148. av_log(NULL, AV_LOG_ERROR, "Failed to create output stream\n");
  149. goto fail;
  150. }
  151. st->stream->codecpar->codec_type = av_buffersink_get_type(st->sink);
  152. st->stream->time_base = av_buffersink_get_time_base(st->sink);
  153. switch (av_buffersink_get_type(st->sink)) {
  154. case AVMEDIA_TYPE_VIDEO:
  155. st->stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
  156. st->stream->avg_frame_rate =
  157. st->stream-> r_frame_rate = av_buffersink_get_frame_rate(st->sink);
  158. st->stream->codecpar->width = av_buffersink_get_w(st->sink);
  159. st->stream->codecpar->height = av_buffersink_get_h(st->sink);
  160. st->stream->codecpar->sample_aspect_ratio = av_buffersink_get_sample_aspect_ratio(st->sink);
  161. st->stream->codecpar->format = av_buffersink_get_format(st->sink);
  162. break;
  163. case AVMEDIA_TYPE_AUDIO:
  164. st->stream->codecpar->channel_layout = av_buffersink_get_channel_layout(st->sink);
  165. st->stream->codecpar->channels = av_buffersink_get_channels(st->sink);
  166. st->stream->codecpar->sample_rate = av_buffersink_get_sample_rate(st->sink);
  167. st->stream->codecpar->format = av_buffersink_get_format(st->sink);
  168. st->stream->codecpar->codec_id = av_get_pcm_codec(st->stream->codecpar->format, -1);
  169. break;
  170. default:
  171. av_assert0(!"reached");
  172. }
  173. }
  174. /* Init output devices */
  175. for (i = 0; i < nb_out_dev; i++) {
  176. st = &streams[i];
  177. if ((ret = avformat_write_header(st->mux, NULL)) < 0) {
  178. av_log(st->mux, AV_LOG_ERROR, "Failed to init output: %s\n",
  179. av_err2str(ret));
  180. goto fail;
  181. }
  182. }
  183. /* Check output devices */
  184. for (i = 0; i < nb_streams; i++) {
  185. st = &streams[i];
  186. ret = av_write_uncoded_frame_query(st->mux, st->stream->index);
  187. if (ret < 0) {
  188. av_log(st->mux, AV_LOG_ERROR,
  189. "Uncoded frames not supported on stream #%d: %s\n",
  190. i, av_err2str(ret));
  191. goto fail;
  192. }
  193. }
  194. while (run) {
  195. ret = avfilter_graph_request_oldest(in_graph);
  196. if (ret < 0) {
  197. if (ret == AVERROR_EOF) {
  198. run = 0;
  199. } else {
  200. av_log(NULL, AV_LOG_ERROR, "Error filtering: %s\n",
  201. av_err2str(ret));
  202. break;
  203. }
  204. }
  205. for (i = 0; i < nb_streams; i++) {
  206. st = &streams[i];
  207. while (1) {
  208. if (!frame && !(frame = av_frame_alloc())) {
  209. ret = AVERROR(ENOMEM);
  210. av_log(NULL, AV_LOG_ERROR, "Could not allocate frame\n");
  211. goto fail;
  212. }
  213. ret = av_buffersink_get_frame_flags(st->sink, frame,
  214. AV_BUFFERSINK_FLAG_NO_REQUEST);
  215. if (ret < 0) {
  216. if (ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
  217. av_log(NULL, AV_LOG_WARNING, "Error in sink: %s\n",
  218. av_err2str(ret));
  219. break;
  220. }
  221. if (frame->pts != AV_NOPTS_VALUE)
  222. frame->pts = av_rescale_q(frame->pts,
  223. av_buffersink_get_time_base(st->sink),
  224. st->stream->time_base);
  225. ret = av_interleaved_write_uncoded_frame(st->mux,
  226. st->stream->index,
  227. frame);
  228. frame = NULL;
  229. if (ret < 0) {
  230. av_log(st->mux, AV_LOG_ERROR,
  231. "Error writing frame: %s\n", av_err2str(ret));
  232. goto fail;
  233. }
  234. }
  235. }
  236. }
  237. ret = 0;
  238. for (i = 0; i < nb_out_dev; i++) {
  239. st = &streams[i];
  240. av_write_trailer(st->mux);
  241. }
  242. fail:
  243. av_frame_free(&frame);
  244. avfilter_graph_free(&in_graph);
  245. if (streams) {
  246. for (i = 0; i < nb_out_dev; i++) {
  247. st = &streams[i];
  248. if (st->mux) {
  249. if (st->mux->pb)
  250. avio_closep(&st->mux->pb);
  251. avformat_free_context(st->mux);
  252. }
  253. }
  254. }
  255. av_freep(&streams);
  256. return ret < 0;
  257. }