v4l2.c 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926
  1. /*
  2. * Video4Linux2 grab interface
  3. * Copyright (c) 2000,2001 Fabrice Bellard
  4. * Copyright (c) 2006 Luca Abeni
  5. *
  6. * Part of this file is based on the V4L2 video capture example
  7. * (http://v4l2spec.bytesex.org/v4l2spec/capture.c)
  8. *
  9. * Thanks to Michael Niedermayer for providing the mapping between
  10. * V4L2_PIX_FMT_* and AV_PIX_FMT_*
  11. *
  12. *
  13. * This file is part of Libav.
  14. *
  15. * Libav is free software; you can redistribute it and/or
  16. * modify it under the terms of the GNU Lesser General Public
  17. * License as published by the Free Software Foundation; either
  18. * version 2.1 of the License, or (at your option) any later version.
  19. *
  20. * Libav is distributed in the hope that it will be useful,
  21. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  23. * Lesser General Public License for more details.
  24. *
  25. * You should have received a copy of the GNU Lesser General Public
  26. * License along with Libav; if not, write to the Free Software
  27. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  28. */
  29. #undef __STRICT_ANSI__ //workaround due to broken kernel headers
  30. #include "config.h"
  31. #include "libavformat/avformat.h"
  32. #include "libavformat/internal.h"
  33. #include <unistd.h>
  34. #include <fcntl.h>
  35. #include <stdatomic.h>
  36. #include <sys/ioctl.h>
  37. #include <sys/mman.h>
  38. #include <sys/time.h>
  39. #include <poll.h>
  40. #if HAVE_SYS_VIDEOIO_H
  41. #include <sys/videoio.h>
  42. #else
  43. #include <linux/videodev2.h>
  44. #endif
  45. #include "libavutil/avassert.h"
  46. #include "libavutil/imgutils.h"
  47. #include "libavutil/internal.h"
  48. #include "libavutil/log.h"
  49. #include "libavutil/opt.h"
  50. #include "libavutil/parseutils.h"
  51. #include "libavutil/pixdesc.h"
  52. #include "libavutil/avstring.h"
  53. #include "libavutil/mathematics.h"
  54. static const int desired_video_buffers = 256;
  55. #define V4L_ALLFORMATS 3
  56. #define V4L_RAWFORMATS 1
  57. #define V4L_COMPFORMATS 2
  58. struct video_data {
  59. AVClass *class;
  60. int fd;
  61. int frame_format; /* V4L2_PIX_FMT_* */
  62. int width, height;
  63. int frame_size;
  64. int timeout;
  65. int interlaced;
  66. int top_field_first;
  67. int buffers;
  68. atomic_int buffers_queued;
  69. void **buf_start;
  70. unsigned int *buf_len;
  71. char *standard;
  72. int channel;
  73. char *video_size; /**< String describing video size,
  74. set by a private option. */
  75. char *pixel_format; /**< Set by a private option. */
  76. int list_format; /**< Set by a private option. */
  77. char *framerate; /**< Set by a private option. */
  78. };
  79. struct buff_data {
  80. struct video_data *s;
  81. int index;
  82. int fd;
  83. };
  84. struct fmt_map {
  85. enum AVPixelFormat ff_fmt;
  86. enum AVCodecID codec_id;
  87. uint32_t v4l2_fmt;
  88. };
  89. static struct fmt_map fmt_conversion_table[] = {
  90. //ff_fmt codec_id v4l2_fmt
  91. { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV420 },
  92. { AV_PIX_FMT_YUV422P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV422P },
  93. { AV_PIX_FMT_YUYV422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUYV },
  94. { AV_PIX_FMT_UYVY422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_UYVY },
  95. { AV_PIX_FMT_YUV411P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV411P },
  96. { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV410 },
  97. { AV_PIX_FMT_RGB555, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555 },
  98. { AV_PIX_FMT_RGB565, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565 },
  99. { AV_PIX_FMT_BGR24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR24 },
  100. { AV_PIX_FMT_RGB24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB24 },
  101. { AV_PIX_FMT_BGRA, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR32 },
  102. { AV_PIX_FMT_GRAY8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_GREY },
  103. { AV_PIX_FMT_NV12, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_NV12 },
  104. { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_MJPEG },
  105. { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_JPEG },
  106. #ifdef V4L2_PIX_FMT_H264
  107. { AV_PIX_FMT_NONE, AV_CODEC_ID_H264, V4L2_PIX_FMT_H264 },
  108. #endif
  109. };
  110. static int device_open(AVFormatContext *ctx)
  111. {
  112. struct v4l2_capability cap;
  113. int fd;
  114. int res, err;
  115. int flags = O_RDWR;
  116. char errbuf[128];
  117. if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
  118. flags |= O_NONBLOCK;
  119. }
  120. fd = avpriv_open(ctx->filename, flags);
  121. if (fd < 0) {
  122. err = AVERROR(errno);
  123. av_strerror(err, errbuf, sizeof(errbuf));
  124. av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s : %s\n",
  125. ctx->filename, errbuf);
  126. return err;
  127. }
  128. res = ioctl(fd, VIDIOC_QUERYCAP, &cap);
  129. if (res < 0) {
  130. err = AVERROR(errno);
  131. av_strerror(err, errbuf, sizeof(errbuf));
  132. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
  133. errbuf);
  134. goto fail;
  135. }
  136. av_log(ctx, AV_LOG_VERBOSE, "[%d]Capabilities: %x\n",
  137. fd, cap.capabilities);
  138. if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
  139. av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
  140. err = AVERROR(ENODEV);
  141. goto fail;
  142. }
  143. if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
  144. av_log(ctx, AV_LOG_ERROR,
  145. "The device does not support the streaming I/O method.\n");
  146. err = AVERROR(ENOSYS);
  147. goto fail;
  148. }
  149. return fd;
  150. fail:
  151. close(fd);
  152. return err;
  153. }
  154. static int device_init(AVFormatContext *ctx, int *width, int *height,
  155. uint32_t pix_fmt)
  156. {
  157. struct video_data *s = ctx->priv_data;
  158. int fd = s->fd;
  159. struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
  160. struct v4l2_pix_format *pix = &fmt.fmt.pix;
  161. int res;
  162. pix->width = *width;
  163. pix->height = *height;
  164. pix->pixelformat = pix_fmt;
  165. pix->field = V4L2_FIELD_ANY;
  166. res = ioctl(fd, VIDIOC_S_FMT, &fmt);
  167. if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
  168. av_log(ctx, AV_LOG_INFO,
  169. "The V4L2 driver changed the video from %dx%d to %dx%d\n",
  170. *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
  171. *width = fmt.fmt.pix.width;
  172. *height = fmt.fmt.pix.height;
  173. }
  174. if (pix_fmt != fmt.fmt.pix.pixelformat) {
  175. av_log(ctx, AV_LOG_DEBUG,
  176. "The V4L2 driver changed the pixel format "
  177. "from 0x%08X to 0x%08X\n",
  178. pix_fmt, fmt.fmt.pix.pixelformat);
  179. res = -1;
  180. }
  181. if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
  182. av_log(ctx, AV_LOG_DEBUG, "The V4L2 driver using the interlaced mode");
  183. s->interlaced = 1;
  184. }
  185. return res;
  186. }
  187. static int first_field(int fd)
  188. {
  189. int res;
  190. v4l2_std_id std;
  191. res = ioctl(fd, VIDIOC_G_STD, &std);
  192. if (res < 0) {
  193. return 0;
  194. }
  195. if (std & V4L2_STD_NTSC) {
  196. return 0;
  197. }
  198. return 1;
  199. }
  200. static uint32_t fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
  201. {
  202. int i;
  203. for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
  204. if ((codec_id == AV_CODEC_ID_NONE ||
  205. fmt_conversion_table[i].codec_id == codec_id) &&
  206. (pix_fmt == AV_PIX_FMT_NONE ||
  207. fmt_conversion_table[i].ff_fmt == pix_fmt)) {
  208. return fmt_conversion_table[i].v4l2_fmt;
  209. }
  210. }
  211. return 0;
  212. }
  213. static enum AVPixelFormat fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
  214. {
  215. int i;
  216. for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
  217. if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt &&
  218. fmt_conversion_table[i].codec_id == codec_id) {
  219. return fmt_conversion_table[i].ff_fmt;
  220. }
  221. }
  222. return AV_PIX_FMT_NONE;
  223. }
  224. static enum AVCodecID fmt_v4l2codec(uint32_t v4l2_fmt)
  225. {
  226. int i;
  227. for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
  228. if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt) {
  229. return fmt_conversion_table[i].codec_id;
  230. }
  231. }
  232. return AV_CODEC_ID_NONE;
  233. }
  234. #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
  235. static void list_framesizes(AVFormatContext *ctx, int fd, uint32_t pixelformat)
  236. {
  237. struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
  238. while(!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
  239. switch (vfse.type) {
  240. case V4L2_FRMSIZE_TYPE_DISCRETE:
  241. av_log(ctx, AV_LOG_INFO, " %ux%u",
  242. vfse.discrete.width, vfse.discrete.height);
  243. break;
  244. case V4L2_FRMSIZE_TYPE_CONTINUOUS:
  245. case V4L2_FRMSIZE_TYPE_STEPWISE:
  246. av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
  247. vfse.stepwise.min_width,
  248. vfse.stepwise.max_width,
  249. vfse.stepwise.step_width,
  250. vfse.stepwise.min_height,
  251. vfse.stepwise.max_height,
  252. vfse.stepwise.step_height);
  253. }
  254. vfse.index++;
  255. }
  256. }
  257. #endif
  258. static void list_formats(AVFormatContext *ctx, int fd, int type)
  259. {
  260. struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
  261. while(!ioctl(fd, VIDIOC_ENUM_FMT, &vfd)) {
  262. enum AVCodecID codec_id = fmt_v4l2codec(vfd.pixelformat);
  263. enum AVPixelFormat pix_fmt = fmt_v4l2ff(vfd.pixelformat, codec_id);
  264. vfd.index++;
  265. if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
  266. type & V4L_RAWFORMATS) {
  267. const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
  268. av_log(ctx, AV_LOG_INFO, "R : %9s : %20s :",
  269. fmt_name ? fmt_name : "Unsupported",
  270. vfd.description);
  271. } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
  272. type & V4L_COMPFORMATS) {
  273. const AVCodecDescriptor *desc = avcodec_descriptor_get(codec_id);
  274. av_log(ctx, AV_LOG_INFO, "C : %9s : %20s :",
  275. desc ? desc->name : "Unsupported",
  276. vfd.description);
  277. } else {
  278. continue;
  279. }
  280. #ifdef V4L2_FMT_FLAG_EMULATED
  281. if (vfd.flags & V4L2_FMT_FLAG_EMULATED) {
  282. av_log(ctx, AV_LOG_WARNING, "%s", "Emulated");
  283. continue;
  284. }
  285. #endif
  286. #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
  287. list_framesizes(ctx, fd, vfd.pixelformat);
  288. #endif
  289. av_log(ctx, AV_LOG_INFO, "\n");
  290. }
  291. }
  292. static int mmap_init(AVFormatContext *ctx)
  293. {
  294. int i, res;
  295. struct video_data *s = ctx->priv_data;
  296. struct v4l2_requestbuffers req = {
  297. .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
  298. .count = desired_video_buffers,
  299. .memory = V4L2_MEMORY_MMAP
  300. };
  301. res = ioctl(s->fd, VIDIOC_REQBUFS, &req);
  302. if (res < 0) {
  303. res = AVERROR(errno);
  304. if (res == AVERROR(EINVAL)) {
  305. av_log(ctx, AV_LOG_ERROR, "Device does not support mmap\n");
  306. } else {
  307. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n");
  308. }
  309. return res;
  310. }
  311. if (req.count < 2) {
  312. av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
  313. return AVERROR(ENOMEM);
  314. }
  315. s->buffers = req.count;
  316. s->buf_start = av_malloc(sizeof(void *) * s->buffers);
  317. if (!s->buf_start) {
  318. av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
  319. return AVERROR(ENOMEM);
  320. }
  321. s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
  322. if (!s->buf_len) {
  323. av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
  324. av_free(s->buf_start);
  325. return AVERROR(ENOMEM);
  326. }
  327. for (i = 0; i < req.count; i++) {
  328. struct v4l2_buffer buf = {
  329. .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
  330. .index = i,
  331. .memory = V4L2_MEMORY_MMAP
  332. };
  333. res = ioctl(s->fd, VIDIOC_QUERYBUF, &buf);
  334. if (res < 0) {
  335. res = AVERROR(errno);
  336. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
  337. return res;
  338. }
  339. s->buf_len[i] = buf.length;
  340. if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
  341. av_log(ctx, AV_LOG_ERROR,
  342. "Buffer len [%d] = %d != %d\n",
  343. i, s->buf_len[i], s->frame_size);
  344. return -1;
  345. }
  346. s->buf_start[i] = mmap(NULL, buf.length,
  347. PROT_READ | PROT_WRITE, MAP_SHARED,
  348. s->fd, buf.m.offset);
  349. if (s->buf_start[i] == MAP_FAILED) {
  350. char errbuf[128];
  351. res = AVERROR(errno);
  352. av_strerror(res, errbuf, sizeof(errbuf));
  353. av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", errbuf);
  354. return res;
  355. }
  356. }
  357. return 0;
  358. }
  359. static void mmap_release_buffer(void *opaque, uint8_t *data)
  360. {
  361. struct v4l2_buffer buf = { 0 };
  362. int res, fd;
  363. struct buff_data *buf_descriptor = opaque;
  364. struct video_data *s = buf_descriptor->s;
  365. char errbuf[128];
  366. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  367. buf.memory = V4L2_MEMORY_MMAP;
  368. buf.index = buf_descriptor->index;
  369. fd = buf_descriptor->fd;
  370. av_free(buf_descriptor);
  371. res = ioctl(fd, VIDIOC_QBUF, &buf);
  372. if (res < 0) {
  373. av_strerror(AVERROR(errno), errbuf, sizeof(errbuf));
  374. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
  375. errbuf);
  376. }
  377. atomic_fetch_add(&s->buffers_queued, 1);
  378. }
  379. static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
  380. {
  381. struct video_data *s = ctx->priv_data;
  382. struct v4l2_buffer buf = {
  383. .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
  384. .memory = V4L2_MEMORY_MMAP
  385. };
  386. struct pollfd p = { .fd = s->fd, .events = POLLIN };
  387. int res;
  388. res = poll(&p, 1, s->timeout);
  389. if (res < 0)
  390. return AVERROR(errno);
  391. if (!(p.revents & (POLLIN | POLLERR | POLLHUP)))
  392. return AVERROR(EAGAIN);
  393. /* FIXME: Some special treatment might be needed in case of loss of signal... */
  394. while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
  395. if (res < 0) {
  396. char errbuf[128];
  397. if (errno == EAGAIN) {
  398. pkt->size = 0;
  399. return AVERROR(EAGAIN);
  400. }
  401. res = AVERROR(errno);
  402. av_strerror(res, errbuf, sizeof(errbuf));
  403. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n",
  404. errbuf);
  405. return res;
  406. }
  407. if (buf.index >= s->buffers) {
  408. av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
  409. return AVERROR(EINVAL);
  410. }
  411. atomic_fetch_add(&s->buffers_queued, -1);
  412. // always keep at least one buffer queued
  413. av_assert0(atomic_load(&s->buffers_queued) >= 1);
  414. if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
  415. av_log(ctx, AV_LOG_ERROR,
  416. "The v4l2 frame is %d bytes, but %d bytes are expected\n",
  417. buf.bytesused, s->frame_size);
  418. return AVERROR_INVALIDDATA;
  419. }
  420. /* Image is at s->buff_start[buf.index] */
  421. if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
  422. /* when we start getting low on queued buffers, fall back on copying data */
  423. res = av_new_packet(pkt, buf.bytesused);
  424. if (res < 0) {
  425. av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
  426. return res;
  427. }
  428. memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
  429. res = ioctl(s->fd, VIDIOC_QBUF, &buf);
  430. if (res < 0) {
  431. res = AVERROR(errno);
  432. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n");
  433. av_packet_unref(pkt);
  434. return res;
  435. }
  436. atomic_fetch_add(&s->buffers_queued, 1);
  437. } else {
  438. struct buff_data *buf_descriptor;
  439. pkt->data = s->buf_start[buf.index];
  440. pkt->size = buf.bytesused;
  441. buf_descriptor = av_malloc(sizeof(struct buff_data));
  442. if (!buf_descriptor) {
  443. /* Something went wrong... Since av_malloc() failed, we cannot even
  444. * allocate a buffer for memcpying into it
  445. */
  446. av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
  447. res = ioctl(s->fd, VIDIOC_QBUF, &buf);
  448. return AVERROR(ENOMEM);
  449. }
  450. buf_descriptor->fd = s->fd;
  451. buf_descriptor->index = buf.index;
  452. buf_descriptor->s = s;
  453. pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer,
  454. buf_descriptor, 0);
  455. if (!pkt->buf) {
  456. av_freep(&buf_descriptor);
  457. return AVERROR(ENOMEM);
  458. }
  459. }
  460. pkt->pts = buf.timestamp.tv_sec * INT64_C(1000000) + buf.timestamp.tv_usec;
  461. return s->buf_len[buf.index];
  462. }
  463. static int mmap_start(AVFormatContext *ctx)
  464. {
  465. struct video_data *s = ctx->priv_data;
  466. enum v4l2_buf_type type;
  467. int i, res, err;
  468. char errbuf[128];
  469. for (i = 0; i < s->buffers; i++) {
  470. struct v4l2_buffer buf = {
  471. .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
  472. .index = i,
  473. .memory = V4L2_MEMORY_MMAP
  474. };
  475. res = ioctl(s->fd, VIDIOC_QBUF, &buf);
  476. if (res < 0) {
  477. err = AVERROR(errno);
  478. av_strerror(err, errbuf, sizeof(errbuf));
  479. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
  480. errbuf);
  481. return err;
  482. }
  483. }
  484. atomic_store(&s->buffers_queued, s->buffers);
  485. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  486. res = ioctl(s->fd, VIDIOC_STREAMON, &type);
  487. if (res < 0) {
  488. err = AVERROR(errno);
  489. av_strerror(err, errbuf, sizeof(errbuf));
  490. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
  491. errbuf);
  492. return err;
  493. }
  494. return 0;
  495. }
  496. static void mmap_close(struct video_data *s)
  497. {
  498. enum v4l2_buf_type type;
  499. int i;
  500. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  501. /* We do not check for the result, because we could
  502. * not do anything about it anyway...
  503. */
  504. ioctl(s->fd, VIDIOC_STREAMOFF, &type);
  505. for (i = 0; i < s->buffers; i++) {
  506. munmap(s->buf_start[i], s->buf_len[i]);
  507. }
  508. av_free(s->buf_start);
  509. av_free(s->buf_len);
  510. }
  511. static int v4l2_set_parameters(AVFormatContext *s1)
  512. {
  513. struct video_data *s = s1->priv_data;
  514. struct v4l2_input input = { 0 };
  515. struct v4l2_standard standard = { 0 };
  516. struct v4l2_streamparm streamparm = { 0 };
  517. struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe;
  518. AVRational framerate_q = { 0 };
  519. int i, ret;
  520. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  521. if (s->framerate &&
  522. (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
  523. av_log(s1, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
  524. s->framerate);
  525. return ret;
  526. }
  527. /* set tv video input */
  528. input.index = s->channel;
  529. if (ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
  530. av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl enum input failed:\n");
  531. return AVERROR(EIO);
  532. }
  533. av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set input_id: %d, input: %s\n",
  534. s->channel, input.name);
  535. if (ioctl(s->fd, VIDIOC_S_INPUT, &input.index) < 0) {
  536. av_log(s1, AV_LOG_ERROR,
  537. "The V4L2 driver ioctl set input(%d) failed\n",
  538. s->channel);
  539. return AVERROR(EIO);
  540. }
  541. if (s->standard) {
  542. av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set standard: %s\n",
  543. s->standard);
  544. /* set tv standard */
  545. for(i=0;;i++) {
  546. standard.index = i;
  547. if (ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
  548. av_log(s1, AV_LOG_ERROR,
  549. "The V4L2 driver ioctl set standard(%s) failed\n",
  550. s->standard);
  551. return AVERROR(EIO);
  552. }
  553. if (!av_strcasecmp(standard.name, s->standard)) {
  554. break;
  555. }
  556. }
  557. av_log(s1, AV_LOG_DEBUG,
  558. "The V4L2 driver set standard: %s, id: %"PRIu64"\n",
  559. s->standard, (uint64_t)standard.id);
  560. if (ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
  561. av_log(s1, AV_LOG_ERROR,
  562. "The V4L2 driver ioctl set standard(%s) failed\n",
  563. s->standard);
  564. return AVERROR(EIO);
  565. }
  566. }
  567. if (framerate_q.num && framerate_q.den) {
  568. av_log(s1, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
  569. framerate_q.den, framerate_q.num);
  570. tpf->numerator = framerate_q.den;
  571. tpf->denominator = framerate_q.num;
  572. if (ioctl(s->fd, VIDIOC_S_PARM, &streamparm) != 0) {
  573. av_log(s1, AV_LOG_ERROR,
  574. "ioctl set time per frame(%d/%d) failed\n",
  575. framerate_q.den, framerate_q.num);
  576. return AVERROR(EIO);
  577. }
  578. if (framerate_q.num != tpf->denominator ||
  579. framerate_q.den != tpf->numerator) {
  580. av_log(s1, AV_LOG_INFO,
  581. "The driver changed the time per frame from "
  582. "%d/%d to %d/%d\n",
  583. framerate_q.den, framerate_q.num,
  584. tpf->numerator, tpf->denominator);
  585. }
  586. } else {
  587. if (ioctl(s->fd, VIDIOC_G_PARM, &streamparm) != 0) {
  588. char errbuf[128];
  589. ret = AVERROR(errno);
  590. av_strerror(ret, errbuf, sizeof(errbuf));
  591. av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n",
  592. errbuf);
  593. return ret;
  594. }
  595. }
  596. s1->streams[0]->avg_frame_rate.num = tpf->denominator;
  597. s1->streams[0]->avg_frame_rate.den = tpf->numerator;
  598. s->timeout = 100 +
  599. av_rescale_q(1, s1->streams[0]->avg_frame_rate,
  600. (AVRational){1, 1000});
  601. return 0;
  602. }
  603. static uint32_t device_try_init(AVFormatContext *s1,
  604. enum AVPixelFormat pix_fmt,
  605. int *width,
  606. int *height,
  607. enum AVCodecID *codec_id)
  608. {
  609. uint32_t desired_format = fmt_ff2v4l(pix_fmt, s1->video_codec_id);
  610. if (desired_format == 0 ||
  611. device_init(s1, width, height, desired_format) < 0) {
  612. int i;
  613. desired_format = 0;
  614. for (i = 0; i<FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
  615. if (s1->video_codec_id == AV_CODEC_ID_NONE ||
  616. fmt_conversion_table[i].codec_id == s1->video_codec_id) {
  617. desired_format = fmt_conversion_table[i].v4l2_fmt;
  618. if (device_init(s1, width, height, desired_format) >= 0) {
  619. break;
  620. }
  621. desired_format = 0;
  622. }
  623. }
  624. }
  625. if (desired_format != 0) {
  626. *codec_id = fmt_v4l2codec(desired_format);
  627. assert(*codec_id != AV_CODEC_ID_NONE);
  628. }
  629. return desired_format;
  630. }
  631. static int v4l2_read_header(AVFormatContext *s1)
  632. {
  633. struct video_data *s = s1->priv_data;
  634. AVStream *st;
  635. int res = 0;
  636. uint32_t desired_format;
  637. enum AVCodecID codec_id;
  638. enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
  639. st = avformat_new_stream(s1, NULL);
  640. if (!st)
  641. return AVERROR(ENOMEM);
  642. s->fd = device_open(s1);
  643. if (s->fd < 0)
  644. return s->fd;
  645. if (s->list_format) {
  646. list_formats(s1, s->fd, s->list_format);
  647. return AVERROR_EXIT;
  648. }
  649. avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
  650. if (s->video_size &&
  651. (res = av_parse_video_size(&s->width, &s->height, s->video_size)) < 0) {
  652. av_log(s1, AV_LOG_ERROR, "Could not parse video size '%s'.\n",
  653. s->video_size);
  654. return res;
  655. }
  656. if (s->pixel_format) {
  657. AVCodec *codec = avcodec_find_decoder_by_name(s->pixel_format);
  658. if (codec) {
  659. s1->video_codec_id = codec->id;
  660. st->need_parsing = AVSTREAM_PARSE_HEADERS;
  661. }
  662. pix_fmt = av_get_pix_fmt(s->pixel_format);
  663. if (pix_fmt == AV_PIX_FMT_NONE && !codec) {
  664. av_log(s1, AV_LOG_ERROR, "No such input format: %s.\n",
  665. s->pixel_format);
  666. return AVERROR(EINVAL);
  667. }
  668. }
  669. if (!s->width && !s->height) {
  670. struct v4l2_format fmt;
  671. av_log(s1, AV_LOG_VERBOSE,
  672. "Querying the device for the current frame size\n");
  673. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  674. if (ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
  675. char errbuf[128];
  676. res = AVERROR(errno);
  677. av_strerror(res, errbuf, sizeof(errbuf));
  678. av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n",
  679. errbuf);
  680. return res;
  681. }
  682. s->width = fmt.fmt.pix.width;
  683. s->height = fmt.fmt.pix.height;
  684. av_log(s1, AV_LOG_VERBOSE,
  685. "Setting frame size to %dx%d\n", s->width, s->height);
  686. }
  687. desired_format = device_try_init(s1, pix_fmt, &s->width, &s->height,
  688. &codec_id);
  689. if (desired_format == 0) {
  690. av_log(s1, AV_LOG_ERROR, "Cannot find a proper format for "
  691. "codec_id %d, pix_fmt %d.\n", s1->video_codec_id, pix_fmt);
  692. close(s->fd);
  693. return AVERROR(EIO);
  694. }
  695. if ((res = av_image_check_size(s->width, s->height, 0, s1) < 0))
  696. return res;
  697. s->frame_format = desired_format;
  698. if ((res = v4l2_set_parameters(s1) < 0))
  699. return res;
  700. st->codecpar->format = fmt_v4l2ff(desired_format, codec_id);
  701. s->frame_size = av_image_get_buffer_size(st->codecpar->format,
  702. s->width, s->height, 1);
  703. if ((res = mmap_init(s1)) ||
  704. (res = mmap_start(s1)) < 0) {
  705. close(s->fd);
  706. return res;
  707. }
  708. s->top_field_first = first_field(s->fd);
  709. st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  710. st->codecpar->codec_id = codec_id;
  711. if (codec_id == AV_CODEC_ID_RAWVIDEO)
  712. st->codecpar->codec_tag =
  713. avcodec_pix_fmt_to_codec_tag(st->codecpar->format);
  714. st->codecpar->width = s->width;
  715. st->codecpar->height = s->height;
  716. st->codecpar->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
  717. return 0;
  718. }
  719. static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
  720. {
  721. #if FF_API_CODED_FRAME && FF_API_LAVF_AVCTX
  722. FF_DISABLE_DEPRECATION_WARNINGS
  723. struct video_data *s = s1->priv_data;
  724. AVFrame *frame = s1->streams[0]->codec->coded_frame;
  725. FF_ENABLE_DEPRECATION_WARNINGS
  726. #endif
  727. int res;
  728. if ((res = mmap_read_frame(s1, pkt)) < 0) {
  729. return res;
  730. }
  731. #if FF_API_CODED_FRAME && FF_API_LAVF_AVCTX
  732. FF_DISABLE_DEPRECATION_WARNINGS
  733. if (frame && s->interlaced) {
  734. frame->interlaced_frame = 1;
  735. frame->top_field_first = s->top_field_first;
  736. }
  737. FF_ENABLE_DEPRECATION_WARNINGS
  738. #endif
  739. return pkt->size;
  740. }
  741. static int v4l2_read_close(AVFormatContext *s1)
  742. {
  743. struct video_data *s = s1->priv_data;
  744. if (atomic_load(&s->buffers_queued) != s->buffers)
  745. av_log(s1, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
  746. "close.\n");
  747. mmap_close(s);
  748. close(s->fd);
  749. return 0;
  750. }
  751. #define OFFSET(x) offsetof(struct video_data, x)
  752. #define DEC AV_OPT_FLAG_DECODING_PARAM
  753. static const AVOption options[] = {
  754. { "standard", "TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
  755. { "channel", "TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC },
  756. { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  757. { "pixel_format", "Preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  758. { "input_format", "Preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  759. { "framerate", "", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  760. { "list_formats", "List available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
  761. { "all", "Show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" },
  762. { "raw", "Show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" },
  763. { "compressed", "Show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" },
  764. { NULL },
  765. };
  766. static const AVClass v4l2_class = {
  767. .class_name = "V4L2 indev",
  768. .item_name = av_default_item_name,
  769. .option = options,
  770. .version = LIBAVUTIL_VERSION_INT,
  771. };
  772. AVInputFormat ff_v4l2_demuxer = {
  773. .name = "video4linux2",
  774. .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
  775. .priv_data_size = sizeof(struct video_data),
  776. .read_header = v4l2_read_header,
  777. .read_packet = v4l2_read_packet,
  778. .read_close = v4l2_read_close,
  779. .flags = AVFMT_NOFILE,
  780. .priv_class = &v4l2_class,
  781. };