v4l2.c 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186
  1. /*
  2. * Copyright (c) 2000,2001 Fabrice Bellard
  3. * Copyright (c) 2006 Luca Abeni
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Video4Linux2 grab interface
  24. *
  25. * Part of this file is based on the V4L2 video capture example
  26. * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
  27. *
  28. * Thanks to Michael Niedermayer for providing the mapping between
  29. * V4L2_PIX_FMT_* and AV_PIX_FMT_*
  30. */
  31. #include <stdatomic.h>
  32. #include "libavutil/avassert.h"
  33. #include "libavutil/avstring.h"
  34. #include "libavutil/imgutils.h"
  35. #include "libavutil/mem.h"
  36. #include "libavutil/parseutils.h"
  37. #include "libavutil/pixdesc.h"
  38. #include "libavutil/time.h"
  39. #include "libavcodec/avcodec.h"
  40. #include "libavcodec/codec_desc.h"
  41. #include "libavformat/demux.h"
  42. #include "libavformat/internal.h"
  43. #include "avdevice.h"
  44. #include "timefilter.h"
  45. #include "v4l2-common.h"
  46. #include <dirent.h>
  47. #if CONFIG_LIBV4L2
  48. #include <libv4l2.h>
  49. #endif
  50. static const int desired_video_buffers = 256;
  51. #define V4L_ALLFORMATS 3
  52. #define V4L_RAWFORMATS 1
  53. #define V4L_COMPFORMATS 2
  54. /**
  55. * Return timestamps to the user exactly as returned by the kernel
  56. */
  57. #define V4L_TS_DEFAULT 0
  58. /**
  59. * Autodetect the kind of timestamps returned by the kernel and convert to
  60. * absolute (wall clock) timestamps.
  61. */
  62. #define V4L_TS_ABS 1
  63. /**
  64. * Assume kernel timestamps are from the monotonic clock and convert to
  65. * absolute timestamps.
  66. */
  67. #define V4L_TS_MONO2ABS 2
  68. /**
  69. * Once the kind of timestamps returned by the kernel have been detected,
  70. * the value of the timefilter (NULL or not) determines whether a conversion
  71. * takes place.
  72. */
  73. #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
  74. struct video_data {
  75. AVClass *class;
  76. int fd;
  77. int pixelformat; /* V4L2_PIX_FMT_* */
  78. int width, height;
  79. int frame_size;
  80. int interlaced;
  81. int top_field_first;
  82. int ts_mode;
  83. TimeFilter *timefilter;
  84. int64_t last_time_m;
  85. int multiplanar;
  86. enum v4l2_buf_type buf_type;
  87. int buffers;
  88. atomic_int buffers_queued;
  89. void **buf_start;
  90. unsigned int *buf_len;
  91. char *standard;
  92. v4l2_std_id std_id;
  93. int channel;
  94. char *pixel_format; /**< Set by a private option. */
  95. int list_format; /**< Set by a private option. */
  96. int list_standard; /**< Set by a private option. */
  97. char *framerate; /**< Set by a private option. */
  98. int use_libv4l2;
  99. int (*open_f)(const char *file, int oflag, ...);
  100. int (*close_f)(int fd);
  101. int (*dup_f)(int fd);
  102. #if defined(__sun) || defined(__BIONIC__) || defined(__musl__) /* POSIX-like */
  103. int (*ioctl_f)(int fd, int request, ...);
  104. #else
  105. int (*ioctl_f)(int fd, unsigned long int request, ...);
  106. #endif
  107. ssize_t (*read_f)(int fd, void *buffer, size_t n);
  108. void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset);
  109. int (*munmap_f)(void *_start, size_t length);
  110. };
  111. struct buff_data {
  112. struct video_data *s;
  113. int index;
  114. };
  115. static int device_open(AVFormatContext *ctx, const char* device_path)
  116. {
  117. struct video_data *s = ctx->priv_data;
  118. struct v4l2_capability cap;
  119. int fd;
  120. int err;
  121. int flags = O_RDWR;
  122. #define SET_WRAPPERS(prefix) do { \
  123. s->open_f = prefix ## open; \
  124. s->close_f = prefix ## close; \
  125. s->dup_f = prefix ## dup; \
  126. s->ioctl_f = prefix ## ioctl; \
  127. s->read_f = prefix ## read; \
  128. s->mmap_f = prefix ## mmap; \
  129. s->munmap_f = prefix ## munmap; \
  130. } while (0)
  131. if (s->use_libv4l2) {
  132. #if CONFIG_LIBV4L2
  133. SET_WRAPPERS(v4l2_);
  134. #else
  135. av_log(ctx, AV_LOG_ERROR, "libavdevice is not built with libv4l2 support.\n");
  136. return AVERROR(EINVAL);
  137. #endif
  138. } else {
  139. SET_WRAPPERS();
  140. }
  141. #define v4l2_open s->open_f
  142. #define v4l2_close s->close_f
  143. #define v4l2_dup s->dup_f
  144. #define v4l2_ioctl s->ioctl_f
  145. #define v4l2_read s->read_f
  146. #define v4l2_mmap s->mmap_f
  147. #define v4l2_munmap s->munmap_f
  148. if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
  149. flags |= O_NONBLOCK;
  150. }
  151. fd = v4l2_open(device_path, flags, 0);
  152. if (fd < 0) {
  153. err = AVERROR(errno);
  154. av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
  155. device_path, av_err2str(err));
  156. return err;
  157. }
  158. if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
  159. err = AVERROR(errno);
  160. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
  161. av_err2str(err));
  162. goto fail;
  163. }
  164. av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
  165. fd, cap.capabilities);
  166. if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
  167. s->multiplanar = 0;
  168. s->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  169. } else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
  170. s->multiplanar = 1;
  171. s->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  172. } else {
  173. av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
  174. err = AVERROR(ENODEV);
  175. goto fail;
  176. }
  177. if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
  178. av_log(ctx, AV_LOG_ERROR,
  179. "The device does not support the streaming I/O method.\n");
  180. err = AVERROR(ENOSYS);
  181. goto fail;
  182. }
  183. return fd;
  184. fail:
  185. v4l2_close(fd);
  186. return err;
  187. }
  188. static int device_init(AVFormatContext *ctx, int *width, int *height,
  189. uint32_t pixelformat)
  190. {
  191. struct video_data *s = ctx->priv_data;
  192. struct v4l2_format fmt = { .type = s->buf_type };
  193. int res = 0;
  194. fmt.fmt.pix.width = *width;
  195. fmt.fmt.pix.height = *height;
  196. fmt.fmt.pix.pixelformat = pixelformat;
  197. fmt.fmt.pix.field = V4L2_FIELD_ANY;
  198. /* Some drivers will fail and return EINVAL when the pixelformat
  199. is not supported (even if type field is valid and supported) */
  200. if (v4l2_ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0)
  201. res = AVERROR(errno);
  202. if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
  203. av_log(ctx, AV_LOG_INFO,
  204. "The V4L2 driver changed the video from %dx%d to %dx%d\n",
  205. *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
  206. *width = fmt.fmt.pix.width;
  207. *height = fmt.fmt.pix.height;
  208. }
  209. if (pixelformat != fmt.fmt.pix.pixelformat) {
  210. av_log(ctx, AV_LOG_DEBUG,
  211. "The V4L2 driver changed the pixel format "
  212. "from 0x%08X to 0x%08X\n",
  213. pixelformat, fmt.fmt.pix.pixelformat);
  214. res = AVERROR(EINVAL);
  215. }
  216. if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
  217. av_log(ctx, AV_LOG_DEBUG,
  218. "The V4L2 driver is using the interlaced mode\n");
  219. s->interlaced = 1;
  220. }
  221. return res;
  222. }
  223. static int first_field(const struct video_data *s)
  224. {
  225. int res;
  226. v4l2_std_id std;
  227. res = v4l2_ioctl(s->fd, VIDIOC_G_STD, &std);
  228. if (res < 0)
  229. return 0;
  230. if (std & V4L2_STD_NTSC)
  231. return 0;
  232. return 1;
  233. }
  234. #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
  235. static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat)
  236. {
  237. const struct video_data *s = ctx->priv_data;
  238. struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
  239. while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
  240. switch (vfse.type) {
  241. case V4L2_FRMSIZE_TYPE_DISCRETE:
  242. av_log(ctx, AV_LOG_INFO, " %ux%u",
  243. vfse.discrete.width, vfse.discrete.height);
  244. break;
  245. case V4L2_FRMSIZE_TYPE_CONTINUOUS:
  246. case V4L2_FRMSIZE_TYPE_STEPWISE:
  247. av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
  248. vfse.stepwise.min_width,
  249. vfse.stepwise.max_width,
  250. vfse.stepwise.step_width,
  251. vfse.stepwise.min_height,
  252. vfse.stepwise.max_height,
  253. vfse.stepwise.step_height);
  254. }
  255. vfse.index++;
  256. }
  257. }
  258. #endif
  259. static void list_formats(AVFormatContext *ctx, int type)
  260. {
  261. const struct video_data *s = ctx->priv_data;
  262. struct v4l2_fmtdesc vfd = { .type = s->buf_type };
  263. while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) {
  264. enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
  265. enum AVPixelFormat pix_fmt = ff_fmt_v4l2ff(vfd.pixelformat, codec_id);
  266. vfd.index++;
  267. if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
  268. type & V4L_RAWFORMATS) {
  269. const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
  270. av_log(ctx, AV_LOG_INFO, "Raw : %11s : %20s :",
  271. fmt_name ? fmt_name : "Unsupported",
  272. vfd.description);
  273. } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
  274. type & V4L_COMPFORMATS) {
  275. const AVCodecDescriptor *desc = avcodec_descriptor_get(codec_id);
  276. av_log(ctx, AV_LOG_INFO, "Compressed: %11s : %20s :",
  277. desc ? desc->name : "Unsupported",
  278. vfd.description);
  279. } else {
  280. continue;
  281. }
  282. #ifdef V4L2_FMT_FLAG_EMULATED
  283. if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
  284. av_log(ctx, AV_LOG_INFO, " Emulated :");
  285. #endif
  286. #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
  287. list_framesizes(ctx, vfd.pixelformat);
  288. #endif
  289. av_log(ctx, AV_LOG_INFO, "\n");
  290. }
  291. }
  292. static void list_standards(AVFormatContext *ctx)
  293. {
  294. int ret;
  295. struct video_data *s = ctx->priv_data;
  296. struct v4l2_standard standard;
  297. if (s->std_id == 0)
  298. return;
  299. for (standard.index = 0; ; standard.index++) {
  300. if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
  301. ret = AVERROR(errno);
  302. if (ret == AVERROR(EINVAL)) {
  303. break;
  304. } else {
  305. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
  306. return;
  307. }
  308. }
  309. av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n",
  310. standard.index, (uint64_t)standard.id, standard.name);
  311. }
  312. }
  313. static int mmap_init(AVFormatContext *ctx)
  314. {
  315. int i, res;
  316. struct video_data *s = ctx->priv_data;
  317. struct v4l2_requestbuffers req = {
  318. .type = s->buf_type,
  319. .count = desired_video_buffers,
  320. .memory = V4L2_MEMORY_MMAP
  321. };
  322. if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
  323. res = AVERROR(errno);
  324. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
  325. return res;
  326. }
  327. if (req.count < 2) {
  328. av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
  329. return AVERROR(ENOMEM);
  330. }
  331. s->buffers = req.count;
  332. s->buf_start = av_malloc_array(s->buffers, sizeof(void *));
  333. if (!s->buf_start) {
  334. av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
  335. return AVERROR(ENOMEM);
  336. }
  337. s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int));
  338. if (!s->buf_len) {
  339. av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
  340. av_freep(&s->buf_start);
  341. return AVERROR(ENOMEM);
  342. }
  343. for (i = 0; i < req.count; i++) {
  344. unsigned int buf_length, buf_offset;
  345. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  346. struct v4l2_buffer buf = {
  347. .type = s->buf_type,
  348. .index = i,
  349. .memory = V4L2_MEMORY_MMAP,
  350. .m.planes = s->multiplanar ? planes : NULL,
  351. .length = s->multiplanar ? VIDEO_MAX_PLANES : 0,
  352. };
  353. if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
  354. res = AVERROR(errno);
  355. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
  356. return res;
  357. }
  358. if (s->multiplanar) {
  359. if (buf.length != 1) {
  360. av_log(ctx, AV_LOG_ERROR, "multiplanar only supported when buf.length == 1\n");
  361. return AVERROR_PATCHWELCOME;
  362. }
  363. buf_length = buf.m.planes[0].length;
  364. buf_offset = buf.m.planes[0].m.mem_offset;
  365. } else {
  366. buf_length = buf.length;
  367. buf_offset = buf.m.offset;
  368. }
  369. s->buf_len[i] = buf_length;
  370. if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
  371. av_log(ctx, AV_LOG_ERROR,
  372. "buf_len[%d] = %d < expected frame size %d\n",
  373. i, s->buf_len[i], s->frame_size);
  374. return AVERROR(ENOMEM);
  375. }
  376. s->buf_start[i] = v4l2_mmap(NULL, buf_length,
  377. PROT_READ | PROT_WRITE, MAP_SHARED,
  378. s->fd, buf_offset);
  379. if (s->buf_start[i] == MAP_FAILED) {
  380. res = AVERROR(errno);
  381. av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
  382. return res;
  383. }
  384. }
  385. return 0;
  386. }
  387. static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
  388. {
  389. int res = 0;
  390. if (v4l2_ioctl(s->fd, VIDIOC_QBUF, buf) < 0) {
  391. res = AVERROR(errno);
  392. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
  393. } else {
  394. atomic_fetch_add(&s->buffers_queued, 1);
  395. }
  396. return res;
  397. }
  398. static void mmap_release_buffer(void *opaque, uint8_t *data)
  399. {
  400. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  401. struct v4l2_buffer buf = { 0 };
  402. struct buff_data *buf_descriptor = opaque;
  403. struct video_data *s = buf_descriptor->s;
  404. buf.type = s->buf_type;
  405. buf.memory = V4L2_MEMORY_MMAP;
  406. buf.index = buf_descriptor->index;
  407. buf.m.planes = s->multiplanar ? planes : NULL;
  408. buf.length = s->multiplanar ? VIDEO_MAX_PLANES : 0;
  409. av_free(buf_descriptor);
  410. enqueue_buffer(s, &buf);
  411. }
  412. #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
  413. static int64_t av_gettime_monotonic(void)
  414. {
  415. return av_gettime_relative();
  416. }
  417. #endif
  418. static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
  419. {
  420. struct video_data *s = ctx->priv_data;
  421. int64_t now;
  422. now = av_gettime();
  423. if (s->ts_mode == V4L_TS_ABS &&
  424. ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
  425. av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
  426. s->ts_mode = V4L_TS_CONVERT_READY;
  427. return 0;
  428. }
  429. #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
  430. if (ctx->streams[0]->avg_frame_rate.num) {
  431. now = av_gettime_monotonic();
  432. if (s->ts_mode == V4L_TS_MONO2ABS ||
  433. (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
  434. AVRational tb = {AV_TIME_BASE, 1};
  435. int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
  436. av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
  437. /* microseconds instead of seconds, MHz instead of Hz */
  438. s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
  439. if (!s->timefilter)
  440. return AVERROR(ENOMEM);
  441. s->ts_mode = V4L_TS_CONVERT_READY;
  442. return 0;
  443. }
  444. }
  445. #endif
  446. av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
  447. return AVERROR(EIO);
  448. }
  449. static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
  450. {
  451. struct video_data *s = ctx->priv_data;
  452. if (s->ts_mode) {
  453. int r = init_convert_timestamp(ctx, *ts);
  454. if (r < 0)
  455. return r;
  456. }
  457. #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
  458. if (s->timefilter) {
  459. int64_t nowa = av_gettime();
  460. int64_t nowm = av_gettime_monotonic();
  461. ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
  462. s->last_time_m = nowm;
  463. *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
  464. }
  465. #endif
  466. return 0;
  467. }
  468. static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
  469. {
  470. struct video_data *s = ctx->priv_data;
  471. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  472. struct v4l2_buffer buf = {
  473. .type = s->buf_type,
  474. .memory = V4L2_MEMORY_MMAP,
  475. .m.planes = s->multiplanar ? planes : NULL,
  476. .length = s->multiplanar ? VIDEO_MAX_PLANES : 0,
  477. };
  478. struct timeval buf_ts;
  479. unsigned int bytesused;
  480. int res;
  481. pkt->size = 0;
  482. /* FIXME: Some special treatment might be needed in case of loss of signal... */
  483. while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
  484. if (res < 0) {
  485. if (errno == EAGAIN)
  486. return AVERROR(EAGAIN);
  487. res = AVERROR(errno);
  488. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n",
  489. av_err2str(res));
  490. return res;
  491. }
  492. buf_ts = buf.timestamp;
  493. if (buf.index >= s->buffers) {
  494. av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
  495. return AVERROR(EINVAL);
  496. }
  497. atomic_fetch_add(&s->buffers_queued, -1);
  498. // always keep at least one buffer queued
  499. av_assert0(atomic_load(&s->buffers_queued) >= 1);
  500. bytesused = s->multiplanar ? buf.m.planes[0].bytesused : buf.bytesused;
  501. #ifdef V4L2_BUF_FLAG_ERROR
  502. if (buf.flags & V4L2_BUF_FLAG_ERROR) {
  503. av_log(ctx, AV_LOG_WARNING,
  504. "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
  505. bytesused);
  506. bytesused = 0;
  507. } else
  508. #endif
  509. {
  510. /* CPIA is a compressed format and we don't know the exact number of bytes
  511. * used by a frame, so set it here as the driver announces it. */
  512. if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
  513. s->frame_size = bytesused;
  514. if (s->frame_size > 0 && bytesused != s->frame_size) {
  515. av_log(ctx, AV_LOG_WARNING,
  516. "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
  517. bytesused, s->frame_size, buf.flags);
  518. bytesused = 0;
  519. }
  520. }
  521. /* Image is at s->buff_start[buf.index] */
  522. if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
  523. /* when we start getting low on queued buffers, fall back on copying data */
  524. res = av_new_packet(pkt, bytesused);
  525. if (res < 0) {
  526. av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
  527. enqueue_buffer(s, &buf);
  528. return res;
  529. }
  530. memcpy(pkt->data, s->buf_start[buf.index], bytesused);
  531. res = enqueue_buffer(s, &buf);
  532. if (res) {
  533. av_packet_unref(pkt);
  534. return res;
  535. }
  536. } else {
  537. struct buff_data *buf_descriptor;
  538. pkt->data = s->buf_start[buf.index];
  539. pkt->size = bytesused;
  540. buf_descriptor = av_malloc(sizeof(struct buff_data));
  541. if (!buf_descriptor) {
  542. /* Something went wrong... Since av_malloc() failed, we cannot even
  543. * allocate a buffer for memcpying into it
  544. */
  545. av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
  546. enqueue_buffer(s, &buf);
  547. return AVERROR(ENOMEM);
  548. }
  549. buf_descriptor->index = buf.index;
  550. buf_descriptor->s = s;
  551. pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer,
  552. buf_descriptor, 0);
  553. if (!pkt->buf) {
  554. av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
  555. enqueue_buffer(s, &buf);
  556. av_freep(&buf_descriptor);
  557. return AVERROR(ENOMEM);
  558. }
  559. }
  560. pkt->pts = buf_ts.tv_sec * INT64_C(1000000) + buf_ts.tv_usec;
  561. convert_timestamp(ctx, &pkt->pts);
  562. return pkt->size;
  563. }
  564. static int mmap_start(AVFormatContext *ctx)
  565. {
  566. struct video_data *s = ctx->priv_data;
  567. enum v4l2_buf_type type;
  568. int i, res;
  569. for (i = 0; i < s->buffers; i++) {
  570. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  571. struct v4l2_buffer buf = {
  572. .type = s->buf_type,
  573. .index = i,
  574. .memory = V4L2_MEMORY_MMAP,
  575. .m.planes = s->multiplanar ? planes : NULL,
  576. .length = s->multiplanar ? VIDEO_MAX_PLANES : 0,
  577. };
  578. if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
  579. res = AVERROR(errno);
  580. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
  581. av_err2str(res));
  582. return res;
  583. }
  584. }
  585. atomic_store(&s->buffers_queued, s->buffers);
  586. type = s->buf_type;
  587. if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
  588. res = AVERROR(errno);
  589. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
  590. av_err2str(res));
  591. return res;
  592. }
  593. return 0;
  594. }
  595. static void mmap_close(struct video_data *s)
  596. {
  597. enum v4l2_buf_type type;
  598. int i;
  599. type = s->buf_type;
  600. /* We do not check for the result, because we could
  601. * not do anything about it anyway...
  602. */
  603. v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
  604. for (i = 0; i < s->buffers; i++) {
  605. v4l2_munmap(s->buf_start[i], s->buf_len[i]);
  606. }
  607. av_freep(&s->buf_start);
  608. av_freep(&s->buf_len);
  609. }
  610. static int v4l2_set_parameters(AVFormatContext *ctx)
  611. {
  612. struct video_data *s = ctx->priv_data;
  613. struct v4l2_standard standard = { 0 };
  614. struct v4l2_streamparm streamparm = { 0 };
  615. struct v4l2_fract *tpf;
  616. AVRational framerate_q = { 0 };
  617. int i, ret;
  618. if (s->framerate &&
  619. (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
  620. av_log(ctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
  621. s->framerate);
  622. return ret;
  623. }
  624. if (s->standard) {
  625. if (s->std_id) {
  626. ret = 0;
  627. av_log(ctx, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
  628. /* set tv standard */
  629. for (i = 0; ; i++) {
  630. standard.index = i;
  631. if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
  632. ret = AVERROR(errno);
  633. break;
  634. }
  635. if (!av_strcasecmp(standard.name, s->standard))
  636. break;
  637. }
  638. if (ret < 0) {
  639. av_log(ctx, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
  640. return ret;
  641. }
  642. if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
  643. ret = AVERROR(errno);
  644. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
  645. return ret;
  646. }
  647. } else {
  648. av_log(ctx, AV_LOG_WARNING,
  649. "This device does not support any standard\n");
  650. }
  651. }
  652. /* get standard */
  653. if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
  654. tpf = &standard.frameperiod;
  655. for (i = 0; ; i++) {
  656. standard.index = i;
  657. if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
  658. ret = AVERROR(errno);
  659. if (ret == AVERROR(EINVAL)
  660. #ifdef ENODATA
  661. || ret == AVERROR(ENODATA)
  662. #endif
  663. ) {
  664. tpf = &streamparm.parm.capture.timeperframe;
  665. break;
  666. }
  667. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
  668. return ret;
  669. }
  670. if (standard.id == s->std_id) {
  671. av_log(ctx, AV_LOG_DEBUG,
  672. "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n",
  673. standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
  674. break;
  675. }
  676. }
  677. } else {
  678. tpf = &streamparm.parm.capture.timeperframe;
  679. }
  680. streamparm.type = s->buf_type;
  681. if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
  682. ret = AVERROR(errno);
  683. av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
  684. } else if (framerate_q.num && framerate_q.den) {
  685. if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
  686. tpf = &streamparm.parm.capture.timeperframe;
  687. av_log(ctx, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
  688. framerate_q.den, framerate_q.num);
  689. tpf->numerator = framerate_q.den;
  690. tpf->denominator = framerate_q.num;
  691. if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
  692. ret = AVERROR(errno);
  693. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n",
  694. av_err2str(ret));
  695. return ret;
  696. }
  697. if (framerate_q.num != tpf->denominator ||
  698. framerate_q.den != tpf->numerator) {
  699. av_log(ctx, AV_LOG_INFO,
  700. "The driver changed the time per frame from "
  701. "%d/%d to %d/%d\n",
  702. framerate_q.den, framerate_q.num,
  703. tpf->numerator, tpf->denominator);
  704. }
  705. } else {
  706. av_log(ctx, AV_LOG_WARNING,
  707. "The driver does not permit changing the time per frame\n");
  708. }
  709. }
  710. if (tpf->denominator > 0 && tpf->numerator > 0) {
  711. ctx->streams[0]->avg_frame_rate.num = tpf->denominator;
  712. ctx->streams[0]->avg_frame_rate.den = tpf->numerator;
  713. ctx->streams[0]->r_frame_rate = ctx->streams[0]->avg_frame_rate;
  714. } else
  715. av_log(ctx, AV_LOG_WARNING, "Time per frame unknown\n");
  716. return 0;
  717. }
  718. static int device_try_init(AVFormatContext *ctx,
  719. enum AVPixelFormat pix_fmt,
  720. int *width,
  721. int *height,
  722. uint32_t *desired_format,
  723. enum AVCodecID *codec_id)
  724. {
  725. int ret, i;
  726. *desired_format = ff_fmt_ff2v4l(pix_fmt, ctx->video_codec_id);
  727. if (*desired_format) {
  728. ret = device_init(ctx, width, height, *desired_format);
  729. if (ret < 0) {
  730. *desired_format = 0;
  731. if (ret != AVERROR(EINVAL))
  732. return ret;
  733. }
  734. }
  735. if (!*desired_format) {
  736. for (i = 0; ff_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) {
  737. if (ctx->video_codec_id == AV_CODEC_ID_NONE ||
  738. ff_fmt_conversion_table[i].codec_id == ctx->video_codec_id) {
  739. av_log(ctx, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
  740. avcodec_get_name(ff_fmt_conversion_table[i].codec_id),
  741. (char *)av_x_if_null(av_get_pix_fmt_name(ff_fmt_conversion_table[i].ff_fmt), "none"));
  742. *desired_format = ff_fmt_conversion_table[i].v4l2_fmt;
  743. ret = device_init(ctx, width, height, *desired_format);
  744. if (ret >= 0)
  745. break;
  746. else if (ret != AVERROR(EINVAL))
  747. return ret;
  748. *desired_format = 0;
  749. }
  750. }
  751. if (*desired_format == 0) {
  752. av_log(ctx, AV_LOG_ERROR, "Cannot find a proper format for "
  753. "codec '%s' (id %d), pixel format '%s' (id %d)\n",
  754. avcodec_get_name(ctx->video_codec_id), ctx->video_codec_id,
  755. (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
  756. ret = AVERROR(EINVAL);
  757. }
  758. }
  759. *codec_id = ff_fmt_v4l2codec(*desired_format);
  760. if (*codec_id == AV_CODEC_ID_NONE)
  761. av_assert0(ret == AVERROR(EINVAL));
  762. return ret;
  763. }
  764. static int v4l2_read_probe(const AVProbeData *p)
  765. {
  766. if (av_strstart(p->filename, "/dev/video", NULL))
  767. return AVPROBE_SCORE_MAX - 1;
  768. return 0;
  769. }
  770. static int v4l2_read_header(AVFormatContext *ctx)
  771. {
  772. struct video_data *s = ctx->priv_data;
  773. AVStream *st;
  774. int res = 0;
  775. uint32_t desired_format;
  776. enum AVCodecID codec_id = AV_CODEC_ID_NONE;
  777. enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
  778. struct v4l2_input input = { 0 };
  779. st = avformat_new_stream(ctx, NULL);
  780. if (!st)
  781. return AVERROR(ENOMEM);
  782. #if CONFIG_LIBV4L2
  783. /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
  784. and errors will get sent to stderr */
  785. if (s->use_libv4l2)
  786. v4l2_log_file = fopen("/dev/null", "w");
  787. #endif
  788. s->fd = device_open(ctx, ctx->url);
  789. if (s->fd < 0)
  790. return s->fd;
  791. if (s->channel != -1) {
  792. /* set video input */
  793. av_log(ctx, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
  794. if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
  795. res = AVERROR(errno);
  796. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
  797. goto fail;
  798. }
  799. } else {
  800. /* get current video input */
  801. if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
  802. res = AVERROR(errno);
  803. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
  804. goto fail;
  805. }
  806. }
  807. /* enum input */
  808. input.index = s->channel;
  809. if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
  810. res = AVERROR(errno);
  811. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
  812. goto fail;
  813. }
  814. s->std_id = input.std;
  815. av_log(ctx, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n",
  816. s->channel, input.name, (uint64_t)input.std);
  817. if (s->list_format) {
  818. list_formats(ctx, s->list_format);
  819. res = AVERROR_EXIT;
  820. goto fail;
  821. }
  822. if (s->list_standard) {
  823. list_standards(ctx);
  824. res = AVERROR_EXIT;
  825. goto fail;
  826. }
  827. avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
  828. if (s->pixel_format) {
  829. const AVCodecDescriptor *desc = avcodec_descriptor_get_by_name(s->pixel_format);
  830. if (desc)
  831. ctx->video_codec_id = desc->id;
  832. pix_fmt = av_get_pix_fmt(s->pixel_format);
  833. if (pix_fmt == AV_PIX_FMT_NONE && !desc) {
  834. av_log(ctx, AV_LOG_ERROR, "No such input format: %s.\n",
  835. s->pixel_format);
  836. res = AVERROR(EINVAL);
  837. goto fail;
  838. }
  839. }
  840. if (!s->width && !s->height) {
  841. struct v4l2_format fmt = { .type = s->buf_type };
  842. av_log(ctx, AV_LOG_VERBOSE,
  843. "Querying the device for the current frame size\n");
  844. if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
  845. res = AVERROR(errno);
  846. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n",
  847. av_err2str(res));
  848. goto fail;
  849. }
  850. s->width = fmt.fmt.pix.width;
  851. s->height = fmt.fmt.pix.height;
  852. av_log(ctx, AV_LOG_VERBOSE,
  853. "Setting frame size to %dx%d\n", s->width, s->height);
  854. }
  855. res = device_try_init(ctx, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
  856. if (res < 0)
  857. goto fail;
  858. /* If no pixel_format was specified, the codec_id was not known up
  859. * until now. Set video_codec_id in the context, as codec_id will
  860. * not be available outside this function
  861. */
  862. if (codec_id != AV_CODEC_ID_NONE && ctx->video_codec_id == AV_CODEC_ID_NONE)
  863. ctx->video_codec_id = codec_id;
  864. if ((res = av_image_check_size(s->width, s->height, 0, ctx)) < 0)
  865. goto fail;
  866. s->pixelformat = desired_format;
  867. if ((res = v4l2_set_parameters(ctx)) < 0)
  868. goto fail;
  869. st->codecpar->format = ff_fmt_v4l2ff(desired_format, codec_id);
  870. if (st->codecpar->format != AV_PIX_FMT_NONE)
  871. s->frame_size = av_image_get_buffer_size(st->codecpar->format,
  872. s->width, s->height, 1);
  873. if ((res = mmap_init(ctx)) ||
  874. (res = mmap_start(ctx)) < 0)
  875. goto fail;
  876. s->top_field_first = first_field(s);
  877. st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  878. st->codecpar->codec_id = codec_id;
  879. if (codec_id == AV_CODEC_ID_RAWVIDEO)
  880. st->codecpar->codec_tag =
  881. avcodec_pix_fmt_to_codec_tag(st->codecpar->format);
  882. else if (codec_id == AV_CODEC_ID_H264) {
  883. avpriv_stream_set_need_parsing(st, AVSTREAM_PARSE_FULL_ONCE);
  884. }
  885. if (desired_format == V4L2_PIX_FMT_YVU420)
  886. st->codecpar->codec_tag = MKTAG('Y', 'V', '1', '2');
  887. else if (desired_format == V4L2_PIX_FMT_YVU410)
  888. st->codecpar->codec_tag = MKTAG('Y', 'V', 'U', '9');
  889. st->codecpar->width = s->width;
  890. st->codecpar->height = s->height;
  891. if (st->avg_frame_rate.den)
  892. st->codecpar->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
  893. return 0;
  894. fail:
  895. v4l2_close(s->fd);
  896. return res;
  897. }
  898. static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt)
  899. {
  900. int res;
  901. if ((res = mmap_read_frame(ctx, pkt)) < 0) {
  902. return res;
  903. }
  904. return pkt->size;
  905. }
  906. static int v4l2_read_close(AVFormatContext *ctx)
  907. {
  908. struct video_data *s = ctx->priv_data;
  909. if (atomic_load(&s->buffers_queued) != s->buffers)
  910. av_log(ctx, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
  911. "close.\n");
  912. mmap_close(s);
  913. ff_timefilter_destroy(s->timefilter);
  914. v4l2_close(s->fd);
  915. return 0;
  916. }
  917. static int v4l2_is_v4l_dev(const char *name)
  918. {
  919. return !strncmp(name, "video", 5) ||
  920. !strncmp(name, "radio", 5) ||
  921. !strncmp(name, "vbi", 3) ||
  922. !strncmp(name, "v4l-subdev", 10);
  923. }
  924. static int v4l2_get_device_list(AVFormatContext *ctx, AVDeviceInfoList *device_list)
  925. {
  926. struct video_data *s = ctx->priv_data;
  927. DIR *dir;
  928. struct dirent *entry;
  929. int ret = 0;
  930. if (!device_list)
  931. return AVERROR(EINVAL);
  932. dir = opendir("/dev");
  933. if (!dir) {
  934. ret = AVERROR(errno);
  935. av_log(ctx, AV_LOG_ERROR, "Couldn't open the directory: %s\n", av_err2str(ret));
  936. return ret;
  937. }
  938. while ((entry = readdir(dir))) {
  939. AVDeviceInfo *device = NULL;
  940. struct v4l2_capability cap;
  941. int fd = -1, size;
  942. char device_name[256];
  943. if (!v4l2_is_v4l_dev(entry->d_name))
  944. continue;
  945. size = snprintf(device_name, sizeof(device_name), "/dev/%s", entry->d_name);
  946. if (size >= sizeof(device_name)) {
  947. av_log(ctx, AV_LOG_ERROR, "Device name too long.\n");
  948. ret = AVERROR(ENOSYS);
  949. break;
  950. }
  951. if ((fd = device_open(ctx, device_name)) < 0)
  952. continue;
  953. if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
  954. ret = AVERROR(errno);
  955. av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n", av_err2str(ret));
  956. goto fail;
  957. }
  958. device = av_mallocz(sizeof(AVDeviceInfo));
  959. if (!device) {
  960. ret = AVERROR(ENOMEM);
  961. goto fail;
  962. }
  963. device->device_name = av_strdup(device_name);
  964. device->device_description = av_strdup(cap.card);
  965. if (!device->device_name || !device->device_description) {
  966. ret = AVERROR(ENOMEM);
  967. goto fail;
  968. }
  969. if ((ret = av_dynarray_add_nofree(&device_list->devices,
  970. &device_list->nb_devices, device)) < 0)
  971. goto fail;
  972. v4l2_close(fd);
  973. continue;
  974. fail:
  975. if (device) {
  976. av_freep(&device->device_name);
  977. av_freep(&device->device_description);
  978. av_freep(&device);
  979. }
  980. v4l2_close(fd);
  981. break;
  982. }
  983. closedir(dir);
  984. return ret;
  985. }
  986. #define OFFSET(x) offsetof(struct video_data, x)
  987. #define DEC AV_OPT_FLAG_DECODING_PARAM
  988. static const AVOption options[] = {
  989. { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
  990. { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = -1 }, -1, INT_MAX, DEC },
  991. { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC },
  992. { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  993. { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  994. { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  995. { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, .unit = "list_formats" },
  996. { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, .unit = "list_formats" },
  997. { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, .unit = "list_formats" },
  998. { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, .unit = "list_formats" },
  999. { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, .unit = "list_standards" },
  1000. { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, .unit = "list_standards" },
  1001. { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, .unit = "timestamps" },
  1002. { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, .unit = "timestamps" },
  1003. { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, .unit = "timestamps" },
  1004. { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, .unit = "timestamps" },
  1005. { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, .unit = "timestamps" },
  1006. { "use_libv4l2", "use libv4l2 (v4l-utils) conversion functions", OFFSET(use_libv4l2), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, DEC },
  1007. { NULL },
  1008. };
  1009. static const AVClass v4l2_class = {
  1010. .class_name = "V4L2 indev",
  1011. .item_name = av_default_item_name,
  1012. .option = options,
  1013. .version = LIBAVUTIL_VERSION_INT,
  1014. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  1015. };
  1016. const FFInputFormat ff_v4l2_demuxer = {
  1017. .p.name = "video4linux2,v4l2",
  1018. .p.long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
  1019. .p.flags = AVFMT_NOFILE,
  1020. .p.priv_class = &v4l2_class,
  1021. .priv_data_size = sizeof(struct video_data),
  1022. .read_probe = v4l2_read_probe,
  1023. .read_header = v4l2_read_header,
  1024. .read_packet = v4l2_read_packet,
  1025. .read_close = v4l2_read_close,
  1026. .get_device_list = v4l2_get_device_list,
  1027. };