avfoundation.m 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249
  1. /*
  2. * AVFoundation input device
  3. * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * AVFoundation input device
  24. * @author Thilo Borgmann <thilo.borgmann@mail.de>
  25. */
  26. #import <AVFoundation/AVFoundation.h>
  27. #include <pthread.h>
  28. #include "libavutil/channel_layout.h"
  29. #include "libavutil/pixdesc.h"
  30. #include "libavutil/opt.h"
  31. #include "libavutil/avstring.h"
  32. #include "libavformat/internal.h"
  33. #include "libavutil/internal.h"
  34. #include "libavutil/parseutils.h"
  35. #include "libavutil/time.h"
  36. #include "libavutil/imgutils.h"
  37. #include "avdevice.h"
  38. static const int avf_time_base = 1000000;
  39. static const AVRational avf_time_base_q = {
  40. .num = 1,
  41. .den = avf_time_base
  42. };
  43. struct AVFPixelFormatSpec {
  44. enum AVPixelFormat ff_id;
  45. OSType avf_id;
  46. };
  47. static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
  48. { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
  49. { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
  50. { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
  51. { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
  52. { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
  53. { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
  54. { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
  55. { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
  56. { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
  57. { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
  58. { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
  59. { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
  60. { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
  61. { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
  62. { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
  63. { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
  64. { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
  65. { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
  66. { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
  67. { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
  68. { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
  69. { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
  70. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  71. { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
  72. #endif
  73. { AV_PIX_FMT_NONE, 0 }
  74. };
  75. typedef struct
  76. {
  77. AVClass* class;
  78. int frames_captured;
  79. int audio_frames_captured;
  80. pthread_mutex_t frame_lock;
  81. id avf_delegate;
  82. id avf_audio_delegate;
  83. AVRational framerate;
  84. int width, height;
  85. int capture_cursor;
  86. int capture_mouse_clicks;
  87. int capture_raw_data;
  88. int drop_late_frames;
  89. int video_is_muxed;
  90. int video_is_screen;
  91. int list_devices;
  92. int video_device_index;
  93. int video_stream_index;
  94. int audio_device_index;
  95. int audio_stream_index;
  96. char *url;
  97. char *video_filename;
  98. char *audio_filename;
  99. int num_video_devices;
  100. int audio_channels;
  101. int audio_bits_per_sample;
  102. int audio_float;
  103. int audio_be;
  104. int audio_signed_integer;
  105. int audio_packed;
  106. int audio_non_interleaved;
  107. int32_t *audio_buffer;
  108. int audio_buffer_size;
  109. enum AVPixelFormat pixel_format;
  110. AVCaptureSession *capture_session;
  111. AVCaptureVideoDataOutput *video_output;
  112. AVCaptureAudioDataOutput *audio_output;
  113. CMSampleBufferRef current_frame;
  114. CMSampleBufferRef current_audio_frame;
  115. AVCaptureDevice *observed_device;
  116. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  117. AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
  118. #endif
  119. int observed_quit;
  120. } AVFContext;
  121. static void lock_frames(AVFContext* ctx)
  122. {
  123. pthread_mutex_lock(&ctx->frame_lock);
  124. }
  125. static void unlock_frames(AVFContext* ctx)
  126. {
  127. pthread_mutex_unlock(&ctx->frame_lock);
  128. }
  129. /** FrameReciever class - delegate for AVCaptureSession
  130. */
  131. @interface AVFFrameReceiver : NSObject
  132. {
  133. AVFContext* _context;
  134. }
  135. - (id)initWithContext:(AVFContext*)context;
  136. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  137. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  138. fromConnection:(AVCaptureConnection *)connection;
  139. @end
  140. @implementation AVFFrameReceiver
  141. - (id)initWithContext:(AVFContext*)context
  142. {
  143. if (self = [super init]) {
  144. _context = context;
  145. // start observing if a device is set for it
  146. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  147. if (_context->observed_device) {
  148. NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
  149. NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew;
  150. [_context->observed_device addObserver: self
  151. forKeyPath: keyPath
  152. options: options
  153. context: _context];
  154. }
  155. #endif
  156. }
  157. return self;
  158. }
  159. - (void)dealloc {
  160. // stop observing if a device is set for it
  161. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  162. if (_context->observed_device) {
  163. NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
  164. [_context->observed_device removeObserver: self forKeyPath: keyPath];
  165. }
  166. #endif
  167. [super dealloc];
  168. }
  169. - (void)observeValueForKeyPath:(NSString *)keyPath
  170. ofObject:(id)object
  171. change:(NSDictionary *)change
  172. context:(void *)context {
  173. if (context == _context) {
  174. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  175. AVCaptureDeviceTransportControlsPlaybackMode mode =
  176. [change[NSKeyValueChangeNewKey] integerValue];
  177. if (mode != _context->observed_mode) {
  178. if (mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
  179. _context->observed_quit = 1;
  180. }
  181. _context->observed_mode = mode;
  182. }
  183. #endif
  184. } else {
  185. [super observeValueForKeyPath: keyPath
  186. ofObject: object
  187. change: change
  188. context: context];
  189. }
  190. }
  191. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  192. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  193. fromConnection:(AVCaptureConnection *)connection
  194. {
  195. lock_frames(_context);
  196. if (_context->current_frame != nil) {
  197. CFRelease(_context->current_frame);
  198. }
  199. _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
  200. unlock_frames(_context);
  201. ++_context->frames_captured;
  202. }
  203. @end
  204. /** AudioReciever class - delegate for AVCaptureSession
  205. */
  206. @interface AVFAudioReceiver : NSObject
  207. {
  208. AVFContext* _context;
  209. }
  210. - (id)initWithContext:(AVFContext*)context;
  211. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  212. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  213. fromConnection:(AVCaptureConnection *)connection;
  214. @end
  215. @implementation AVFAudioReceiver
  216. - (id)initWithContext:(AVFContext*)context
  217. {
  218. if (self = [super init]) {
  219. _context = context;
  220. }
  221. return self;
  222. }
  223. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  224. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  225. fromConnection:(AVCaptureConnection *)connection
  226. {
  227. lock_frames(_context);
  228. if (_context->current_audio_frame != nil) {
  229. CFRelease(_context->current_audio_frame);
  230. }
  231. _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
  232. unlock_frames(_context);
  233. ++_context->audio_frames_captured;
  234. }
  235. @end
  236. static void destroy_context(AVFContext* ctx)
  237. {
  238. [ctx->capture_session stopRunning];
  239. [ctx->capture_session release];
  240. [ctx->video_output release];
  241. [ctx->audio_output release];
  242. [ctx->avf_delegate release];
  243. [ctx->avf_audio_delegate release];
  244. ctx->capture_session = NULL;
  245. ctx->video_output = NULL;
  246. ctx->audio_output = NULL;
  247. ctx->avf_delegate = NULL;
  248. ctx->avf_audio_delegate = NULL;
  249. av_freep(&ctx->url);
  250. av_freep(&ctx->audio_buffer);
  251. pthread_mutex_destroy(&ctx->frame_lock);
  252. if (ctx->current_frame) {
  253. CFRelease(ctx->current_frame);
  254. }
  255. }
  256. static int parse_device_name(AVFormatContext *s)
  257. {
  258. AVFContext *ctx = (AVFContext*)s->priv_data;
  259. char *save;
  260. ctx->url = av_strdup(s->url);
  261. if (!ctx->url)
  262. return AVERROR(ENOMEM);
  263. if (ctx->url[0] != ':') {
  264. ctx->video_filename = av_strtok(ctx->url, ":", &save);
  265. ctx->audio_filename = av_strtok(NULL, ":", &save);
  266. } else {
  267. ctx->audio_filename = av_strtok(ctx->url, ":", &save);
  268. }
  269. return 0;
  270. }
  271. /**
  272. * Configure the video device.
  273. *
  274. * Configure the video device using a run-time approach to access properties
  275. * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
  276. * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
  277. *
  278. * The NSUndefinedKeyException must be handled by the caller of this function.
  279. *
  280. */
  281. static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  282. {
  283. AVFContext *ctx = (AVFContext*)s->priv_data;
  284. double framerate = av_q2d(ctx->framerate);
  285. NSObject *range = nil;
  286. NSObject *format = nil;
  287. NSObject *selected_range = nil;
  288. NSObject *selected_format = nil;
  289. // try to configure format by formats list
  290. // might raise an exception if no format list is given
  291. // (then fallback to default, no configuration)
  292. @try {
  293. for (format in [video_device valueForKey:@"formats"]) {
  294. CMFormatDescriptionRef formatDescription;
  295. CMVideoDimensions dimensions;
  296. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  297. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  298. if ((ctx->width == 0 && ctx->height == 0) ||
  299. (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
  300. selected_format = format;
  301. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  302. double max_framerate;
  303. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  304. if (fabs (framerate - max_framerate) < 0.01) {
  305. selected_range = range;
  306. break;
  307. }
  308. }
  309. }
  310. }
  311. if (!selected_format) {
  312. av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
  313. ctx->width, ctx->height);
  314. goto unsupported_format;
  315. }
  316. if (!selected_range) {
  317. av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
  318. framerate);
  319. if (ctx->video_is_muxed) {
  320. av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
  321. } else {
  322. goto unsupported_format;
  323. }
  324. }
  325. if ([video_device lockForConfiguration:NULL] == YES) {
  326. if (selected_format) {
  327. [video_device setValue:selected_format forKey:@"activeFormat"];
  328. }
  329. if (selected_range) {
  330. NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
  331. [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
  332. [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
  333. }
  334. } else {
  335. av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
  336. return AVERROR(EINVAL);
  337. }
  338. } @catch(NSException *e) {
  339. av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
  340. }
  341. return 0;
  342. unsupported_format:
  343. av_log(s, AV_LOG_ERROR, "Supported modes:\n");
  344. for (format in [video_device valueForKey:@"formats"]) {
  345. CMFormatDescriptionRef formatDescription;
  346. CMVideoDimensions dimensions;
  347. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  348. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  349. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  350. double min_framerate;
  351. double max_framerate;
  352. [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
  353. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  354. av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
  355. dimensions.width, dimensions.height,
  356. min_framerate, max_framerate);
  357. }
  358. }
  359. return AVERROR(EINVAL);
  360. }
  361. static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  362. {
  363. AVFContext *ctx = (AVFContext*)s->priv_data;
  364. int ret;
  365. NSError *error = nil;
  366. AVCaptureInput* capture_input = nil;
  367. struct AVFPixelFormatSpec pxl_fmt_spec;
  368. NSNumber *pixel_format;
  369. NSDictionary *capture_dict;
  370. dispatch_queue_t queue;
  371. if (ctx->video_device_index < ctx->num_video_devices) {
  372. capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
  373. } else {
  374. capture_input = (AVCaptureInput*) video_device;
  375. }
  376. if (!capture_input) {
  377. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  378. [[error localizedDescription] UTF8String]);
  379. return 1;
  380. }
  381. if ([ctx->capture_session canAddInput:capture_input]) {
  382. [ctx->capture_session addInput:capture_input];
  383. } else {
  384. av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
  385. return 1;
  386. }
  387. // Attaching output
  388. ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
  389. if (!ctx->video_output) {
  390. av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
  391. return 1;
  392. }
  393. // Configure device framerate and video size
  394. @try {
  395. if ((ret = configure_video_device(s, video_device)) < 0) {
  396. return ret;
  397. }
  398. } @catch (NSException *exception) {
  399. if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
  400. av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
  401. return AVERROR_EXTERNAL;
  402. }
  403. }
  404. // select pixel format
  405. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  406. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  407. if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
  408. pxl_fmt_spec = avf_pixel_formats[i];
  409. break;
  410. }
  411. }
  412. // check if selected pixel format is supported by AVFoundation
  413. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  414. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
  415. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  416. return 1;
  417. }
  418. // check if the pixel format is available for this device
  419. if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
  420. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
  421. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  422. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  423. av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
  424. for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
  425. struct AVFPixelFormatSpec pxl_fmt_dummy;
  426. pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
  427. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  428. if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
  429. pxl_fmt_dummy = avf_pixel_formats[i];
  430. break;
  431. }
  432. }
  433. if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
  434. av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
  435. // select first supported pixel format instead of user selected (or default) pixel format
  436. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  437. pxl_fmt_spec = pxl_fmt_dummy;
  438. }
  439. }
  440. }
  441. // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
  442. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  443. return 1;
  444. } else {
  445. av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
  446. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  447. }
  448. }
  449. // set videoSettings to an empty dict for receiving raw data of muxed devices
  450. if (ctx->capture_raw_data) {
  451. ctx->pixel_format = pxl_fmt_spec.ff_id;
  452. ctx->video_output.videoSettings = @{ };
  453. } else {
  454. ctx->pixel_format = pxl_fmt_spec.ff_id;
  455. pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
  456. capture_dict = [NSDictionary dictionaryWithObject:pixel_format
  457. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  458. [ctx->video_output setVideoSettings:capture_dict];
  459. }
  460. [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
  461. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  462. // check for transport control support and set observer device if supported
  463. if (!ctx->video_is_screen) {
  464. int trans_ctrl = [video_device transportControlsSupported];
  465. AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
  466. if (trans_ctrl) {
  467. ctx->observed_mode = trans_mode;
  468. ctx->observed_device = video_device;
  469. }
  470. }
  471. #endif
  472. ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
  473. queue = dispatch_queue_create("avf_queue", NULL);
  474. [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
  475. dispatch_release(queue);
  476. if ([ctx->capture_session canAddOutput:ctx->video_output]) {
  477. [ctx->capture_session addOutput:ctx->video_output];
  478. } else {
  479. av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
  480. return 1;
  481. }
  482. return 0;
  483. }
  484. static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
  485. {
  486. AVFContext *ctx = (AVFContext*)s->priv_data;
  487. NSError *error = nil;
  488. AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
  489. dispatch_queue_t queue;
  490. if (!audio_dev_input) {
  491. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  492. [[error localizedDescription] UTF8String]);
  493. return 1;
  494. }
  495. if ([ctx->capture_session canAddInput:audio_dev_input]) {
  496. [ctx->capture_session addInput:audio_dev_input];
  497. } else {
  498. av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
  499. return 1;
  500. }
  501. // Attaching output
  502. ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
  503. if (!ctx->audio_output) {
  504. av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
  505. return 1;
  506. }
  507. ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
  508. queue = dispatch_queue_create("avf_audio_queue", NULL);
  509. [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
  510. dispatch_release(queue);
  511. if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
  512. [ctx->capture_session addOutput:ctx->audio_output];
  513. } else {
  514. av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
  515. return 1;
  516. }
  517. return 0;
  518. }
  519. static int get_video_config(AVFormatContext *s)
  520. {
  521. AVFContext *ctx = (AVFContext*)s->priv_data;
  522. CVImageBufferRef image_buffer;
  523. CMBlockBufferRef block_buffer;
  524. CGSize image_buffer_size;
  525. AVStream* stream = avformat_new_stream(s, NULL);
  526. if (!stream) {
  527. return 1;
  528. }
  529. // Take stream info from the first frame.
  530. while (ctx->frames_captured < 1) {
  531. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  532. }
  533. lock_frames(ctx);
  534. ctx->video_stream_index = stream->index;
  535. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  536. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  537. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  538. if (image_buffer) {
  539. image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
  540. stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
  541. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  542. stream->codecpar->width = (int)image_buffer_size.width;
  543. stream->codecpar->height = (int)image_buffer_size.height;
  544. stream->codecpar->format = ctx->pixel_format;
  545. } else {
  546. stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
  547. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  548. stream->codecpar->format = ctx->pixel_format;
  549. }
  550. CFRelease(ctx->current_frame);
  551. ctx->current_frame = nil;
  552. unlock_frames(ctx);
  553. return 0;
  554. }
  555. static int get_audio_config(AVFormatContext *s)
  556. {
  557. AVFContext *ctx = (AVFContext*)s->priv_data;
  558. CMFormatDescriptionRef format_desc;
  559. AVStream* stream = avformat_new_stream(s, NULL);
  560. if (!stream) {
  561. return 1;
  562. }
  563. // Take stream info from the first frame.
  564. while (ctx->audio_frames_captured < 1) {
  565. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  566. }
  567. lock_frames(ctx);
  568. ctx->audio_stream_index = stream->index;
  569. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  570. format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
  571. const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
  572. if (!basic_desc) {
  573. unlock_frames(ctx);
  574. av_log(s, AV_LOG_ERROR, "audio format not available\n");
  575. return 1;
  576. }
  577. stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
  578. stream->codecpar->sample_rate = basic_desc->mSampleRate;
  579. av_channel_layout_default(&stream->codecpar->ch_layout, basic_desc->mChannelsPerFrame);
  580. ctx->audio_channels = basic_desc->mChannelsPerFrame;
  581. ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
  582. ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
  583. ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
  584. ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
  585. ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
  586. ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
  587. if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  588. ctx->audio_float &&
  589. ctx->audio_bits_per_sample == 32 &&
  590. ctx->audio_packed) {
  591. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
  592. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  593. ctx->audio_signed_integer &&
  594. ctx->audio_bits_per_sample == 16 &&
  595. ctx->audio_packed) {
  596. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
  597. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  598. ctx->audio_signed_integer &&
  599. ctx->audio_bits_per_sample == 24 &&
  600. ctx->audio_packed) {
  601. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
  602. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  603. ctx->audio_signed_integer &&
  604. ctx->audio_bits_per_sample == 32 &&
  605. ctx->audio_packed) {
  606. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
  607. } else {
  608. unlock_frames(ctx);
  609. av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
  610. return 1;
  611. }
  612. if (ctx->audio_non_interleaved) {
  613. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  614. ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  615. ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
  616. if (!ctx->audio_buffer) {
  617. unlock_frames(ctx);
  618. av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
  619. return 1;
  620. }
  621. }
  622. CFRelease(ctx->current_audio_frame);
  623. ctx->current_audio_frame = nil;
  624. unlock_frames(ctx);
  625. return 0;
  626. }
  627. static int avf_read_header(AVFormatContext *s)
  628. {
  629. int ret = 0;
  630. NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
  631. uint32_t num_screens = 0;
  632. AVFContext *ctx = (AVFContext*)s->priv_data;
  633. AVCaptureDevice *video_device = nil;
  634. AVCaptureDevice *audio_device = nil;
  635. // Find capture device
  636. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  637. NSArray *devices_muxed = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
  638. ctx->num_video_devices = [devices count] + [devices_muxed count];
  639. pthread_mutex_init(&ctx->frame_lock, NULL);
  640. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  641. CGGetActiveDisplayList(0, NULL, &num_screens);
  642. #endif
  643. // List devices if requested
  644. if (ctx->list_devices) {
  645. int index = 0;
  646. av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
  647. for (AVCaptureDevice *device in devices) {
  648. const char *name = [[device localizedName] UTF8String];
  649. index = [devices indexOfObject:device];
  650. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  651. }
  652. for (AVCaptureDevice *device in devices_muxed) {
  653. const char *name = [[device localizedName] UTF8String];
  654. index = [devices count] + [devices_muxed indexOfObject:device];
  655. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  656. }
  657. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  658. if (num_screens > 0) {
  659. CGDirectDisplayID screens[num_screens];
  660. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  661. for (int i = 0; i < num_screens; i++) {
  662. av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
  663. }
  664. }
  665. #endif
  666. av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
  667. devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  668. for (AVCaptureDevice *device in devices) {
  669. const char *name = [[device localizedName] UTF8String];
  670. int index = [devices indexOfObject:device];
  671. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  672. }
  673. goto fail;
  674. }
  675. // parse input filename for video and audio device
  676. ret = parse_device_name(s);
  677. if (ret)
  678. goto fail;
  679. // check for device index given in filename
  680. if (ctx->video_device_index == -1 && ctx->video_filename) {
  681. sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
  682. }
  683. if (ctx->audio_device_index == -1 && ctx->audio_filename) {
  684. sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
  685. }
  686. if (ctx->video_device_index >= 0) {
  687. if (ctx->video_device_index < ctx->num_video_devices) {
  688. if (ctx->video_device_index < [devices count]) {
  689. video_device = [devices objectAtIndex:ctx->video_device_index];
  690. } else {
  691. video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
  692. ctx->video_is_muxed = 1;
  693. }
  694. } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
  695. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  696. CGDirectDisplayID screens[num_screens];
  697. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  698. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
  699. if (ctx->framerate.num > 0) {
  700. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  701. }
  702. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  703. if (ctx->capture_cursor) {
  704. capture_screen_input.capturesCursor = YES;
  705. } else {
  706. capture_screen_input.capturesCursor = NO;
  707. }
  708. #endif
  709. if (ctx->capture_mouse_clicks) {
  710. capture_screen_input.capturesMouseClicks = YES;
  711. } else {
  712. capture_screen_input.capturesMouseClicks = NO;
  713. }
  714. video_device = (AVCaptureDevice*) capture_screen_input;
  715. ctx->video_is_screen = 1;
  716. #endif
  717. } else {
  718. av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
  719. goto fail;
  720. }
  721. } else if (ctx->video_filename &&
  722. strncmp(ctx->video_filename, "none", 4)) {
  723. if (!strncmp(ctx->video_filename, "default", 7)) {
  724. video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  725. } else {
  726. // looking for video inputs
  727. for (AVCaptureDevice *device in devices) {
  728. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  729. video_device = device;
  730. break;
  731. }
  732. }
  733. // looking for muxed inputs
  734. for (AVCaptureDevice *device in devices_muxed) {
  735. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  736. video_device = device;
  737. ctx->video_is_muxed = 1;
  738. break;
  739. }
  740. }
  741. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  742. // looking for screen inputs
  743. if (!video_device) {
  744. int idx;
  745. if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
  746. CGDirectDisplayID screens[num_screens];
  747. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  748. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
  749. video_device = (AVCaptureDevice*) capture_screen_input;
  750. ctx->video_device_index = ctx->num_video_devices + idx;
  751. ctx->video_is_screen = 1;
  752. if (ctx->framerate.num > 0) {
  753. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  754. }
  755. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  756. if (ctx->capture_cursor) {
  757. capture_screen_input.capturesCursor = YES;
  758. } else {
  759. capture_screen_input.capturesCursor = NO;
  760. }
  761. #endif
  762. if (ctx->capture_mouse_clicks) {
  763. capture_screen_input.capturesMouseClicks = YES;
  764. } else {
  765. capture_screen_input.capturesMouseClicks = NO;
  766. }
  767. }
  768. }
  769. #endif
  770. }
  771. if (!video_device) {
  772. av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
  773. goto fail;
  774. }
  775. }
  776. // get audio device
  777. if (ctx->audio_device_index >= 0) {
  778. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  779. if (ctx->audio_device_index >= [devices count]) {
  780. av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
  781. goto fail;
  782. }
  783. audio_device = [devices objectAtIndex:ctx->audio_device_index];
  784. } else if (ctx->audio_filename &&
  785. strncmp(ctx->audio_filename, "none", 4)) {
  786. if (!strncmp(ctx->audio_filename, "default", 7)) {
  787. audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  788. } else {
  789. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  790. for (AVCaptureDevice *device in devices) {
  791. if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
  792. audio_device = device;
  793. break;
  794. }
  795. }
  796. }
  797. if (!audio_device) {
  798. av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
  799. goto fail;
  800. }
  801. }
  802. // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
  803. if (!video_device && !audio_device) {
  804. av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
  805. goto fail;
  806. }
  807. if (video_device) {
  808. if (ctx->video_device_index < ctx->num_video_devices) {
  809. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
  810. } else {
  811. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
  812. }
  813. }
  814. if (audio_device) {
  815. av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
  816. }
  817. // Initialize capture session
  818. ctx->capture_session = [[AVCaptureSession alloc] init];
  819. if (video_device && add_video_device(s, video_device)) {
  820. goto fail;
  821. }
  822. if (audio_device && add_audio_device(s, audio_device)) {
  823. }
  824. [ctx->capture_session startRunning];
  825. /* Unlock device configuration only after the session is started so it
  826. * does not reset the capture formats */
  827. if (!ctx->video_is_screen) {
  828. [video_device unlockForConfiguration];
  829. }
  830. if (video_device && get_video_config(s)) {
  831. goto fail;
  832. }
  833. // set audio stream
  834. if (audio_device && get_audio_config(s)) {
  835. goto fail;
  836. }
  837. [pool release];
  838. return 0;
  839. fail:
  840. [pool release];
  841. destroy_context(ctx);
  842. if (ret)
  843. return ret;
  844. return AVERROR(EIO);
  845. }
  846. static int copy_cvpixelbuffer(AVFormatContext *s,
  847. CVPixelBufferRef image_buffer,
  848. AVPacket *pkt)
  849. {
  850. AVFContext *ctx = s->priv_data;
  851. int src_linesize[4];
  852. const uint8_t *src_data[4];
  853. int width = CVPixelBufferGetWidth(image_buffer);
  854. int height = CVPixelBufferGetHeight(image_buffer);
  855. int status;
  856. memset(src_linesize, 0, sizeof(src_linesize));
  857. memset(src_data, 0, sizeof(src_data));
  858. status = CVPixelBufferLockBaseAddress(image_buffer, 0);
  859. if (status != kCVReturnSuccess) {
  860. av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
  861. return AVERROR_EXTERNAL;
  862. }
  863. if (CVPixelBufferIsPlanar(image_buffer)) {
  864. size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
  865. int i;
  866. for(i = 0; i < plane_count; i++){
  867. src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
  868. src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
  869. }
  870. } else {
  871. src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
  872. src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
  873. }
  874. status = av_image_copy_to_buffer(pkt->data, pkt->size,
  875. src_data, src_linesize,
  876. ctx->pixel_format, width, height, 1);
  877. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  878. return status;
  879. }
  880. static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
  881. {
  882. AVFContext* ctx = (AVFContext*)s->priv_data;
  883. do {
  884. CVImageBufferRef image_buffer;
  885. CMBlockBufferRef block_buffer;
  886. lock_frames(ctx);
  887. if (ctx->current_frame != nil) {
  888. int status;
  889. int length = 0;
  890. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  891. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  892. if (image_buffer != nil) {
  893. length = (int)CVPixelBufferGetDataSize(image_buffer);
  894. } else if (block_buffer != nil) {
  895. length = (int)CMBlockBufferGetDataLength(block_buffer);
  896. } else {
  897. unlock_frames(ctx);
  898. return AVERROR(EINVAL);
  899. }
  900. if (av_new_packet(pkt, length) < 0) {
  901. unlock_frames(ctx);
  902. return AVERROR(EIO);
  903. }
  904. CMItemCount count;
  905. CMSampleTimingInfo timing_info;
  906. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
  907. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  908. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  909. }
  910. pkt->stream_index = ctx->video_stream_index;
  911. pkt->flags |= AV_PKT_FLAG_KEY;
  912. if (image_buffer) {
  913. status = copy_cvpixelbuffer(s, image_buffer, pkt);
  914. } else {
  915. status = 0;
  916. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  917. if (ret != kCMBlockBufferNoErr) {
  918. status = AVERROR(EIO);
  919. }
  920. }
  921. CFRelease(ctx->current_frame);
  922. ctx->current_frame = nil;
  923. if (status < 0) {
  924. unlock_frames(ctx);
  925. return status;
  926. }
  927. } else if (ctx->current_audio_frame != nil) {
  928. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  929. int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  930. if (!block_buffer || !block_buffer_size) {
  931. unlock_frames(ctx);
  932. return AVERROR(EIO);
  933. }
  934. if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
  935. unlock_frames(ctx);
  936. return AVERROR_BUFFER_TOO_SMALL;
  937. }
  938. if (av_new_packet(pkt, block_buffer_size) < 0) {
  939. unlock_frames(ctx);
  940. return AVERROR(EIO);
  941. }
  942. CMItemCount count;
  943. CMSampleTimingInfo timing_info;
  944. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
  945. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  946. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  947. }
  948. pkt->stream_index = ctx->audio_stream_index;
  949. pkt->flags |= AV_PKT_FLAG_KEY;
  950. if (ctx->audio_non_interleaved) {
  951. int sample, c, shift, num_samples;
  952. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
  953. if (ret != kCMBlockBufferNoErr) {
  954. unlock_frames(ctx);
  955. return AVERROR(EIO);
  956. }
  957. num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
  958. // transform decoded frame into output format
  959. #define INTERLEAVE_OUTPUT(bps) \
  960. { \
  961. int##bps##_t **src; \
  962. int##bps##_t *dest; \
  963. src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
  964. if (!src) { \
  965. unlock_frames(ctx); \
  966. return AVERROR(EIO); \
  967. } \
  968. \
  969. for (c = 0; c < ctx->audio_channels; c++) { \
  970. src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
  971. } \
  972. dest = (int##bps##_t*)pkt->data; \
  973. shift = bps - ctx->audio_bits_per_sample; \
  974. for (sample = 0; sample < num_samples; sample++) \
  975. for (c = 0; c < ctx->audio_channels; c++) \
  976. *dest++ = src[c][sample] << shift; \
  977. av_freep(&src); \
  978. }
  979. if (ctx->audio_bits_per_sample <= 16) {
  980. INTERLEAVE_OUTPUT(16)
  981. } else {
  982. INTERLEAVE_OUTPUT(32)
  983. }
  984. } else {
  985. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  986. if (ret != kCMBlockBufferNoErr) {
  987. unlock_frames(ctx);
  988. return AVERROR(EIO);
  989. }
  990. }
  991. CFRelease(ctx->current_audio_frame);
  992. ctx->current_audio_frame = nil;
  993. } else {
  994. pkt->data = NULL;
  995. unlock_frames(ctx);
  996. if (ctx->observed_quit) {
  997. return AVERROR_EOF;
  998. } else {
  999. return AVERROR(EAGAIN);
  1000. }
  1001. }
  1002. unlock_frames(ctx);
  1003. } while (!pkt->data);
  1004. return 0;
  1005. }
  1006. static int avf_close(AVFormatContext *s)
  1007. {
  1008. AVFContext* ctx = (AVFContext*)s->priv_data;
  1009. destroy_context(ctx);
  1010. return 0;
  1011. }
  1012. static const AVOption options[] = {
  1013. { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1014. { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  1015. { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  1016. { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
  1017. { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  1018. { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
  1019. { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1020. { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1021. { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1022. { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1023. { NULL },
  1024. };
  1025. static const AVClass avf_class = {
  1026. .class_name = "AVFoundation indev",
  1027. .item_name = av_default_item_name,
  1028. .option = options,
  1029. .version = LIBAVUTIL_VERSION_INT,
  1030. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  1031. };
  1032. const AVInputFormat ff_avfoundation_demuxer = {
  1033. .name = "avfoundation",
  1034. .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
  1035. .priv_data_size = sizeof(AVFContext),
  1036. .read_header = avf_read_header,
  1037. .read_packet = avf_read_packet,
  1038. .read_close = avf_close,
  1039. .flags = AVFMT_NOFILE,
  1040. .priv_class = &avf_class,
  1041. };