avfoundation.m 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049
  1. /*
  2. * AVFoundation input device
  3. * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * AVFoundation input device
  24. * @author Thilo Borgmann <thilo.borgmann@mail.de>
  25. */
  26. #import <AVFoundation/AVFoundation.h>
  27. #include <pthread.h>
  28. #include "libavutil/pixdesc.h"
  29. #include "libavutil/opt.h"
  30. #include "libavutil/avstring.h"
  31. #include "libavformat/internal.h"
  32. #include "libavutil/internal.h"
  33. #include "libavutil/parseutils.h"
  34. #include "libavutil/time.h"
  35. #include "avdevice.h"
  36. static const int avf_time_base = 1000000;
  37. static const AVRational avf_time_base_q = {
  38. .num = 1,
  39. .den = avf_time_base
  40. };
  41. struct AVFPixelFormatSpec {
  42. enum AVPixelFormat ff_id;
  43. OSType avf_id;
  44. };
  45. static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
  46. { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
  47. { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
  48. { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
  49. { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
  50. { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
  51. { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
  52. { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
  53. { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
  54. { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
  55. { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
  56. { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
  57. { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
  58. { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
  59. { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
  60. { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
  61. { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
  62. { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
  63. { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
  64. { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
  65. { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
  66. { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
  67. { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
  68. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  69. { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
  70. #endif
  71. { AV_PIX_FMT_NONE, 0 }
  72. };
  73. typedef struct
  74. {
  75. AVClass* class;
  76. int frames_captured;
  77. int audio_frames_captured;
  78. int64_t first_pts;
  79. int64_t first_audio_pts;
  80. pthread_mutex_t frame_lock;
  81. pthread_cond_t frame_wait_cond;
  82. id avf_delegate;
  83. id avf_audio_delegate;
  84. AVRational framerate;
  85. int width, height;
  86. int capture_cursor;
  87. int capture_mouse_clicks;
  88. int list_devices;
  89. int video_device_index;
  90. int video_stream_index;
  91. int audio_device_index;
  92. int audio_stream_index;
  93. char *video_filename;
  94. char *audio_filename;
  95. int num_video_devices;
  96. int audio_channels;
  97. int audio_bits_per_sample;
  98. int audio_float;
  99. int audio_be;
  100. int audio_signed_integer;
  101. int audio_packed;
  102. int audio_non_interleaved;
  103. int32_t *audio_buffer;
  104. int audio_buffer_size;
  105. enum AVPixelFormat pixel_format;
  106. AVCaptureSession *capture_session;
  107. AVCaptureVideoDataOutput *video_output;
  108. AVCaptureAudioDataOutput *audio_output;
  109. CMSampleBufferRef current_frame;
  110. CMSampleBufferRef current_audio_frame;
  111. } AVFContext;
  112. static void lock_frames(AVFContext* ctx)
  113. {
  114. pthread_mutex_lock(&ctx->frame_lock);
  115. }
  116. static void unlock_frames(AVFContext* ctx)
  117. {
  118. pthread_mutex_unlock(&ctx->frame_lock);
  119. }
  120. /** FrameReciever class - delegate for AVCaptureSession
  121. */
  122. @interface AVFFrameReceiver : NSObject
  123. {
  124. AVFContext* _context;
  125. }
  126. - (id)initWithContext:(AVFContext*)context;
  127. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  128. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  129. fromConnection:(AVCaptureConnection *)connection;
  130. @end
  131. @implementation AVFFrameReceiver
  132. - (id)initWithContext:(AVFContext*)context
  133. {
  134. if (self = [super init]) {
  135. _context = context;
  136. }
  137. return self;
  138. }
  139. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  140. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  141. fromConnection:(AVCaptureConnection *)connection
  142. {
  143. lock_frames(_context);
  144. if (_context->current_frame != nil) {
  145. CFRelease(_context->current_frame);
  146. }
  147. _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
  148. pthread_cond_signal(&_context->frame_wait_cond);
  149. unlock_frames(_context);
  150. ++_context->frames_captured;
  151. }
  152. @end
  153. /** AudioReciever class - delegate for AVCaptureSession
  154. */
  155. @interface AVFAudioReceiver : NSObject
  156. {
  157. AVFContext* _context;
  158. }
  159. - (id)initWithContext:(AVFContext*)context;
  160. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  161. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  162. fromConnection:(AVCaptureConnection *)connection;
  163. @end
  164. @implementation AVFAudioReceiver
  165. - (id)initWithContext:(AVFContext*)context
  166. {
  167. if (self = [super init]) {
  168. _context = context;
  169. }
  170. return self;
  171. }
  172. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  173. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  174. fromConnection:(AVCaptureConnection *)connection
  175. {
  176. lock_frames(_context);
  177. if (_context->current_audio_frame != nil) {
  178. CFRelease(_context->current_audio_frame);
  179. }
  180. _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
  181. pthread_cond_signal(&_context->frame_wait_cond);
  182. unlock_frames(_context);
  183. ++_context->audio_frames_captured;
  184. }
  185. @end
  186. static void destroy_context(AVFContext* ctx)
  187. {
  188. [ctx->capture_session stopRunning];
  189. [ctx->capture_session release];
  190. [ctx->video_output release];
  191. [ctx->audio_output release];
  192. [ctx->avf_delegate release];
  193. [ctx->avf_audio_delegate release];
  194. ctx->capture_session = NULL;
  195. ctx->video_output = NULL;
  196. ctx->audio_output = NULL;
  197. ctx->avf_delegate = NULL;
  198. ctx->avf_audio_delegate = NULL;
  199. av_freep(&ctx->audio_buffer);
  200. pthread_mutex_destroy(&ctx->frame_lock);
  201. pthread_cond_destroy(&ctx->frame_wait_cond);
  202. if (ctx->current_frame) {
  203. CFRelease(ctx->current_frame);
  204. }
  205. }
  206. static void parse_device_name(AVFormatContext *s)
  207. {
  208. AVFContext *ctx = (AVFContext*)s->priv_data;
  209. char *tmp = av_strdup(s->filename);
  210. char *save;
  211. if (tmp[0] != ':') {
  212. ctx->video_filename = av_strtok(tmp, ":", &save);
  213. ctx->audio_filename = av_strtok(NULL, ":", &save);
  214. } else {
  215. ctx->audio_filename = av_strtok(tmp, ":", &save);
  216. }
  217. }
  218. /**
  219. * Configure the video device.
  220. *
  221. * Configure the video device using a run-time approach to access properties
  222. * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
  223. * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
  224. *
  225. * The NSUndefinedKeyException must be handled by the caller of this function.
  226. *
  227. */
  228. static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  229. {
  230. AVFContext *ctx = (AVFContext*)s->priv_data;
  231. double framerate = av_q2d(ctx->framerate);
  232. NSObject *range = nil;
  233. NSObject *format = nil;
  234. NSObject *selected_range = nil;
  235. NSObject *selected_format = nil;
  236. for (format in [video_device valueForKey:@"formats"]) {
  237. CMFormatDescriptionRef formatDescription;
  238. CMVideoDimensions dimensions;
  239. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  240. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  241. if ((ctx->width == 0 && ctx->height == 0) ||
  242. (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
  243. selected_format = format;
  244. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  245. double max_framerate;
  246. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  247. if (fabs (framerate - max_framerate) < 0.01) {
  248. selected_range = range;
  249. break;
  250. }
  251. }
  252. }
  253. }
  254. if (!selected_format) {
  255. av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device\n",
  256. ctx->width, ctx->height);
  257. goto unsupported_format;
  258. }
  259. if (!selected_range) {
  260. av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device\n",
  261. framerate);
  262. goto unsupported_format;
  263. }
  264. if ([video_device lockForConfiguration:NULL] == YES) {
  265. NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
  266. [video_device setValue:selected_format forKey:@"activeFormat"];
  267. [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
  268. [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
  269. } else {
  270. av_log(s, AV_LOG_ERROR, "Could not lock device for configuration");
  271. return AVERROR(EINVAL);
  272. }
  273. return 0;
  274. unsupported_format:
  275. av_log(s, AV_LOG_ERROR, "Supported modes:\n");
  276. for (format in [video_device valueForKey:@"formats"]) {
  277. CMFormatDescriptionRef formatDescription;
  278. CMVideoDimensions dimensions;
  279. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  280. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  281. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  282. double min_framerate;
  283. double max_framerate;
  284. [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
  285. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  286. av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
  287. dimensions.width, dimensions.height,
  288. min_framerate, max_framerate);
  289. }
  290. }
  291. return AVERROR(EINVAL);
  292. }
  293. static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  294. {
  295. AVFContext *ctx = (AVFContext*)s->priv_data;
  296. int ret;
  297. NSError *error = nil;
  298. AVCaptureInput* capture_input = nil;
  299. struct AVFPixelFormatSpec pxl_fmt_spec;
  300. NSNumber *pixel_format;
  301. NSDictionary *capture_dict;
  302. dispatch_queue_t queue;
  303. if (ctx->video_device_index < ctx->num_video_devices) {
  304. capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
  305. } else {
  306. capture_input = (AVCaptureInput*) video_device;
  307. }
  308. if (!capture_input) {
  309. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  310. [[error localizedDescription] UTF8String]);
  311. return 1;
  312. }
  313. if ([ctx->capture_session canAddInput:capture_input]) {
  314. [ctx->capture_session addInput:capture_input];
  315. } else {
  316. av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
  317. return 1;
  318. }
  319. // Attaching output
  320. ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
  321. if (!ctx->video_output) {
  322. av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
  323. return 1;
  324. }
  325. // Configure device framerate and video size
  326. @try {
  327. if ((ret = configure_video_device(s, video_device)) < 0) {
  328. return ret;
  329. }
  330. } @catch (NSException *exception) {
  331. if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
  332. av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
  333. return AVERROR_EXTERNAL;
  334. }
  335. }
  336. // select pixel format
  337. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  338. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  339. if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
  340. pxl_fmt_spec = avf_pixel_formats[i];
  341. break;
  342. }
  343. }
  344. // check if selected pixel format is supported by AVFoundation
  345. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  346. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
  347. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  348. return 1;
  349. }
  350. // check if the pixel format is available for this device
  351. if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
  352. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
  353. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  354. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  355. av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
  356. for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
  357. struct AVFPixelFormatSpec pxl_fmt_dummy;
  358. pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
  359. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  360. if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
  361. pxl_fmt_dummy = avf_pixel_formats[i];
  362. break;
  363. }
  364. }
  365. if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
  366. av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
  367. // select first supported pixel format instead of user selected (or default) pixel format
  368. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  369. pxl_fmt_spec = pxl_fmt_dummy;
  370. }
  371. }
  372. }
  373. // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
  374. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  375. return 1;
  376. } else {
  377. av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
  378. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  379. }
  380. }
  381. ctx->pixel_format = pxl_fmt_spec.ff_id;
  382. pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
  383. capture_dict = [NSDictionary dictionaryWithObject:pixel_format
  384. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  385. [ctx->video_output setVideoSettings:capture_dict];
  386. [ctx->video_output setAlwaysDiscardsLateVideoFrames:YES];
  387. ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
  388. queue = dispatch_queue_create("avf_queue", NULL);
  389. [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
  390. dispatch_release(queue);
  391. if ([ctx->capture_session canAddOutput:ctx->video_output]) {
  392. [ctx->capture_session addOutput:ctx->video_output];
  393. } else {
  394. av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
  395. return 1;
  396. }
  397. return 0;
  398. }
  399. static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
  400. {
  401. AVFContext *ctx = (AVFContext*)s->priv_data;
  402. NSError *error = nil;
  403. AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
  404. dispatch_queue_t queue;
  405. if (!audio_dev_input) {
  406. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  407. [[error localizedDescription] UTF8String]);
  408. return 1;
  409. }
  410. if ([ctx->capture_session canAddInput:audio_dev_input]) {
  411. [ctx->capture_session addInput:audio_dev_input];
  412. } else {
  413. av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
  414. return 1;
  415. }
  416. // Attaching output
  417. ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
  418. if (!ctx->audio_output) {
  419. av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
  420. return 1;
  421. }
  422. ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
  423. queue = dispatch_queue_create("avf_audio_queue", NULL);
  424. [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
  425. dispatch_release(queue);
  426. if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
  427. [ctx->capture_session addOutput:ctx->audio_output];
  428. } else {
  429. av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
  430. return 1;
  431. }
  432. return 0;
  433. }
  434. static int get_video_config(AVFormatContext *s)
  435. {
  436. AVFContext *ctx = (AVFContext*)s->priv_data;
  437. CVImageBufferRef image_buffer;
  438. CGSize image_buffer_size;
  439. AVStream* stream = avformat_new_stream(s, NULL);
  440. if (!stream) {
  441. return 1;
  442. }
  443. // Take stream info from the first frame.
  444. while (ctx->frames_captured < 1) {
  445. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  446. }
  447. lock_frames(ctx);
  448. ctx->video_stream_index = stream->index;
  449. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  450. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  451. image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
  452. stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
  453. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  454. stream->codecpar->width = (int)image_buffer_size.width;
  455. stream->codecpar->height = (int)image_buffer_size.height;
  456. stream->codecpar->format = ctx->pixel_format;
  457. CFRelease(ctx->current_frame);
  458. ctx->current_frame = nil;
  459. unlock_frames(ctx);
  460. return 0;
  461. }
  462. static int get_audio_config(AVFormatContext *s)
  463. {
  464. AVFContext *ctx = (AVFContext*)s->priv_data;
  465. CMFormatDescriptionRef format_desc;
  466. AVStream* stream = avformat_new_stream(s, NULL);
  467. if (!stream) {
  468. return 1;
  469. }
  470. // Take stream info from the first frame.
  471. while (ctx->audio_frames_captured < 1) {
  472. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  473. }
  474. lock_frames(ctx);
  475. ctx->audio_stream_index = stream->index;
  476. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  477. format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
  478. const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
  479. if (!basic_desc) {
  480. av_log(s, AV_LOG_ERROR, "audio format not available\n");
  481. return 1;
  482. }
  483. stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
  484. stream->codecpar->sample_rate = basic_desc->mSampleRate;
  485. stream->codecpar->channels = basic_desc->mChannelsPerFrame;
  486. stream->codecpar->channel_layout = av_get_default_channel_layout(stream->codecpar->channels);
  487. ctx->audio_channels = basic_desc->mChannelsPerFrame;
  488. ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
  489. ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
  490. ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
  491. ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
  492. ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
  493. ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
  494. if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  495. ctx->audio_float &&
  496. ctx->audio_bits_per_sample == 32 &&
  497. ctx->audio_packed) {
  498. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
  499. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  500. ctx->audio_signed_integer &&
  501. ctx->audio_bits_per_sample == 16 &&
  502. ctx->audio_packed) {
  503. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
  504. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  505. ctx->audio_signed_integer &&
  506. ctx->audio_bits_per_sample == 24 &&
  507. ctx->audio_packed) {
  508. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
  509. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  510. ctx->audio_signed_integer &&
  511. ctx->audio_bits_per_sample == 32 &&
  512. ctx->audio_packed) {
  513. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
  514. } else {
  515. av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
  516. return 1;
  517. }
  518. if (ctx->audio_non_interleaved) {
  519. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  520. ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  521. ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
  522. if (!ctx->audio_buffer) {
  523. av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
  524. return 1;
  525. }
  526. }
  527. CFRelease(ctx->current_audio_frame);
  528. ctx->current_audio_frame = nil;
  529. unlock_frames(ctx);
  530. return 0;
  531. }
  532. static int avf_read_header(AVFormatContext *s)
  533. {
  534. NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
  535. int capture_screen = 0;
  536. uint32_t num_screens = 0;
  537. AVFContext *ctx = (AVFContext*)s->priv_data;
  538. AVCaptureDevice *video_device = nil;
  539. AVCaptureDevice *audio_device = nil;
  540. // Find capture device
  541. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  542. ctx->num_video_devices = [devices count];
  543. ctx->first_pts = av_gettime();
  544. ctx->first_audio_pts = av_gettime();
  545. pthread_mutex_init(&ctx->frame_lock, NULL);
  546. pthread_cond_init(&ctx->frame_wait_cond, NULL);
  547. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  548. CGGetActiveDisplayList(0, NULL, &num_screens);
  549. #endif
  550. // List devices if requested
  551. if (ctx->list_devices) {
  552. int index = 0;
  553. av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
  554. for (AVCaptureDevice *device in devices) {
  555. const char *name = [[device localizedName] UTF8String];
  556. index = [devices indexOfObject:device];
  557. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  558. index++;
  559. }
  560. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  561. if (num_screens > 0) {
  562. CGDirectDisplayID screens[num_screens];
  563. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  564. for (int i = 0; i < num_screens; i++) {
  565. av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", index + i, i);
  566. }
  567. }
  568. #endif
  569. av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
  570. devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  571. for (AVCaptureDevice *device in devices) {
  572. const char *name = [[device localizedName] UTF8String];
  573. int index = [devices indexOfObject:device];
  574. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  575. }
  576. goto fail;
  577. }
  578. // parse input filename for video and audio device
  579. parse_device_name(s);
  580. // check for device index given in filename
  581. if (ctx->video_device_index == -1 && ctx->video_filename) {
  582. sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
  583. }
  584. if (ctx->audio_device_index == -1 && ctx->audio_filename) {
  585. sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
  586. }
  587. if (ctx->video_device_index >= 0) {
  588. if (ctx->video_device_index < ctx->num_video_devices) {
  589. video_device = [devices objectAtIndex:ctx->video_device_index];
  590. } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
  591. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  592. CGDirectDisplayID screens[num_screens];
  593. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  594. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
  595. if (ctx->framerate.num > 0) {
  596. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  597. }
  598. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  599. if (ctx->capture_cursor) {
  600. capture_screen_input.capturesCursor = YES;
  601. } else {
  602. capture_screen_input.capturesCursor = NO;
  603. }
  604. #endif
  605. if (ctx->capture_mouse_clicks) {
  606. capture_screen_input.capturesMouseClicks = YES;
  607. } else {
  608. capture_screen_input.capturesMouseClicks = NO;
  609. }
  610. video_device = (AVCaptureDevice*) capture_screen_input;
  611. capture_screen = 1;
  612. #endif
  613. } else {
  614. av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
  615. goto fail;
  616. }
  617. } else if (ctx->video_filename &&
  618. strncmp(ctx->video_filename, "none", 4)) {
  619. if (!strncmp(ctx->video_filename, "default", 7)) {
  620. video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  621. } else {
  622. // looking for video inputs
  623. for (AVCaptureDevice *device in devices) {
  624. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  625. video_device = device;
  626. break;
  627. }
  628. }
  629. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  630. // looking for screen inputs
  631. if (!video_device) {
  632. int idx;
  633. if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
  634. CGDirectDisplayID screens[num_screens];
  635. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  636. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
  637. video_device = (AVCaptureDevice*) capture_screen_input;
  638. ctx->video_device_index = ctx->num_video_devices + idx;
  639. capture_screen = 1;
  640. if (ctx->framerate.num > 0) {
  641. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  642. }
  643. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  644. if (ctx->capture_cursor) {
  645. capture_screen_input.capturesCursor = YES;
  646. } else {
  647. capture_screen_input.capturesCursor = NO;
  648. }
  649. #endif
  650. if (ctx->capture_mouse_clicks) {
  651. capture_screen_input.capturesMouseClicks = YES;
  652. } else {
  653. capture_screen_input.capturesMouseClicks = NO;
  654. }
  655. }
  656. }
  657. #endif
  658. }
  659. if (!video_device) {
  660. av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
  661. goto fail;
  662. }
  663. }
  664. // get audio device
  665. if (ctx->audio_device_index >= 0) {
  666. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  667. if (ctx->audio_device_index >= [devices count]) {
  668. av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
  669. goto fail;
  670. }
  671. audio_device = [devices objectAtIndex:ctx->audio_device_index];
  672. } else if (ctx->audio_filename &&
  673. strncmp(ctx->audio_filename, "none", 4)) {
  674. if (!strncmp(ctx->audio_filename, "default", 7)) {
  675. audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  676. } else {
  677. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  678. for (AVCaptureDevice *device in devices) {
  679. if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
  680. audio_device = device;
  681. break;
  682. }
  683. }
  684. }
  685. if (!audio_device) {
  686. av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
  687. goto fail;
  688. }
  689. }
  690. // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
  691. if (!video_device && !audio_device) {
  692. av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
  693. goto fail;
  694. }
  695. if (video_device) {
  696. if (ctx->video_device_index < ctx->num_video_devices) {
  697. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
  698. } else {
  699. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
  700. }
  701. }
  702. if (audio_device) {
  703. av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
  704. }
  705. // Initialize capture session
  706. ctx->capture_session = [[AVCaptureSession alloc] init];
  707. if (video_device && add_video_device(s, video_device)) {
  708. goto fail;
  709. }
  710. if (audio_device && add_audio_device(s, audio_device)) {
  711. }
  712. [ctx->capture_session startRunning];
  713. /* Unlock device configuration only after the session is started so it
  714. * does not reset the capture formats */
  715. if (!capture_screen) {
  716. [video_device unlockForConfiguration];
  717. }
  718. if (video_device && get_video_config(s)) {
  719. goto fail;
  720. }
  721. // set audio stream
  722. if (audio_device && get_audio_config(s)) {
  723. goto fail;
  724. }
  725. [pool release];
  726. return 0;
  727. fail:
  728. [pool release];
  729. destroy_context(ctx);
  730. return AVERROR(EIO);
  731. }
  732. static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
  733. {
  734. AVFContext* ctx = (AVFContext*)s->priv_data;
  735. do {
  736. CVImageBufferRef image_buffer;
  737. lock_frames(ctx);
  738. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  739. if (ctx->current_frame != nil) {
  740. void *data;
  741. if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(image_buffer)) < 0) {
  742. return AVERROR(EIO);
  743. }
  744. CMItemCount count;
  745. CMSampleTimingInfo timing_info;
  746. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
  747. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  748. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  749. }
  750. pkt->stream_index = ctx->video_stream_index;
  751. pkt->flags |= AV_PKT_FLAG_KEY;
  752. CVPixelBufferLockBaseAddress(image_buffer, 0);
  753. data = CVPixelBufferGetBaseAddress(image_buffer);
  754. memcpy(pkt->data, data, pkt->size);
  755. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  756. CFRelease(ctx->current_frame);
  757. ctx->current_frame = nil;
  758. } else if (ctx->current_audio_frame != nil) {
  759. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  760. int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  761. if (!block_buffer || !block_buffer_size) {
  762. return AVERROR(EIO);
  763. }
  764. if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
  765. return AVERROR_BUFFER_TOO_SMALL;
  766. }
  767. if (av_new_packet(pkt, block_buffer_size) < 0) {
  768. return AVERROR(EIO);
  769. }
  770. CMItemCount count;
  771. CMSampleTimingInfo timing_info;
  772. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
  773. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  774. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  775. }
  776. pkt->stream_index = ctx->audio_stream_index;
  777. pkt->flags |= AV_PKT_FLAG_KEY;
  778. if (ctx->audio_non_interleaved) {
  779. int sample, c, shift, num_samples;
  780. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
  781. if (ret != kCMBlockBufferNoErr) {
  782. return AVERROR(EIO);
  783. }
  784. num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
  785. // transform decoded frame into output format
  786. #define INTERLEAVE_OUTPUT(bps) \
  787. { \
  788. int##bps##_t **src; \
  789. int##bps##_t *dest; \
  790. src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
  791. if (!src) return AVERROR(EIO); \
  792. for (c = 0; c < ctx->audio_channels; c++) { \
  793. src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
  794. } \
  795. dest = (int##bps##_t*)pkt->data; \
  796. shift = bps - ctx->audio_bits_per_sample; \
  797. for (sample = 0; sample < num_samples; sample++) \
  798. for (c = 0; c < ctx->audio_channels; c++) \
  799. *dest++ = src[c][sample] << shift; \
  800. av_freep(&src); \
  801. }
  802. if (ctx->audio_bits_per_sample <= 16) {
  803. INTERLEAVE_OUTPUT(16)
  804. } else {
  805. INTERLEAVE_OUTPUT(32)
  806. }
  807. } else {
  808. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  809. if (ret != kCMBlockBufferNoErr) {
  810. return AVERROR(EIO);
  811. }
  812. }
  813. CFRelease(ctx->current_audio_frame);
  814. ctx->current_audio_frame = nil;
  815. } else {
  816. pkt->data = NULL;
  817. pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
  818. }
  819. unlock_frames(ctx);
  820. } while (!pkt->data);
  821. return 0;
  822. }
  823. static int avf_close(AVFormatContext *s)
  824. {
  825. AVFContext* ctx = (AVFContext*)s->priv_data;
  826. destroy_context(ctx);
  827. return 0;
  828. }
  829. static const AVOption options[] = {
  830. { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
  831. { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
  832. { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
  833. { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  834. { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  835. { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
  836. { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  837. { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
  838. { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  839. { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  840. { NULL },
  841. };
  842. static const AVClass avf_class = {
  843. .class_name = "AVFoundation input device",
  844. .item_name = av_default_item_name,
  845. .option = options,
  846. .version = LIBAVUTIL_VERSION_INT,
  847. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  848. };
  849. AVInputFormat ff_avfoundation_demuxer = {
  850. .name = "avfoundation",
  851. .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
  852. .priv_data_size = sizeof(AVFContext),
  853. .read_header = avf_read_header,
  854. .read_packet = avf_read_packet,
  855. .read_close = avf_close,
  856. .flags = AVFMT_NOFILE,
  857. .priv_class = &avf_class,
  858. };