avfoundation.m 48 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307
  1. /*
  2. * AVFoundation input device
  3. * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * AVFoundation input device
  24. * @author Thilo Borgmann <thilo.borgmann@mail.de>
  25. */
  26. #import <AVFoundation/AVFoundation.h>
  27. #include <pthread.h>
  28. #include "libavutil/channel_layout.h"
  29. #include "libavutil/mem.h"
  30. #include "libavutil/pixdesc.h"
  31. #include "libavutil/opt.h"
  32. #include "libavutil/avstring.h"
  33. #include "libavformat/demux.h"
  34. #include "libavformat/internal.h"
  35. #include "libavutil/internal.h"
  36. #include "libavutil/parseutils.h"
  37. #include "libavutil/time.h"
  38. #include "libavutil/imgutils.h"
  39. #include "avdevice.h"
  40. static const int avf_time_base = 1000000;
  41. static const AVRational avf_time_base_q = {
  42. .num = 1,
  43. .den = avf_time_base
  44. };
  45. struct AVFPixelFormatSpec {
  46. enum AVPixelFormat ff_id;
  47. OSType avf_id;
  48. };
  49. static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
  50. { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
  51. { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
  52. { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
  53. { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
  54. { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
  55. { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
  56. { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
  57. { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
  58. { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
  59. { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
  60. { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
  61. { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
  62. { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
  63. { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
  64. { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
  65. { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
  66. { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
  67. { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
  68. { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
  69. { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
  70. { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
  71. { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
  72. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  73. { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
  74. #endif
  75. { AV_PIX_FMT_NONE, 0 }
  76. };
  77. typedef struct
  78. {
  79. AVClass* class;
  80. int frames_captured;
  81. int audio_frames_captured;
  82. pthread_mutex_t frame_lock;
  83. id avf_delegate;
  84. id avf_audio_delegate;
  85. AVRational framerate;
  86. int width, height;
  87. int capture_cursor;
  88. int capture_mouse_clicks;
  89. int capture_raw_data;
  90. int drop_late_frames;
  91. int video_is_muxed;
  92. int video_is_screen;
  93. int list_devices;
  94. int video_device_index;
  95. int video_stream_index;
  96. int audio_device_index;
  97. int audio_stream_index;
  98. char *url;
  99. char *video_filename;
  100. char *audio_filename;
  101. int num_video_devices;
  102. int audio_channels;
  103. int audio_bits_per_sample;
  104. int audio_float;
  105. int audio_be;
  106. int audio_signed_integer;
  107. int audio_packed;
  108. int audio_non_interleaved;
  109. int32_t *audio_buffer;
  110. int audio_buffer_size;
  111. enum AVPixelFormat pixel_format;
  112. AVCaptureSession *capture_session;
  113. AVCaptureVideoDataOutput *video_output;
  114. AVCaptureAudioDataOutput *audio_output;
  115. CMSampleBufferRef current_frame;
  116. CMSampleBufferRef current_audio_frame;
  117. AVCaptureDevice *observed_device;
  118. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  119. AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
  120. #endif
  121. int observed_quit;
  122. } AVFContext;
  123. static void lock_frames(AVFContext* ctx)
  124. {
  125. pthread_mutex_lock(&ctx->frame_lock);
  126. }
  127. static void unlock_frames(AVFContext* ctx)
  128. {
  129. pthread_mutex_unlock(&ctx->frame_lock);
  130. }
  131. /** FrameReciever class - delegate for AVCaptureSession
  132. */
  133. @interface AVFFrameReceiver : NSObject
  134. {
  135. AVFContext* _context;
  136. }
  137. - (id)initWithContext:(AVFContext*)context;
  138. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  139. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  140. fromConnection:(AVCaptureConnection *)connection;
  141. @end
  142. @implementation AVFFrameReceiver
  143. - (id)initWithContext:(AVFContext*)context
  144. {
  145. if (self = [super init]) {
  146. _context = context;
  147. // start observing if a device is set for it
  148. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  149. if (_context->observed_device) {
  150. NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
  151. NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew;
  152. [_context->observed_device addObserver: self
  153. forKeyPath: keyPath
  154. options: options
  155. context: _context];
  156. }
  157. #endif
  158. }
  159. return self;
  160. }
  161. - (void)dealloc {
  162. // stop observing if a device is set for it
  163. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  164. if (_context->observed_device) {
  165. NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
  166. [_context->observed_device removeObserver: self forKeyPath: keyPath];
  167. }
  168. #endif
  169. [super dealloc];
  170. }
  171. - (void)observeValueForKeyPath:(NSString *)keyPath
  172. ofObject:(id)object
  173. change:(NSDictionary *)change
  174. context:(void *)context {
  175. if (context == _context) {
  176. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  177. AVCaptureDeviceTransportControlsPlaybackMode mode =
  178. [change[NSKeyValueChangeNewKey] integerValue];
  179. if (mode != _context->observed_mode) {
  180. if (mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
  181. _context->observed_quit = 1;
  182. }
  183. _context->observed_mode = mode;
  184. }
  185. #endif
  186. } else {
  187. [super observeValueForKeyPath: keyPath
  188. ofObject: object
  189. change: change
  190. context: context];
  191. }
  192. }
  193. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  194. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  195. fromConnection:(AVCaptureConnection *)connection
  196. {
  197. lock_frames(_context);
  198. if (_context->current_frame != nil) {
  199. CFRelease(_context->current_frame);
  200. }
  201. _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
  202. unlock_frames(_context);
  203. ++_context->frames_captured;
  204. }
  205. @end
  206. /** AudioReciever class - delegate for AVCaptureSession
  207. */
  208. @interface AVFAudioReceiver : NSObject
  209. {
  210. AVFContext* _context;
  211. }
  212. - (id)initWithContext:(AVFContext*)context;
  213. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  214. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  215. fromConnection:(AVCaptureConnection *)connection;
  216. @end
  217. @implementation AVFAudioReceiver
  218. - (id)initWithContext:(AVFContext*)context
  219. {
  220. if (self = [super init]) {
  221. _context = context;
  222. }
  223. return self;
  224. }
  225. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  226. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  227. fromConnection:(AVCaptureConnection *)connection
  228. {
  229. lock_frames(_context);
  230. if (_context->current_audio_frame != nil) {
  231. CFRelease(_context->current_audio_frame);
  232. }
  233. _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
  234. unlock_frames(_context);
  235. ++_context->audio_frames_captured;
  236. }
  237. @end
  238. static void destroy_context(AVFContext* ctx)
  239. {
  240. [ctx->capture_session stopRunning];
  241. [ctx->capture_session release];
  242. [ctx->video_output release];
  243. [ctx->audio_output release];
  244. [ctx->avf_delegate release];
  245. [ctx->avf_audio_delegate release];
  246. ctx->capture_session = NULL;
  247. ctx->video_output = NULL;
  248. ctx->audio_output = NULL;
  249. ctx->avf_delegate = NULL;
  250. ctx->avf_audio_delegate = NULL;
  251. av_freep(&ctx->url);
  252. av_freep(&ctx->audio_buffer);
  253. pthread_mutex_destroy(&ctx->frame_lock);
  254. if (ctx->current_frame) {
  255. CFRelease(ctx->current_frame);
  256. }
  257. }
  258. static int parse_device_name(AVFormatContext *s)
  259. {
  260. AVFContext *ctx = (AVFContext*)s->priv_data;
  261. char *save;
  262. ctx->url = av_strdup(s->url);
  263. if (!ctx->url)
  264. return AVERROR(ENOMEM);
  265. if (ctx->url[0] != ':') {
  266. ctx->video_filename = av_strtok(ctx->url, ":", &save);
  267. ctx->audio_filename = av_strtok(NULL, ":", &save);
  268. } else {
  269. ctx->audio_filename = av_strtok(ctx->url, ":", &save);
  270. }
  271. return 0;
  272. }
  273. /**
  274. * Configure the video device.
  275. *
  276. * Configure the video device using a run-time approach to access properties
  277. * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
  278. * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
  279. *
  280. * The NSUndefinedKeyException must be handled by the caller of this function.
  281. *
  282. */
  283. static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  284. {
  285. AVFContext *ctx = (AVFContext*)s->priv_data;
  286. double framerate = av_q2d(ctx->framerate);
  287. NSObject *range = nil;
  288. NSObject *format = nil;
  289. NSObject *selected_range = nil;
  290. NSObject *selected_format = nil;
  291. // try to configure format by formats list
  292. // might raise an exception if no format list is given
  293. // (then fallback to default, no configuration)
  294. @try {
  295. for (format in [video_device valueForKey:@"formats"]) {
  296. CMFormatDescriptionRef formatDescription;
  297. CMVideoDimensions dimensions;
  298. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  299. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  300. if ((ctx->width == 0 && ctx->height == 0) ||
  301. (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
  302. selected_format = format;
  303. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  304. double max_framerate;
  305. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  306. if (fabs (framerate - max_framerate) < 0.01) {
  307. selected_range = range;
  308. break;
  309. }
  310. }
  311. }
  312. }
  313. if (!selected_format) {
  314. av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
  315. ctx->width, ctx->height);
  316. goto unsupported_format;
  317. }
  318. if (!selected_range) {
  319. av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
  320. framerate);
  321. if (ctx->video_is_muxed) {
  322. av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
  323. } else {
  324. goto unsupported_format;
  325. }
  326. }
  327. if ([video_device lockForConfiguration:NULL] == YES) {
  328. if (selected_format) {
  329. [video_device setValue:selected_format forKey:@"activeFormat"];
  330. }
  331. if (selected_range) {
  332. NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
  333. [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
  334. [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
  335. }
  336. } else {
  337. av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
  338. return AVERROR(EINVAL);
  339. }
  340. } @catch(NSException *e) {
  341. av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
  342. }
  343. return 0;
  344. unsupported_format:
  345. av_log(s, AV_LOG_ERROR, "Supported modes:\n");
  346. for (format in [video_device valueForKey:@"formats"]) {
  347. CMFormatDescriptionRef formatDescription;
  348. CMVideoDimensions dimensions;
  349. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  350. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  351. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  352. double min_framerate;
  353. double max_framerate;
  354. [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
  355. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  356. av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
  357. dimensions.width, dimensions.height,
  358. min_framerate, max_framerate);
  359. }
  360. }
  361. return AVERROR(EINVAL);
  362. }
  363. static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  364. {
  365. AVFContext *ctx = (AVFContext*)s->priv_data;
  366. int ret;
  367. NSError *error = nil;
  368. AVCaptureInput* capture_input = nil;
  369. struct AVFPixelFormatSpec pxl_fmt_spec;
  370. NSNumber *pixel_format;
  371. NSDictionary *capture_dict;
  372. dispatch_queue_t queue;
  373. if (ctx->video_device_index < ctx->num_video_devices) {
  374. capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
  375. } else {
  376. capture_input = (AVCaptureInput*) video_device;
  377. }
  378. if (!capture_input) {
  379. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  380. [[error localizedDescription] UTF8String]);
  381. return 1;
  382. }
  383. if ([ctx->capture_session canAddInput:capture_input]) {
  384. [ctx->capture_session addInput:capture_input];
  385. } else {
  386. av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
  387. return 1;
  388. }
  389. // Attaching output
  390. ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
  391. if (!ctx->video_output) {
  392. av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
  393. return 1;
  394. }
  395. // Configure device framerate and video size
  396. @try {
  397. if ((ret = configure_video_device(s, video_device)) < 0) {
  398. return ret;
  399. }
  400. } @catch (NSException *exception) {
  401. if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
  402. av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
  403. return AVERROR_EXTERNAL;
  404. }
  405. }
  406. // select pixel format
  407. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  408. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  409. if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
  410. pxl_fmt_spec = avf_pixel_formats[i];
  411. break;
  412. }
  413. }
  414. // check if selected pixel format is supported by AVFoundation
  415. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  416. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
  417. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  418. return 1;
  419. }
  420. // check if the pixel format is available for this device
  421. if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
  422. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
  423. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  424. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  425. av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
  426. for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
  427. struct AVFPixelFormatSpec pxl_fmt_dummy;
  428. pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
  429. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  430. if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
  431. pxl_fmt_dummy = avf_pixel_formats[i];
  432. break;
  433. }
  434. }
  435. if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
  436. av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
  437. // select first supported pixel format instead of user selected (or default) pixel format
  438. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  439. pxl_fmt_spec = pxl_fmt_dummy;
  440. }
  441. }
  442. }
  443. // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
  444. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  445. return 1;
  446. } else {
  447. av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
  448. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  449. }
  450. }
  451. // set videoSettings to an empty dict for receiving raw data of muxed devices
  452. if (ctx->capture_raw_data) {
  453. ctx->pixel_format = pxl_fmt_spec.ff_id;
  454. ctx->video_output.videoSettings = @{ };
  455. } else {
  456. ctx->pixel_format = pxl_fmt_spec.ff_id;
  457. pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
  458. capture_dict = [NSDictionary dictionaryWithObject:pixel_format
  459. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  460. [ctx->video_output setVideoSettings:capture_dict];
  461. }
  462. [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
  463. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  464. // check for transport control support and set observer device if supported
  465. if (!ctx->video_is_screen) {
  466. int trans_ctrl = [video_device transportControlsSupported];
  467. AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
  468. if (trans_ctrl) {
  469. ctx->observed_mode = trans_mode;
  470. ctx->observed_device = video_device;
  471. }
  472. }
  473. #endif
  474. ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
  475. queue = dispatch_queue_create("avf_queue", NULL);
  476. [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
  477. dispatch_release(queue);
  478. if ([ctx->capture_session canAddOutput:ctx->video_output]) {
  479. [ctx->capture_session addOutput:ctx->video_output];
  480. } else {
  481. av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
  482. return 1;
  483. }
  484. return 0;
  485. }
  486. static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
  487. {
  488. AVFContext *ctx = (AVFContext*)s->priv_data;
  489. NSError *error = nil;
  490. AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
  491. dispatch_queue_t queue;
  492. if (!audio_dev_input) {
  493. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  494. [[error localizedDescription] UTF8String]);
  495. return 1;
  496. }
  497. if ([ctx->capture_session canAddInput:audio_dev_input]) {
  498. [ctx->capture_session addInput:audio_dev_input];
  499. } else {
  500. av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
  501. return 1;
  502. }
  503. // Attaching output
  504. ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
  505. if (!ctx->audio_output) {
  506. av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
  507. return 1;
  508. }
  509. ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
  510. queue = dispatch_queue_create("avf_audio_queue", NULL);
  511. [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
  512. dispatch_release(queue);
  513. if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
  514. [ctx->capture_session addOutput:ctx->audio_output];
  515. } else {
  516. av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
  517. return 1;
  518. }
  519. return 0;
  520. }
  521. static int get_video_config(AVFormatContext *s)
  522. {
  523. AVFContext *ctx = (AVFContext*)s->priv_data;
  524. CVImageBufferRef image_buffer;
  525. CGSize image_buffer_size;
  526. AVStream* stream = avformat_new_stream(s, NULL);
  527. if (!stream) {
  528. return 1;
  529. }
  530. // Take stream info from the first frame.
  531. while (ctx->frames_captured < 1) {
  532. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  533. }
  534. lock_frames(ctx);
  535. ctx->video_stream_index = stream->index;
  536. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  537. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  538. if (image_buffer) {
  539. image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
  540. stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
  541. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  542. stream->codecpar->width = (int)image_buffer_size.width;
  543. stream->codecpar->height = (int)image_buffer_size.height;
  544. stream->codecpar->format = ctx->pixel_format;
  545. } else {
  546. stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
  547. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  548. stream->codecpar->format = ctx->pixel_format;
  549. }
  550. CFRelease(ctx->current_frame);
  551. ctx->current_frame = nil;
  552. unlock_frames(ctx);
  553. return 0;
  554. }
  555. static int get_audio_config(AVFormatContext *s)
  556. {
  557. AVFContext *ctx = (AVFContext*)s->priv_data;
  558. CMFormatDescriptionRef format_desc;
  559. AVStream* stream = avformat_new_stream(s, NULL);
  560. if (!stream) {
  561. return 1;
  562. }
  563. // Take stream info from the first frame.
  564. while (ctx->audio_frames_captured < 1) {
  565. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  566. }
  567. lock_frames(ctx);
  568. ctx->audio_stream_index = stream->index;
  569. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  570. format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
  571. const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
  572. if (!basic_desc) {
  573. unlock_frames(ctx);
  574. av_log(s, AV_LOG_ERROR, "audio format not available\n");
  575. return 1;
  576. }
  577. stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
  578. stream->codecpar->sample_rate = basic_desc->mSampleRate;
  579. av_channel_layout_default(&stream->codecpar->ch_layout, basic_desc->mChannelsPerFrame);
  580. ctx->audio_channels = basic_desc->mChannelsPerFrame;
  581. ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
  582. ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
  583. ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
  584. ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
  585. ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
  586. ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
  587. if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  588. ctx->audio_float &&
  589. ctx->audio_bits_per_sample == 32 &&
  590. ctx->audio_packed) {
  591. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
  592. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  593. ctx->audio_signed_integer &&
  594. ctx->audio_bits_per_sample == 16 &&
  595. ctx->audio_packed) {
  596. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
  597. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  598. ctx->audio_signed_integer &&
  599. ctx->audio_bits_per_sample == 24 &&
  600. ctx->audio_packed) {
  601. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
  602. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  603. ctx->audio_signed_integer &&
  604. ctx->audio_bits_per_sample == 32 &&
  605. ctx->audio_packed) {
  606. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
  607. } else {
  608. unlock_frames(ctx);
  609. av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
  610. return 1;
  611. }
  612. if (ctx->audio_non_interleaved) {
  613. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  614. ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  615. ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
  616. if (!ctx->audio_buffer) {
  617. unlock_frames(ctx);
  618. av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
  619. return 1;
  620. }
  621. }
  622. CFRelease(ctx->current_audio_frame);
  623. ctx->current_audio_frame = nil;
  624. unlock_frames(ctx);
  625. return 0;
  626. }
  627. static NSArray* getDevicesWithMediaType(AVMediaType mediaType) {
  628. #if ((TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000) || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101500))
  629. NSMutableArray *deviceTypes = nil;
  630. if (mediaType == AVMediaTypeVideo) {
  631. deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]];
  632. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000)
  633. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  634. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  635. #endif
  636. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110100)
  637. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  638. #endif
  639. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 130000)
  640. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
  641. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
  642. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
  643. #endif
  644. #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
  645. [deviceTypes addObject: AVCaptureDeviceTypeDeskViewCamera];
  646. #endif
  647. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 150400)
  648. [deviceTypes addObject: AVCaptureDeviceTypeBuiltInLiDARDepthCamera];
  649. #endif
  650. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
  651. [deviceTypes addObject: AVCaptureDeviceTypeContinuityCamera];
  652. [deviceTypes addObject: AVCaptureDeviceTypeExternal];
  653. #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
  654. [deviceTypes addObject: AVCaptureDeviceTypeExternalUnknown];
  655. #endif
  656. } else if (mediaType == AVMediaTypeAudio) {
  657. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
  658. deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeMicrophone]];
  659. #else
  660. deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInMicrophone]];
  661. #endif
  662. } else if (mediaType == AVMediaTypeMuxed) {
  663. #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
  664. deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternal]];
  665. #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
  666. deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternalUnknown]];
  667. #else
  668. return nil;
  669. #endif
  670. } else {
  671. return nil;
  672. }
  673. AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession =
  674. [AVCaptureDeviceDiscoverySession
  675. discoverySessionWithDeviceTypes:deviceTypes
  676. mediaType:mediaType
  677. position:AVCaptureDevicePositionUnspecified];
  678. return [captureDeviceDiscoverySession devices];
  679. #else
  680. return [AVCaptureDevice devicesWithMediaType:mediaType];
  681. #endif
  682. }
  683. static int avf_read_header(AVFormatContext *s)
  684. {
  685. int ret = 0;
  686. NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
  687. uint32_t num_screens = 0;
  688. AVFContext *ctx = (AVFContext*)s->priv_data;
  689. AVCaptureDevice *video_device = nil;
  690. AVCaptureDevice *audio_device = nil;
  691. // Find capture device
  692. NSArray *devices = getDevicesWithMediaType(AVMediaTypeVideo);
  693. NSArray *devices_muxed = getDevicesWithMediaType(AVMediaTypeMuxed);
  694. ctx->num_video_devices = [devices count] + [devices_muxed count];
  695. pthread_mutex_init(&ctx->frame_lock, NULL);
  696. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  697. CGGetActiveDisplayList(0, NULL, &num_screens);
  698. #endif
  699. // List devices if requested
  700. if (ctx->list_devices) {
  701. int index = 0;
  702. av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
  703. for (AVCaptureDevice *device in devices) {
  704. const char *name = [[device localizedName] UTF8String];
  705. index = [devices indexOfObject:device];
  706. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  707. }
  708. for (AVCaptureDevice *device in devices_muxed) {
  709. const char *name = [[device localizedName] UTF8String];
  710. index = [devices count] + [devices_muxed indexOfObject:device];
  711. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  712. }
  713. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  714. if (num_screens > 0) {
  715. CGDirectDisplayID screens[num_screens];
  716. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  717. for (int i = 0; i < num_screens; i++) {
  718. av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
  719. }
  720. }
  721. #endif
  722. av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
  723. devices = getDevicesWithMediaType(AVMediaTypeAudio);
  724. for (AVCaptureDevice *device in devices) {
  725. const char *name = [[device localizedName] UTF8String];
  726. int index = [devices indexOfObject:device];
  727. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  728. }
  729. goto fail;
  730. }
  731. // parse input filename for video and audio device
  732. ret = parse_device_name(s);
  733. if (ret)
  734. goto fail;
  735. // check for device index given in filename
  736. if (ctx->video_device_index == -1 && ctx->video_filename) {
  737. sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
  738. }
  739. if (ctx->audio_device_index == -1 && ctx->audio_filename) {
  740. sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
  741. }
  742. if (ctx->video_device_index >= 0) {
  743. if (ctx->video_device_index < ctx->num_video_devices) {
  744. if (ctx->video_device_index < [devices count]) {
  745. video_device = [devices objectAtIndex:ctx->video_device_index];
  746. } else {
  747. video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
  748. ctx->video_is_muxed = 1;
  749. }
  750. } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
  751. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  752. CGDirectDisplayID screens[num_screens];
  753. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  754. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
  755. if (ctx->framerate.num > 0) {
  756. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  757. }
  758. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  759. if (ctx->capture_cursor) {
  760. capture_screen_input.capturesCursor = YES;
  761. } else {
  762. capture_screen_input.capturesCursor = NO;
  763. }
  764. #endif
  765. if (ctx->capture_mouse_clicks) {
  766. capture_screen_input.capturesMouseClicks = YES;
  767. } else {
  768. capture_screen_input.capturesMouseClicks = NO;
  769. }
  770. video_device = (AVCaptureDevice*) capture_screen_input;
  771. ctx->video_is_screen = 1;
  772. #endif
  773. } else {
  774. av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
  775. goto fail;
  776. }
  777. } else if (ctx->video_filename &&
  778. strncmp(ctx->video_filename, "none", 4)) {
  779. if (!strncmp(ctx->video_filename, "default", 7)) {
  780. video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  781. } else {
  782. // looking for video inputs
  783. for (AVCaptureDevice *device in devices) {
  784. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  785. video_device = device;
  786. break;
  787. }
  788. }
  789. // looking for muxed inputs
  790. for (AVCaptureDevice *device in devices_muxed) {
  791. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  792. video_device = device;
  793. ctx->video_is_muxed = 1;
  794. break;
  795. }
  796. }
  797. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  798. // looking for screen inputs
  799. if (!video_device) {
  800. int idx;
  801. if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
  802. CGDirectDisplayID screens[num_screens];
  803. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  804. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
  805. video_device = (AVCaptureDevice*) capture_screen_input;
  806. ctx->video_device_index = ctx->num_video_devices + idx;
  807. ctx->video_is_screen = 1;
  808. if (ctx->framerate.num > 0) {
  809. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  810. }
  811. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  812. if (ctx->capture_cursor) {
  813. capture_screen_input.capturesCursor = YES;
  814. } else {
  815. capture_screen_input.capturesCursor = NO;
  816. }
  817. #endif
  818. if (ctx->capture_mouse_clicks) {
  819. capture_screen_input.capturesMouseClicks = YES;
  820. } else {
  821. capture_screen_input.capturesMouseClicks = NO;
  822. }
  823. }
  824. }
  825. #endif
  826. }
  827. if (!video_device) {
  828. av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
  829. goto fail;
  830. }
  831. }
  832. // get audio device
  833. if (ctx->audio_device_index >= 0) {
  834. NSArray *devices = getDevicesWithMediaType(AVMediaTypeAudio);
  835. if (ctx->audio_device_index >= [devices count]) {
  836. av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
  837. goto fail;
  838. }
  839. audio_device = [devices objectAtIndex:ctx->audio_device_index];
  840. } else if (ctx->audio_filename &&
  841. strncmp(ctx->audio_filename, "none", 4)) {
  842. if (!strncmp(ctx->audio_filename, "default", 7)) {
  843. audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  844. } else {
  845. NSArray *devices = getDevicesWithMediaType(AVMediaTypeAudio);
  846. for (AVCaptureDevice *device in devices) {
  847. if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
  848. audio_device = device;
  849. break;
  850. }
  851. }
  852. }
  853. if (!audio_device) {
  854. av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
  855. goto fail;
  856. }
  857. }
  858. // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
  859. if (!video_device && !audio_device) {
  860. av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
  861. goto fail;
  862. }
  863. if (video_device) {
  864. if (ctx->video_device_index < ctx->num_video_devices) {
  865. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
  866. } else {
  867. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
  868. }
  869. }
  870. if (audio_device) {
  871. av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
  872. }
  873. // Initialize capture session
  874. ctx->capture_session = [[AVCaptureSession alloc] init];
  875. if (video_device && add_video_device(s, video_device)) {
  876. goto fail;
  877. }
  878. if (audio_device && add_audio_device(s, audio_device)) {
  879. }
  880. [ctx->capture_session startRunning];
  881. /* Unlock device configuration only after the session is started so it
  882. * does not reset the capture formats */
  883. if (!ctx->video_is_screen) {
  884. [video_device unlockForConfiguration];
  885. }
  886. if (video_device && get_video_config(s)) {
  887. goto fail;
  888. }
  889. // set audio stream
  890. if (audio_device && get_audio_config(s)) {
  891. goto fail;
  892. }
  893. [pool release];
  894. return 0;
  895. fail:
  896. [pool release];
  897. destroy_context(ctx);
  898. if (ret)
  899. return ret;
  900. return AVERROR(EIO);
  901. }
  902. static int copy_cvpixelbuffer(AVFormatContext *s,
  903. CVPixelBufferRef image_buffer,
  904. AVPacket *pkt)
  905. {
  906. AVFContext *ctx = s->priv_data;
  907. int src_linesize[4];
  908. const uint8_t *src_data[4];
  909. int width = CVPixelBufferGetWidth(image_buffer);
  910. int height = CVPixelBufferGetHeight(image_buffer);
  911. int status;
  912. memset(src_linesize, 0, sizeof(src_linesize));
  913. memset(src_data, 0, sizeof(src_data));
  914. status = CVPixelBufferLockBaseAddress(image_buffer, 0);
  915. if (status != kCVReturnSuccess) {
  916. av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
  917. return AVERROR_EXTERNAL;
  918. }
  919. if (CVPixelBufferIsPlanar(image_buffer)) {
  920. size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
  921. int i;
  922. for(i = 0; i < plane_count; i++){
  923. src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
  924. src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
  925. }
  926. } else {
  927. src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
  928. src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
  929. }
  930. status = av_image_copy_to_buffer(pkt->data, pkt->size,
  931. src_data, src_linesize,
  932. ctx->pixel_format, width, height, 1);
  933. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  934. return status;
  935. }
  936. static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
  937. {
  938. AVFContext* ctx = (AVFContext*)s->priv_data;
  939. do {
  940. CVImageBufferRef image_buffer;
  941. CMBlockBufferRef block_buffer;
  942. lock_frames(ctx);
  943. if (ctx->current_frame != nil) {
  944. int status;
  945. int length = 0;
  946. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  947. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  948. if (image_buffer != nil) {
  949. length = (int)CVPixelBufferGetDataSize(image_buffer);
  950. } else if (block_buffer != nil) {
  951. length = (int)CMBlockBufferGetDataLength(block_buffer);
  952. } else {
  953. unlock_frames(ctx);
  954. return AVERROR(EINVAL);
  955. }
  956. if (av_new_packet(pkt, length) < 0) {
  957. unlock_frames(ctx);
  958. return AVERROR(EIO);
  959. }
  960. CMItemCount count;
  961. CMSampleTimingInfo timing_info;
  962. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
  963. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  964. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  965. }
  966. pkt->stream_index = ctx->video_stream_index;
  967. pkt->flags |= AV_PKT_FLAG_KEY;
  968. if (image_buffer) {
  969. status = copy_cvpixelbuffer(s, image_buffer, pkt);
  970. } else {
  971. status = 0;
  972. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  973. if (ret != kCMBlockBufferNoErr) {
  974. status = AVERROR(EIO);
  975. }
  976. }
  977. CFRelease(ctx->current_frame);
  978. ctx->current_frame = nil;
  979. if (status < 0) {
  980. unlock_frames(ctx);
  981. return status;
  982. }
  983. } else if (ctx->current_audio_frame != nil) {
  984. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  985. int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  986. if (!block_buffer || !block_buffer_size) {
  987. unlock_frames(ctx);
  988. return AVERROR(EIO);
  989. }
  990. if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
  991. unlock_frames(ctx);
  992. return AVERROR_BUFFER_TOO_SMALL;
  993. }
  994. if (av_new_packet(pkt, block_buffer_size) < 0) {
  995. unlock_frames(ctx);
  996. return AVERROR(EIO);
  997. }
  998. CMItemCount count;
  999. CMSampleTimingInfo timing_info;
  1000. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
  1001. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  1002. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  1003. }
  1004. pkt->stream_index = ctx->audio_stream_index;
  1005. pkt->flags |= AV_PKT_FLAG_KEY;
  1006. if (ctx->audio_non_interleaved) {
  1007. int sample, c, shift, num_samples;
  1008. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
  1009. if (ret != kCMBlockBufferNoErr) {
  1010. unlock_frames(ctx);
  1011. return AVERROR(EIO);
  1012. }
  1013. num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
  1014. // transform decoded frame into output format
  1015. #define INTERLEAVE_OUTPUT(bps) \
  1016. { \
  1017. int##bps##_t **src; \
  1018. int##bps##_t *dest; \
  1019. src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
  1020. if (!src) { \
  1021. unlock_frames(ctx); \
  1022. return AVERROR(EIO); \
  1023. } \
  1024. \
  1025. for (c = 0; c < ctx->audio_channels; c++) { \
  1026. src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
  1027. } \
  1028. dest = (int##bps##_t*)pkt->data; \
  1029. shift = bps - ctx->audio_bits_per_sample; \
  1030. for (sample = 0; sample < num_samples; sample++) \
  1031. for (c = 0; c < ctx->audio_channels; c++) \
  1032. *dest++ = src[c][sample] << shift; \
  1033. av_freep(&src); \
  1034. }
  1035. if (ctx->audio_bits_per_sample <= 16) {
  1036. INTERLEAVE_OUTPUT(16)
  1037. } else {
  1038. INTERLEAVE_OUTPUT(32)
  1039. }
  1040. } else {
  1041. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  1042. if (ret != kCMBlockBufferNoErr) {
  1043. unlock_frames(ctx);
  1044. return AVERROR(EIO);
  1045. }
  1046. }
  1047. CFRelease(ctx->current_audio_frame);
  1048. ctx->current_audio_frame = nil;
  1049. } else {
  1050. pkt->data = NULL;
  1051. unlock_frames(ctx);
  1052. if (ctx->observed_quit) {
  1053. return AVERROR_EOF;
  1054. } else {
  1055. return AVERROR(EAGAIN);
  1056. }
  1057. }
  1058. unlock_frames(ctx);
  1059. } while (!pkt->data);
  1060. return 0;
  1061. }
  1062. static int avf_close(AVFormatContext *s)
  1063. {
  1064. AVFContext* ctx = (AVFContext*)s->priv_data;
  1065. destroy_context(ctx);
  1066. return 0;
  1067. }
  1068. static const AVOption options[] = {
  1069. { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1070. { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  1071. { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  1072. { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
  1073. { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  1074. { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
  1075. { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1076. { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1077. { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1078. { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  1079. { NULL },
  1080. };
  1081. static const AVClass avf_class = {
  1082. .class_name = "AVFoundation indev",
  1083. .item_name = av_default_item_name,
  1084. .option = options,
  1085. .version = LIBAVUTIL_VERSION_INT,
  1086. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  1087. };
  1088. const FFInputFormat ff_avfoundation_demuxer = {
  1089. .p.name = "avfoundation",
  1090. .p.long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
  1091. .p.flags = AVFMT_NOFILE,
  1092. .p.priv_class = &avf_class,
  1093. .priv_data_size = sizeof(AVFContext),
  1094. .read_header = avf_read_header,
  1095. .read_packet = avf_read_packet,
  1096. .read_close = avf_close,
  1097. };