MGVideoManager.mm 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. //
  2. // MGVideoManager.m
  3. // MGLivenessDetection
  4. //
  5. // Created by 张英堂 on 16/3/31.
  6. // Copyright © 2016年 megvii. All rights reserved.
  7. //
  8. #import "MGVideoManager.h"
  9. //屏幕宽度 (区别于viewcontroller.view.fream)
  10. #define MG_WIN_WIDTH [UIScreen mainScreen].bounds.size.width
  11. //屏幕高度 (区别于viewcontroller.view.fream)
  12. #define MG_WIN_HEIGHT [UIScreen mainScreen].bounds.size.height
  13. @interface MGVideoManager ()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
  14. {
  15. AVCaptureConnection *_audioConnection;
  16. AVCaptureConnection *_videoConnection;
  17. NSDictionary *_audioCompressionSettings;
  18. AVCaptureDevice *_videoDevice;
  19. dispatch_queue_t _videoQueue;
  20. }
  21. @property(nonatomic, assign) CMFormatDescriptionRef outputAudioFormatDescription;
  22. @property(nonatomic, assign) CMFormatDescriptionRef outputVideoFormatDescription;
  23. @property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
  24. @property(nonatomic, copy) NSString *sessionPreset;
  25. @property(nonatomic, copy) NSString *tempVideoPath;
  26. @property (nonatomic, assign) BOOL videoRecord;
  27. @property (nonatomic, assign) BOOL videoSound;
  28. @property (nonatomic, assign) BOOL startRecord;
  29. @end
  30. @implementation MGVideoManager
  31. -(void)dealloc{
  32. _audioConnection = nil;
  33. _videoConnection = nil;
  34. self.videoDelegate = nil;
  35. self.sessionPreset = nil;
  36. }
  37. -(instancetype)initWithPreset:(NSString *)sessionPreset
  38. devicePosition:(AVCaptureDevicePosition)devicePosition
  39. videoRecord:(BOOL)record
  40. videoSound:(BOOL)sound{
  41. self = [super init];
  42. if (self) {
  43. self.sessionPreset = sessionPreset;
  44. _devicePosition = devicePosition;
  45. self.videoRecord = record;
  46. self.videoSound = sound;
  47. _startRecord = NO;
  48. _videoQueue = dispatch_queue_create("com.megvii.face.video", NULL);
  49. }
  50. return self;
  51. }
  52. + (instancetype)videoPreset:(NSString *)sessionPreset
  53. devicePosition:(AVCaptureDevicePosition)devicePosition
  54. videoRecord:(BOOL)record
  55. videoSound:(BOOL)sound{
  56. MGVideoManager *manager = [[MGVideoManager alloc] initWithPreset:sessionPreset
  57. devicePosition:devicePosition
  58. videoRecord:record
  59. videoSound:sound];
  60. return manager;
  61. }
  62. #pragma mark - video 功能开关
  63. - (void)stopRunning{
  64. if (self.videoSession) {
  65. [self.videoSession stopRunning];
  66. }
  67. }
  68. - (void)startRunning{
  69. [self initialSession];
  70. if (self.videoSession) {
  71. [self.videoSession startRunning];
  72. }
  73. }
  74. - (void)startRecording{
  75. [self startRunning];
  76. if (!self.videoRecord) {
  77. return;
  78. }
  79. _startRecord = YES;
  80. }
  81. - (NSString *)stopRceording{
  82. _startRecord = NO;
  83. NSString *tempString = @"no video!";
  84. return tempString;
  85. }
  86. #pragma mark - 初始化video配置
  87. - (NSString *)sessionPreset{
  88. if (nil == _sessionPreset) {
  89. _sessionPreset = AVCaptureSessionPreset640x480;
  90. }
  91. return _sessionPreset;
  92. }
  93. -(AVCaptureVideoPreviewLayer *)videoPreviewLayer{
  94. if (nil == _videoPreviewLayer) {
  95. _videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.videoSession];
  96. [_videoPreviewLayer setFrame:CGRectMake(0, 0, MG_WIN_WIDTH, MG_WIN_HEIGHT)];
  97. [_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
  98. }
  99. return _videoPreviewLayer;
  100. }
  101. -(AVCaptureVideoPreviewLayer *)videoPreview{
  102. return self.videoPreviewLayer;
  103. }
  104. -(BOOL)videoSound{
  105. if (_videoRecord && _videoSound) {
  106. return YES;
  107. }
  108. return NO;
  109. }
  110. - (CMFormatDescriptionRef)formatDescription{
  111. return self.outputVideoFormatDescription;
  112. }
  113. - (dispatch_queue_t)getVideoQueue{
  114. return _videoQueue;
  115. }
  116. //初始化相机
  117. - (void) initialSession
  118. {
  119. if (self.videoSession == nil) {
  120. /* session */
  121. _videoSession = [[AVCaptureSession alloc] init];
  122. /* 摄像头 */
  123. _videoDevice = [self cameraWithPosition:self.devicePosition];
  124. [self setMaxVideoFrame:60 videoDevice:_videoDevice];
  125. /* input */
  126. NSError *DeviceError;
  127. _videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:&DeviceError];
  128. if (DeviceError) {
  129. [self videoError:DeviceError];
  130. return;
  131. }
  132. if ([self.videoSession canAddInput:self.videoInput]) {
  133. [self.videoSession addInput:self.videoInput];
  134. }
  135. /* output */
  136. AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
  137. [output setSampleBufferDelegate:self queue:_videoQueue];
  138. output.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
  139. output.alwaysDiscardsLateVideoFrames = NO;
  140. if ([self.videoSession canAddOutput:output]) {
  141. [self.videoSession addOutput:output];
  142. }
  143. /* sessionPreset */
  144. // 判断最佳分辨率
  145. if ([self.videoSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
  146. [self.videoSession setSessionPreset: AVCaptureSessionPreset1920x1080];
  147. NSLog(@"分辨率 1920*1080");
  148. }else if ([self.videoSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
  149. [self.videoSession setSessionPreset: AVCaptureSessionPreset1280x720];
  150. NSLog(@"分辨率 1280*720");
  151. }else if ([self.videoSession canSetSessionPreset:AVCaptureSessionPresetiFrame960x540]) {
  152. [self.videoSession setSessionPreset: AVCaptureSessionPresetiFrame960x540];
  153. NSLog(@"分辨率 960*540");
  154. }else {
  155. [self.videoSession setSessionPreset: AVCaptureSessionPreset640x480];
  156. NSLog(@"分辨率 640*480");
  157. }
  158. // if ([self.videoSession canSetSessionPreset:self.sessionPreset]) {
  159. // [self.videoSession setSessionPreset: self.sessionPreset];
  160. // }else{
  161. // NSError *presetError = [NSError errorWithDomain:NSCocoaErrorDomain code:101 userInfo:@{@"sessionPreset":@"不支持的sessionPreset!"}];
  162. // [self videoError:presetError];
  163. // return;
  164. // }
  165. _videoConnection = [output connectionWithMediaType:AVMediaTypeVideo];
  166. // [_videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
  167. self.videoOrientation = _videoConnection.videoOrientation;
  168. /* 设置声音 */
  169. if (self.videoSound) {
  170. AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  171. AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
  172. if ( [self.videoSession canAddInput:audioIn] ) {
  173. [self.videoSession addInput:audioIn];
  174. }
  175. AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
  176. dispatch_queue_t audioCaptureQueue = dispatch_queue_create("com.megvii.audio", DISPATCH_QUEUE_SERIAL );
  177. [audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
  178. if ( [self.videoSession canAddOutput:audioOut] ) {
  179. [self.videoSession addOutput:audioOut];
  180. }
  181. _audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
  182. output.alwaysDiscardsLateVideoFrames = YES;
  183. _audioCompressionSettings = [[audioOut recommendedAudioSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
  184. }
  185. }
  186. }
  187. - (void)initVideoRecord:(CMFormatDescriptionRef)formatDescription{
  188. }
  189. //前后摄像头
  190. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
  191. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  192. for (AVCaptureDevice *device in devices) {
  193. if ([device position] == position) {
  194. return device;
  195. }
  196. }
  197. return nil;
  198. }
  199. //前后摄像头的切换
  200. - (void)toggleCamera:(id)sender{
  201. NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
  202. if (cameraCount > 1) {
  203. NSError *error;
  204. AVCaptureDeviceInput *newVideoInput;
  205. AVCaptureDevicePosition position = [[_videoInput device] position];
  206. if (position == AVCaptureDevicePositionBack)
  207. newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self cameraWithPosition:AVCaptureDevicePositionFront] error:&error];
  208. else if (position == AVCaptureDevicePositionFront)
  209. newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self cameraWithPosition:AVCaptureDevicePositionBack] error:&error];
  210. else
  211. return;
  212. if (newVideoInput != nil) {
  213. [self.videoSession beginConfiguration];
  214. [self.videoSession removeInput:self.videoInput];
  215. if ([self.videoSession canAddInput:newVideoInput]) {
  216. [self.videoSession addInput:newVideoInput];
  217. _videoInput = newVideoInput;
  218. } else {
  219. [self.videoSession addInput:self.videoInput];
  220. }
  221. [self.videoSession commitConfiguration];
  222. } else if (error) {
  223. [self videoError:error];
  224. }
  225. }
  226. }
  227. // 设置 视频最大帧率
  228. - (void)setMaxVideoFrame:(NSInteger)frame videoDevice:(AVCaptureDevice *)videoDevice{
  229. for(AVCaptureDeviceFormat *vFormat in [videoDevice formats])
  230. {
  231. CMFormatDescriptionRef description= vFormat.formatDescription;
  232. AVFrameRateRange *rateRange = (AVFrameRateRange*)[vFormat.videoSupportedFrameRateRanges objectAtIndex:0];
  233. float maxrate = rateRange.maxFrameRate;
  234. if(maxrate >= frame && CMFormatDescriptionGetMediaSubType(description)==kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  235. {
  236. if (YES == [videoDevice lockForConfiguration:NULL])
  237. {
  238. videoDevice.activeFormat = vFormat;
  239. [videoDevice setActiveVideoMinFrameDuration:CMTimeMake(1,(int)(frame/3))];
  240. [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(1,(int)frame)];
  241. [videoDevice unlockForConfiguration];
  242. }
  243. }
  244. }
  245. }
  246. //录像功能
  247. - (void)appendVideoBuffer:(CMSampleBufferRef)pixelBuffer
  248. {
  249. }
  250. - (void)appendAudioBuffer:(CMSampleBufferRef)sampleBuffer{
  251. }
  252. - (CGAffineTransform)transformFromVideoBufferOrientationToOrientation:(AVCaptureVideoOrientation)orientation withAutoMirroring:(BOOL)mirror
  253. {
  254. CGAffineTransform transform = CGAffineTransformIdentity;
  255. CGFloat orientationAngleOffset = [MGImage angleOffsetFromPortraitOrientationToOrientation:orientation];
  256. CGFloat videoOrientationAngleOffset = [MGImage angleOffsetFromPortraitOrientationToOrientation:self.videoOrientation];
  257. CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
  258. transform = CGAffineTransformMakeRotation(angleOffset);
  259. // transform = CGAffineTransformRotate(transform, -M_PI);
  260. if ( _videoDevice.position == AVCaptureDevicePositionFront)
  261. {
  262. if (mirror) {
  263. transform = CGAffineTransformScale(transform, -1, 1);
  264. }else {
  265. transform = CGAffineTransformRotate(transform, M_PI );
  266. }
  267. }
  268. return transform;
  269. }
  270. #pragma mark - delegate
  271. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  272. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  273. fromConnection:(AVCaptureConnection *)connection
  274. {
  275. @autoreleasepool {
  276. if (connection == _videoConnection)
  277. {
  278. CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
  279. if (self.outputVideoFormatDescription == nil) {
  280. self.outputVideoFormatDescription = formatDescription;
  281. }
  282. if (self.videoDelegate) {
  283. [self.videoDelegate MGCaptureOutput:captureOutput didOutputSampleBuffer:sampleBuffer fromConnection:connection];
  284. }
  285. if (self.videoRecord && _startRecord) {
  286. [self appendVideoBuffer:sampleBuffer];
  287. }
  288. }else if (connection == _audioConnection){
  289. [self appendAudioBuffer:sampleBuffer];
  290. }
  291. }
  292. }
  293. #pragma mark - 视频流出错,抛出异常
  294. - (void)videoError:(NSError *)error{
  295. if (self.videoDelegate && error) {
  296. [self.videoDelegate MGCaptureOutput:nil error:error];
  297. }
  298. }
  299. @end