MGOpenGLRenderer.m 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845
  1. /*
  2. File: RosyWriterOpenGLRenderer.m
  3. Abstract: The RosyWriter OpenGL effect renderer
  4. Version: 2.1
  5. Copyright (C) 2014 Apple Inc. All Rights Reserved.
  6. */
  7. #import "MGOpenGLRenderer.h"
  8. #import <OpenGLES/ES2/gl.h>
  9. #import <OpenGLES/ES2/glext.h>
  10. #import "GLESUtils.h"
  11. #import "MGHeader.h"
  12. #import "MGFaceModelArray.h"
  13. @interface MGOpenGLRenderer ()
  14. {
  15. EAGLContext *_oglContext;
  16. CVOpenGLESTextureCacheRef _textureCache;
  17. CVOpenGLESTextureCacheRef _renderTextureCache;
  18. CVPixelBufferPoolRef _bufferPool;
  19. CFDictionaryRef _bufferPoolAuxAttributes;
  20. CMFormatDescriptionRef _outputFormatDescription;
  21. GLuint _faceProgram;
  22. GLint _facePointSize;
  23. GLuint _face3DProgram;
  24. GLint _colorSelectorSlot;
  25. GLuint _videoProgram;
  26. GLint _frame;
  27. GLuint _offscreenBufferHandle;
  28. GLfloat _videoFrameW;
  29. GLfloat _videoFrameH;
  30. }
  31. @end
  32. @implementation MGOpenGLRenderer
  33. #pragma mark API
  34. - (instancetype)init
  35. {
  36. self = [super init];
  37. if ( self )
  38. {
  39. _oglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
  40. if ( ! _oglContext ) {
  41. NSLog( @"Problem with OpenGL context." );
  42. return nil;
  43. }
  44. }
  45. return self;
  46. }
  47. - (void)dealloc
  48. {
  49. [self deleteBuffers];
  50. _oglContext = nil;
  51. }
  52. - (void)deleteBuffers
  53. {
  54. EAGLContext *oldContext = [EAGLContext currentContext];
  55. if ( oldContext != _oglContext ) {
  56. if ( ! [EAGLContext setCurrentContext:_oglContext] ) {
  57. @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil];
  58. return;
  59. }
  60. }
  61. if ( _offscreenBufferHandle ) {
  62. glDeleteFramebuffers( 1, &_offscreenBufferHandle );
  63. _offscreenBufferHandle = 0;
  64. }
  65. if ( _videoProgram ) {
  66. glDeleteProgram( _videoProgram );
  67. _videoProgram = 0;
  68. }
  69. if (_faceProgram) {
  70. glDeleteProgram(_faceProgram);
  71. _faceProgram = 0;
  72. }
  73. if (_face3DProgram) {
  74. glDeleteProgram(_face3DProgram);
  75. _face3DProgram = 0;
  76. }
  77. if ( _textureCache ) {
  78. CFRelease( _textureCache );
  79. _textureCache = 0;
  80. }
  81. if ( _renderTextureCache ) {
  82. CFRelease( _renderTextureCache );
  83. _renderTextureCache = 0;
  84. }
  85. if ( _bufferPool ) {
  86. CFRelease( _bufferPool );
  87. _bufferPool = NULL;
  88. }
  89. if ( _bufferPoolAuxAttributes ) {
  90. CFRelease( _bufferPoolAuxAttributes );
  91. _bufferPoolAuxAttributes = NULL;
  92. }
  93. if ( _outputFormatDescription ) {
  94. CFRelease( _outputFormatDescription );
  95. _outputFormatDescription = NULL;
  96. }
  97. if ( oldContext != _oglContext ) {
  98. [EAGLContext setCurrentContext:oldContext];
  99. }
  100. }
  101. #pragma mark RosyWriterRenderer
  102. - (BOOL)operatesInPlace
  103. {
  104. return NO;
  105. }
  106. - (FourCharCode)inputPixelFormat
  107. {
  108. return kCVPixelFormatType_32BGRA;
  109. }
  110. - (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint
  111. {
  112. // The input and output dimensions are the same. This renderer doesn't do any scaling.
  113. CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions( inputFormatDescription );
  114. [self deleteBuffers];
  115. if ( ! [self initializeBuffersWithOutputDimensions:dimensions retainedBufferCountHint:outputRetainedBufferCountHint] ) {
  116. @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem preparing renderer." userInfo:nil];
  117. }
  118. }
  119. - (void)reset
  120. {
  121. [self deleteBuffers];
  122. }
  123. //- (CVPixelBufferRef )copyRenderedPixelBuffer:(CMSampleBufferRef)sampleBufferRef faceModelArray:(MGFaceModelArray *)modelArray drawLandmark:(BOOL)drawLandmark
  124. //{
  125. // CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
  126. //
  127. // if ( _offscreenBufferHandle == 0 ) {
  128. // @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Unintialized buffer" userInfo:nil];
  129. // return nil;
  130. // }
  131. //
  132. // if ( pixelBuffer == NULL ) {
  133. // @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL pixel buffer" userInfo:nil];
  134. // return nil;
  135. // }
  136. //
  137. // const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer) };
  138. // const CMVideoDimensions dstDimensions = CMVideoFormatDescriptionGetDimensions( _outputFormatDescription );
  139. //
  140. // _videoFrameW = dstDimensions.width;
  141. // _videoFrameH = dstDimensions.height;
  142. //
  143. // if ( _videoFrameW != _videoFrameW || _videoFrameH != _videoFrameH ) {
  144. // @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Invalid pixel buffer dimensions" userInfo:nil];
  145. // return nil;
  146. // }
  147. //
  148. // if ( CVPixelBufferGetPixelFormatType( pixelBuffer ) != kCVPixelFormatType_32BGRA ) {
  149. // @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Invalid pixel buffer format" userInfo:nil];
  150. // return nil;
  151. // }
  152. //
  153. // EAGLContext *oldContext = [EAGLContext currentContext];
  154. // if ( oldContext != _oglContext ) {
  155. // if ( ! [EAGLContext setCurrentContext:_oglContext] ) {
  156. // @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil];
  157. // return nil;
  158. // }
  159. // }
  160. //
  161. // CVReturn err = noErr;
  162. // CVOpenGLESTextureRef srcTexture = NULL, dstTexture = NULL;
  163. // CVPixelBufferRef dstPixelBuffer = NULL;
  164. //
  165. // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
  166. // _textureCache,
  167. // pixelBuffer,
  168. // NULL,
  169. // GL_TEXTURE_2D,
  170. // GL_RGBA,
  171. // _videoFrameW, _videoFrameH,
  172. // GL_BGRA,
  173. // GL_UNSIGNED_BYTE,
  174. // 0,
  175. // &srcTexture );
  176. // if ( ! srcTexture || err ) {
  177. // NSLog( @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err );
  178. // goto bail;
  179. // }
  180. //
  181. // err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer );
  182. // if ( err == kCVReturnWouldExceedAllocationThreshold ) {
  183. // // Flush the texture cache to potentially release the retained buffers and try again to create a pixel buffer
  184. // CVOpenGLESTextureCacheFlush( _renderTextureCache, 0 );
  185. // err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer );
  186. // }
  187. // if ( err ) {
  188. // if ( err == kCVReturnWouldExceedAllocationThreshold ) {
  189. // NSLog( @"Pool is out of buffers, dropping frame" );
  190. // }
  191. // else {
  192. // NSLog( @"Error at CVPixelBufferPoolCreatePixelBuffer %d", err );
  193. // }
  194. // goto bail;
  195. // }
  196. //
  197. // err = CVOpenGLESTextureCacheCreateTextureFromImage( kCFAllocatorDefault,
  198. // _renderTextureCache,
  199. // dstPixelBuffer,
  200. // NULL,
  201. // GL_TEXTURE_2D,
  202. // GL_RGBA,
  203. // _videoFrameW, _videoFrameH,
  204. // GL_BGRA,
  205. // GL_UNSIGNED_BYTE,
  206. // 0,
  207. // &dstTexture );
  208. //
  209. // if ( ! dstTexture || err ) {
  210. // NSLog( @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err );
  211. // goto bail;
  212. // }
  213. //
  214. // glBindFramebuffer( GL_FRAMEBUFFER, _offscreenBufferHandle );
  215. // glViewport( 0, 0, srcDimensions.width, srcDimensions.height );
  216. // glUseProgram( _videoProgram );
  217. //
  218. // // Set up our destination pixel buffer as the framebuffer's render target.
  219. // glActiveTexture( GL_TEXTURE0 );
  220. // glBindTexture( CVOpenGLESTextureGetTarget( dstTexture ), CVOpenGLESTextureGetName( dstTexture ) );
  221. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
  222. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
  223. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
  224. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
  225. //
  226. // glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget( dstTexture ), CVOpenGLESTextureGetName( dstTexture ), 0 );
  227. //
  228. // // Render our source pixel buffer.
  229. // glActiveTexture( GL_TEXTURE1 );
  230. // glBindTexture( CVOpenGLESTextureGetTarget( srcTexture ), CVOpenGLESTextureGetName( srcTexture ) );
  231. // glUniform1i(_frame, 1 );
  232. //
  233. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  234. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  235. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  236. // glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  237. //
  238. // glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
  239. // glEnableVertexAttribArray( ATTRIB_VERTEX );
  240. //
  241. // glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices);
  242. // glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON );
  243. // glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  244. //
  245. // glBindTexture( CVOpenGLESTextureGetTarget( srcTexture ), 0 );
  246. // glBindTexture( CVOpenGLESTextureGetTarget( dstTexture ), 0 );
  247. //
  248. // if (modelArray && drawLandmark){
  249. // /* 绘制人脸关键点 */
  250. // glActiveTexture(GL_TEXTURE2);
  251. // glUseProgram(_faceProgram);
  252. //
  253. // [self drawFaceWithRect:modelArray.detectRect];
  254. // for (int i =0; i < modelArray.count; i++) {
  255. // MGFaceInfo *model = [modelArray modelWithIndex:i];
  256. // [self drawFacePointer:model.points faceRect:model.rect];
  257. // }
  258. //
  259. //
  260. // if (self.show3DView == YES) {
  261. // if (modelArray.count >= 1) {
  262. // /* 绘制人脸 3D 图层 */
  263. // glActiveTexture(GL_TEXTURE3);
  264. // glUseProgram(_face3DProgram);
  265. //
  266. // MGFaceInfo *firstInfo = [modelArray modelWithIndex:0];
  267. // [self drawTriConeX:-firstInfo.pitch Y:-firstInfo.yaw Z:-firstInfo.roll];
  268. // }
  269. // }
  270. // }
  271. //
  272. // // Make sure that outstanding GL commands which render to the destination pixel buffer have been submitted.
  273. // // AVAssetWriter, AVSampleBufferDisplayLayer, and GL will block until the rendering is complete when sourcing from this pixel buffer.
  274. // glFlush();
  275. //
  276. //bail:
  277. // if ( oldContext != _oglContext ) {
  278. // [EAGLContext setCurrentContext:oldContext];
  279. // }
  280. // if ( srcTexture ) {
  281. // CFRelease( srcTexture );
  282. // }
  283. // if ( dstTexture ) {
  284. // CFRelease( dstTexture );
  285. // }
  286. //
  287. // return dstPixelBuffer;
  288. //}
  289. - (CVPixelBufferRef )drawPixelBuffer:(CMSampleBufferRef)sampleBufferRef custumDrawing:(void (^)(void))draw
  290. {
  291. CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
  292. if ( _offscreenBufferHandle == 0 ) {
  293. @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Unintialized buffer" userInfo:nil];
  294. return nil;
  295. }
  296. if ( pixelBuffer == NULL ) {
  297. @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL pixel buffer" userInfo:nil];
  298. return nil;
  299. }
  300. const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer) };
  301. const CMVideoDimensions dstDimensions = CMVideoFormatDescriptionGetDimensions( _outputFormatDescription );
  302. _videoFrameW = dstDimensions.width;
  303. _videoFrameH = dstDimensions.height;
  304. if ( _videoFrameW != _videoFrameW || _videoFrameH != _videoFrameH ) {
  305. @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Invalid pixel buffer dimensions" userInfo:nil];
  306. return nil;
  307. }
  308. if ( CVPixelBufferGetPixelFormatType( pixelBuffer ) != kCVPixelFormatType_32BGRA ) {
  309. @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Invalid pixel buffer format" userInfo:nil];
  310. return nil;
  311. }
  312. EAGLContext *oldContext = [EAGLContext currentContext];
  313. if ( oldContext != _oglContext ) {
  314. if ( ! [EAGLContext setCurrentContext:_oglContext] ) {
  315. @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil];
  316. return nil;
  317. }
  318. }
  319. CVReturn err = noErr;
  320. CVOpenGLESTextureRef srcTexture = NULL, dstTexture = NULL;
  321. CVPixelBufferRef dstPixelBuffer = NULL;
  322. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
  323. _textureCache,
  324. pixelBuffer,
  325. NULL,
  326. GL_TEXTURE_2D,
  327. GL_RGBA,
  328. _videoFrameW, _videoFrameH,
  329. GL_BGRA,
  330. GL_UNSIGNED_BYTE,
  331. 0,
  332. &srcTexture );
  333. if ( ! srcTexture || err ) {
  334. NSLog( @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err );
  335. goto bail;
  336. }
  337. err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer );
  338. if ( err == kCVReturnWouldExceedAllocationThreshold ) {
  339. // Flush the texture cache to potentially release the retained buffers and try again to create a pixel buffer
  340. CVOpenGLESTextureCacheFlush( _renderTextureCache, 0 );
  341. err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer );
  342. }
  343. if ( err ) {
  344. if ( err == kCVReturnWouldExceedAllocationThreshold ) {
  345. NSLog( @"Pool is out of buffers, dropping frame" );
  346. }
  347. else {
  348. NSLog( @"Error at CVPixelBufferPoolCreatePixelBuffer %d", err );
  349. }
  350. goto bail;
  351. }
  352. err = CVOpenGLESTextureCacheCreateTextureFromImage( kCFAllocatorDefault,
  353. _renderTextureCache,
  354. dstPixelBuffer,
  355. NULL,
  356. GL_TEXTURE_2D,
  357. GL_RGBA,
  358. _videoFrameW, _videoFrameH,
  359. GL_BGRA,
  360. GL_UNSIGNED_BYTE,
  361. 0,
  362. &dstTexture );
  363. if ( ! dstTexture || err ) {
  364. NSLog( @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err );
  365. goto bail;
  366. }
  367. glBindFramebuffer( GL_FRAMEBUFFER, _offscreenBufferHandle );
  368. glViewport( 0, 0, srcDimensions.width, srcDimensions.height );
  369. glUseProgram( _videoProgram );
  370. // Set up our destination pixel buffer as the framebuffer's render target.
  371. glActiveTexture( GL_TEXTURE0 );
  372. glBindTexture( CVOpenGLESTextureGetTarget( dstTexture ), CVOpenGLESTextureGetName( dstTexture ) );
  373. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
  374. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
  375. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
  376. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
  377. glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget( dstTexture ), CVOpenGLESTextureGetName( dstTexture ), 0 );
  378. // Render our source pixel buffer.
  379. glActiveTexture( GL_TEXTURE1 );
  380. glBindTexture( CVOpenGLESTextureGetTarget( srcTexture ), CVOpenGLESTextureGetName( srcTexture ) );
  381. glUniform1i(_frame, 1 );
  382. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  383. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  384. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  385. glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  386. glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
  387. glEnableVertexAttribArray( ATTRIB_VERTEX );
  388. glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices);
  389. glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON );
  390. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  391. glBindTexture( CVOpenGLESTextureGetTarget( srcTexture ), 0 );
  392. glBindTexture( CVOpenGLESTextureGetTarget( dstTexture ), 0 );
  393. if (draw != nil) {
  394. draw();
  395. }
  396. // if (modelArray && drawLandmark){
  397. // /* 绘制人脸关键点 */
  398. // glActiveTexture(GL_TEXTURE2);
  399. // glUseProgram(_faceProgram);
  400. //
  401. // [self drawFaceWithRect:modelArray.detectRect];
  402. // for (int i =0; i < modelArray.count; i++) {
  403. // MGFaceInfo *model = [modelArray modelWithIndex:i];
  404. // [self drawFacePointer:model.points faceRect:model.rect];
  405. // }
  406. //
  407. //
  408. // if (self.show3DView == YES) {
  409. // if (modelArray.count >= 1) {
  410. // /* 绘制人脸 3D 图层 */
  411. // glActiveTexture(GL_TEXTURE3);
  412. // glUseProgram(_face3DProgram);
  413. //
  414. // MGFaceInfo *firstInfo = [modelArray modelWithIndex:0];
  415. // [self drawTriConeX:-firstInfo.pitch Y:-firstInfo.yaw Z:-firstInfo.roll];
  416. // }
  417. // }
  418. // }
  419. // Make sure that outstanding GL commands which render to the destination pixel buffer have been submitted.
  420. // AVAssetWriter, AVSampleBufferDisplayLayer, and GL will block until the rendering is complete when sourcing from this pixel buffer.
  421. glFlush();
  422. bail:
  423. if ( oldContext != _oglContext ) {
  424. [EAGLContext setCurrentContext:oldContext];
  425. }
  426. if ( srcTexture ) {
  427. CFRelease( srcTexture );
  428. }
  429. if ( dstTexture ) {
  430. CFRelease( dstTexture );
  431. }
  432. return dstPixelBuffer;
  433. }
  434. - (void)drawFaceLandMark:(MGFaceModelArray *)faces {
  435. if (!faces || faces.count == 0) return;
  436. glActiveTexture(GL_TEXTURE2);
  437. glUseProgram(_faceProgram);
  438. // [self drawRect:faces.detectRect];
  439. for (int i =0; i < faces.count; i++) {
  440. MGFaceInfo *model = [faces modelWithIndex:i];
  441. [self drawFacePointer:model.points faceRect:model.rect];
  442. }
  443. if (self.show3DView == YES) {
  444. if (faces.count >= 1) {
  445. /* 绘制人脸 3D 图层 */
  446. glActiveTexture(GL_TEXTURE3);
  447. glUseProgram(_face3DProgram);
  448. MGFaceInfo *firstInfo = [faces modelWithIndex:0];
  449. [self drawTriConeX:-firstInfo.pitch Y:-firstInfo.yaw Z:-firstInfo.roll];
  450. }
  451. }
  452. }
  453. - (CMFormatDescriptionRef)outputFormatDescription
  454. {
  455. return _outputFormatDescription;
  456. }
  457. - (void)setupFaceProgram
  458. {
  459. NSString *vertexShaderPath = [[NSBundle mainBundle] pathForResource:@"FacePointSize"
  460. ofType:@"glsl"];
  461. NSString *fragmentShaderPath = [[NSBundle mainBundle] pathForResource:@"FacePointColor"
  462. ofType:@"glsl"];
  463. GLuint programHandle = [GLESUtils loadProgram:vertexShaderPath
  464. withFragmentShaderFilepath:fragmentShaderPath];
  465. if (programHandle == 0) {
  466. NSLog(@" >> Error: Failed to setup face program.");
  467. return;
  468. }
  469. _faceProgram = programHandle;
  470. _colorSelectorSlot = glGetUniformLocation(_faceProgram, "sizeScale");
  471. }
  472. - (void)setupFace3Dprogram{
  473. NSString *vertexShaderPath = [[NSBundle mainBundle] pathForResource:@"Face3DVertex"
  474. ofType:@"glsl"];
  475. NSString *fragmentShaderPath = [[NSBundle mainBundle] pathForResource:@"Face3DFragment"
  476. ofType:@"glsl"];
  477. GLuint programHandle = [GLESUtils loadProgram:vertexShaderPath
  478. withFragmentShaderFilepath:fragmentShaderPath];
  479. if (programHandle == 0) {
  480. NSLog(@" >> Error: Failed to setup 3D program.");
  481. return;
  482. }
  483. _face3DProgram = programHandle;
  484. _colorSelectorSlot = glGetUniformLocation(_face3DProgram, "color_selector");
  485. }
  486. - (void)setupVideoProgram{
  487. //Load vertex and fragment shaders
  488. GLint attribLocation[NUM_ATTRIBUTES] = {
  489. ATTRIB_VERTEX, ATTRIB_TEXTUREPOSITON,
  490. };
  491. GLchar *attribName[NUM_ATTRIBUTES] = {
  492. "position", "texturecoordinate",
  493. };
  494. const GLchar *vertSrc = [GLESUtils readFile:@"VideoVert.glsl"];
  495. const GLchar *fragSrc = [GLESUtils readFile:@"VideoFrag.glsl"];
  496. // shader program
  497. glueCreateProgram(vertSrc, fragSrc,
  498. NUM_ATTRIBUTES, (const GLchar **)&attribName[0], attribLocation,
  499. 0, 0, 0,
  500. &_videoProgram );
  501. if (_videoProgram == 0) {
  502. NSLog( @"Problem initializing the program." );
  503. }
  504. }
  505. #pragma mark Internal
  506. - (BOOL)initializeBuffersWithOutputDimensions:(CMVideoDimensions)outputDimensions retainedBufferCountHint:(size_t)clientRetainedBufferCountHint
  507. {
  508. BOOL success = YES;
  509. EAGLContext *oldContext = [EAGLContext currentContext];
  510. if ( oldContext != _oglContext ) {
  511. if ( ! [EAGLContext setCurrentContext:_oglContext] ) {
  512. @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil];
  513. return NO;
  514. }
  515. }
  516. glDisable( GL_DEPTH_TEST );
  517. glGenFramebuffers( 1, &_offscreenBufferHandle );
  518. glBindFramebuffer( GL_FRAMEBUFFER, _offscreenBufferHandle );
  519. CVReturn err = CVOpenGLESTextureCacheCreate( kCFAllocatorDefault, NULL, _oglContext, NULL, &_textureCache );
  520. if ( err ) {
  521. NSLog( @"Error at CVOpenGLESTextureCacheCreate %d", err );
  522. success = NO;
  523. goto bail;
  524. }
  525. err = CVOpenGLESTextureCacheCreate( kCFAllocatorDefault, NULL, _oglContext, NULL, &_renderTextureCache );
  526. if ( err ) {
  527. NSLog( @"Error at CVOpenGLESTextureCacheCreate %d", err );
  528. success = NO;
  529. goto bail;
  530. }
  531. [self setupVideoProgram];
  532. _frame = glueGetUniformLocation(_videoProgram, "videoframe");
  533. /*设置人脸标点图层Program*/
  534. [self setupFaceProgram];
  535. /* 设置3D图层 */
  536. [self setupFace3Dprogram];
  537. size_t maxRetainedBufferCount = clientRetainedBufferCountHint;
  538. _bufferPool = createPixelBufferPool(outputDimensions.width, outputDimensions.height, kCVPixelFormatType_32BGRA, (int32_t)maxRetainedBufferCount );
  539. if (! _bufferPool) {
  540. NSLog( @"Problem initializing a buffer pool." );
  541. success = NO;
  542. goto bail;
  543. }
  544. _bufferPoolAuxAttributes = createPixelBufferPoolAuxAttributes((int32_t)maxRetainedBufferCount);
  545. preallocatePixelBuffersInPool(_bufferPool, _bufferPoolAuxAttributes);
  546. CMFormatDescriptionRef outputFormatDescription = NULL;
  547. CVPixelBufferRef testPixelBuffer = NULL;
  548. CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &testPixelBuffer );
  549. if ( ! testPixelBuffer ) {
  550. NSLog( @"Problem creating a pixel buffer." );
  551. success = NO;
  552. goto bail;
  553. }
  554. CMVideoFormatDescriptionCreateForImageBuffer( kCFAllocatorDefault, testPixelBuffer, &outputFormatDescription );
  555. _outputFormatDescription = outputFormatDescription;
  556. CFRelease( testPixelBuffer );
  557. bail:
  558. if ( ! success ) {
  559. [self deleteBuffers];
  560. }
  561. if ( oldContext != _oglContext ) {
  562. [EAGLContext setCurrentContext:oldContext];
  563. }
  564. return success;
  565. }
  566. static CVPixelBufferPoolRef createPixelBufferPool(int32_t width, int32_t height, FourCharCode pixelFormat, int32_t maxBufferCount )
  567. {
  568. CVPixelBufferPoolRef outputPool = NULL;
  569. NSDictionary *sourcePixelBufferOptions = @{(id)kCVPixelBufferPixelFormatTypeKey : @(pixelFormat),
  570. (id)kCVPixelBufferWidthKey : @(width),
  571. (id)kCVPixelBufferHeightKey : @(height),
  572. (id)kCVPixelFormatOpenGLESCompatibility : @(YES),
  573. (id)kCVPixelBufferIOSurfacePropertiesKey : @{ /*empty dictionary*/ } };
  574. NSDictionary *pixelBufferPoolOptions = @{ (id)kCVPixelBufferPoolMinimumBufferCountKey : @(maxBufferCount) };
  575. CVPixelBufferPoolCreate(kCFAllocatorDefault, (__bridge CFDictionaryRef)pixelBufferPoolOptions, (__bridge CFDictionaryRef)sourcePixelBufferOptions, &outputPool );
  576. return outputPool;
  577. }
  578. static CFDictionaryRef createPixelBufferPoolAuxAttributes(int32_t maxBufferCount)
  579. {
  580. // CVPixelBufferPoolCreatePixelBufferWithAuxAttributes() will return kCVReturnWouldExceedAllocationThreshold if we have already vended the max number of buffers
  581. return CFRetain((__bridge CFTypeRef)(@{(id)kCVPixelBufferPoolAllocationThresholdKey : @(maxBufferCount)}));
  582. }
  583. static void preallocatePixelBuffersInPool( CVPixelBufferPoolRef pool, CFDictionaryRef auxAttributes )
  584. {
  585. // Preallocate buffers in the pool, since this is for real-time display/capture
  586. NSMutableArray *pixelBuffers = [[NSMutableArray alloc] init];
  587. while ( 1 )
  588. {
  589. CVPixelBufferRef pixelBuffer = NULL;
  590. OSStatus err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer );
  591. if ( err == kCVReturnWouldExceedAllocationThreshold ) {
  592. break;
  593. }
  594. assert( err == noErr );
  595. [pixelBuffers addObject:(__bridge id)(pixelBuffer)];
  596. CFRelease( pixelBuffer );
  597. }
  598. }
  599. #pragma mark - 绘制矩形
  600. - (void)drawRect:(CGRect )rect {
  601. if (CGRectIsNull(rect)) return;
  602. GLfloat lineWidth = _videoFrameH/480.0 * 3.0;
  603. glLineWidth(lineWidth);
  604. GLfloat top = (rect.origin.y - _videoFrameH/2) / (_videoFrameH/2);
  605. GLfloat left = (_videoFrameW/2 - rect.origin.x) / (_videoFrameW/2);
  606. GLfloat right = (_videoFrameW/2 - (rect.origin.x+rect.size.width)) / (_videoFrameW/2);
  607. GLfloat bottom = ((rect.origin.y + rect.size.height) - _videoFrameH/2) / (_videoFrameH/2);
  608. GLfloat tempFace[]= {
  609. right, top, 0.0f, // right top
  610. left, top, 0.0f, // left top
  611. left, bottom, 0.0f, // left bottom
  612. right, bottom, 0.0f, // right Bottom
  613. };
  614. GLubyte indices[] = {
  615. 0, 1, 1, 2, 2, 3, 3, 0
  616. };
  617. glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 0, tempFace );
  618. glEnableVertexAttribArray(0 );
  619. glDrawElements(GL_LINES, sizeof(indices)/sizeof(GLubyte), GL_UNSIGNED_BYTE, indices);
  620. }
  621. #pragma mark - 绘制关键点
  622. - (void)drawFaceWithRect:(CGRect)rect {
  623. if (CGRectIsNull(rect)) return;
  624. GLfloat lineWidth = _videoFrameH/480.0 * 3.0;
  625. glLineWidth(lineWidth);
  626. GLfloat top = [self changeToGLPointT:rect.origin.y];
  627. GLfloat left = [self changeToGLPointL:rect.origin.x];
  628. GLfloat right = [self changeToGLPointR:rect.size.width];
  629. GLfloat bottom = [self changeToGLPointB:rect.size.height];
  630. GLfloat tempFace[]= {
  631. bottom,left,0.0f,
  632. top, left, 0.0f,
  633. top, right, 0.0f,
  634. bottom,right,0.0f,
  635. };
  636. GLubyte indices[] = {
  637. 0, 1, 1, 2, 2, 3, 3, 0
  638. };
  639. glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 0, tempFace );
  640. glEnableVertexAttribArray(0 );
  641. glDrawElements(GL_LINES, sizeof(indices)/sizeof(GLubyte), GL_UNSIGNED_BYTE, indices);
  642. }
  643. - (void)drawFacePointer:(NSArray *)pointArray faceRect:(CGRect)rect{
  644. // GLfloat lineWidth = _videoFrameH/480.0;
  645. const GLfloat lineWidth = rect.size.width/WIN_WIDTH * 1.5;
  646. glUniform1f(_facePointSize, lineWidth);
  647. const GLsizei pointCount = (GLsizei)pointArray.count;
  648. GLfloat tempPoint[pointCount * 3];
  649. GLubyte indices[pointCount];
  650. for (int i = 0; i < pointArray.count; i ++) {
  651. CGPoint pointer = [pointArray[i] CGPointValue];
  652. GLfloat top = [self changeToGLPointT:pointer.x];
  653. GLfloat left = [self changeToGLPointL:pointer.y];
  654. tempPoint[i*3+0]=top;
  655. tempPoint[i*3+1]=left;
  656. tempPoint[i*3+2]=0.0f;
  657. indices[i]=i;
  658. }
  659. glVertexAttribPointer( 0, 3, GL_FLOAT, GL_TRUE, 0, tempPoint );
  660. glEnableVertexAttribArray(GL_VERTEX_ATTRIB_ARRAY_POINTER);
  661. glDrawElements(GL_POINTS, (GLsizei)sizeof(indices)/sizeof(GLubyte), GL_UNSIGNED_BYTE, indices);
  662. }
  663. - (GLfloat)changeToGLPointT:(CGFloat)x{
  664. GLfloat tempX = (x - _videoFrameW/2) / (_videoFrameW/2);
  665. return tempX;
  666. }
  667. - (GLfloat)changeToGLPointL:(CGFloat)y{
  668. GLfloat tempY = (_videoFrameH/2 - (_videoFrameH - y)) / (_videoFrameH/2);
  669. return tempY;
  670. }
  671. - (GLfloat)changeToGLPointR:(CGFloat)y{
  672. GLfloat tempR = (_videoFrameH/2 - y) / (_videoFrameH/2);
  673. return tempR;
  674. }
  675. - (GLfloat)changeToGLPointB:(CGFloat)y{
  676. GLfloat tempB = (y - _videoFrameW/2) / (_videoFrameW/2);
  677. return tempB;
  678. }
  679. static void rotatePoint3f(float *points, int offset, float angle/*radis*/, int x_axis, int y_axis) {
  680. float x = points[offset + x_axis], y = points[offset + y_axis];
  681. float alpha_x = cosf(angle), alpha_y = sinf(angle);
  682. points[offset + x_axis] = x * alpha_x - y * alpha_y;
  683. points[offset + y_axis] = x * alpha_y + y * alpha_x;
  684. }
  685. - (void)drawTriConeX:(float)pitch Y:(float)yaw Z:(float)roll {
  686. GLfloat lineWidth = _videoFrameH/480.0 * 2.0;
  687. glLineWidth(lineWidth);
  688. GLfloat vertices[] = {
  689. 0.0f, 0.0f, 0.0f,
  690. -1.0f, 0.0f, 0.0f,
  691. 0.0f, -1.0f, 0.0f,
  692. 0.0f, 0.0f, -1.0f
  693. };
  694. int n = sizeof(vertices) / sizeof(GLfloat) / 3;
  695. float a = 0.2;
  696. GLfloat resize = _videoFrameW / _videoFrameH;
  697. for (int i = 0; i < n; ++i) {
  698. rotatePoint3f(vertices, i * 3, yaw, 2, 0);
  699. rotatePoint3f(vertices, i * 3, pitch, 2, 1);
  700. rotatePoint3f(vertices, i * 3, roll, 0, 1);
  701. vertices[i * 3 + 0] = vertices[i * 3 + 0] * a * 1 + 0.8f;
  702. vertices[i * 3 + 1] = vertices[i * 3 + 1] * a * resize + 0;
  703. vertices[i * 3 + 2] = vertices[i * 3 + 2] * a * 1 + 0;
  704. }
  705. GLubyte indices[] = {0, 1, 0, 2, 0, 3};
  706. glVertexAttribPointer(0, 3, GL_FLOAT, GL_TRUE, 0, vertices );
  707. glEnableVertexAttribArray(GL_VERTEX_ATTRIB_ARRAY_POINTER);
  708. for (int i = 0; i < 3; ++i) {
  709. glUniform1f(_colorSelectorSlot, (float)(i + 1));
  710. glDrawElements(GL_LINES, 2, GL_UNSIGNED_BYTE, indices + i * 2);
  711. }
  712. }
  713. - (void)setUpOutSampleBuffer:(CGSize)outSize devicePosition:(AVCaptureDevicePosition)devicePosition{
  714. [EAGLContext setCurrentContext:_oglContext];
  715. CMVideoDimensions dimensions;
  716. dimensions.width = outSize.width;
  717. dimensions.height = outSize.height;
  718. [self deleteBuffers];
  719. if ( ! [self initializeBuffersWithOutputDimensions:dimensions retainedBufferCountHint:6] ) {
  720. @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem preparing renderer." userInfo:nil];
  721. }
  722. }
  723. @end