OOFaceRecognizeLoginViewController.swift 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319
  1. //
  2. // OOFaceRecognizeLoginViewController.swift
  3. // O2Platform
  4. //
  5. // Created by FancyLou on 2018/10/15.
  6. // Copyright © 2018 zoneland. All rights reserved.
  7. //
  8. import UIKit
  9. import CoreMotion
  10. import CocoaLumberjack
  11. import AVFoundation
  12. import Promises
  13. import O2OA_Auth_SDK
  14. class OOFaceRecognizeLoginViewController: UIViewController {
  15. var viewModel:OOLoginViewModel = {
  16. return OOLoginViewModel()
  17. }()
  18. var _detectImageQueue:DispatchQueue!
  19. var previewView:MGOpenGLView!
  20. var motionManager:CMMotionManager!
  21. var videoManager:MGVideoManager!
  22. var renderer:MGOpenGLRenderer!
  23. var markManager:MGFacepp!
  24. var hasVideoFormatDescription:Bool = false
  25. var remoteRecognizingUserFace: Bool = false
  26. var detectMode:MGFppDetectionMode = MGFppDetectionMode.trackingFast
  27. var pointsNum:Int = 81
  28. var detectRect:CGRect!
  29. var videoSize:CGSize!
  30. var currentFaceCount:Int = 0
  31. var orientation:Int = 90
  32. //MARK: - 播放音效
  33. var beepPlayer :AVAudioPlayer!
  34. //MARK: - system override
  35. deinit {
  36. self.previewView = nil
  37. self.renderer = nil
  38. }
  39. override func viewDidLoad() {
  40. super.viewDidLoad()
  41. // Do any additional setup after loading the view.
  42. self.createView()
  43. // 后台线程队列,用于解析解析人脸
  44. self._detectImageQueue = DispatchQueue(label: "com.megvii.image.detect")
  45. //属性初始化处理
  46. self.detectRect = CGRect.null
  47. self.videoSize = CGSize(width: 480, height: 640) //TODO 如何计算最优分辨率
  48. self.videoManager = MGVideoManager.videoPreset(AVCaptureSession.Preset.vga640x480.rawValue, devicePosition: AVCaptureDevice.Position.front, videoRecord: false, videoSound: false)
  49. self.videoManager.videoDelegate = self
  50. let path = Bundle.main.path(forResource: "megviifacepp_0_5_2_model", ofType: "")
  51. do {
  52. let data = try Data.init(contentsOf: URL(fileURLWithPath: path!))
  53. let mark = MGFacepp(model: data, maxFaceCount: 1) { (config) in
  54. config?.minFaceSize = 100 //最小人脸
  55. config?.interval = 40 //检测间隔
  56. config?.orientation = 90
  57. config?.detectionMode = MGFppDetectionMode.trackingFast
  58. config?.detectROI = MGDetectROI(left: 0, top: 0, right: 0, bottom: 0)
  59. config?.pixelFormatType = MGPixelFormatType.PixelFormatTypeRGBA
  60. }
  61. self.markManager = mark
  62. }catch {
  63. DDLogError("face++模型文件无法获取!!!")
  64. }
  65. self.pointsNum = 81
  66. self.detectMode = MGFppDetectionMode.trackingFast
  67. self.renderer = MGOpenGLRenderer()
  68. self.renderer.show3DView = false
  69. self.motionManager = CMMotionManager()
  70. self.motionManager.accelerometerUpdateInterval = 0.3
  71. let devicePosition = self.videoManager.devicePosition
  72. let motionQueue = OperationQueue()
  73. motionQueue.name = "com.megvii.gryo"
  74. self.motionManager.startAccelerometerUpdates(to: motionQueue) { (accelerometerData, error) in
  75. if accelerometerData != nil {
  76. if fabs(accelerometerData!.acceleration.z) > 0.7 {
  77. self.orientation = 90
  78. }else {
  79. if AVCaptureDevice.Position.back == devicePosition {
  80. if fabs(accelerometerData!.acceleration.x) < 0.4 {
  81. self.orientation = 90;
  82. }else if accelerometerData!.acceleration.x > 0.4 {
  83. self.orientation = 180;
  84. }else if accelerometerData!.acceleration.x < -0.4 {
  85. self.orientation = 0;
  86. }
  87. }else {
  88. if fabs(accelerometerData!.acceleration.x) < 0.4 {
  89. self.orientation = 90;
  90. }else if accelerometerData!.acceleration.x > 0.4 {
  91. self.orientation = 0;
  92. }else if accelerometerData!.acceleration.x < -0.4 {
  93. self.orientation = 180;
  94. }
  95. }
  96. if accelerometerData!.acceleration.y > 0.6 {
  97. self.orientation = 270;
  98. }
  99. }
  100. }
  101. }
  102. beepPlayerInit()
  103. }
  104. override func viewWillAppear(_ animated: Bool) {
  105. UIApplication.shared.isIdleTimerDisabled = true //屏幕不锁屏
  106. self.videoManager.startRunning()
  107. self.setUpCameraLayer()
  108. }
  109. override func viewWillDisappear(_ animated: Bool) {
  110. UIApplication.shared.isIdleTimerDisabled = false
  111. self.motionManager.stopAccelerometerUpdates()
  112. self.videoManager.stopRunning()
  113. }
  114. //MARK: - private
  115. /// 音效播放器初始化
  116. private func beepPlayerInit() {
  117. // 建立播放器
  118. let beepPath = Bundle.main.path(forResource: "beep", ofType: "wav")
  119. do {
  120. beepPlayer = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: beepPath!))
  121. // 重複播放次數 設為 0 則是只播放一次 不重複
  122. beepPlayer.numberOfLoops = 0
  123. } catch {
  124. DDLogError("初始化audioPlayer异常,\(error)")
  125. }
  126. }
  127. /// 播放音效
  128. private func playBeepSound() {
  129. beepPlayer.play()
  130. }
  131. private func createView() {
  132. self.view.backgroundColor = UIColor.white
  133. self.title = "人脸识别登录"
  134. let left = UIBarButtonItem(image: UIImage(named: "icon_menu_window_close"), style: .done, target: self, action: #selector(close))
  135. self.navigationItem.leftBarButtonItem = left
  136. }
  137. /// 加载摄像头预览
  138. private func setUpCameraLayer() {
  139. if self.previewView == nil {
  140. self.previewView = MGOpenGLView(frame: CGRect.zero)
  141. self.previewView.autoresizingMask = UIView.AutoresizingMask(rawValue: UIView.AutoresizingMask.flexibleHeight.rawValue | UIView.AutoresizingMask.flexibleWidth.rawValue)
  142. let currentInterfaceOrientation = UIApplication.shared.statusBarOrientation
  143. let or = AVCaptureVideoOrientation.transform(ui: currentInterfaceOrientation)
  144. switch or {
  145. case .landscapeLeft:
  146. DDLogDebug("landscapeLeft.......")
  147. break
  148. case .landscapeRight:
  149. DDLogDebug("landscapeRight.......")
  150. break
  151. case .portrait:
  152. DDLogDebug("portrait.......")
  153. break
  154. case .portraitUpsideDown:
  155. DDLogDebug("portraitUpsideDown.......")
  156. break
  157. }
  158. let transform = self.videoManager.transformFromVideoBufferOrientation(to: or, withAutoMirroring: true)
  159. self.previewView.transform = transform
  160. self.view.insertSubview(self.previewView, at: 0)
  161. DDLogDebug("preview 已经贴上去了!!!!!!")
  162. var bounds = CGRect.zero
  163. bounds.size = self.view.convert(self.view.bounds, to: self.previewView).size
  164. self.previewView.bounds = bounds
  165. self.previewView.center = CGPoint(x: self.view.bounds.size.width/2, y: self.view.bounds.size.height/2)
  166. }
  167. }
  168. @objc private func close() {
  169. self.motionManager.stopAccelerometerUpdates()
  170. self.videoManager.stopRunning()
  171. self.dismiss(animated: true, completion: nil)
  172. }
  173. }
  174. //MARK: - 扩展
  175. extension OOFaceRecognizeLoginViewController: MGVideoDelegate {
  176. func mgCaptureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
  177. if !self.hasVideoFormatDescription {
  178. self.hasVideoFormatDescription = true
  179. let format = self.videoManager.formatDescription()
  180. self.renderer.prepareForInput(with: format?.takeUnretainedValue(), outputRetainedBufferCountHint: 6)
  181. }
  182. self.rotateAndDetectSampleBuffer(sampleBuffer: sampleBuffer)
  183. }
  184. func mgCaptureOutput(_ captureOutput: AVCaptureOutput!, error: Error!) {
  185. if error != nil {
  186. DDLogError("摄像头数据获取异常,\(error!)")
  187. }
  188. self.showSystemAlert(title: "提示", message: "摄像头不支持!") { (action) in
  189. self.close()
  190. }
  191. }
  192. /// 旋转并且显示
  193. ///
  194. /// - Parameter sampleBuffer: 输出流
  195. private func rotateAndDetectSampleBuffer(sampleBuffer: CMSampleBuffer?) {
  196. if self.markManager.status != .markWorking && sampleBuffer != nil {
  197. var bufferCopy: CMSampleBuffer?
  198. let copy = CMSampleBufferCreateCopy(allocator: kCFAllocatorDefault, sampleBuffer: sampleBuffer!, sampleBufferOut: &bufferCopy)
  199. if copy == noErr {
  200. let item = DispatchWorkItem {
  201. if self.markManager.getConfig()?.orientation != self.orientation.toInt32 {
  202. self.markManager.updateSetting({ (config) in
  203. config?.orientation = self.orientation.toInt32
  204. })
  205. }
  206. if self.detectMode == MGFppDetectionMode.trackingFast {
  207. self.trackSampleBuffer(detectSampleBufferRef: bufferCopy)
  208. }
  209. }
  210. self._detectImageQueue.async(execute: item)
  211. }else {
  212. DDLogError("copy 视频流出错!!!")
  213. }
  214. }
  215. }
  216. private func trackSampleBuffer(detectSampleBufferRef: CMSampleBuffer?) {
  217. guard let sample = detectSampleBufferRef else {
  218. return
  219. }
  220. let imageData = MGImageData.init(sampleBuffer: sample)
  221. self.markManager.beginDetectionFrame()
  222. let array = self.markManager.detect(with: imageData)
  223. // let faceModelArray = MGFaceModelArray()
  224. // faceModelArray.getFaceInfo = false
  225. // faceModelArray.get3DInfo = false
  226. // faceModelArray.detectRect = self.detectRect
  227. if array != nil && array!.count > 0 {
  228. // faceModelArray.faceArray = NSMutableArray.init(array: array!)
  229. let faceInfo = array![0]
  230. //self.markManager.getGetLandmark(faceInfo, isSmooth: true, pointsNumber: self.pointsNum.toInt32)
  231. if !self.remoteRecognizingUserFace {
  232. self.remoteRecognizingUserFace = true
  233. // 生成图片发送到服务器验证身份
  234. let image = MGImage.image(from: sample, orientation: UIImage.Orientation.rightMirrored)
  235. if image != nil {
  236. // 将检测出的人脸框放大
  237. let x = faceInfo.rect.origin.x;
  238. let y = faceInfo.rect.origin.y;
  239. let width = faceInfo.rect.size.width;
  240. let height = faceInfo.rect.size.height;
  241. let rect = CGRect(x: x-width/2, y: y-height/5, w: width*1.8, h: height*1.4);
  242. // 截取人脸部分的图片
  243. let faceImage = MGImage.croppedImage(image, rect: rect)
  244. viewModel.faceRecognize(image: faceImage!)
  245. .then { (userId) in
  246. DDLogInfo("userId:\(userId)")
  247. O2AuthSDK.shared.faceRecognizeLogin(userId: userId, callback: { (result, msg) in
  248. if result {
  249. DispatchQueue.main.async {
  250. self.playBeepSound()
  251. //登录成功,跳转到主页
  252. let destVC = O2MainController.genernateVC()
  253. destVC.selectedIndex = 2 // 首页选中 TODO 图标不亮。。。。。
  254. UIApplication.shared.keyWindow?.rootViewController = destVC
  255. UIApplication.shared.keyWindow?.makeKeyAndVisible()
  256. }
  257. }else {
  258. DDLogError("识别错误。。。。。。。\(msg ?? "")")
  259. self.remoteRecognizingUserFace = false
  260. }
  261. })
  262. }.catch { (error) in
  263. DDLogError("识别错误。。。。。。。\(error)")
  264. self.remoteRecognizingUserFace = false
  265. }
  266. }
  267. }
  268. }
  269. self.markManager.endDetectionFrame()
  270. self.displayWithfaceModel(faceModelArray: nil, detectSampleBufferRef: sample)
  271. }
  272. private func displayWithfaceModel(faceModelArray: MGFaceModelArray?, detectSampleBufferRef: CMSampleBuffer) {
  273. let item = DispatchWorkItem {
  274. let renderedPixelBuffer = self.renderer.drawPixelBuffer(detectSampleBufferRef, custumDrawing: {
  275. // self.renderer.drawFaceLandMark(faceModelArray)
  276. })
  277. if let pixel = renderedPixelBuffer?.takeRetainedValue() {
  278. self.previewView.display(pixel)
  279. }
  280. }
  281. DispatchQueue.main.async(execute: item)
  282. }
  283. }