实现功能:
开启前置摄像头获取稳定人脸信息后截图并跳转至另一控制器
实现步骤:
-
在官方注册账号,注册App,并下载SDK拖入项目.
-
添加bright桥接文件.
-
在.info文件中添加权限提示字段,
-
加入必需框架
-
激活引擎
-
使用AVFondation框架,实现CameraController,开启前置摄像头,并创建数据回调delegate
-
新建VideoCheckViewController,实现页面.
-
添加陀螺仪判断,以保证照片清晰度
-
在delegate回调中实现主要功能.
1.在官方注册账号,注册App,并下载SDK拖入项目.
注册App很重要,因为注册app需要账号的企业认证,所以官方的和我的demo里都不自带appID和SDKKey,需要填入自己公司申请的appID.
2.添加bright桥接文件
#ifndef bridge_header_h
#define bridge_header_h
#import <ArcSoftFaceEngine/ArcSoftFaceEngine.h>
#import <ArcSoftFaceEngine/ArcSoftFaceEngineDefine.h>
#import <ArcSoftFaceEngine/amcomdef.h>
#import <ArcSoftFaceEngine/merror.h>
#import "Utility.h"
#import "ASFVideoProcessor.h"
#endif /* bridge_header_h */
3.在.info文件中添加权限提示字段
!!!谨记,如果没添加权限提示,激活引擎时会返回错误: 94217未知错误
<key>NSCameraUsageDescription</key>
<string>App需要您的相机权限</string>
4.加入必需框架
添加框架ArcSoftFaceEngine框架是虹软主体SDK,
libstdc++.6.0.0.tdb是它必需框架.
CoreMotion是开启陀螺仪传感器,不需要可以不加
5.激活引擎
填入appid和sdkkey
@objc private func onClickActivateButton(){
let appid = ""
let sdkkey = ""
let engine = ArcSoftFaceEngine()
let mr = engine.active(withAppId: appid, sdkKey: sdkkey)
if mr == ASF_MOK || mr == MERR_ASF_ALREADY_ACTIVATED{
let alertController = UIAlertController(title: "SDK激活成功", message: "", preferredStyle: .alert)
self.present(alertController, animated: true, completion: nil)
alertController.addAction(UIAlertAction(title: "确定", style: .cancel, handler: nil))
}else {
let result = "SDK激活失败: \(mr)"
let alertController = UIAlertController(title: result, message: "", preferredStyle: .alert)
self.present(alertController, animated: true, completion: nil)
alertController.addAction(UIAlertAction(title: "确定", style: .cancel, handler: nil))
}
}
6.使用AVFondation框架,实现CameraController,开启前置摄像头,并创建数据回调delegate
//
// EWCameraController.swift
// EWArcSoft-Swift
//
// Created by Ethan.Wang on 2018/12/21.
// Copyright © 2018 Ethan. All rights reserved.
//
import UIKit
import AVFoundation
protocol EWCameraControllerDelegate {
func captureOutput(_ output: AVCaptureOutput,didOutput sampleBuffer: CMSampleBuffer,from connection: AVCaptureConnection)
}
class EWCameraController: NSObject {
public var delegate: EWCameraControllerDelegate?
public var previewLayer: AVCaptureVideoPreviewLayer?
/// AVCaptureSession是AVFoundation的核心类,用于捕捉视频和音频,协调视频和音频的输入和输出流.
private let captureSession = AVCaptureSession()
/// 捕捉连接——AVCaptureConnection,捕捉连接负责将捕捉会话接收的媒体类型和输出连接起来
private var videoConnection: AVCaptureConnection?
/// 捕捉设备
private var captureDevice: AVCaptureDevice!
public func setUpCaptureSession(videoOrientation: AVCaptureVideoOrientation){
captureSession.beginConfiguration()
/// SessionPreset,用于设置output输出流的bitrate或者说画面质量
captureSession.sessionPreset = AVCaptureSession.Preset.photo
/// 获取输入设备,builtInWideAngleCamera是通用相机,AVMediaType.video代表视频媒体,front表示前置摄像头,如果需要后置摄像头修改为back
let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices
/// 获取前置摄像头
captureDevice = availableDevices.first
do {
/// 将前置摄像头作为session的input输入流
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
}catch {
print(error.localizedDescription)
}
/// 设定视频预览层,也就是相机预览layer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
/// 相机页面展现形式
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill /// 拉伸充满frame
/// 设定输出流
let dataOutput = AVCaptureVideoDataOutput()
/// 指定像素格式
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] as [String : Any]
/// 是否直接丢弃处理旧帧时捕获的新帧,默认为True,如果改为false会大幅提高内存使用
dataOutput.alwaysDiscardsLateVideoFrames = true
/// 将输出流加入session
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
/// 开新线程进行输出流代理方法调用
let queue = DispatchQueue(label: "com.brianadvent.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
videoConnection = dataOutput.connection(with: .video)
guard videoConnection != nil else {
return
}
/// 设置镜像展示,不设置或赋值为false则获取图片是延垂直线相反
if videoConnection!.isVideoMirroringSupported{
videoConnection?.isVideoMirrored = true
}
/// 设置摄像头位置
if videoConnection!.isVideoOrientationSupported{
videoConnection?.videoOrientation = videoOrientation
}
if captureSession.canSetSessionPreset(.iFrame1280x720){
captureSession.sessionPreset = .iFrame1280x720
}
captureSession.commitConfiguration()
}
public func startCaptureSession(){
if !captureSession.isRunning{
captureSession.startRunning()
}
}
public func stopCaptureSession(){
captureSession.stopRunning()
}
}
extension EWCameraController: AVCaptureVideoDataOutputSampleBufferDelegate{
/// 输出流代理方法,实时调用
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if connection == videoConnection{
self.delegate?.captureOutput(output, didOutput: sampleBuffer, from: connection)
}
}
}
7.新建VideoCheckViewController,实现页面.
/// 人脸识别页面控制器
class EWVideoCheckViewController: UIViewController {
/// 陀螺仪传感器Manager
private let motionManager: CMMotionManager = CMMotionManager()
/// 根据takePhoto状态来决定拍照
private var takePhone: Bool = false
/// 拍照后结果展示圆形ImageView
private let showImageView: UIImageView = {
let imageView = UIImageView(frame: CGRect(x: (UIScreen.main.bounds.size.width - 230)/2, y: 231, width: 230, height: 230))
imageView.layer.cornerRadius = 115
imageView.layer.masksToBounds = true
imageView.contentMode = .center
return imageView
}()
/// 拍照后背景半透明View
private lazy var imageBackView: UIImageView = {
let imageView = UIImageView(frame: UIScreen.main.bounds)
imageView.backgroundColor = UIColor.black.withAlphaComponent(0.7)
imageView.isHidden = true
imageView.addSubview(showImageView)
return imageView
}()
/// 扫描框ImageView
private let scanningImageView: UIImageView = {
let imageView = UIImageView(frame: CGRect(x: (UIScreen.main.bounds.size.width - 230)/2, y: 231, width: 230, height: 230))
imageView.image = UIImage(named: "scanning")
return imageView
}()
/// 摄像机控制器
private var cameraController: EWCameraController = EWCameraController()
/// 虹软进行人脸识别分析的工具
private var videoProcessor: ASFVideoProcessor = ASFVideoProcessor()
/// 装载所有人脸信息框的Array
private var allFaceRectViewArray: [UIView] = []
override func viewDidLoad() {
super.viewDidLoad()
/// 初始化陀螺仪传感器
startMotionManager()
/// 将设备方向赋给cameraController
let uiOrientation = UIApplication.shared.statusBarOrientation
cameraController.delegate = self
cameraController.setUpCaptureSession(videoOrientation: AVCaptureVideoOrientation(ui:uiOrientation))
/// 将摄影类layer置于控制器前
guard self.cameraController.previewLayer != nil else { return }
self.view.layer.addSublayer(self.cameraController.previewLayer!)
self.cameraController.previewLayer?.frame = self.view.layer.frame
/// 虹软识别控制器初始化
videoProcessor.initProcessor()
/// 波纹发散动画View
let animationView = EWRippleAnimationView(frame:CGRect(x: 0, y: 0, width: self.showImageView.frame.size.width , height: self.showImageView.frame.size.height))
animationView.center = self.showImageView.center
self.imageBackView.addSubview(animationView)
self.imageBackView.bringSubviewToFront(self.showImageView)
self.view.addSubview(imageBackView)
self.view.addSubview(scanningImageView)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.cameraController.startCaptureSession()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.cameraController.stopCaptureSession()
}
/// 将虹软获取的人脸rect转换成CRRect类型
private func dataFaceRectToViewFaceRect(faceRect: MRECT) -> CGRect{
var frameRect: CGRect = CGRect.zero
let viewFrame = self.view.frame
let faceWidth = faceRect.right - faceRect.left
let faceHeight = faceRect.bottom - faceRect.top
frameRect.size.width = viewFrame.width/CGFloat(IMAGE_WIDTH)*CGFloat(faceWidth)
frameRect.size.height = viewFrame.height/CGFloat(IMAGE_HEIGHT)*CGFloat(faceHeight)
frameRect.origin.x = viewFrame.width/CGFloat(IMAGE_WIDTH)*CGFloat(faceRect.left)
frameRect.origin.y = viewFrame.height/CGFloat(IMAGE_HEIGHT)*CGFloat(faceRect.top)
return frameRect
}
/// 开始陀螺仪判断
private func startMotionManager(){
motionManager.startGyroUpdates()
}
/// 根据CMSampleBuffer媒体文件获取相片
private func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .up)
}
}
return nil
}
}
9.在delegate回调中实现主要功能.
extension EWVideoCheckViewController: EWCameraControllerDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if takePhone == false {
/// 获取数据传入虹软人面识别控制器
guard let cameraData = Utility.getCameraData(from: sampleBuffer) else { return }
guard let faceInfoArray = self.videoProcessor.process(cameraData) as? [ASFVideoFaceInfo] else {
Utility.freeCameraData(cameraData)
return
}
DispatchQueue.main.async { [weak self] in
guard let weakSelf = self else {
Utility.freeCameraData(cameraData)
return
}
/// 获取人脸view数组是为了判断rect
if weakSelf.allFaceRectViewArray.count < faceInfoArray.count {
for _ in faceInfoArray{
let view = UIView()
weakSelf.view.addSubview(view)
weakSelf.allFaceRectViewArray.append(view)
}
}
for (index,_) in faceInfoArray.enumerated() {
let faceRectView: UIView = weakSelf.allFaceRectViewArray[index]
let faceInfo: ASFVideoFaceInfo = faceInfoArray[index]
faceRectView.frame = weakSelf.dataFaceRectToViewFaceRect(faceRect: faceInfo.faceRect)
//// 判断face3DAngle,保证人脸正对摄像头
guard faceInfo.face3DAngle != nil else {
break
}
guard faceInfo.face3DAngle.status == 0 else {
break
}
guard faceInfo.face3DAngle.rollAngle <= 10 && faceInfo.face3DAngle.rollAngle >= -10 else {
break
}
guard faceInfo.face3DAngle.yawAngle <= 10 && faceInfo.face3DAngle.yawAngle >= -10 else {
break
}
guard faceInfo.face3DAngle.pitchAngle <= 10 && faceInfo.face3DAngle.pitchAngle >= -10 else {
break
}
/// 判断人脸View.frame,保证人脸在扫描框中
guard CGRect(x: 30, y: 150, width: UIScreen.main.bounds.size.width - 60, height: UIScreen.main.bounds.size.height-300).contains(faceRectView.frame) else {
break
}
/// 判断陀螺仪实时加速度,保证手机在尽量平稳的状态
guard let newestAccel = weakSelf.motionManager.gyroData else {
break
}
guard newestAccel.rotationRate.x < 0.000005 && newestAccel.rotationRate.y < 0.000005 && newestAccel.rotationRate.z < 0.000005 else {
break
}
/// 全部条件满足,则拍照.
weakSelf.takePhone = true
/// 将数据转换成UIImage
let resultImage = weakSelf.getImageFromSampleBuffer(buffer: sampleBuffer)
/// 将预览View展示,把结果image加入预览ImageView
weakSelf.imageBackView.isHidden = false
weakSelf.showImageView.image = resultImage
/// 添加一个缩小动画,并在动画结束后跳转到新页面
UIView.animate(withDuration: 1.3, animations: {
weakSelf.showImageView.transform = CGAffineTransform(scaleX: 0.7, y: 0.7)
}, completion: { (finished) in
let vc = EWShowImageViewController()
vc.image = resultImage
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 2.0, execute: {
weakSelf.present(vc, animated: false, completion: nil)
})
})
}
}
/// 释放内存!!! 重要!!!
Utility.freeCameraData(cameraData)
}
}
}
其他问题.
1.官方demo跑不通.
官方的demo有两个问题,首先是没有在target中ArcSoftFaceEngine,libstdc++.6.0.0.tdb框架,
其次是在激活引擎方法里没有填入appid和sdkkey,需要我们自己手动加入.
2.激活引擎错误94217未知错误.
这个是因为没有在info.plist文件中添加相机权限提示.
3.激活后调用相机app内存无限升高,直到崩溃
注意观察 func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {}
方法中是不是有调用
Utility.freeCameraData(cameraData)
demo地址: EWArcSoft-Swift
调用成功记得查看下内存使用情况,一定要将data及时释放,保证不产生内存泄漏.
接入的主要核心内容就是AVFoundation框架实现的摄像机,以及虹软自己框架对摄像机传回数据的处理.包括之前我自定制相机时也是用了AVFoundation框架,所以它真的很重要.需要深入学习.
OC版本: OC.虹软人脸识别SDK接入
有问题欢迎探讨.
网友评论