- Vision框架详细解析(十一) —— 基于Vision的Bod
- Vision框架详细解析(十) —— 基于Vision的Body
- Vision框架详细解析(十二) —— 基于Vision的Fac
- Vision框架详细解析(十三) —— 基于Vision的Fac
- Vision框架详细解析(九) —— 基于Vision的QR扫描
- Vision框架详细解析(八) —— 基于Vision的QR扫描
- Vision框架详细解析(十六) —— 基于Vision的轮廓检
- Vision框架详细解析(十七) —— 基于Vision的轮廓检
- Vision框架详细解析(二) —— 基于Vision的人脸识别
- Vision框架详细解析(三) —— 基于Vision的人脸识别
版本记录
版本号 | 时间 |
---|---|
V1.0 | 2021.03.12 星期五 |
前言
iOS 11+
和macOS 10.13+
新出了Vision
框架,提供了人脸识别、物体检测、物体跟踪等技术,它是基于Core ML的。可以说是人工智能的一部分,接下来几篇我们就详细的解析一下Vision框架。感兴趣的看下面几篇文章。
1. Vision框架详细解析(一) —— 基本概览(一)
2. Vision框架详细解析(二) —— 基于Vision的人脸识别(一)
3. Vision框架详细解析(三) —— 基于Vision的人脸识别(二)
4. Vision框架详细解析(四) —— 在iOS中使用Vision和Metal进行照片堆叠(一)
5. Vision框架详细解析(五) —— 在iOS中使用Vision和Metal进行照片堆叠(二)
6. Vision框架详细解析(六) —— 基于Vision的显著性分析(一)
7. Vision框架详细解析(七) —— 基于Vision的显著性分析(二)
8. Vision框架详细解析(八) —— 基于Vision的QR扫描(一)
9. Vision框架详细解析(九) —— 基于Vision的QR扫描(二)
10. Vision框架详细解析(十) —— 基于Vision的Body Detect和Hand Pose(一)
源码
1. Swift
首先看下工程组织结构
![](https://img.haomeiwen.com/i3691932/8fca7760bf0193d0.png)
接着就是源码了。
1. AppMain.swift
import SwiftUI
@main
struct AppMain: App {
var body: some Scene {
WindowGroup {
TabView {
ContentView()
.tabItem {
Label("StarCount", systemImage: "line.horizontal.star.fill.line.horizontal")
}
}
}
}
}
2. ContentView.swift
import SwiftUI
struct ContentView: View {
@State private var overlayPoints: [CGPoint] = []
@StateObject private var gameLogicController = GameLogicController()
var body: some View {
ZStack {
CameraView {
overlayPoints = $0
gameLogicController.checkStarsCount($0.count)
}
.overlay(
FingersOverlay(with: overlayPoints)
.foregroundColor(.orange)
)
.edgesIgnoringSafeArea(.all)
StarAnimator(makeItRain: $gameLogicController.makeItRain) {
gameLogicController.didRainStars(count: $0)
}
}
.onAppear {
gameLogicController.start()
}
.overlay(
successBadge
.animation(.default)
)
}
@ViewBuilder
private var successBadge: some View {
if let number = gameLogicController.successBadge {
Image(systemName: "\(number).circle.fill")
.resizable()
.imageScale(.large)
.foregroundColor(.white)
.frame(width: 200, height: 200)
.shadow(radius: 5)
} else {
EmptyView()
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
3. FingersOverlay.swift
import SwiftUI
struct FingersOverlay: Shape {
let points: [CGPoint]
private let pointsPath = UIBezierPath()
init(with points: [CGPoint]) {
self.points = points
}
func path(in rect: CGRect) -> Path {
for point in points {
pointsPath.move(to: point)
pointsPath.addArc(withCenter: point, radius: 5, startAngle: 0, endAngle: 2 * .pi, clockwise: true)
}
return Path(pointsPath.cgPath)
}
}
4. GameLogicController.swift
import Combine
import Foundation
final class GameLogicController: ObservableObject {
private var goalCount = 0
@Published var makeItRain = false
@Published private(set) var successBadge: Int?
private var shouldEvaluateResult = true
func start() {
makeItRain = true
}
func didRainStars(count: Int) {
goalCount = count
}
func checkStarsCount(_ count: Int) {
if !shouldEvaluateResult {
return
}
if count == goalCount {
shouldEvaluateResult = false
successBadge = count
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
self.successBadge = nil
self.makeItRain = true
self.shouldEvaluateResult = true
}
}
}
}
5. CameraViewController.swift
import UIKit
import AVFoundation
import Vision
final class CameraViewController: UIViewController {
// swiftlint:disable:next force_cast
private var cameraView: CameraPreview { view as! CameraPreview }
private let videoDataOutputQueue = DispatchQueue(
label: "CameraFeedOutput",
qos: .userInteractive
)
private var cameraFeedSession: AVCaptureSession?
private let handPoseRequest: VNDetectHumanHandPoseRequest = {
let request = VNDetectHumanHandPoseRequest()
request.maximumHandCount = 2
return request
}()
var pointsProcessorHandler: (([CGPoint]) -> Void)?
override func loadView() {
view = CameraPreview()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
if cameraFeedSession == nil {
try setupAVSession()
cameraView.previewLayer.session = cameraFeedSession
cameraView.previewLayer.videoGravity = .resizeAspectFill
}
cameraFeedSession?.startRunning()
} catch {
print(error.localizedDescription)
}
}
override func viewWillDisappear(_ animated: Bool) {
cameraFeedSession?.stopRunning()
super.viewWillDisappear(animated)
}
func setupAVSession() throws {
// Select a front facing camera, make an input.
guard let videoDevice = AVCaptureDevice.default(
.builtInWideAngleCamera,
for: .video,
position: .front)
else {
throw AppError.captureSessionSetup(
reason: "Could not find a front facing camera."
)
}
guard let deviceInput = try? AVCaptureDeviceInput(
device: videoDevice
) else {
throw AppError.captureSessionSetup(
reason: "Could not create video device input."
)
}
let session = AVCaptureSession()
session.beginConfiguration()
session.sessionPreset = AVCaptureSession.Preset.high
// Add a video input.
guard session.canAddInput(deviceInput) else {
throw AppError.captureSessionSetup(
reason: "Could not add video device input to the session"
)
}
session.addInput(deviceInput)
let dataOutput = AVCaptureVideoDataOutput()
if session.canAddOutput(dataOutput) {
session.addOutput(dataOutput)
// Add a video data output.
dataOutput.alwaysDiscardsLateVideoFrames = true
dataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
} else {
throw AppError.captureSessionSetup(
reason: "Could not add video data output to the session"
)
}
session.commitConfiguration()
cameraFeedSession = session
}
func processPoints(_ fingerTips: [CGPoint]) {
// Convert points from AVFoundation coordinates to UIKit coordinates.
let convertedPoints = fingerTips.map {
cameraView.previewLayer.layerPointConverted(fromCaptureDevicePoint: $0)
}
pointsProcessorHandler?(convertedPoints)
}
}
extension
CameraViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(
_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection
) {
var fingerTips: [CGPoint] = []
defer {
DispatchQueue.main.sync {
self.processPoints(fingerTips)
}
}
let handler = VNImageRequestHandler(
cmSampleBuffer: sampleBuffer,
orientation: .up,
options: [:]
)
do {
// Perform VNDetectHumanHandPoseRequest
try handler.perform([handPoseRequest])
// Continue only when at least a hand was detected in the frame. We're interested in maximum of two hands.
guard
let results = handPoseRequest.results?.prefix(2),
!results.isEmpty
else {
return
}
var recognizedPoints: [VNRecognizedPoint] = []
try results.forEach { observation in
// Get points for all fingers.
let fingers = try observation.recognizedPoints(.all)
// Look for tip points.
if let thumbTipPoint = fingers[.thumbTip] {
recognizedPoints.append(thumbTipPoint)
}
if let indexTipPoint = fingers[.indexTip] {
recognizedPoints.append(indexTipPoint)
}
if let middleTipPoint = fingers[.middleTip] {
recognizedPoints.append(middleTipPoint)
}
if let ringTipPoint = fingers[.ringTip] {
recognizedPoints.append(ringTipPoint)
}
if let littleTipPoint = fingers[.littleTip] {
recognizedPoints.append(littleTipPoint)
}
}
fingerTips = recognizedPoints.filter {
// Ignore low confidence points.
$0.confidence > 0.9
}
.map {
// Convert points from Vision coordinates to AVFoundation coordinates.
CGPoint(x: $0.location.x, y: 1 - $0.location.y)
}
} catch {
cameraFeedSession?.stopRunning()
print(error.localizedDescription)
}
}
}
6. CameraPreview.swift
import UIKit
import AVFoundation
final class CameraPreview: UIView {
var previewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable:next force_cast
layer as! AVCaptureVideoPreviewLayer
}
override class var layerClass: AnyClass {
AVCaptureVideoPreviewLayer.self
}
}
7. CameraView.swift
import SwiftUI
struct CameraView: UIViewControllerRepresentable {
var pointsProcessorHandler: (([CGPoint]) -> Void)?
func makeUIViewController(context: Context) -> CameraViewController {
let cvc = CameraViewController()
cvc.pointsProcessorHandler = pointsProcessorHandler
return cvc
}
func updateUIViewController(
_ uiViewController: CameraViewController,
context: Context
) {
}
}
8. StarAnimator.swift
import SwiftUI
struct StarAnimator: UIViewRepresentable {
final class Coordinator: NSObject, StarAnimatorDelegate {
var parent: StarAnimator
init(_ parent: StarAnimator) {
self.parent = parent
}
func didStartRaining(count: Int) {
parent.makeItRain = false
parent.numberOfStarsHandler(count)
}
}
@Binding var makeItRain: Bool
var numberOfStarsHandler: (Int) -> Void
func makeUIView(context: Context) -> StarAnimatorView {
let uivc = StarAnimatorView()
uivc.delegate = context.coordinator
return uivc
}
func updateUIView(_ uiView: StarAnimatorView, context: Context) {
if makeItRain {
uiView.rain(shouldClearCanvas: true)
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
}
9. StarAnimatorView.swift
import UIKit
protocol StarAnimatorDelegate: class {
func didStartRaining(count: Int)
}
final class StarAnimatorView: UIView {
private enum Constants {
static let animationDelay: TimeInterval = 0.1
static let defaultAnimationDuration: TimeInterval = 0.5
static let cleanupDelay: TimeInterval = 5
static let starWidth: CGFloat = 16
static let maxStarCount: Int = 10
}
weak var delegate: StarAnimatorDelegate?
private var cleanupWorkItem: DispatchWorkItem?
private lazy var animator: UIDynamicAnimator = {
UIDynamicAnimator(referenceView: self)
}()
private lazy var collision: UICollisionBehavior = {
let behavior = UICollisionBehavior(items: [])
behavior.addBoundary(
withIdentifier: "bottom" as NSCopying,
from: CGPoint(x: 0, y: bounds.size.height),
to: CGPoint(x: bounds.size.width, y: bounds.size.height)
)
behavior.addBoundary(
withIdentifier: "right" as NSCopying,
from: CGPoint(x: bounds.size.width, y: 0),
to: CGPoint(x: bounds.size.width, y: bounds.size.height)
)
behavior.addBoundary(
withIdentifier: "left" as NSCopying,
from: CGPoint(x: 0, y: 0),
to: CGPoint(x: 0, y: bounds.size.height)
)
animator.addBehavior(behavior)
return behavior
}()
private lazy var gravity: UIGravityBehavior = {
let behavior = UIGravityBehavior(items: [])
behavior.magnitude = 0.4
animator.addBehavior(behavior)
return behavior
}()
private var rotationBehaviors: [UIDynamicItemBehavior] = []
func rain(shouldClearCanvas: Bool) {
if frame.size.width == 0 {
return
}
if shouldClearCanvas {
cleanup()
}
cleanupWorkItem?.cancel()
cleanupWorkItem = DispatchWorkItem { [weak self] in
self?.cleanup()
}
if let item = cleanupWorkItem {
DispatchQueue.main.asyncAfter(deadline: .now() + Constants.cleanupDelay, execute: item)
}
let starsCount = Int.random(in: 1...Constants.maxStarCount)
for i in 0..<starsCount {
let star = RoundedCollisionImageView(
frame: CGRect(
x: CGFloat.random(in: 0 ..< frame.size.width - Constants.starWidth),
y: -Constants.starWidth,
width: Constants.starWidth,
height: Constants.starWidth
)
)
star.tintColor = .systemYellow
star.alpha = 0
star.image = UIImage(systemName: "star.fill")
star.preferredSymbolConfiguration =
.init(scale: [UIImage.SymbolScale.large, .medium, .small].randomElement() ?? .medium)
star.sizeToFit()
star.layer.shadowColor = UIColor.white.cgColor
star.layer.shadowOpacity = 0.3
star.layer.shadowOffset = .zero
addSubview(star)
gravity.addItem(star)
collision.addItem(star)
let dynamicBehavior = UIDynamicItemBehavior(items: [star])
dynamicBehavior.elasticity = 0.4
dynamicBehavior.addAngularVelocity(CGFloat.random(in: -10...10), for: star)
rotationBehaviors.append(dynamicBehavior)
animator.addBehavior(dynamicBehavior)
UIView.animate(withDuration: Constants.defaultAnimationDuration, delay: Constants.animationDelay * Double(i)) {
star.alpha = 1
}
}
print("Dropped Stars: \(starsCount)")
delegate?.didStartRaining(count: starsCount)
}
@objc private func cleanup() {
subviews.forEach {
$0.removeFromSuperview()
}
rotationBehaviors.forEach {
animator.removeBehavior($0)
}
rotationBehaviors.removeAll()
while !gravity.items.isEmpty {
gravity.removeItem(gravity.items[0])
}
while !collision.items.isEmpty {
collision.removeItem(collision.items[0])
}
}
}
final private class RoundedCollisionImageView: UIImageView {
override var collisionBoundsType: UIDynamicItemCollisionBoundsType {
.ellipse
}
}
10. AppError.swift
enum AppError: Error {
case captureSessionSetup(reason: String)
}
后记
本篇主要讲述了基于
Vision
的Body Detect
和Hand Pose
,感兴趣的给个赞或者关注~~~
![](https://img.haomeiwen.com/i3691932/7ea261f02e53220d.png)
网友评论