美文网首页
2018-02-08

2018-02-08

作者: 叶孤城___ | 来源:发表于2018-02-08 12:59 被阅读1264次
    //
    //  Camea.swift
    //  CameaDemo
    //
    //  Created by zang qilong on 2018/2/8.
    //  Copyright © 2018年 zang qilong. All rights reserved.
    //
    
    import UIKit
    import AVFoundation
    
    protocol CameraDelegate {
        func didOutputSample(sampleBuffer:CMSampleBuffer)
    }
    
    public enum PhysicalVideoDeviceType {
        case backCamera
        case frontCamera
        
        func captureDevicePosition() -> AVCaptureDevice.Position {
            switch self {
            case .backCamera: return .back
            case .frontCamera: return .front
            }
        }
        
        func device() -> AVCaptureDevice? {
            if let videoDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: self.captureDevicePosition()) {
                return videoDevice
            }else {
                return nil
            }
        }
    }
    
    enum GLESImageKitError:Error {
        case deviceNotFind
    }
    
    class Camera: NSObject {
        let sessionQueue = DispatchQueue(label: "com.glesImageKit.sessionQueue")
        let videoQueue = DispatchQueue(label: "com.glesImageKit.videoQueue")
        let audioQueue = DispatchQueue(label: "com.glesImageKit.audioQueue")
        
        let session = AVCaptureSession()
        
        var devicePosition:PhysicalVideoDeviceType = .backCamera
        var cameraDevice:AVCaptureDevice
        var cameraDeviceInput:AVCaptureDeviceInput
        let videoDataOutput:AVCaptureVideoDataOutput
        let videoConnection:AVCaptureConnection
        
        let microphoneDevice:AVCaptureDevice
        let audioDeviceInput:AVCaptureDeviceInput
        let audioDataOutput:AVCaptureAudioDataOutput
        let audioConnection:AVCaptureConnection
        
        let semaphore = DispatchSemaphore(value: 1)
        
        public var delegate: CameraDelegate?
        
        init(sessionPreset:AVCaptureSession.Preset, videoDeviceType:PhysicalVideoDeviceType = .backCamera) throws {
            
            session.beginConfiguration()
            /// Video Device Initialize
            if let videoDevice = videoDeviceType.device() {
                self.devicePosition = videoDeviceType
                self.cameraDevice = videoDevice
            }else {
                throw GLESImageKitError.deviceNotFind
            }
            
            do {
                cameraDeviceInput = try AVCaptureDeviceInput(device: cameraDevice)
            } catch  {
                throw error
            }
            
            /// Audio Device Initialize
            if let audioDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInMicrophone, for: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified) {
                self.microphoneDevice = audioDevice
            }else {
                throw GLESImageKitError.deviceNotFind
            }
            
            do {
                audioDeviceInput = try AVCaptureDeviceInput(device: microphoneDevice)
            } catch  {
                throw error
            }
            
            if session.canAddInput(cameraDeviceInput) {
                session.addInput(cameraDeviceInput)
            }else {
                print("can't add camera deivce")
            }
            
            if session.canAddInput(audioDeviceInput) {
                session.addInput(audioDeviceInput)
            }else {
                print("can't add audio deivce")
            }
            
            videoDataOutput = AVCaptureVideoDataOutput()
            videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
            videoDataOutput.alwaysDiscardsLateVideoFrames = false
            
            if session.canAddOutput(videoDataOutput) {
                session.addOutput(videoDataOutput)
            }
            videoConnection = videoDataOutput.connection(with: AVMediaType.video)!
            
            audioDataOutput = AVCaptureAudioDataOutput()
            if session.canAddOutput(audioDataOutput) {
                session.addOutput(audioDataOutput)
            }
            
            audioConnection = audioDataOutput.connection(with: AVMediaType.audio)!
            
            session.sessionPreset = sessionPreset
            session.commitConfiguration()
            
            super.init()
            
            videoDataOutput.setSampleBufferDelegate(self, queue: videoQueue)
            audioDataOutput.setSampleBufferDelegate(self, queue: audioQueue)
        }
        
        func startCapture() {
            sessionQueue.async {
                if !self.session.isRunning {
                    self.session.startRunning()
                }
            }
        }
        
        func stopCapture() {
            sessionQueue.async {
                if self.session.isRunning {
                    self.session.stopRunning()
                }
            }
        }
        
        func switchCamera() throws {
            if devicePosition == .backCamera {
                devicePosition = .frontCamera
            }else {
                devicePosition = .backCamera
            }
            
            guard let device = devicePosition.device() else {
                return
            }
            
            let inputDevice = try AVCaptureDeviceInput(device: device)
            session.beginConfiguration()
            session.removeInput(self.cameraDeviceInput)
            self.cameraDevice = device
            self.cameraDeviceInput = inputDevice
            session.addInput(inputDevice)
            session.commitConfiguration()
        }
        
    }
    
    extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate {
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            if connection == videoConnection {
                guard (semaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return }
                delegate?.didOutputSample(sampleBuffer: sampleBuffer)
                semaphore.signal()
            }else {
                
            }
        }
    }
    
    extension Camera: AVCaptureAudioDataOutputSampleBufferDelegate {
        func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            
        }
    }
    
    
    //
    //  ViewController.swift
    //  CameaDemo
    //
    //  Created by zang qilong on 2018/2/8.
    //  Copyright © 2018年 zang qilong. All rights reserved.
    //
    
    import UIKit
    import AVFoundation
    
    class ViewController: UIViewController {
        
        var camera:Camera!
        var previewLayer:AVCaptureVideoPreviewLayer!
    
        override func viewDidLoad() {
            super.viewDidLoad()
            do {
                camera = try Camera(sessionPreset: AVCaptureSession.Preset.photo)
            } catch let error {
                print(error)
            }
            
            previewLayer = AVCaptureVideoPreviewLayer(session: camera.session)
            previewLayer.frame = self.view.bounds
            self.view.layer.addSublayer(previewLayer)
        }
        
        override func viewDidAppear(_ animated: Bool) {
            super.viewDidAppear(animated)
            camera.startCapture()
        }
    
        override func didReceiveMemoryWarning() {
            super.didReceiveMemoryWarning()
            // Dispose of any resources that can be recreated.
        }
    
        @IBAction func switchCamera() {
            try! camera.switchCamera()
        }
    }
    
    
    

    相关文章

      网友评论

          本文标题:2018-02-08

          本文链接:https://www.haomeiwen.com/subject/yhhttftx.html