美文网首页
AVFoundation 框架

AVFoundation 框架

作者: 小凡凡520 | 来源:发表于2020-02-12 14:04 被阅读0次
    一、视频播放
    • AVPlayerViewController
    import UIKit
    import AVKit
    
    class HHViewController: UIViewController {
    
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            if let path = Bundle.main.path(forResource: "", ofType: "") {
                let url = URL(fileURLWithPath: path)
                let vc = AVPlayerViewController()
                vc.player = AVPlayer(url: url)
                self.present(vc, animated: true) {
                        vc.player?.play()
                }
            }
        }
    }
    
    // 界面自定义
    
    import UIKit
    import AVKit
    
    class HHViewController: UIViewController {
    
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            if let path = Bundle.main.path(forResource: "", ofType: "") {
                let url = URL(fileURLWithPath: path)
                let player = AVPlayer(url: url)
                let layer = AVPlayerLayer(player: player)
                layer.frame = self.view.frame
                self.view.layer.addSublayer(layer)
                
                player.play()
            }
        }
    }
    
    
    二、视频录制
    import UIKit
    import AVFoundation
    
    class TestViewController: UIViewController,AVCaptureFileOutputRecordingDelegate {
        
        @IBOutlet weak var imageView: UIImageView!
        
        private var session:AVCaptureSession!
        
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
        }
        
        @IBAction func end(_ sender: Any) {
            session!.stopRunning()
        }
        
        @IBAction func test(_ sender: Any) {
             // 预览图层和视频方向保持一致,这个属性设置很重要,如果不设置,那么出来的视频图像可以是倒向左边的。
    //         captureConnection.videoOrientation=[self.captureVideoPreviewLayer connection].videoOrientation;
            
            session = AVCaptureSession()
            
            guard let videoDevice = AVCaptureDevice.default(for: .video) else {
                return
            }
            guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
                return
            }
            let videoInput = try! AVCaptureDeviceInput(device: videoDevice)
            let audioInput = try! AVCaptureDeviceInput(device: audioDevice)
            let videoOutPut = AVCaptureMovieFileOutput()
            
            if session.canAddInput(videoInput) {
                session.addInput(videoInput)
            }
            if session.canAddInput(audioInput) {
                session.addInput(audioInput)
            }
            if session.canAddOutput(videoOutPut) {
                session.addOutput(videoOutPut)
            }
            
            // 设置视频输出的文件路径,这里设置为 temp 文件
            let outputFielPath = (NSHomeDirectory() as! NSString).appending("/Documents/1.mp4")
            if FileManager.default.fileExists(atPath: outputFielPath) {
                try! FileManager.default.removeItem(atPath: outputFielPath)
            }
            if FileManager.default.createFile(atPath: outputFielPath, contents: nil, attributes: nil) {
                print("ok")
            }
            
            
            let previewLayer = AVCaptureVideoPreviewLayer(session: session)
            previewLayer.frame = self.view.bounds
            self.view.layer.addSublayer(previewLayer)
    
            session.startRunning()
            
            // 路径转换成 URL 要用这个方法,用 NSBundle 方法转换成 URL 的话可能会出现读取不到路径的错误
            let fileUrl = URL(fileURLWithPath: outputFielPath)
            // 往路径的 URL 开始写入录像 Buffer ,边录边写
            videoOutPut.startRecording(to: fileUrl, recordingDelegate: self)
        }
        
        func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
            print("didStartRecordingTo")
        }
        
        func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
            print("didFinishRecordingTo")
        }
    }
    
    三、生成视频缩略图
    • AVURLAsset
      该类是AVAsset的子类,AVAsset类专门用于获取多媒体的相关信息,包括获取多媒体的画面、声音等信息。而AVURLAsset子类的作用则是根据NSURL来初始化AVAsset对象。
    • AVAssetImageGenerator
      该类专门用于截取视频指定帧的画面。
    import UIKit
    import AVFoundation
    
    class HHViewController: UIViewController {
    
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            
        }
        
        func imageWithVideo(vidoURL:URL) ->UIImage? {
            
            var thumb:UIImage?
            
            // 根据视频的URL创建AVURLAsset
            let asset = AVURLAsset(url: vidoURL)
            // 根据AVURLAsset创建AVAssetImageGenerator对象
            let gen = AVAssetImageGenerator(asset: asset)
            gen.appliesPreferredTrackTransform = true
             // 定义获取0帧处的视频截图
            let time = CMTimeMake(value: 0, timescale: 10)
            var actualTime: CMTime = CMTimeMake(value: 0, timescale: 0)
            do {
                // 获取time处的视频截图
                let image = try gen.copyCGImage(at: time, actualTime: &actualTime)
                // 将CGImageRef转换为UIImage
                thumb = UIImage(cgImage: image)
            } catch {}
            
            return thumb
        }
    }
    
    
    四、自定义相机
    import UIKit
    import AVFoundation
    
    class TestViewController: UIViewController {
        
        @IBOutlet weak var imageView: UIImageView!
            
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
        }
        
        @IBAction func end(_ sender: Any) {
            
        }
        
        @IBAction func test(_ sender: Any) {
            
            // 创建session(捕捉会话)
            let captureSession = AVCaptureSession()
            captureSession.canSetSessionPreset(.photo)
            
            // 输入
            guard let videoDevice = AVCaptureDevice.default(for: .video) else {
                return
            }
            do {
                // 视频输入
                let videoInput = try AVCaptureDeviceInput(device: videoDevice)
                
                if captureSession.canAddInput(videoInput) {
                    captureSession.addInput(videoInput)
                }
                
                // 创建image output
                let imageOutput = AVCaptureStillImageOutput()
                
                imageOutput.outputSettings = [
                    AVVideoCodecKey:AVVideoCodecJPEG
                ]
                if captureSession.canAddOutput(imageOutput) {
                    captureSession.addOutput(imageOutput)
                }
                
                let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                previewLayer.frame = self.view.bounds
                self.view.layer.addSublayer(previewLayer)
                
                captureSession.startRunning()
                
                DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 1) {
                    
                    // 输出图片
                    if let connect = imageOutput.connection(with: .video) {
                        imageOutput.captureStillImageAsynchronously(from: connect) { (sampleBuffer, error) in
                            if sampleBuffer != nil {
                                if let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer!) {
                                    DispatchQueue.main.async {
                                        self.imageView.image =  UIImage(data: data)
                                        previewLayer.removeFromSuperlayer()
                                    }
                                }
                            }
                        }
                    }
                }
            } catch {}
        }
    }
    
    五、回放
    六、编辑
    七、顺序播放多个音频
    八、音频
    import UIKit
    import AVFoundation
    
    class HHViewController: UIViewController,AVCaptureAudioDataOutputSampleBufferDelegate {
    
        private var previewLayer:AVCaptureVideoPreviewLayer?
        
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            // 创建session(捕捉会话)
            let captureSession = AVCaptureSession()
            captureSession.canSetSessionPreset(.photo)
            
            // 输入
            if let videoDevice = AVCaptureDevice.default(for: .video),let audioDevice = AVCaptureDevice.default(for: .audio) {
                do {
                    // 音频输入
                    let audioIn = try AVCaptureDeviceInput(device: audioDevice)
                    if captureSession.canAddInput(audioIn) {
                        captureSession.addInput(audioIn)
                    }
                    
                    let audioOut = AVCaptureAudioDataOutput()
                    audioOut.setSampleBufferDelegate(self, queue: DispatchQueue.main)
                    
                    if captureSession.canAddOutput(audioOut) {
                        captureSession.addOutput(audioOut)
                    }
                    
                    // 设置音频捕捉连接
                    audioOut.connection(with: .audio)
                } catch {}
            }
        }
        
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            
        }
    }
    
    九、视频
    import UIKit
    import AVFoundation
    
    class HHViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
    
        private var previewLayer:AVCaptureVideoPreviewLayer?
        
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            // 创建session(捕捉会话)
            let captureSession = AVCaptureSession()
            captureSession.canSetSessionPreset(.photo)
            
            // 输入
            if let videoDevice = AVCaptureDevice.default(for: .video),let audioDevice = AVCaptureDevice.default(for: .audio) {
                do {
                    let videoIn = try AVCaptureDeviceInput(device: videoDevice)
                    if captureSession.canAddInput(videoIn) {
                        captureSession.addInput(videoIn)
                    }
                    
                    let videoOut = AVCaptureVideoDataOutput()
                    videoOut.alwaysDiscardsLateVideoFrames = true
                    videoOut.setSampleBufferDelegate(self, queue: DispatchQueue.main)
                    if captureSession.canAddOutput(videoOut) {
                        captureSession.addOutput(videoOut)
                    }
                } catch {}
            }
        }
        
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            
        }
    }
    
    十、操作相机
    import UIKit
    import AVFoundation
    
    class HHViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
    
        private var previewLayer:AVCaptureVideoPreviewLayer?
        
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            // 创建session(捕捉会话)
            let captureSession = AVCaptureSession()
            captureSession.canSetSessionPreset(.photo)
            
            // 输入
            if let videoDevice = AVCaptureDevice.default(for: .video),let audioDevice = AVCaptureDevice.default(for: .audio) {
                do {
                    let videoIn = try AVCaptureDeviceInput(device: videoDevice)
                    if captureSession.canAddInput(videoIn) {
                        captureSession.addInput(videoIn)
                    }
                    
                    let videoOut = AVCaptureVideoDataOutput()
                    videoOut.alwaysDiscardsLateVideoFrames = true
                    videoOut.setSampleBufferDelegate(self, queue: DispatchQueue.main)
                    if captureSession.canAddOutput(videoOut) {
                        captureSession.addOutput(videoOut)
                    }
                    
                    guard let backDevice = change() else {
                        return
                    }
                    let newVideoIn = try AVCaptureDeviceInput(device: backDevice)
                    captureSession.beginConfiguration()
                    captureSession.removeInput(videoIn)
                    if captureSession.canAddInput(newVideoIn) {
                        captureSession.addInput(newVideoIn)
                    }
                    captureSession.commitConfiguration()
                } catch {}
            }
        }
        
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            
        }
        
        func change() -> AVCaptureDevice? {
            let devices = AVCaptureDevice.devices(for: .video)
            for device in devices {
                if device.position == .back {
                    return device
                }
            }
            return nil
        }
    }
    
    十一、补光
    十二、二维码识别
    import UIKit
    import AVFoundation
    
    class TestViewController: UIViewController,AVCaptureMetadataOutputObjectsDelegate {
        
        private var session:AVCaptureSession!
        
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
        }
        
        @IBAction func end(_ sender: Any) {
            session.stopRunning()
        }
        
        @IBAction func test(_ sender: Any) {
            
            //session
            session = AVCaptureSession()
            
            //device
            guard let device = AVCaptureDevice.default(for: .video) else {
                return
            }
            //input
            let input = try! AVCaptureDeviceInput(device: device)
            if session.canAddInput(input) {
                session.addInput(input)
            }
            
            //output
            let output = AVCaptureMetadataOutput()
            if session.canAddOutput(output) {
                session.addOutput(output)
            }
            output.metadataObjectTypes = [.qr]
            output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            //add preview layer
            let previewLayer = AVCaptureVideoPreviewLayer(session: session)
            previewLayer.frame = self.view.bounds
            self.view.layer.addSublayer(previewLayer)
            //start
            session.startRunning()
        }
        
        func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
            
            for metadata in metadataObjects {
                if metadata.type == .qr {
                    if let obj = metadata as? AVMetadataMachineReadableCodeObject {
                        print(obj.stringValue)
                    }
                }
            }
        }
    }
    
    十三、直播
    import UIKit
    import AVFoundation
    
    class TestViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
        
        private var session:AVCaptureSession!
        @IBOutlet weak var imageView: UIImageView!
        
        override func viewDidLoad() {
            super.viewDidLoad()
    
            // Do any additional setup after loading the view.
            
            imageView.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI_2))
        }
        
        @IBAction func end(_ sender: Any) {
            session.stopRunning()
        }
        
        @IBAction func test(_ sender: Any) {
            
            //默认前摄像头输入
            guard let frontDevice = AVCaptureDevice.default(for: .video) else {
                return
            }
            let frontCameraInput = try! AVCaptureDeviceInput(device: frontDevice)
            
            //音视频输出
            let videoOutput = AVCaptureVideoDataOutput()
            videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
            //视频输出的设置
            let setcapSettings = [
                kCVPixelBufferPixelFormatTypeKey:NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
            ]
            videoOutput.videoSettings = setcapSettings as [String : Any]
            
            session = AVCaptureSession()
            session.sessionPreset = .hd1280x720
            //添加设备
            if session.canAddInput(frontCameraInput) {
                session.addInput(frontCameraInput)
            }
            if session.canAddOutput(videoOutput) {
                session.addOutput(videoOutput)
            }
            
            //捕获view
            let previewLayer = AVCaptureVideoPreviewLayer(session: session)
            previewLayer.videoGravity = .resizeAspectFill
            previewLayer.frame = CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height)
            self.view.layer.insertSublayer(previewLayer, at: 0)
            
            session.startRunning()
        }
        
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            // 获取图片信息
            guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                return
            }
            // 转换为CIImage
            let ciImage = CIImage(cvImageBuffer: imageBuffer)
            // 转换UIImage
            let img = UIImage(ciImage: ciImage)
            // 回到主线程更新UI
            DispatchQueue.main.async {
                self.imageView.image = img
            }
        }
    }
    
    十四、聚焦
    • AVCaptureFocusModeLocked
      设置一个固定的聚焦点
    • AVCaptureFocusModeAutoFocus
      首次自动对焦然后锁定一个聚焦点
    • AVCaptureFocusModeContinuousAutoFocus
      指当场景改变,相机会自动重新对焦到画面的中心点
    十五、曝光模式
    • AVCaptureExposureModeContinuousAutoExposure
      自动调节曝光模式
    • AVCaptureExposureModeLocked
      exposurePoint
    十六、闪光灯模式
    • AVCaptureFlashModeOff
      永不开启
    • AVCaptureFlashModeOn
      总是开启
    • AVCaptureFlashModeAuto
      自动开启,根据光线判断
    十七、手电筒模式
    • AVCaptureTorchModeOff
    • AVCaptureTorchModeOn
    • AVCaptureTorchModeAuto
    十八、设置设备方向
    AVCaptureConnection *captureConnection = <#A capture connection#>;
    if ([captureConnection isVideoOrientationSupported])
    {
        AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
        [captureConnection setVideoOrientation:orientation];
    }
    
    十九、获取输出流
    • AVCaptureMovieFileOutput
      将数据写入文件
    • AVCaptureVideoDataOutput
      将视频数据以回调形式输出视频帧
    • AVCaptureAudioDataOutput
      将音频数据以回调形式输出音频帧
    • AVCaptureStillImageOutput
      捕捉静态图片
    二十、音频/视频的合成

    资源参考

    相关文章

      网友评论

          本文标题:AVFoundation 框架

          本文链接:https://www.haomeiwen.com/subject/pfmebftx.html