创建
//
// SHCamera.swift
// SHCamera
//
// Created by Mac on 2020/7/25.
// Copyright © 2020 Mac. All rights reserved.
//
import AVKit
import UIKit
class SHCamera: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
var isPuse:Bool = true
var exportModel:VideoExport = VideoExport.init()
var path = VideoExport.getCurrentFilePath()
let session = AVCaptureSession.init()
/// 预览视图
var preview:UIView?
/// 视频导出
let videoOutput:AVCaptureVideoDataOutput = AVCaptureVideoDataOutput.init()
/// 音频导出
let audioOutput:AVCaptureAudioDataOutput = AVCaptureAudioDataOutput.init()
var videoConnection: AVCaptureConnection?
var audioConnection: AVCaptureConnection?
var videoDataOutputQueue = DispatchQueue.init(label: "VideoData")
var audioDataOutputQueue = DispatchQueue.init(label: "AudioData")
func camera(view:UIView?) {
self.preview = view
if let view = self.preview {
let layer = AVCaptureVideoPreviewLayer.init(session: session)
layer.frame = view.bounds
view.layer.addSublayer(layer)
view.layer.insertSublayer(layer, at: 0)
}
self.addDeviceAudioInput()
self.addDeviceVideoInput()
self.addOutAudioDevice()
self.addOutVideoDevice()
if let prv = view {
self.exportModel.initCommon(videoSize: CGSize.init(width: 960, height: 540), outputUrl: path)
}else{
self.exportModel.initCommon(videoSize: CGSize.init(width: 960, height: 540), outputUrl: path)
}
}
func addDeviceVideoInput() {
let videoInput = self.getDevice(mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
if let videoIn = videoInput {
if let device = try? AVCaptureDeviceInput.init(device: videoIn) {
if session.canAddInput(device) {
session.addInput(device)
}
}
}
}
func addDeviceAudioInput() {
let videoInput = self.getDevice(mediaType: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)
if let videoIn = videoInput {
if let device = try? AVCaptureDeviceInput.init(device: videoIn) {
if session.canAddInput(device) {
session.addInput(device)
}
}
}
}
func addOutVideoDevice() {
videoOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
}
self.videoConnection = videoOutput.connection(with: AVMediaType.video)
}
func addOutAudioDevice() {
audioOutput.setSampleBufferDelegate(self, queue: audioDataOutputQueue)
if session.canAddOutput(audioOutput) {
session.addOutput(audioOutput)
}
self.audioConnection = videoOutput.connection(with: AVMediaType.audio)
}
func getDevice(mediaType:AVMediaType,position:AVCaptureDevice.Position) -> AVCaptureDevice? {
if mediaType == .video {
let videoInput = AVCaptureDevice.DiscoverySession.init(deviceTypes: [.builtInWideAngleCamera], mediaType: mediaType, position: position)
return videoInput.devices.first
}
if mediaType == .audio {
let videoInput = AVCaptureDevice.DiscoverySession.init(deviceTypes: [.builtInMicrophone], mediaType: mediaType, position: position)
return videoInput.devices.first
}
return nil
}
func startRunning() {
session.startRunning()
}
func startRecord() {
self.isPuse = false
}
/// 暂停
func puse() {
self.isPuse = true
}
}
extension SHCamera:AVCaptureVideoDataOutputSampleBufferDelegate
{
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("didDrop")
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if self.isPuse == true {
return;
}
objc_sync_enter(self)
if let assetWriter = self.exportModel.assetWriter {
if assetWriter.status != .writing && assetWriter.status != .unknown {
return
}
}
if let assetWriter = self.exportModel.assetWriter, assetWriter.status == AVAssetWriter.Status.unknown {
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
// CMTimeSubtract(<#T##lhs: CMTime##CMTime#>, <#T##rhs: CMTime##CMTime#>)
if connection == self.videoConnection,self.isPuse == false {
print("didOutput videoOutput")
videoDataOutputQueue.async {
if let videoWriterInput = self.exportModel.assetWriterVideoInput , videoWriterInput.isReadyForMoreMediaData {
videoWriterInput.append(sampleBuffer)
}
}
}
if connection == self.audioConnection,self.isPuse == false {
print("didOutput audioOutput")
audioDataOutputQueue.async {
if let videoWriterInput = self.exportModel.assetWriterAudioInput , videoWriterInput.isReadyForMoreMediaData {
videoWriterInput.append(sampleBuffer)
}
}
}
objc_sync_exit(self)
}
func finishWriting(ok:@escaping ()->Void) {
self.exportModel.finishWriting(ok: ok)
}
}
导出类
//
// VideoExport.swift
// SHCamera
//
// Created by Mac on 2020/7/25.
// Copyright © 2020 Mac. All rights reserved.
//
import AVKit
import UIKit
class VideoExport: NSObject {
var assetWriter:AVAssetWriter? = nil
var outputUrl:String = ""
var assetWriterVideoInput:AVAssetWriterInput?
var assetWriterAudioInput:AVAssetWriterInput?
class func getSaveDic() -> String {
//缓存目录路径cache
var cachePath = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.cachesDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).last!
cachePath = "\(cachePath)/SHMedia"
if FileManager.default.isExecutableFile(atPath: cachePath) == false{
try? FileManager.default.createDirectory(atPath: cachePath, withIntermediateDirectories: true, attributes: nil)
}
return cachePath
}
class func getCurrentFilePath() -> String {
let dic = self.getSaveDic()
return "\(dic)/\(Int(Date.init().timeIntervalSince1970)).mov"
}
func initCommon(videoSize:CGSize,outputUrl:String) {
self.outputUrl = outputUrl
guard let assetWriter = try? AVAssetWriter.init(outputURL: URL.init(fileURLWithPath: outputUrl), fileType: .mov) else { return }
self.assetWriter = assetWriter
let pixelNumber = 1000000
// let pixelNumber = 3 * videoSize.height * videoSize.width
let compressionProperties = [
AVVideoProfileLevelKey: AVVideoProfileLevelH264MainAutoLevel,
AVVideoAllowFrameReorderingKey: false,
AVVideoExpectedSourceFrameRateKey: 30,
AVVideoMaxKeyFrameIntervalKey: 30,
AVVideoAverageBitRateKey: pixelNumber
] as [String : Any]
let outputSettings = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: videoSize.width,
AVVideoHeightKey: videoSize.height ,
AVVideoCompressionPropertiesKey: compressionProperties
] as [String : Any]
let assetWriterVideoInput = AVAssetWriterInput.init(mediaType: .video, outputSettings: outputSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(assetWriterVideoInput) {
assetWriter.add(assetWriterVideoInput)
}
let audioOutputSettings = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderBitRatePerChannelKey: 28000,
AVSampleRateKey: 22050,
AVNumberOfChannelsKey: 1]
let assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
assetWriterAudioInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(assetWriterAudioInput) {
assetWriter.add(assetWriterAudioInput)
}
self.assetWriterVideoInput = assetWriterVideoInput
self.assetWriterAudioInput = assetWriterAudioInput
}
func startWrite() {
self.assetWriter?.startWriting()
}
func finishWriting(ok:@escaping ()->Void) {
self.assetWriter?.finishWriting(completionHandler: {
ok()
})
}
func addVideo(sample:CMSampleBuffer) {
self.assetWriterVideoInput?.append(sample)
}
func addAudioVideo(sample:CMSampleBuffer) {
self.assetWriterAudioInput?.append(sample)
}
}
使用
//
// ViewController.swift
// SHCamera
//
// Created by Mac on 2020/7/25.
// Copyright © 2020 Mac. All rights reserved.
//
import AVKit
import UIKit
class ViewController: UIViewController {
var camera:SHCamera = SHCamera.init()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
self.camera.camera(view: self.view)
self.camera.startRunning()
}
@IBAction func wanchengClick(_ sender: Any) {
// self.camera.path
self.camera.finishWriting {
PhotoSaveTool.saveVideo(filePath: self.camera.path) { (succ, err) in
print("succ \(succ)")
}
}
}
@IBAction func zantingLuzhi(_ sender: Any) {
if self.camera.isPuse {
self.camera.startRecord()
}else{
self.camera.puse()
}
}
}
保存到相册
//
// PhotoSaveTool.swift
// SHCamera
//
// Created by Mac on 2020/7/25.
// Copyright © 2020 Mac. All rights reserved.
//
import Photos
import UIKit
class PhotoSaveTool: NSObject {
class func saveVideo(filePath:String,complete:@escaping (Bool,Error?)->Void) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: filePath))
}) { (success, error) in
DispatchQueue.main.async {
complete(success,error)
if success {
} else {
}
}
}
}
func autho() {
PHPhotoLibrary.requestAuthorization { (status) in
DispatchQueue.main.async {
if status == PHAuthorizationStatus.denied {
} else {
}
}
}
}
}
网友评论