美文网首页
同时保存ARkit的渲染画面和背景

同时保存ARkit的渲染画面和背景

作者: 梁间 | 来源:发表于2018-12-05 17:45 被阅读0次
目标

以25帧每秒的速度保存下面两项数据,并且两项数据一一对应:ARkit包含虚拟物体的最终渲染画面、ARkit不包含虚拟物体的背景。

数据获取
@IBOutlet var arView: ARSCNView!

包含虚拟物体的最终渲染画面提取方式:

let arImage = self.arView.snapshot()

不包含虚拟物体的背景提取方式:

let bgImageBuffer =  self.arView.session.currentFrame!.capturedImage
let bgImage = bgImageBuffer.uiImage

CVPixelBuffer转UIImage

import VideoToolbox

extension CVPixelBuffer{
    var uiImage: UIImage {
        var cgImage: CGImage?
        VTCreateCGImageFromCVPixelBuffer(self, options: nil, imageOut: &cgImage)
        return UIImage.init(cgImage: cgImage!)
    }
}
以图片形式保存

为保证两组数据的一一对应,我们结合时间戳来为图片命名

func saveImage(arImage:UIImage, bgImage:UIImage) {
    let timeInterval: TimeInterval = Date().timeIntervalSince1970
    let millisecond = CLongLong(round(timeInterval*1000))
    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
    let ArImagepath = documentsURL.appendingPathComponent("\(millisecond)_ar.png")
    let BgImagepath = documentsURL.appendingPathComponent("\(millisecond)_bg.png")
        
    let pngArImageData = arImage.pngData()
    try? pngArImageData!.write(to: ArImagepath)
    let pngBgImageData = bgImage.pngData()
    try? pngBgImageData!.write(to: BgImagepath)
}

使用图片形式保存这种方案存在性能瓶颈,图片处理及持久化操作都很耗时,即便使用线程等手段,在ipad上每秒钟处理3帧也就是保存6张图片已经是极限。当然也可能是受我技术所限,如哪位大神有更先进的方案望告知。

保存为视频格式

ARKit视频保存有一个很不错的项目ARVideoKit,可以用作参考。

建立类WriteAR用以保存视频

import Foundation
import AVFoundation

class WritAR: NSObject{
    
    private var assetWriter: AVAssetWriter!
    private var videoInput: AVAssetWriterInput!
    private var pixelBufferInput: AVAssetWriterInputPixelBufferAdaptor!
    private var videoOutputSettings: Dictionary<String, AnyObject>!
    private var audioSettings: [String: Any]?
    
    private var mp4FileName:String!
    
    var startingVideoTime: CMTime?
    var isWritingWithoutError: Bool?
    
    init(fileName:String, width:Int, height:Int){
        
        let homePath = NSHomeDirectory()
        let fileManger = FileManager.default
        let output = homePath + "/Documents/" + fileName
        do{
            try fileManger.removeItem(atPath: output)
        }catch{
            print("delete Failed!")
        }
        
        let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
        mp4FileName = fileName
        let outputUrl = documentsURL.appendingPathComponent(mp4FileName)
        do {
            assetWriter = try AVAssetWriter(outputURL: outputUrl, fileType: AVFileType.mp4)
        } catch {
            return
        }
        
        videoOutputSettings = [
            AVVideoCodecKey: AVVideoCodecType.h264 as AnyObject,
            AVVideoWidthKey: width as AnyObject, 
            AVVideoHeightKey: height as AnyObject 
        ]
        
        let attributes: [String: Bool] = [
            kCVPixelBufferCGImageCompatibilityKey as String: true,
            kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
        ]
        
        videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
        
        videoInput.expectsMediaDataInRealTime = true
        pixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: attributes)
        
        if assetWriter.canAdd(videoInput) {
            assetWriter.add(videoInput)
        } else {
            print("asser writer error")
        }
    }
    
    
    func finish() {
        self.videoInput.markAsFinished()
        self.assetWriter.finishWriting {
            print("finish")
        }
    }
    
    func insert(pixel buffer: CVPixelBuffer, with time: CMTime) {
        if assetWriter.status == .unknown {
            print("unknown")
            guard startingVideoTime == nil else {
                isWritingWithoutError = false
                return
            }
            startingVideoTime = time
            if assetWriter.startWriting() {
                print("startWriting")
                assetWriter.startSession(atSourceTime: startingVideoTime!)
                isWritingWithoutError = true
            } else {
                isWritingWithoutError = false
            }
        } else if assetWriter.status == .failed {
            print("failed")
            isWritingWithoutError = false
            return
        }
        
        if videoInput.isReadyForMoreMediaData {
           // print(time)
            pixelBufferInput.append(buffer, withPresentationTime: time)
            isWritingWithoutError = true
        }
     
    }
    
}

声明

    private var arWrite: WritAR!
    private var bgWrite: WritAR!
    private var frameTime:CMTime!

初始化

    arWrite = WritAR(fileName: "ar.mp4", width: 1125, height: 2436) //iPhoneX渲染画面数据
    bgWrite = WritAR(fileName: "bg.mp4", width: 1920, height: 1440) //iPhoneX拍摄画面数据
    self.frameTime = CMTimeMake(value: 1, timescale: 25)

写入帧

if self.frameTimeValue == 0{
    self.arWrite.insert(pixel: arImageBuffer!, with: CMTime.zero)
    self.bgWrite.insert(pixel: bgImageBuffer, with: CMTime.zero)
}else{
    let value = self.frameTimeValue - 1
    let lastTime = CMTimeMake(value: Int64(value), timescale: self.frameTime.timescale)
    let presentTime = CMTimeAdd(lastTime, self.frameTime)
    self.arWrite.insert(pixel: arImageBuffer!, with: presentTime)
    self.bgWrite.insert(pixel: bgImageBuffer, with: presentTime)
}
                
self.frameTimeValue=self.frameTimeValue + 1

结束

    arWrite.finish()
    bgWrite.finish()

使用这种方式每秒大概能保存20帧,生成的视屏还是不够流畅,经测试性能瓶颈在数据获取处

let arImage = self.arView.snapshot()

这个命令比较耗时,正在寻找替换方式。

视频提取帧图片

这步操作是在macOS中执行。

    @IBAction func mp4ToPng(button:NSButton){
        let filePath = Bundle.main.path(forResource: "ar", ofType: "mp4")
        let videoURL = URL(fileURLWithPath: filePath!)
        let avAsset = AVAsset(url: videoURL)
        
        let generator = AVAssetImageGenerator(asset: avAsset)
        generator.appliesPreferredTrackTransform = true
        generator.requestedTimeToleranceAfter = CMTime.zero;
        generator.requestedTimeToleranceBefore = CMTime.zero;
        
        while true{
            
            let time = CMTimeMake(value: value, timescale: 25)
            var actualTime: CMTime = CMTimeMake(value: 0,timescale: 0)
            
            do{
                let imageRef: CGImage = try generator.copyCGImage(at: time, actualTime: &actualTime)
                let frameImg = NSImage(cgImage: imageRef, size:NSSize(width: 1125, height: 2436))
                
                let homePath = NSHomeDirectory()
                let imagePath = homePath+"/documents/ar_\(value).png"
                
                print(imagePath)
                print(frameImg.savePNG(to: URL(fileURLWithPath: imagePath)))
            }catch{
                break
            }
            
            value = value + 1
        }
    }

NSImage转换png

extension NSBitmapImageRep {
    var png: Data? {
        return representation(using: .png, properties: [:])
    }
}
extension Data {
    var bitmap: NSBitmapImageRep? {
        return NSBitmapImageRep(data: self)
    }
}
extension NSImage {
    var png: Data? {
        return tiffRepresentation?.bitmap?.png
    }
    func savePNG(to url: URL) -> Bool {
        do {
            try png?.write(to: url)
            return true
        } catch {
            print(error)
            return false
        }
    }
}

相关文章

网友评论

      本文标题:同时保存ARkit的渲染画面和背景

      本文链接:https://www.haomeiwen.com/subject/ofyqqqtx.html