SHRenderView
import AVKit
import UIKit
class SHRenderView: UIView {
enum NUM_UNIFORMS:Int {
case UNIFORM_Y = 0
case UNIFORM_UV = 1
case UNIFORM_COLOR_CONVERSION_MATRIX = 2
}
var attrib_vertex:GLuint = 0
var attrib_textcoord:GLuint = 1
var _frameBufferHandle:GLuint = 0
var _colorBufferHandle:GLuint = 0
var _backingWidth:GLint = 0
var _backingHeight:GLint = 0
var uniforms:[NUM_UNIFORMS:GLint] = [NUM_UNIFORMS:GLint]();
var _preferredConversion:[GLfloat] = [GLfloat]()
let kColorConversion709:[GLfloat] = [
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
]
let kColorConversion601:[GLfloat] = [
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
]
let kColorConversion601FullRange:[GLfloat] = [
1.0, 1.0, 1.0,
0.0, -0.343, 1.765,
1.4, -0.711, 0.0,
]
// let kColorConversion601:[GLfloat] = <#value#>
var myProgram:GLuint?
var context:EAGLContext?
var _lumaTexture:CVOpenGLESTexture?;
var _chromaTexture:CVOpenGLESTexture?;
var _videoTextureCache:CVOpenGLESTextureCache?;
var isFullYUVRange:Bool = false
// 只有CAEAGLLayer 类型的 layer 才支持 OpenGl 描绘
override class var layerClass : AnyClass {
return CAEAGLLayer.self
}
override init(frame: CGRect) {
super.init(frame: frame)
self.initCommon()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
self.initCommon()
}
func setupGL() {
// 设置为当前上下文
if !EAGLContext.setCurrent(context) {
print("Failed to set current OpenGL context")
return
}
self.setupBuffers()
self.setupProgram()
if (_videoTextureCache == nil) {
let err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, self.context!, nil, &_videoTextureCache);
if (err != noErr) {
NSLog("Error at CVOpenGLESTextureCacheCreate %d", err);
return;
}
}
}
func initCommon() {
self.contentScaleFactor = UIScreen.main.scale
self.setupLayer()
self.setupContext()
self.setupProgram()
_preferredConversion = kColorConversion709;
}
func setupBuffers() {
glDisable(GLenum(GL_DEPTH_TEST));
glEnableVertexAttribArray(attrib_vertex);
glVertexAttribPointer(
GLuint(attrib_vertex),
2,
GLenum(GL_FLOAT),
GLboolean(GL_FALSE),
GLsizei(MemoryLayout<GLfloat>.size * 2), UnsafeRawPointer(bitPattern:0 ))
glEnableVertexAttribArray(attrib_textcoord);
glVertexAttribPointer(
GLuint(attrib_textcoord),
2,
GLenum(GL_FLOAT),
GLboolean(GL_FALSE),
GLsizei(MemoryLayout<GLfloat>.size * 2), UnsafeRawPointer(bitPattern:0 ))
glGenFramebuffers(1, &_frameBufferHandle);
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), _frameBufferHandle);
glGenRenderbuffers(1, &_colorBufferHandle);
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), _colorBufferHandle);
context?.renderbufferStorage(Int(GL_RENDERBUFFER), from: layer as? CAEAGLLayer)
glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_WIDTH), &_backingWidth);
glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_HEIGHT), &_backingHeight);
glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), _colorBufferHandle);
if (glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) != GL_FRAMEBUFFER_COMPLETE) {
NSLog("Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)));
}
}
func setupProgram() {
self.myProgram = GLESUtils.loanProgram(verShaderFileName: "Shader.vsh", fragShaderFileName: "Shader.fsh")
guard let myProgram = myProgram else {
return
}
glBindAttribLocation(self.myProgram!, attrib_vertex, "position");
glBindAttribLocation(self.myProgram!, attrib_textcoord , "texCoord");
// Get uniform locations.
uniforms[.UNIFORM_Y] = glGetUniformLocation(self.myProgram!, "SamplerY");
uniforms[.UNIFORM_UV] = glGetUniformLocation(self.myProgram!, "SamplerUV");
uniforms[.UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.myProgram!, "colorConversionMatrix");
glUseProgram(myProgram)
glUniform1i(uniforms[.UNIFORM_Y]!, 0);
glUniform1i(uniforms[.UNIFORM_UV]!, 1);
glUniformMatrix3fv(uniforms[.UNIFORM_COLOR_CONVERSION_MATRIX]!, 1, GLboolean(GL_FALSE), _preferredConversion);
}
fileprivate func setupLayer() {
let eagLayer = layer as? CAEAGLLayer
// CALayer 默认是透明的,必须将它设为不透明才能让其可见
eagLayer?.isOpaque = true
// 设置描绘属性,在这里设置不维持渲染内容以及颜色格式为 RGBA8
eagLayer?.drawableProperties = [kEAGLDrawablePropertyRetainedBacking:false,kEAGLDrawablePropertyColorFormat:kEAGLColorFormatRGBA8]
}
fileprivate func setupContext() {
// 指定 OpenGL 渲染 API 的版本,在这里我们使用 OpenGL ES 3.0
context = EAGLContext(api: .openGLES2)
if context == nil {
print("Failed to initialize OpenGLES 3.0 context")
return
}
// 设置为当前上下文
if !EAGLContext.setCurrent(context) {
print("Failed to set current OpenGL context")
return
}
}
func display(_ pixelBuffer:CVPixelBuffer) {
self.cleanUpTextures()
let frameWidth = CVPixelBufferGetWidth(pixelBuffer)
let frameHeight = CVPixelBufferGetHeight(pixelBuffer)
guard let cache = _videoTextureCache else {
NSLog("No video texture cache");
return;
}
if (EAGLContext.current() != context) {
EAGLContext.setCurrent(context)
}
//
// /*
// Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix.
// */
let colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, nil);
if let v = colorAttachments?.takeRetainedValue() {
if "\(v)" == kCVImageBufferYCbCrMatrix_ITU_R_601_4 as String {
if (self.isFullYUVRange) {
_preferredConversion = kColorConversion601FullRange;
}else {
_preferredConversion = kColorConversion601;
}
}else{
print("=== \(v)")
_preferredConversion = kColorConversion709;
}
print("colorAttachments \(v)")
}
/*
CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef.
*/
/*
Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane.
*/
glActiveTexture(GLenum(GL_TEXTURE0));
// let fw = (MemoryLayout<GLfloat>.size * frameWidth)
// let fh = (MemoryLayout<GLfloat>.size * frameHeight)
var err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
cache,
pixelBuffer,
nil,
GLenum(GL_TEXTURE_2D),
GL_LUMINANCE,
GLsizei(frameWidth),
GLsizei(frameHeight),
GLenum(GL_LUMINANCE),
GLenum(GL_UNSIGNED_BYTE),
0,
&_lumaTexture);
print("_lumaTexture \(_lumaTexture)")
if (err != noErr) {
NSLog("==Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(GLenum(GL_TEXTURE_2D), CVOpenGLESTextureGetName(_lumaTexture!));
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR);
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE));
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE));
// UV-plane.
glActiveTexture(GLenum(GL_TEXTURE1));
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
cache,
pixelBuffer,
nil,
GLenum(GL_TEXTURE_2D),
GL_LUMINANCE_ALPHA,
GLsizei(Double(frameWidth) / 2.0),
GLsizei(Double(frameHeight) / 2.0),
GLenum(GL_LUMINANCE_ALPHA),
GLenum(GL_UNSIGNED_BYTE),
1,
&_chromaTexture);
if (err != noErr) {
NSLog(" 000 Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(GLenum(GL_TEXTURE_2D), CVOpenGLESTextureGetName(_chromaTexture!));
// NSLog(@"id %d", CVOpenGLESTextureGetName(_chromaTexture));
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR);
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE));
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE));
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), _frameBufferHandle);
// Set the view port to the entire view.
glViewport(0, 0, _backingWidth, _backingHeight);
glClearColor(0.1, 0.0, 0.0, 1.0);
glClear(GLbitfield(GL_COLOR_BUFFER_BIT));
// Use shader program.
glUseProgram(self.myProgram!);
glUniformMatrix3fv(uniforms[.UNIFORM_COLOR_CONVERSION_MATRIX]!, 1, GLboolean(GL_FALSE), _preferredConversion);
// Set up the quad vertices with respect to the orientation and aspect ratio of the video.
print("_backingWidth \(_backingWidth)")
let vertexSamplingRect = AVMakeRect(aspectRatio: CGSize(width: CGFloat(_backingWidth), height: CGFloat(_backingHeight)), insideRect: self.layer.bounds);
// Compute normalized quad coordinates to draw the frame into.
var normalizedSamplingSize = CGSize(width: 0.0, height: 0.0);
let cropScaleAmount = CGSize(width: vertexSamplingRect.size.width/self.layer.bounds.size.width, height: vertexSamplingRect.size.height/self.layer.bounds.size.height);
// Normalize the quad vertices.
if (cropScaleAmount.width > cropScaleAmount.height) {
normalizedSamplingSize.width = 1.0;
normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
}
else {
normalizedSamplingSize.width = 1.0;
normalizedSamplingSize.height = cropScaleAmount.width/cropScaleAmount.height;
}
/*
The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers.
Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen.
*/
let quadTextureData:[GLfloat] = [
GLfloat(-1 * normalizedSamplingSize.width),
GLfloat(-1 * normalizedSamplingSize.height),
GLfloat(normalizedSamplingSize.width),
GLfloat(-1 * normalizedSamplingSize.height),
GLfloat(-1 * normalizedSamplingSize.width), GLfloat(normalizedSamplingSize.height),
GLfloat(normalizedSamplingSize.width), GLfloat(normalizedSamplingSize.height),
];
let quadVertexData:[GLfloat] = [
0, 0,
1, 0,
0, 1,
1, 1
];
// 更新顶点数据
glVertexAttribPointer(attrib_vertex, 2, GLenum(GL_FLOAT), 0, 0, quadVertexData);
glEnableVertexAttribArray(attrib_vertex);
// let quadTextureData:[GLfloat] = [ // 正常坐标
// -1, -1,
// 1, -1,
// -1, 1,
// 1, 1
//
// ];
glVertexAttribPointer(attrib_textcoord, 2, GLenum(GL_FLOAT), 0, 0, quadTextureData);
glEnableVertexAttribArray(attrib_textcoord);
print("=== \(uniforms) \(attrib_vertex)")
glDrawArrays(GLenum(GL_TRIANGLE_STRIP), 0, 4);
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), _colorBufferHandle);
if (EAGLContext.current() == context) {
context?.presentRenderbuffer(Int(GL_RENDERBUFFER));
}
}
func cleanUpTextures() {
_lumaTexture = nil
_chromaTexture = nil
if let cache = _videoTextureCache {
CVOpenGLESTextureCacheFlush(cache, 0);
}
}
}
SHCamera.swift
//
// SHCamera.swift
// SHCamera
//
// Created by Mac on 2020/7/25.
// Copyright © 2020 Mac. All rights reserved.
//
import AVKit
import UIKit
class SHCamera: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
var isPuse:Bool = true
var exportModel:VideoExport = VideoExport.init()
var path = VideoExport.getCurrentFilePath()
lazy var moT: GLuint = {
return self.generateTexture()
}()
let session = AVCaptureSession.init()
/// 预览视图
var preview:UIView?
/// 视频导出
let videoOutput:AVCaptureVideoDataOutput = AVCaptureVideoDataOutput.init()
/// 音频导出
let audioOutput:AVCaptureAudioDataOutput = AVCaptureAudioDataOutput.init()
var videoConnection: AVCaptureConnection?
var audioConnection: AVCaptureConnection?
var videoDataOutputQueue = DispatchQueue.init(label: "VideoData")
var audioDataOutputQueue = DispatchQueue.init(label: "AudioData")
let frameRenderingSemaphore = DispatchSemaphore(value:1)
var renderView:SHRenderView = SHRenderView.init(frame: UIScreen.main.bounds)
func camera(view:UIView?) {
self.preview = view
if let view = self.preview {
// let layer = AVCaptureVideoPreviewLayer.init(session: session)
// layer.frame = view.bounds
// view.layer.addSublayer(layer)
// view.layer.insertSublayer(layer, at: 0)
view.insertSubview(renderView, at: 0)
renderView.frame = view.bounds
renderView.isFullYUVRange = false
renderView.setupGL()
}
self.addDeviceAudioInput()
self.addDeviceVideoInput()
self.addOutAudioDevice()
self.addOutVideoDevice()
if let prv = view {
self.exportModel.initCommon(videoSize: CGSize.init(width: 960, height: 540), outputUrl: path)
}else{
self.exportModel.initCommon(videoSize: CGSize.init(width: 960, height: 540), outputUrl: path)
}
}
func addDeviceVideoInput() {
let videoInput = self.getDevice(mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
if let videoIn = videoInput {
if let device = try? AVCaptureDeviceInput.init(device: videoIn) {
if session.canAddInput(device) {
session.addInput(device)
}
}
}
}
func addDeviceAudioInput() {
let videoInput = self.getDevice(mediaType: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)
if let videoIn = videoInput {
if let device = try? AVCaptureDeviceInput.init(device: videoIn) {
if session.canAddInput(device) {
session.addInput(device)
}
}
}
}
func addOutVideoDevice() {
videoOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
}
self.videoConnection = videoOutput.connection(with: AVMediaType.video)
if self.videoConnection?.isVideoOrientationSupported ?? false {
self.videoConnection?.videoOrientation = .portrait
}
}
func addOutAudioDevice() {
audioOutput.setSampleBufferDelegate(self, queue: audioDataOutputQueue)
if session.canAddOutput(audioOutput) {
session.addOutput(audioOutput)
}
self.audioConnection = videoOutput.connection(with: AVMediaType.audio)
}
func getDevice(mediaType:AVMediaType,position:AVCaptureDevice.Position) -> AVCaptureDevice? {
if mediaType == .video {
let videoInput = AVCaptureDevice.DiscoverySession.init(deviceTypes: [.builtInWideAngleCamera], mediaType: mediaType, position: position)
return videoInput.devices.first
}
if mediaType == .audio {
let videoInput = AVCaptureDevice.DiscoverySession.init(deviceTypes: [.builtInMicrophone], mediaType: mediaType, position: position)
return videoInput.devices.first
}
return nil
}
func startRunning() {
session.startRunning()
}
func startRecord() {
self.isPuse = false
}
/// 暂停
func puse() {
self.isPuse = true
}
public func generateTexture(minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_LINEAR, wrapT:Int32 = GL_CLAMP_TO_EDGE) -> GLuint {
var texture:GLuint = 0
glActiveTexture(GLenum(GL_TEXTURE1))
glGenTextures(1, &texture)
glBindTexture(GLenum(GL_TEXTURE_2D), texture)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), minFilter)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), magFilter)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), wrapS)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), wrapT)
glBindTexture(GLenum(GL_TEXTURE_2D), 0)
return texture
}
}
extension SHCamera:AVCaptureVideoDataOutputSampleBufferDelegate
{
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("didDrop")
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if output == videoOutput {
// 视频
// guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else {
// print("++++++")
// return
//
// }
// print("===startTime \(startTime)")
//
DispatchQueue.main.async {
// let startTime = CFAbsoluteTimeGetCurrent()
let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)!
// let bufferWidth = CVPixelBufferGetWidth(cameraFrame)
// let bufferHeight = CVPixelBufferGetHeight(cameraFrame)
// let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
// CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
self.renderView.display(cameraFrame)
// self.renderView.render()
//
// self.renderView.setupCmTexture(sampleBuffer: sampleBuffer)
// glBindTexture(GLenum(GL_TEXTURE_2D), self.moT)
// glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame))
// self.renderView.presentRenderbuffer()
// CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
// self.frameRenderingSemaphore.signal()
// print("===\(self.frameRenderingSemaphore)")
}
}
// guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return }
// let startTime = CFAbsoluteTimeGetCurrent()
//
// let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)!
// let bufferWidth = CVPixelBufferGetWidth(cameraFrame)
// let bufferHeight = CVPixelBufferGetHeight(cameraFrame)
// let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
// CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
//
//
//
// self.frameRenderingSemaphore.signal()
// if self.isPuse == true {
// return;
// }
//
// objc_sync_enter(self)
// if let assetWriter = self.exportModel.assetWriter {
// if assetWriter.status != .writing && assetWriter.status != .unknown {
// return
// }
// }
// if let assetWriter = self.exportModel.assetWriter, assetWriter.status == AVAssetWriter.Status.unknown {
// assetWriter.startWriting()
// assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
// }
//
// if connection == self.videoConnection,self.isPuse == false {
// print("didOutput videoOutput")
// videoDataOutputQueue.async {
// if let videoWriterInput = self.exportModel.assetWriterVideoInput , videoWriterInput.isReadyForMoreMediaData {
// videoWriterInput.append(sampleBuffer)
// }
// }
// }
//
//
// if connection == self.audioConnection,self.isPuse == false {
// print("didOutput audioOutput")
// audioDataOutputQueue.async {
// if let videoWriterInput = self.exportModel.assetWriterAudioInput , videoWriterInput.isReadyForMoreMediaData {
//
//
//
// videoWriterInput.append(sampleBuffer)
//
//
// }
// }
//
// }
// objc_sync_exit(self)
}
func finishWriting(ok:@escaping ()->Void) {
self.exportModel.finishWriting(ok: ok)
}
}
Shader.vsh
attribute vec4 position;
attribute vec2 texCoord;
varying vec2 texCoordVarying;
void main()
{
gl_Position = vec4(position.x,-position.y,position.z,1.0);
texCoordVarying = texCoord;
}
Shader.fsh
varying highp vec2 texCoordVarying;
precision mediump float;
uniform sampler2D SamplerY;
uniform sampler2D SamplerUV;
uniform mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
// Subtract constants to map the video range start at 0
yuv.x = (texture2D(SamplerY, texCoordVarying).r);// - (16.0/255.0));
yuv.yz = (texture2D(SamplerUV, texCoordVarying).ra - vec2(0.5, 0.5));
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
// gl_FragColor = vec4(1, 0, 0, 1);
}
网友评论