美文网首页
iOS OpenGL ES 视频采集分屏实现

iOS OpenGL ES 视频采集分屏实现

作者: 小而白 | 来源:发表于2021-09-13 00:10 被阅读0次

话说天下大势...话不多说,先上效果图:


分屏效果图.gif

要想实现这个效果,简要步骤如下:
1、视频采集
2、采集到的每一帧图像作为纹理分别映射到屏幕上下均分区域

音视频采集

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "openGLManager.h"

@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property (strong, nonatomic) AVCaptureDeviceInput       *cameraInput;//摄像头输入
@property (strong, nonatomic) AVCaptureDeviceInput       *audioMicInput;//麦克风输入
@property (strong, nonatomic) AVCaptureSession           *recordSession;//捕获视频的会话
@property (copy  , nonatomic) dispatch_queue_t           captureQueue;//录制的队列
@property (strong, nonatomic) AVCaptureConnection        *audioConnection;//音频录制连接
@property (strong, nonatomic) AVCaptureConnection        *videoConnection;//视频录制连接
@property (strong, nonatomic) AVCaptureVideoDataOutput   *videoOutput;//视频输出
@property (strong, nonatomic) AVCaptureAudioDataOutput   *audioOutput;//音频输出


@property (atomic, assign) BOOL isCapturing;//正在录制
@property (atomic, assign) CGFloat currentRecordTime;//当前录制时间

@property (nonatomic,strong) openGLManager *glManager;


@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
        
    self.glManager = [[openGLManager alloc] initWithFrame:self.view.bounds];
    [self.view addSubview:self.glManager];
    
    [self recordSession];
    [self sessionLayerRunning];
    [self.glManager setupFilter: @"2Screen"];
}


- (void)sessionLayerRunning{
    
    dispatch_async(dispatch_get_main_queue(), ^{
        if (![self.recordSession isRunning]) {
            [self.recordSession startRunning];
        }
    });
}

- (void)sessionLayerStop{
    
    dispatch_async(dispatch_get_main_queue(), ^{
        if ([self.recordSession isRunning]) {
            [self.recordSession stopRunning];
        }
    });
}

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    
    if (connection == self.videoConnection) {
        CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        [self.glManager renderBuffer:pixelBuffer];
    }
}

//捕获视频的会话
- (AVCaptureSession *)recordSession {
    if (_recordSession == nil) {
        _recordSession = [[AVCaptureSession alloc] init];
        //添加后置摄像头的输出
        if ([_recordSession canAddInput:self.cameraInput]) {
            [_recordSession addInput:self.cameraInput];
        }
        //添加后置麦克风的输出
        if ([_recordSession canAddInput:self.audioMicInput]) {
            [_recordSession addInput:self.audioMicInput];
        }
        //添加视频输出
        if ([_recordSession canAddOutput:self.videoOutput]) {
            [_recordSession addOutput:self.videoOutput];
        }
        //添加音频输出
        if ([_recordSession canAddOutput:self.audioOutput]) {
            [_recordSession addOutput:self.audioOutput];
        }
        
        //设置视频录制的方向
        self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
        
        //分辨率
        if ([self.recordSession canSetSessionPreset:AVCaptureSessionPreset1280x720]){
            self.recordSession.sessionPreset = AVCaptureSessionPreset1280x720;
        }
        
        //自动白平衡
        if ([self.cameraInput.device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
            [self.cameraInput.device setWhiteBalanceMode:AVCaptureWhiteBalanceModeAutoWhiteBalance];
        }
        
    }
    return _recordSession;
}

//摄像头设备
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
    //返回和视频录制相关的所有默认设备
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    //遍历这些设备返回跟position相关的设备
    for (AVCaptureDevice *device in devices) {
        if ([device position] == position) {
            return device;
        }
    }
    return nil;
}

//摄像头输入
- (AVCaptureDeviceInput *)cameraInput {
    if (_cameraInput == nil) {
        NSError *error;
        _cameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self cameraWithPosition:AVCaptureDevicePositionFront] error:&error];
    }
    return _cameraInput;
}

//麦克风输入
- (AVCaptureDeviceInput *)audioMicInput {
    if (_audioMicInput == nil) {
        AVCaptureDevice *mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        NSError *error;
        _audioMicInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:&error];
    }
    return _audioMicInput;
}


//录制的队列
- (dispatch_queue_t)captureQueue {
    if (_captureQueue == nil) {
        _captureQueue = dispatch_queue_create(0, 0);
    }
    return _captureQueue;
}

//视频输出
//kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange格式 无法渲染 使用
- (AVCaptureVideoDataOutput *)videoOutput {
    if (!_videoOutput) {
        _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
        [_videoOutput setSampleBufferDelegate:self queue:self.captureQueue];
        NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                        [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                        nil];
        _videoOutput.videoSettings = setcapSettings;
        _videoOutput.alwaysDiscardsLateVideoFrames = YES;
    }
    return _videoOutput;
}

//音频输出
- (AVCaptureAudioDataOutput *)audioOutput {
    if (_audioOutput == nil) {
        _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
        [_audioOutput setSampleBufferDelegate:self queue:self.captureQueue];
    }
    return _audioOutput;
}

//视频连接
- (AVCaptureConnection *)videoConnection {
    if (!_videoConnection) {
        _videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
    }
    return _videoConnection;
}

//音频连接
- (AVCaptureConnection *)audioConnection {
    if (_audioConnection == nil) {
        _audioConnection = [self.audioOutput connectionWithMediaType:AVMediaTypeAudio];
    }
    return _audioConnection;
}

@end

OpenGL 视频预览

#import "openGLManager.h"
#import <GLKit/GLKit.h>
#import <OpenGLES/ES3/glext.h>

@interface openGLManager (){
    // 渲染缓冲区、帧缓冲区对象
    GLuint renderBuffer, frameBuffer;
    //缓冲宽高
    GLint backingWidth, backingHeight;

    CVOpenGLESTextureRef texture;

    CVOpenGLESTextureCacheRef textureCache;//视频缓冲区
    
    //着色器数据
    GLuint positionSlot,textureSlot,textureCoordSlot;
    
    GLuint         vertShader, fragShader;
    
    NSMutableArray  *attributes;
    NSMutableArray  *uniforms;

}

@property (nonatomic, strong) EAGLContext *context;

@property (nonatomic, strong) CAEAGLLayer* myLayer;

// 开始的时间戳
@property (nonatomic, assign) NSTimeInterval startTimeInterval;
// 着色器程序
@property (nonatomic, assign) GLuint program;
// 顶点缓存
@property (nonatomic, assign) GLuint vertexBuffer;
// 纹理 ID
@property (nonatomic, assign) GLuint textureID;

@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;

@end

@implementation openGLManager

+ (Class)layerClass {
    return [CAEAGLLayer class];
}

- (instancetype)initWithFrame:(CGRect)frame {
    self = [super initWithFrame:frame];
    if (self) {
        [self initFilter];
    }
    return self;
}

- (void)initFilter{
    [self setupContext];
    [self setupLayer];
    [self setupCoreVideoTextureCache];
    [self loadShaders:@"Normal"];
    [self bindRender];
}

- (void)setupFilter:(NSString*)filterName{
    [self loadShaders:filterName];
}

- (void)renderBuffer:(CVPixelBufferRef)pixelBuffer {
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    glUseProgram(self.program);

    [self setDisplayFramebuffer];
    
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);
    
    [self cleanUpTexture];
    
    glActiveTexture(GL_TEXTURE4);
    // Create a CVOpenGLESTexture from the CVImageBuffer
    size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer);
    size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer);
    CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                textureCache,
                                                                pixelBuffer,
                                                                NULL,
                                                                GL_TEXTURE_2D,
                                                                GL_RGBA,
                                                                (GLsizei)frameWidth,
                                                                (GLsizei)frameHeight,
                                                                GL_BGRA,
                                                                GL_UNSIGNED_BYTE,
                                                                0,
                                                                &texture);
    if (ret) {
        NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage ret: %d", ret);
    }
    glBindTexture(CVOpenGLESTextureGetTarget(texture), CVOpenGLESTextureGetName(texture));
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glUniform1i(textureSlot, 4);


    static const GLfloat imageVertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f,  1.0f,
        1.0f,  1.0f,
    };
    
    static const GLfloat noRotationTextureCoordinates[] = {
        0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f, 0.0f,
        1.0f, 0.0f,
    };
    
    glVertexAttribPointer(positionSlot, 2, GL_FLOAT, 0, 0, imageVertices);
    glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, 0, 0, noRotationTextureCoordinates);
    
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    
    [self presentFramebuffer];
        
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    
}

- (void)destroyDisplayFramebuffer {
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    if (frameBuffer) {
        glDeleteFramebuffers(1, &frameBuffer);
        frameBuffer = 0;
    }
    
    if (renderBuffer) {
        glDeleteRenderbuffers(1, &renderBuffer);
        renderBuffer = 0;
    }
}

- (void)cleanUpTexture {
    if(texture) {
        CFRelease(texture);
        texture = NULL;
    }
    CVOpenGLESTextureCacheFlush(textureCache, 0);
}

- (void)setDisplayFramebuffer {
    if (!frameBuffer) {
        [self bindRender];
    }
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
    glViewport(0, 0, backingWidth, backingHeight);
}

- (void)presentFramebuffer {
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
    
    [self.context presentRenderbuffer:GL_RENDERBUFFER];
}

//渲染上下文
- (void)setupContext{
    self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
    if (!self.context) {
        self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    }
    
    [EAGLContext setCurrentContext:self.context];
}
//设置图层(CAEAGLLayer)
- (void)setupLayer{
    
    self.contentScaleFactor = [[UIScreen mainScreen] scale];
    self.myLayer = (CAEAGLLayer *)self.layer;
    self.myLayer.opaque = YES; //CALayer默认是透明的,透明的对性能负荷大,故将其关闭
    self.myLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                   // 由应用层来进行内存管理
                                   @(NO),kEAGLDrawablePropertyRetainedBacking,
                                   kEAGLColorFormatRGBA8,kEAGLDrawablePropertyColorFormat,
                                   nil];
        
}
//视频纹理渲染的高效纹理缓冲区
- (void)setupCoreVideoTextureCache
{
    CVReturn result = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, self.context, NULL, &textureCache);
    if (result != kCVReturnSuccess) {
        NSLog(@"CVOpenGLESTextureCacheCreate fail %d",result);
    }
    
}
//绑定渲染缓冲区
- (void)bindRender{
    
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    glGenFramebuffers(1, &frameBuffer);
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
    
    glGenRenderbuffers(1, &renderBuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
    
    [self.context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self.myLayer];
    
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
    
    if ( (backingWidth == 0) || (backingHeight == 0) ) {
        NSLog(@"Backing width: 0 || height: 0");

        [self destroyDisplayFramebuffer];
        return;
    }
    
    NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
    
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBuffer);
    
}

//设置默认着色器
- (void)loadShaders:(NSString*)shaderFilename{
    if ([EAGLContext currentContext] != self.context) {
        [EAGLContext setCurrentContext:self.context];
    }
    
    
    self.program = glCreateProgram();
    attributes = [[NSMutableArray alloc] init];
    uniforms = [[NSMutableArray alloc] init];
    
    NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:shaderFilename ofType:@"vsh"];
    NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];

    NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:shaderFilename ofType:@"fsh"];
    NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];

    if (![self compileShader:&vertShader type:GL_VERTEX_SHADER string:vertexShaderString]) {
        NSLog(@"Failed to compile vertex shader");
    }
    
    // Create and compile fragment shader
    if (![self compileShader:&fragShader  type:GL_FRAGMENT_SHADER string:fragmentShaderString]) {
        NSLog(@"Failed to compile fragment shader");
    }
    
    glAttachShader(self.program, vertShader);
    glAttachShader(self.program, fragShader);
    
    [self addAttribute:@"Position"];
    [self addAttribute:@"textureCoordinate"];
    
    if (![self link]) {
        NSString *fragLog = [self fragmentShaderLog];
        NSLog(@"Fragment shader compile log: %@", fragLog);
        NSString *vertLog = [self vertexShaderLog];
        NSLog(@"Vertex shader compile log: %@", vertLog);
        NSAssert(NO, @"Filter shader link failed");
    }
    
    positionSlot = [self attributeIndex:@"Position"];
    textureCoordSlot = [self attributeIndex:@"textureCoordinate"];
    textureSlot = [self uniformIndex:@"Texture"]; // This does assume a name of "inputTexture" for the fragment shader
    
    glUseProgram(self.program);

    glEnableVertexAttribArray(positionSlot);
    glEnableVertexAttribArray(textureCoordSlot);
}

- (GLuint)attributeIndex:(NSString *)attributeName {
    return (GLuint)[attributes indexOfObject:attributeName];
}
- (GLuint)uniformIndex:(NSString *)uniformName {
    return glGetUniformLocation(self.program, [uniformName UTF8String]);
}

- (BOOL)link {

    GLint status;
    glLinkProgram(self.program);
    glGetProgramiv(self.program, GL_LINK_STATUS, &status);
    if (status == GL_FALSE)
        return NO;
    
    if (vertShader) {
        glDeleteShader(vertShader);
        vertShader = 0;
    }
    if (fragShader) {
        glDeleteShader(fragShader);
        fragShader = 0;
    }
    return YES;
}

- (void)addAttribute:(NSString *)attributeName {
    if (![attributes containsObject:attributeName]) {
        [attributes addObject:attributeName];
        glBindAttribLocation(self.program, (GLuint)[attributes indexOfObject:attributeName], [attributeName UTF8String]);
    }
}

- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type string:(NSString *)shaderString {
//    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();

    GLint status;
    const GLchar *source;
    
    source = (GLchar *)[shaderString UTF8String];
    if (!source) {
        NSLog(@"Failed to load vertex shader");
        return NO;
    }
    
    *shader = glCreateShader(type);
    glShaderSource(*shader, 1, &source, NULL);
    glCompileShader(*shader);
    
    glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);

    if (status != GL_TRUE) {
        GLint logLength;
        glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
        if (logLength > 0) {
            GLchar *log = (GLchar *)malloc(logLength);
            glGetShaderInfoLog(*shader, logLength, &logLength, log);
            if (shader == &vertShader) {
                self.vertexShaderLog = [NSString stringWithFormat:@"%s", log];
            } else {
                self.fragmentShaderLog = [NSString stringWithFormat:@"%s", log];
            }

            free(log);
        }
    }
    return status == GL_TRUE;
}

@end

以上OpenGL渲染流程基本上都是固定的写法,我们主要关注下可编程管线GLSL部分的代码

可编程渲染管线.png

顶点着色器

顶点着色器程序从“2Screen.vsh”文件读取,代码摘录如下:

attribute vec4 Position;
attribute vec2 TextureCoord;
varying vec2 varyTextureCoord;

void main() {
    gl_Position = Position;
    varyTextureCoord = TextureCoord;
}

片元着色器

片元着色器程序从“2Screen.fsh”文件读取,代码摘录如下:

precision highp float;
uniform sampler2D Texture;
varying highp vec2 varyTextureCoord;

void main() {
    vec2 uv = varyTextureCoord.xy;
    float y;
    if (uv.y >= 0.0 && uv.y <= 0.5) {
        y = uv.y + 0.25;
    }else {
        y = uv.y - 0.25;
    }
    gl_FragColor = texture2D(Texture, vec2(uv.x, y));
}

实际上,分屏核心代码即是上面片元着色器的代码。将原始采集图像的中间区域(y轴取值范围0.25~0.75)分别映射到屏幕的上下区域。

拓展题

读者朋友们,我们实现了2行1列等分的分屏效果,相信对于m行n列(m、n为大于等于1的整数)等分的分屏效果你们也可以实现了吧,想一想,试一试吧。

参考资料

[GLES] 固定管线与可编程管线的差别
如何优雅地实现一个分屏滤镜

相关文章

网友评论

      本文标题:iOS OpenGL ES 视频采集分屏实现

      本文链接:https://www.haomeiwen.com/subject/jzcxgltx.html