解压后的视频帧数据可以通OpenGLES渲染到屏幕上来实现视频播放。OpenGLES的渲染需要用到CAEAGLLayer这个图层,我们通过自定义一个类 SQLayer来继承CAEAGLLayer来实现对视频播放的封装。
定义pixelBuffer属性来接收解压后视频帧数据(在保证宽高比不变的前提下,得到一个尽可能大的矩形。)由于顶点坐标
@property CVPixelBufferRef pixelBuffer;
-(id)initWithFrame:(CGRect)frame;
-(void)resetRenderBuffer;
在使用OpenGLES的时候,主要是编写顶点着色器和片元着色器。我们要绘制的顶点,需要根据传入的需要渲染的图片的纵横比和自身的frame大小,计算出合适的显示区域。这个函数就是AVMakeRectWithAspectRatioInsideRect。我们写入的顶点坐标不是屏幕坐标,是对象坐标,其范围是(-1,1)所以要进行坐标转换,也就是归一化.
YUV
我们视频压缩是使用YUV进行编码的,
- YUV(TCbCr)是电视系统所采用的一种颜色编码方法
- Y:表示明亮度,也就是灰阶值,它是基础信号
- U和V是色度描述色彩和饱和度
常见的格式(RGB8:8:8)
- YUV4:2:0 (YCbCr 4:2:0) 这比RGB少了二分之一
- YUV4:2:2 (YCbCr 4:2:2) 小了三分之一
- YUV4:4:4 (YCbCr 4:4:4)
现在常用的是 YUV4:2:0,这里不是只有Y,Cb2个Cr0个,指的是每行扫描只有一种色度分量,用2:1的抽样率存储。
如果一行是4:2:0那么下一行是4:0:2;
片元着色器中通过SDTV或者HDTV的矩阵来还原出颜色
// BT.601, which is the standard for SDTV.
static const GLfloat kColorConversion601[] = {
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
};
// BT.709, which is the standard for HDTV.
static const GLfloat kColorConversion709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
我们还需要注意的是YUV是解码的时候分为两个纹理。一个是Y纹理灰色的 一个是UV进行颜色填充
源码如下:
//
// SQLayer.h
// CPDemo
//
// Created by Sem on 2020/8/12.
// Copyright © 2020 SEM. All rights reserved.
//
#import <QuartzCore/QuartzCore.h>
#include <QuartzCore/QuartzCore.h>
#include <CoreVideo/CoreVideo.h>
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface SQLayer : CAEAGLLayer
@property CVPixelBufferRef pixelBuffer;
-(id)initWithFrame:(CGRect)frame;
-(void)resetRenderBuffer;
@end
NS_ASSUME_NONNULL_END
//
// SQLayer.m
// CPDemo
//
// Created by Sem on 2020/8/12.
// Copyright © 2020 SEM. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#include <OpenGLES/EAGL.h>
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#import "SQLayer.h"
// Uniform index.
enum
{
UNIFORM_Y,
UNIFORM_UV,
UNIFORM_ROTATION_ANGLE,
UNIFORM_COLOR_CONVERSION_MATRIX,
NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];
// Attribute index.
enum
{
ATTRIB_VERTEX,
ATTRIB_TEXCOORD,
NUM_ATTRIBUTES
};
// BT.601, which is the standard for SDTV.
static const GLfloat kColorConversion601[] = {
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
};
// BT.709, which is the standard for HDTV.
static const GLfloat kColorConversion709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
@interface SQLayer()
{
GLint _backingWidth;
GLint _backingHeight;
EAGLContext *_context;
CVOpenGLESTextureRef _lumaTexture;
CVOpenGLESTextureRef _chromaTexture;
GLuint _frameBufferHandle;
GLuint _colorBufferHandle;
const GLfloat *_preferredConversion;
}
@property GLuint program;
@end
@implementation SQLayer
@synthesize pixelBuffer = _pixelBuffer;
-(CVPixelBufferRef) pixelBuffer
{
return _pixelBuffer;
}
- (void)setPixelBuffer:(CVPixelBufferRef)pixelBuffer{
if(_pixelBuffer){
CVPixelBufferRelease(_pixelBuffer);
}
_pixelBuffer = CVPixelBufferRetain(pixelBuffer);
int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer);
int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer);
[self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight];
}
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight{
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
if(pixelBuffer == NULL) {
NSLog(@"Pixel buffer is null");
return;
}
CVReturn err;
size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) {
_preferredConversion = kColorConversion601;
}
else {
_preferredConversion = kColorConversion709;
}
/*
CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef.
*/
/*
Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane.
*/
CVOpenGLESTextureCacheRef _videoTextureCache;
err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context,NULL, &_videoTextureCache);
if (err != noErr) {
NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
return;
}
glActiveTexture(GL_TEXTURE0);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _videoTextureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RED_EXT, frameWidth, frameHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &_lumaTexture);
glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
if(planeCount == 2) {
glActiveTexture(GL_TEXTURE1);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG_EXT,
frameWidth / 2,
frameHeight / 2,
GL_RG_EXT,
GL_UNSIGNED_BYTE,
1,
&_chromaTexture);
if (err) {
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
glViewport(0, 0, _backingWidth, _backingHeight);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Use shader program.
glUseProgram(self.program);
// glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1);
// glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1);
glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
// Set up the quad vertices with respect to the orientation and aspect ratio of the video.
CGRect viewBounds = self.bounds;
CGSize contentSize = CGSizeMake(frameWidth, frameHeight);
//在保证宽高比不变的前提下,得到一个尽可能大的矩形。
CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds);
// Compute normalized quad coordinates to draw the frame into.
CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width,
vertexSamplingRect.size.height/viewBounds.size.height);
// Normalize the quad vertices.
if (cropScaleAmount.width > cropScaleAmount.height) {
normalizedSamplingSize.width = 1.0;
normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
}
else {
normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height;
normalizedSamplingSize.height = 1.0;;
}
GLfloat quadVertexData [] = {
-1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
-1 * normalizedSamplingSize.width, normalizedSamplingSize.height,
normalizedSamplingSize.width, normalizedSamplingSize.height,
};
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
glEnableVertexAttribArray(ATTRIB_VERTEX);
CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1);
GLfloat quadTextureData[] = {
CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),
CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect)
};
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
[_context presentRenderbuffer:GL_RENDERBUFFER];
[self cleanUpTextures];
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
if(_videoTextureCache) {
CFRelease(_videoTextureCache);
}
}
-(instancetype)initWithFrame:(CGRect)frame{
self = [super init];
if (self) {
CGFloat scale = [[UIScreen mainScreen] scale];
self.contentsScale = scale;
//一个布尔值,指示层是否包含完全不透明的内容.默认为NO
self.opaque = TRUE;
/*
kEAGLDrawablePropertyRetainedBacking指定可绘制表面在显示后是否保留其内容的键.默认为NO.
*/
self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]};
//设置layer图层frame
[self setFrame:frame];
// 设置绘制框架的上下文.
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_context) {
return nil;
}
// 将默认转换设置为BT.709,这是HDTV的标准
_preferredConversion = kColorConversion709;
[self setupGL];
}
return self;
}
//OpenGL 相关设置
-(void)setupGL{
if(!_context || ![EAGLContext setCurrentContext:_context]){
return;
}
[self setupBuffers];
[self loadShaders];
glUseProgram(self.program);
glUniform1i(uniforms[UNIFORM_Y], 0);
glUniform1i(uniforms[UNIFORM_UV], 1);
glUniform1i(uniforms[UNIFORM_ROTATION_ANGLE], 0);
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
}
-(void)setupBuffers{
glDisable(GL_DEPTH_TEST);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2* sizeof(GLfloat), 0);
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
[self createBuffers];
}
- (void) createBuffers{
glGenFramebuffers(1, &_frameBufferHandle);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
glGenRenderbuffers(1, &_colorBufferHandle);
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
#pragma mark - OpenGL ES 2 shader compilation
//片元着色器代码
const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;"
"precision mediump float;"
"uniform sampler2D SamplerY;"
"uniform sampler2D SamplerUV;"
"uniform mat3 colorConversionMatrix;"
"void main()"
"{"
" mediump vec3 yuv;"
" lowp vec3 rgb;"
// Subtract constants to map the video range start at 0
" yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));"
" yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));"
" rgb = colorConversionMatrix * yuv;"
" gl_FragColor = vec4(rgb, 1);"
"}";
//顶点着色器代码
const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;"
"attribute vec2 texCoord;"
"uniform float preferredRotation;"
"varying vec2 texCoordVarying;"
"void main()"
"{"
" mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,"
" sin(preferredRotation), cos(preferredRotation), 0.0, 0.0,"
" 0.0, 0.0, 1.0, 0.0,"
" 0.0, 0.0, 0.0, 1.0);"
" gl_Position = position * rotationMatrix;"
" texCoordVarying = texCoord;"
"}";
-(BOOL)loadShaders{
GLuint vertShader = 0, fragShader = 0;
self.program = glCreateProgram();
if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) {
NSLog(@"Failed to compile vertex shader");
return NO;
}
if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) {
NSLog(@"Failed to compile fragment shader");
return NO;
}
glAttachShader(self.program, vertShader);
glAttachShader(self.program, fragShader);
glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord");
// Link the program.
if (![self linkProgram:self.program]) {
NSLog(@"Failed to link program: %d", self.program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (self.program) {
glDeleteProgram(self.program);
self.program = 0;
}
return NO;
}
uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
// uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold");
// uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold");
uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation");
uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(self.program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(self.program, fragShader);
glDeleteShader(fragShader);
}
return YES;
}
- (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString
{
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &shaderString, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(@"Shader compile log:\n%s", log);
free(log);
}
#endif
GLint status = 0;
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL
{
NSError *error;
NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
if (sourceString == nil) {
NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
return NO;
}
const GLchar *source = (GLchar *)[sourceString UTF8String];
return [self compileShaderString:shader type:type shaderString:source];
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(@"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)validateProgram:(GLuint)prog
{
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(@"Program validate log:\n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (void)dealloc
{
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
[self cleanUpTextures];
if(_pixelBuffer) {
CVPixelBufferRelease(_pixelBuffer);
}
if (self.program) {
glDeleteProgram(self.program);
self.program = 0;
}
if(_context) {
//[_context release];
_context = nil;
}
//[super dealloc];
}
- (void) cleanUpTextures
{
if (_lumaTexture) {
CFRelease(_lumaTexture);
_lumaTexture = NULL;
}
if (_chromaTexture) {
CFRelease(_chromaTexture);
_chromaTexture = NULL;
}
}
- (void) resetRenderBuffer
{
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
[self releaseBuffers];
[self createBuffers];
}
- (void) releaseBuffers
{
if(_frameBufferHandle) {
glDeleteFramebuffers(1, &_frameBufferHandle);
_frameBufferHandle = 0;
}
if(_colorBufferHandle) {
glDeleteRenderbuffers(1, &_colorBufferHandle);
_colorBufferHandle = 0;
}
}
@end
网友评论