美文网首页
IOS OpenGL图像实现

IOS OpenGL图像实现

作者: andy_tu | 来源:发表于2016-12-14 11:08 被阅读0次

1.创建CAEAGLLayer

CAEAGLLayer*eaglLayer = (CAEAGLLayer*)self.layer;

eaglLayer.opaque=YES;

eaglLayer.drawableProperties= [NSDictionarydictionaryWithObjectsAndKeys:

[NSNumbernumberWithBool:NO],kEAGLDrawablePropertyRetainedBacking,

kEAGLColorFormatRGB565,kEAGLDrawablePropertyColorFormat,

//[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking,

nil];

2.创建上下文

_glContext= [[EAGLContextalloc]initWithAPI:kEAGLRenderingAPIOpenGLES2];

//[self debugGlError];

if(!_glContext|| ![EAGLContextsetCurrentContext:_glContext])

{

returnNO;

}

3.设置frame和renderbuffer

[self  createFrameAndRenderBuffer];

4.设置纹理

[self setupYUVTexture];

- (BOOL)createFrameAndRenderBuffer

{

glGenFramebuffers(1, &_framebuffer);

glGenRenderbuffers(1, &_renderBuffer);

glBindFramebuffer(GL_FRAMEBUFFER,_framebuffer);

glBindRenderbuffer(GL_RENDERBUFFER,_renderBuffer);

if(![_glContextrenderbufferStorage:GL_RENDERBUFFERfromDrawable:(CAEAGLLayer*)self.layer])

{

//NSLog(@"attach渲染缓冲区失败");

}

glFramebufferRenderbuffer(GL_FRAMEBUFFER,GL_COLOR_ATTACHMENT0,GL_RENDERBUFFER,_renderBuffer);

if(glCheckFramebufferStatus(GL_FRAMEBUFFER) !=GL_FRAMEBUFFER_COMPLETE)

{

//NSLog(@"创建缓冲区错误0x%x", glCheckFramebufferStatus(GL_FRAMEBUFFER));

returnNO;

}

returnYES;

}

- (void)setupYUVTexture

{

if(_textureYUV[TEXY])

{

glDeleteTextures(3,_textureYUV);

}

//产生纹理

glGenTextures(3,_textureYUV);

if(!_textureYUV[TEXY] || !_textureYUV[TEXU] || !_textureYUV[TEXV])

{

NSLog(@"<<<<<<<<<<<<纹理创建失败!>>>>>>>>>>>>");

return;

}

//绑定纹理

glActiveTexture(GL_TEXTURE0);

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXY]);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP_TO_EDGE);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP_TO_EDGE);

glActiveTexture(GL_TEXTURE1);

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXU]);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP_TO_EDGE);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP_TO_EDGE);

glActiveTexture(GL_TEXTURE2);

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXV]);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP_TO_EDGE);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP_TO_EDGE);

}

- (void)render

{

//[EAGLContext setCurrentContext:_glContext];

CGSizesize =self.bounds.size;

NSString*isStretch = [[UserClientshareUserClient]getVideoStretch];

BOOLisVideoPreView = [[UserClientshareUserClient]videoPreviewView];

if([isStretchisEqualToString:@"NO"] && isVideoPreView ==YES){

CGFloatvideoW =_videoW*size.height/_videoH;

CGFloatvideoX = (size.width- videoW) *0.5;

glViewport(videoX*_viewScale,1, videoW*_viewScale, size.height*_viewScale-2);

}else{

glViewport(1,1, size.width*_viewScale-2, size.height*_viewScale-2);

}

staticconstGLfloatsquareVertices[] = {

-1.0f, -1.0f,

1.0f, -1.0f,

-1.0f,1.0f,

1.0f,1.0f,

};

staticconstGLfloatcoordVertices[] = {

0.0f,1.0f,

1.0f,1.0f,

0.0f,0.0f,

1.0f,0.0f,

};

// Update attribute values

glVertexAttribPointer(ATTRIB_VERTEX,2,GL_FLOAT,0,0, squareVertices);

glEnableVertexAttribArray(ATTRIB_VERTEX);

glVertexAttribPointer(ATTRIB_TEXTURE,2,GL_FLOAT,0,0, coordVertices);

glEnableVertexAttribArray(ATTRIB_TEXTURE);

// Draw

glDrawArrays(GL_TRIANGLE_STRIP,0,4);

glBindRenderbuffer(GL_RENDERBUFFER,_renderBuffer);

if(_glContext) {

if([UIApplicationsharedApplication].applicationState==UIApplicationStateActive) {//application活跃状态下才刷新openGL,否则会被系统杀死

[_glContextpresentRenderbuffer:GL_RENDERBUFFER];

}

}

}

- (void)displayYUV420pData:(void*)data width:(NSInteger)w height:(NSInteger)h

{

@synchronized(self)

{

if(w !=_videoW|| h !=_videoH)

{

[selfsetVideoSize:wheight:h];

}

[EAGLContextsetCurrentContext:_glContext];

if(_textureYUV[TEXY]){

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXY]);

}

glTexSubImage2D(GL_TEXTURE_2D,0,0,0, w, h,GL_RED_EXT,GL_UNSIGNED_BYTE, data);

if(_textureYUV[TEXU]){

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXU]);

}

glTexSubImage2D(GL_TEXTURE_2D,0,0,0, w/2, h/2,GL_RED_EXT,GL_UNSIGNED_BYTE, data + w * h *5/4);

if(_textureYUV[TEXV]){

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXV]);

}

glTexSubImage2D(GL_TEXTURE_2D,0,0,0, w/2, h/2,GL_RED_EXT,GL_UNSIGNED_BYTE, data + w * h);

GLenumerr =glGetError();

if(err ==GL_NO_ERROR)

{

[selfrender];

}

}

#ifdef DEBUG

GLenumerr =glGetError();

if(err !=GL_NO_ERROR)

{

printf("GL_ERROR=======>%d\n", err);

}

structtimevalnowtime;

gettimeofday(&nowtime,NULL);

if(nowtime.tv_sec!=_time.tv_sec)

{

printf("视频%zd帧率:%zd\n",self.tag,_frameRate);

memcpy(&_time, &nowtime,sizeof(structtimeval));

_frameRate=1;

}

else

{

_frameRate++;

}

#endif

}

- (void)setVideoSize:(GLuint)width height:(GLuint)height

{

_videoW= width;

_videoH= height;

void*blackData =malloc(width * height *1.5);

if(blackData)

//bzero(blackData, width * height * 1.5);

memset(blackData,0x0, width * height *1.5);

[EAGLContextsetCurrentContext:_glContext];

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXY]);

glTexImage2D(GL_TEXTURE_2D,0,GL_RED_EXT, width, height,0,GL_RED_EXT,GL_UNSIGNED_BYTE, blackData);

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXU]);

glTexImage2D(GL_TEXTURE_2D,0,GL_RED_EXT, width/2, height/2,0,GL_RED_EXT,GL_UNSIGNED_BYTE, blackData + width * height);

glBindTexture(GL_TEXTURE_2D,_textureYUV[TEXV]);

glTexImage2D(GL_TEXTURE_2D,0,GL_RED_EXT, width/2, height/2,0,GL_RED_EXT,GL_UNSIGNED_BYTE, blackData + width * height *5/4);

free(blackData);

}

- (void)clearFrame

{

@synchronized(self)

{

//if ([self window])

if(_glContext)//add by tuzhi

{

[EAGLContextsetCurrentContext:_glContext];

glClearColor(0.0,0.0,0.0,1.0);

glClear(GL_COLOR_BUFFER_BIT);

glBindRenderbuffer(GL_RENDERBUFFER,_renderBuffer);

[_glContextpresentRenderbuffer:GL_RENDERBUFFER];

}

}

}

- (void)dealloc

{

if(_program) {

glDeleteTextures(3,_textureYUV);

glDeleteProgram(_program);

}

if(_glContext) {

//[_glContext release];

_glContext=NULL;

}

[selfdestoryFrameAndRenderBuffer];

[EAGLContextsetCurrentContext:nil];

//[super dealloc];

}

相关文章

  • IOS OpenGL图像实现

    1.创建CAEAGLLayer CAEAGLLayer*eaglLayer = (CAEAGLLayer*)sel...

  • 简析OpenGL ES 3.0 Sobel Filter实现边缘

    文档iOS OpenGL ES 3.0 数据可视化 4:纹理映射实现2维图像与视频渲染简介使用索贝尔算子(Sobe...

  • OpenGL学习--基础学习

    首先知道OpenGL/OpenGL ES/Metal 都是利用GPU芯片高效渲染图形图像,图形API 是iOS开发...

  • OpenGL 之 GPUImage 源码分析

    GPUImage 是 iOS 上一个基于 OpenGL 进行图像处理的开源框架,后来有人借鉴它的想法实现了一个 A...

  • 初识GLKit

    GLKit 框架是为了简化iOS上OpenGL ES的开发,提供的基于OpenGL ES的iOS框架。 实现思路:...

  • Core Graphics 绘图&2d形变

    绘图 iOS图像处理之Core Graphics和OpenGL ES初见iOS支持两套图形API族:Core Gr...

  • iOS动画(CoreAnimation)

    一、iOS核心动画介绍 CoreAnimation框架是基于OpenGL与CoreGraphics图像处理框架的一...

  • OpenGL开篇

    和视频、图像相关的底层大多数都是OpenGL ES的实现,苹果在CoreAnimation也是用的OpenGL E...

  • iOS核心动画

    iOS核心动画 核心动画框架 CoreAnimation框架是基于OpenGL与CoreGraphics图像处理框...

  • iOS图像:OpenGL(上)

    原创:知识探索型文章创作不易,请珍惜,之后会持续更新,不断完善个人比较喜欢做笔记和写总结,毕竟好记性不如烂笔头哈哈...

网友评论

      本文标题:IOS OpenGL图像实现

      本文链接:https://www.haomeiwen.com/subject/apdimttx.html