美文网首页
IOS FFmpeg H.264解码

IOS FFmpeg H.264解码

作者: copy_farmer | 来源:发表于2020-09-04 15:25 被阅读0次

    1.**本地集成FFmpeg **

    (引用) iOS集成FFmpeg及视频格式转码

    2.pod集成(省事,但是有些参数不能修改)

      pod 'FFmpeg'
    

    3.定义YUV结构体 DecodeH264Data_YUV.h

    #ifndef _DECODEH264DATA_YUV_
    #define _DECODEH264DATA_YUV_
    
    #pragma pack(push, 1)
    
    typedef struct H264FrameDef
    {
        unsigned int    length;
        unsigned char*  dataBuffer;
        
    }H264Frame;
    
    typedef struct  H264YUVDef
    {
        unsigned int    width;
        unsigned int    height;
        H264Frame       luma;
        H264Frame       chromaB;
        H264Frame       chromaR;
        
    }H264YUV_Frame;
    
    
    #pragma pack(pop)
    
    #endif
    

    4.解码器H264Decoder.h

    #import <Foundation/Foundation.h>
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #import "DecodeH264Data_YUV.h"
    
    
    @protocol H264DecoderDelegate <NSObject>
    
    @optional
    - (void)updateDecodedH264FrameData: (H264YUV_Frame*)yuvFrame;
    @end
    
    @interface H264Decoder : NSObject
    {
        int pictureWidth;
        AVCodec*    pCodec;
        AVCodecContext* pCodecCtx;
        AVFrame*    pVideoFrame;
    }
    
    @property (nonatomic, strong) NSData *sps;
    @property (nonatomic, strong) NSData *pps;
    @property (nonatomic, weak) id<H264DecoderDelegate> delegate;
    @property (nonatomic, assign) BOOL hasIFrame;
    
    + (H264Decoder *)sharedInstance;
    - (id)init;
    - (void)decodeSpsPps:(NSData *)sps pps:(NSData *)pps;
    - (int)decodeH264IFrames:(NSData *)h264Data;
    - (int)decodeH264Frames:(NSData *)data;
    
    @end
    
    
    #import "H264Decoder.h"
    
    typedef enum
    {
        NALUTypeSliceNoneIDR = 1,
        NALUTypeSliceIDR = 5,
        NALUTypeSPS = 7,
        NALUTypePPS = 8
    } NALUType;
    
    @interface H264Decoder ()
    {
        BOOL _needFilter;
    }
    
    @end
    
    @implementation H264Decoder
    
    + (H264Decoder *)sharedInstance {
        static dispatch_once_t once;
        static id instance;
        dispatch_once(&once, ^{
            instance = [[self alloc] init];
        });
        return instance;
    }
    
    - (id) init
    {
        if(self=[super init])
        {
            _needFilter = NO;
            pCodec      =NULL;
            pCodecCtx   =NULL;
            pVideoFrame =NULL;
            
            pictureWidth = 0;
            
            av_register_all();
         
            pCodec=avcodec_find_decoder(AV_CODEC_ID_H264);
            if(!pCodec)
            {
                printf("Codec not find\n");
            }
            pCodecCtx=avcodec_alloc_context3(pCodec);
            if(!pCodecCtx)
            {
                printf("allocate codec context error\n");
            }
            
            pCodecCtx->flags2 |= CODEC_FLAG2_FAST;
            pCodecCtx->thread_count = 2;
            pCodecCtx->thread_type = FF_THREAD_FRAME;
            pCodecCtx->flags|=CODEC_FLAG_LOW_DELAY;
            
            avcodec_open2(pCodecCtx, pCodec, NULL);
            
            pVideoFrame=av_frame_alloc();
            
        }
        
        return self;
    }
    
    
    -(void)decodeSpsPps:(NSData *)sps pps:(NSData *)pps
    {
        self.sps = sps;
        self.pps = pps;
        
        [self decodeSpsPpsData:sps];
        [self decodeSpsPpsData:pps];
    }
    
    - (int)getNALUType:(NSData *)NALU
    {
        uint8_t * bytes = (uint8_t *) NALU.bytes;
        return bytes[4] & 0x1F;
    }
    
    - (int)decodeH264IFrames:(NSData *)h264Data
    {
        _needFilter = YES;
        if([h264Data length] < 5)
        {
            return 0;
        }
        int type = [self getNALUType: h264Data];
        if(type == NALUTypeSliceIDR)
        {
            //NSLog(@"decodeH264IFrames");
            return [self decodeH264Frames:h264Data];
        }
        else
        {
            return 0;
        }
        
    }
    
    
    - (int)decodeH264Frames:(NSData *)h264Data
    {
        if(_needFilter)
        {
            int type = [self getNALUType: h264Data];
            if(type == NALUTypeSliceIDR)
            {
                _needFilter = NO;
            }
            else
            {
                return 0;
            }
        }
        
        int got_picture = [self decodeH264Data:h264Data];
        
    
        if((pictureWidth !=0 ) && (pictureWidth!=pCodecCtx->width))
        {
            pictureWidth = pCodecCtx->width;
            return -1;
        }
        //YUV 420 Y U V  -> RGB
        if(got_picture)
        {
    
            unsigned int lumaLength= (pCodecCtx->height)*(MIN(pVideoFrame->linesize[0], pCodecCtx->width));
            unsigned int chromBLength=((pCodecCtx->height)/2)*(MIN(pVideoFrame->linesize[1], (pCodecCtx->width)/2));
            unsigned int chromRLength=((pCodecCtx->height)/2)*(MIN(pVideoFrame->linesize[2], (pCodecCtx->width)/2));
            
            H264YUV_Frame    yuvFrame;
            memset(&yuvFrame, 0, sizeof(H264YUV_Frame));
            
            yuvFrame.luma.length = lumaLength;
            yuvFrame.chromaB.length = chromBLength;
            yuvFrame.chromaR.length =chromRLength;
            
            yuvFrame.luma.dataBuffer=(unsigned char*)malloc(lumaLength);
            yuvFrame.chromaB.dataBuffer=(unsigned char*)malloc(chromBLength);
            yuvFrame.chromaR.dataBuffer=(unsigned char*)malloc(chromRLength);
            
            copyDecodedFrame(pVideoFrame->data[0],yuvFrame.luma.dataBuffer,pVideoFrame->linesize[0],
                             pCodecCtx->width,pCodecCtx->height);
            copyDecodedFrame(pVideoFrame->data[1], yuvFrame.chromaB.dataBuffer,pVideoFrame->linesize[1],
                             pCodecCtx->width / 2,pCodecCtx->height / 2);
            copyDecodedFrame(pVideoFrame->data[2], yuvFrame.chromaR.dataBuffer,pVideoFrame->linesize[2],
                             pCodecCtx->width / 2,pCodecCtx->height / 2);
            
            yuvFrame.width=pCodecCtx->width;
            yuvFrame.height=pCodecCtx->height;
            __weak __typeof__(self) weakSelf = self;
            dispatch_sync(dispatch_get_main_queue(), ^{
                
                [weakSelf updateYUVFrameOnMainThread:(H264YUV_Frame*)&yuvFrame];
            });
            
            free(yuvFrame.luma.dataBuffer);
            free(yuvFrame.chromaB.dataBuffer);
            free(yuvFrame.chromaR.dataBuffer);
            
        }
    
        return 0;
    }
    
    void copyDecodedFrame(unsigned char *src, unsigned char *dist,int linesize, int width, int height)
    {
        width = MIN(linesize, width);
        for (NSUInteger i = 0; i < height; ++i)
        {
            memcpy(dist, src, width);
            dist += width;
            src += linesize;
        }
    }
    
    - (void)updateYUVFrameOnMainThread:(H264YUV_Frame*)yuvFrame
    {
        if(yuvFrame!=NULL)
        {
            if([self.delegate respondsToSelector:@selector(updateDecodedH264FrameData:)])
            {
                [self.delegate updateDecodedH264FrameData:yuvFrame];
            }
        }
    }
    
    -(int)decodeSpsPpsData:(NSData *)data
    {
        @synchronized (self) {
            uint8_t *ch;
            ch = malloc([data length] + 4);
            ch[0]=0;
            ch[1]=0;
            ch[2]=0;
            ch[3]=1;
            
            memcpy(ch + 4, [data bytes], [data length]);
            
            AVPacket packet;
            av_init_packet(&packet);
            
            packet.data = ch;
            packet.size = (int)[data length] + 4;
            
            int got_picture = 0;
            avcodec_decode_video2(pCodecCtx, pVideoFrame, &got_picture, &packet);
            av_packet_unref(&packet);
            free(ch);
            return got_picture;
        }
    
    }
    -(int)decodeH264Data:(NSData *)data
    {
        @synchronized (self) {
            uint8_t *ch = (uint8_t *)data.bytes;
            ch[0]=0;
            ch[1]=0;
            ch[2]=0;
            ch[3]=1;
            
            AVPacket packet;
            av_init_packet(&packet);
            
            packet.data = ch;
            packet.size = (int)[data length];
            
            int got_picture = 0;
            avcodec_decode_video2(pCodecCtx, pVideoFrame, &got_picture, &packet);
            av_packet_unref(&packet);
            return got_picture;
        }
        
    }
    

    5.YUV播放(OpenGLES)

    #import <UIKit/UIKit.h>
    #import "DecodeH264Data_YUV.h"
    
    
    @interface OpenGLGLRenderer_YUV : NSObject
    {
        
        GLint _uniformSamplers[3];
        GLuint _textures[3];
    }
    
    - (BOOL) isValid;
    - (NSString *)fragmentShader;
    - (void) resolveUniforms: (GLuint) program;
    - (void) setFrame: (H264YUV_Frame *) frame;
    - (BOOL) prepareRender;
    
    @end
    
    
    
    @interface OpenGLFrameView : UIView
    {
        EAGLContext     *_context;
        GLuint          _framebuffer;
        GLuint          _renderbuffer;
        GLint           _backingWidth;
        GLint           _backingHeight;
        GLuint          _program;
        GLint           _uniformMatrix;
        GLfloat         _vertices[8];
        
        CGPoint      begainPoint;
        
    
        
        OpenGLGLRenderer_YUV* _renderer;
    }
    
    
    - (id)initWithFrame:(CGRect)frame;
    - (void)render:(H264YUV_Frame *)frame;
    - (UIImage*)snapshotPicture;
    @end
    #define PT_DELAY 1.5
    
    #import "OpenGLFrameView.h"
    #import <QuartzCore/QuartzCore.h>
    #import <OpenGLES/EAGLDrawable.h>
    #import <OpenGLES/EAGL.h>
    #import <OpenGLES/ES2/gl.h>
    #import <OpenGLES/ES2/glext.h>
    #import "ZOStatusTool.h"
    
    
    #pragma mark - shaders
    
    #define STRINGIZE(x) #x
    #define STRINGIZE2(x) STRINGIZE(x)
    #define SHADER_STRING(text) @ STRINGIZE2(text)
    
    NSString *const vertexShaderString = SHADER_STRING
    (
        attribute vec4 position;
        attribute vec2 texcoord;
        uniform mat4 modelViewProjectionMatrix;
        varying vec2 v_texcoord;
     
        void main()
        {
            gl_Position = modelViewProjectionMatrix * position;
            v_texcoord = texcoord.xy;
        }
     
     );
    
    NSString *const rgbFragmentShaderString = SHADER_STRING
    (
        varying highp vec2 v_texcoord;
        uniform sampler2D s_texture;
     
        void main()
        {
            gl_FragColor = texture2D(s_texture, v_texcoord);
        }
     
     );
    
    NSString *const yuvFragmentShaderString = SHADER_STRING
    (
        varying highp vec2 v_texcoord;
        uniform sampler2D s_texture_y;
        uniform sampler2D s_texture_u;
        uniform sampler2D s_texture_v;
     
        void main()
        {
            highp float y = texture2D(s_texture_y, v_texcoord).r;
            highp float u = texture2D(s_texture_u, v_texcoord).r - 0.5;
            highp float v = texture2D(s_texture_v, v_texcoord).r - 0.5;
         
            highp float r = y + 1.402 * v;
            highp float g = y - 0.344 * u - 0.714 * v;
            highp float b = y + 1.772 * u;
         
            gl_FragColor = vec4(r,g,b,1.0);
        }
     
     );
    
    static BOOL validateProgram(GLuint prog)
    {
        GLint status;
        
        glValidateProgram(prog);
        
        
        glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
        if (status == GL_FALSE) {
            printf("Validate program failed %d\n", prog);
            return NO;
        }
        
        return YES;
    }
    
    static GLuint compileShader(GLenum type, NSString *shaderString)
    {
        GLint status;
        const GLchar *sources = (GLchar *)shaderString.UTF8String;
        
        GLuint shader = glCreateShader(type);
        if (shader == 0 || shader == GL_INVALID_ENUM)
        {
            printf("Create shader failed%d\n", type);
            return 0;
        }
        
        glShaderSource(shader, 1, &sources, NULL);
        glCompileShader(shader);
        
    
        glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
        if (status == GL_FALSE)
        {
            glDeleteShader(shader);
            printf("Compile shader failed\n");
            return 0;
        }
        
        return shader;
    }
    
    static void mat4f_LoadOrtho(float left, float right, float bottom, float top, float near, float far, float* mout)
    {
        float r_l = right - left;
        float t_b = top - bottom;
        float f_n = far - near;
        float tx = - (right + left) / (right - left);
        float ty = - (top + bottom) / (top - bottom);
        float tz = - (far + near) / (far - near);
        
        mout[0] = 2.0f / r_l;
        mout[1] = 0.0f;
        mout[2] = 0.0f;
        mout[3] = 0.0f;
        
        mout[4] = 0.0f;
        mout[5] = 2.0f / t_b;
        mout[6] = 0.0f;
        mout[7] = 0.0f;
        
        mout[8] = 0.0f;
        mout[9] = 0.0f;
        mout[10] = -2.0f / f_n;
        mout[11] = 0.0f;
        
        mout[12] = tx;
        mout[13] = ty;
        mout[14] = tz;
        mout[15] = 1.0f;
    }
    
    
    #pragma mark - frame renderers
    
    
    
    @implementation OpenGLGLRenderer_YUV
    
    - (BOOL) isValid
    {
        return (_textures[0] != 0);
    }
    
    - (NSString *) fragmentShader
    {
        return yuvFragmentShaderString;
    }
    
    - (void) resolveUniforms: (GLuint) program
    {
        _uniformSamplers[0] = glGetUniformLocation(program, "s_texture_y");
        _uniformSamplers[1] = glGetUniformLocation(program, "s_texture_u");
        _uniformSamplers[2] = glGetUniformLocation(program, "s_texture_v");
    }
    
    - (void) setFrame: (H264YUV_Frame *)frame
    {
        H264YUV_Frame *yuvFrame = (H264YUV_Frame *)frame;
        
        
        assert(yuvFrame->luma.length == yuvFrame->width * yuvFrame->height);
        assert(yuvFrame->chromaB.length == (yuvFrame->width * yuvFrame->height) / 4);
        assert(yuvFrame->chromaR.length == (yuvFrame->width * yuvFrame->height) / 4);
        
        
        const NSUInteger frameWidth = yuvFrame->width;
        const NSUInteger frameHeight = yuvFrame->height;
        
        glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
        
        if (0 == _textures[0])
        {
            glGenTextures(3, _textures);
        }
        
        const UInt8 *pixels[3] = {(UInt8 *)yuvFrame->luma.dataBuffer, (UInt8 *)yuvFrame->chromaB.dataBuffer, (UInt8 *)yuvFrame->chromaR.dataBuffer};
        
        const NSUInteger widths[3]  = { frameWidth, frameWidth / 2, frameWidth / 2 };
        const NSUInteger heights[3] = { frameHeight, frameHeight / 2, frameHeight / 2 };
        
        for (int i = 0; i < 3; ++i)
        {
            
            glBindTexture(GL_TEXTURE_2D, _textures[i]);
            
            glTexImage2D(GL_TEXTURE_2D,
                         0,
                         GL_LUMINANCE,
                         (int)widths[i],
                         (int)heights[i],
                         0,
                         GL_LUMINANCE,
                         GL_UNSIGNED_BYTE,
                         pixels[i]);
            
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        }
    }
    
    - (BOOL) prepareRender
    {
        if (_textures[0] == 0)
            return NO;
        
        for (int i = 0; i < 3; ++i) {
            glActiveTexture(GL_TEXTURE0 + i);
            glBindTexture(GL_TEXTURE_2D, _textures[i]);
            glUniform1i(_uniformSamplers[i], i);
        }
        
        return YES;
    }
    
    - (void)dealloc
    {
        //printf("----------------Release openGL ES Rennder--------------------->>>>>>\n");
        if (_textures[0])
        {
            glDeleteTextures(3, _textures);
        }
    }
    
    
    @end
    
    #pragma mark - gl view
    
    enum {
        ATTRIBUTE_VERTEX,
        ATTRIBUTE_TEXCOORD,
    };
    
    @implementation OpenGLFrameView
    
    + (Class) layerClass
    {
        return [CAEAGLLayer class];
    }
    
    - (id) initWithFrame:(CGRect)frame
    {
    
        if (self= [super initWithFrame:frame])
        {
            
            _renderer = [[OpenGLGLRenderer_YUV alloc] init];
            
            self.contentScaleFactor = [[UIScreen mainScreen] scale];
            CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
            eaglLayer.opaque = YES;
            eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                            [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking,
                                            kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
                                            nil];
            
            _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
            
            if (!_context ||
                ![EAGLContext setCurrentContext:_context])
            {
                
                printf("Setup EAGLContext failed \n");
                self = nil;
                return nil;
            }
            
            glGenFramebuffers(1, &_framebuffer);
            glGenRenderbuffers(1, &_renderbuffer);
            glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
            glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
            [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
            glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
            glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
            glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderbuffer);
            
            GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
            if (status != GL_FRAMEBUFFER_COMPLETE)
            {
                
                printf(" Make complete framebuffer object failed %x\n", status);
                self = nil;
                return nil;
            }
            
            GLenum glError = glGetError();
            if (GL_NO_ERROR != glError)
            {
                
                printf("failed to setup GL %x\n", glError);
                self = nil;
                return nil;
            }
            
            if (![self loadShaders])
            {
                
                self = nil;
                return nil;
            }
            
            _vertices[0] = -1.0f;
            _vertices[1] = -1.0f;
            _vertices[2] =  1.0f;
            _vertices[3] = -1.0f;
            _vertices[4] = -1.0f;
            _vertices[5] =  1.0f;
            _vertices[6] =  1.0f;
            _vertices[7] =  1.0f;
            
            //printf("setup OpenGL ES success\n");
        }
    
        return self;
    }
    
    - (void)dealloc
    {
        //printf("----------------Release openGL ES --------------------->>>>>>\n");
        if(_renderer!=nil)
        {
            _renderer= nil;
        }
        if (_framebuffer) {
            glDeleteFramebuffers(1, &_framebuffer);
            _framebuffer = 0;
        }
        
        if (_renderbuffer) {
            glDeleteRenderbuffers(1, &_renderbuffer);
            _renderbuffer = 0;
        }
        
        if (_program) {
            glDeleteProgram(_program);
            _program = 0;
        }
        
        if ([EAGLContext currentContext] == _context) {
            [EAGLContext setCurrentContext:nil];
            _context = nil;
        }
        
    }
    
    
    - (void)layoutSubviews
    {
        glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
        [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
        glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
        glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
        
        GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
        if (status != GL_FRAMEBUFFER_COMPLETE)
        {
            
           //printf("Make complete framebuffer object failed %x\n", status);
            
        }
        else
        {
            
           //printf("Setup GL framebuffer success %d:%d\n", _backingWidth, _backingHeight);
        }
        
        [self updateVertices];
        [self render: nil];
    }
    
    - (void)setContentMode:(UIViewContentMode)contentMode
    {
        [super setContentMode:contentMode];
        [self updateVertices];
        if (_renderer.isValid){
            [self render:nil];
        }
    }
    
    - (BOOL)loadShaders
    {
        BOOL result = NO;
        GLuint vertShader = 0, fragShader = 0;
        
        _program = glCreateProgram();
        
        vertShader = compileShader(GL_VERTEX_SHADER, vertexShaderString);
        if (!vertShader)
            goto exit;
        
        fragShader = compileShader(GL_FRAGMENT_SHADER,  _renderer.fragmentShader);
        if (!fragShader)
            goto exit;
        
        glAttachShader(_program, vertShader);
        glAttachShader(_program, fragShader);
        glBindAttribLocation(_program, ATTRIBUTE_VERTEX, "position");
        glBindAttribLocation(_program, ATTRIBUTE_TEXCOORD, "texcoord");
        
        glLinkProgram(_program);
        
        GLint status;
        glGetProgramiv(_program, GL_LINK_STATUS, &status);
        if (status == GL_FALSE) {
            printf("Link program failed %d\n", _program);
            goto exit;
        }
        
        result = validateProgram(_program);
        
        _uniformMatrix = glGetUniformLocation(_program, "modelViewProjectionMatrix");
        [_renderer resolveUniforms:_program];
        
    exit:
        
        if (vertShader){
            glDeleteShader(vertShader);
        }
        if (fragShader){
            glDeleteShader(fragShader);
        }
        
        if (result) {
            
            //printf("Setup GL programm ok\n");
            
        } else {
            
            glDeleteProgram(_program);
            _program = 0;
        }
        
        return result;
    }
    
    
    - (void)updateVertices
    {
        const BOOL fit      = (self.contentMode == UIViewContentModeScaleAspectFit);
        const float width   = 640;
        const float height  = 360;
        const float dH      = (float)_backingHeight / height;
        const float dW      = (float)_backingWidth    / width;
        const float dd      = fit ? MIN(dH, dW) : MAX(dH, dW);
        const float h       = (height * dd / (float)_backingHeight);
        const float w       = (width  * dd / (float)_backingWidth );
        
        _vertices[0] = - w;
        _vertices[1] = - h;
        _vertices[2] =   w;
        _vertices[3] = - h;
        _vertices[4] = - w;
        _vertices[5] =   h;
        _vertices[6] =   w;
        _vertices[7] =   h;
    }
    
    - (void)render: (H264YUV_Frame *)frame
    {
        
        static const GLfloat texCoords[] =
        {
            0.0f, 1.0f,
            1.0f, 1.0f,
            0.0f, 0.0f,
            1.0f, 0.0f,
        };
        
        [EAGLContext setCurrentContext:_context];
        
        glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
        glViewport(0, 0, _backingWidth, _backingHeight);
        
        //glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
        
        //glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
        
        
        //glClear(GL_COLOR_BUFFER_BIT);
        
        glUseProgram(_program);
        
        if (frame)
        {
            [_renderer setFrame:frame];
        }
        
        if ([_renderer prepareRender])
        {
            
            GLfloat modelviewProj[16];
            mat4f_LoadOrtho(-1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f, modelviewProj);
            glUniformMatrix4fv(_uniformMatrix, 1, GL_FALSE, modelviewProj);
            
            glVertexAttribPointer(ATTRIBUTE_VERTEX, 2, GL_FLOAT, 0, 0, _vertices);
            glEnableVertexAttribArray(ATTRIBUTE_VERTEX);
            glVertexAttribPointer(ATTRIBUTE_TEXCOORD, 2, GL_FLOAT, 0, 0, texCoords);
            glEnableVertexAttribArray(ATTRIBUTE_TEXCOORD);
            
            glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
            
            if ([[ZOStatusTool shareStatusTool] singleShot]) {
                glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
                glClear(GL_COLOR_BUFFER_BIT);
            }
        }
        
        glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
        [_context presentRenderbuffer:GL_RENDERBUFFER];
    }
    
    
    
    
    - (UIImage*)snapshotPicture
    {
    
        glBindRenderbuffer(GL_RENDERBUFFER, _framebuffer);
        
        glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
        glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
        
        NSInteger x = 0, y = 0, width = _backingWidth, height = _backingHeight;
        
        NSInteger dataLength = width * height * 4;
        GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
        
        
        glPixelStorei(GL_PACK_ALIGNMENT, 4);
        glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
        
        CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
        CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
        
        CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrderDefault,ref, NULL, NO, kCGRenderingIntentDefault);
        
        
        NSInteger widthInPoints, heightInPoints;
        
        if (NULL != UIGraphicsBeginImageContextWithOptions)
        {
            
            CGFloat scale = 1;
            widthInPoints = width / scale;
            heightInPoints = height / scale;
            UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
        }
        else
        {
            
            widthInPoints = width;
            heightInPoints = height;
            UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
            
        }
        CGContextRef cgcontext = UIGraphicsGetCurrentContext();
        
        CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
        CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
        
        UIImage *retImage = UIGraphicsGetImageFromCurrentImageContext();
    
        UIGraphicsEndImageContext();
        
        free(data);
        CFRelease(ref);
        CFRelease(colorspace);
        CGImageRelease(iref);
    
        return retImage;
        
    }
    @end
    

    相关文章

      网友评论

          本文标题:IOS FFmpeg H.264解码

          本文链接:https://www.haomeiwen.com/subject/gnvzsktx.html