美文网首页
android opengles显示yuv视频

android opengles显示yuv视频

作者: gylmy | 来源:发表于2018-04-07 22:27 被阅读0次
之前在项目中显示h264视频流,基本流程为收到视频流数据,解码,转yuv420至rgb565,通过opengles显示,但是转码比较耗时,分辨率为d1的视频流一般手机性能上都正常,但是720p或者1080p的分辨率,有些低配的手机性能跟不上,会造成解码演示,如果opengles直接显示yuv,不要转码之后在显示,效率上可以提高一倍。
由于是实时的视频流,没有贴上接收数据的实现细节,java的大概代码如下
public class VideoSurfaceView extends SurfaceView implements SurfaceHolder.Callback,Runnable {
    public SurfaceHolder surfaceHolder;
    public VideoSurfaceView(Context context) {
        super(context);
        init();
    }
    public VideoSurfaceView(Context context, AttributeSet attributeSet) {
        super(context, attributeSet);
        init();
    }

    private void init() {
        surfaceHolder = getHolder();
        surfaceHolder.addCallback(this);
    }

    public void surfaceCreated(SurfaceHolder holder) {

    }

    public void surfaceDestroyed(SurfaceHolder holder) {

    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        Log.e(tag,"surface changed");
        new Thread(this).start();
    }
    public void run() {

        Log.e(tag,"surface run");
        while (!quit)
        {
            if(!init){
                jopenglInit(VideoSurfaceView.this.surfaceHolder.getSurface(),width,height);//width 和 height为视频的宽度和高度
                jvideo_decode_init(type, width, height);//jni里初始化解码器功能,由于是实时视频流,这里的参数是视频流的参数
                init = true;
            }
            if(/*有视频流*/){
                jvideo_decode_frame( buf, len);//收到的视频流数据
            }
        }
    }
}

jni部分:
全局变量:


#define GET_STR(x) #x
const char *vertexShaderString = GET_STR(
                                         attribute vec4 aPosition;
                                         attribute vec2 aTexCoord;
                                         varying vec2 vTexCoord;
                                         void main() {
                                             vTexCoord=vec2(aTexCoord.x,1.0-aTexCoord.y);
                                             gl_Position = aPosition;
                                         }
                                         );
const char *fragmentShaderString = GET_STR(
                                           precision mediump float;
                                           varying vec2 vTexCoord;
                                           uniform sampler2D yTexture;
                                           uniform sampler2D uTexture;
                                           uniform sampler2D vTexture;
                                           void main() {
                                               vec3 yuv;
                                               vec3 rgb;
                                               yuv.r = texture2D(yTexture, vTexCoord).r;
                                               yuv.g = texture2D(uTexture, vTexCoord).r - 0.5;
                                               yuv.b = texture2D(vTexture, vTexCoord).r - 0.5;
                                               rgb = mat3(1.0,       1.0,         1.0,
                                                          0.0,       -0.39465,  2.03211,
                                                          1.13983, -0.58060,  0.0) * yuv;
                                               gl_FragColor = vec4(rgb, 1.0);
                                           }
                                           );

GLuint yTextureId;
GLuint uTextureId;
GLuint vTextureId;

static EGLConfig eglConf;
static EGLSurface eglWindow;
static EGLContext eglCtx;
static EGLDisplay eglDisp;

初始化 jopenglInit:

int jopenglInit(JNIEnv* env, jclass obj, jobject surface,jint cnxwidth,jint cnxheight){
    /**
     *初始化egl
     **/
    LOGE("jopenglInit begin");
    int windowWidth;
    int windowHeight;
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
    
    EGLint configSpec[] = { EGL_RED_SIZE, 8,
        EGL_GREEN_SIZE, 8,
        EGL_BLUE_SIZE, 8,
        EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE };
    
    eglDisp = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    EGLint eglMajVers, eglMinVers;
    EGLint numConfigs;
    eglInitialize(eglDisp, &eglMajVers, &eglMinVers);
    eglChooseConfig(eglDisp, configSpec, &eglConf, 1, &numConfigs);
    
    eglWindow = eglCreateWindowSurface(eglDisp, eglConf,nativeWindow, NULL);
    
    eglQuerySurface(eglDisp,eglWindow,EGL_WIDTH,&windowWidth);
    eglQuerySurface(eglDisp,eglWindow,EGL_HEIGHT,&windowHeight);
    const EGLint ctxAttr[] = {
        EGL_CONTEXT_CLIENT_VERSION, 2,
        EGL_NONE
    };
    eglCtx = eglCreateContext(eglDisp, eglConf,EGL_NO_CONTEXT, ctxAttr);
    
    eglMakeCurrent(eglDisp, eglWindow, eglWindow, eglCtx);
    
    LOGE("window width is %d, height is %d ,%s(%d) \n",windowWidth,windowHeight, __FUNCTION__, __LINE__);
    /**
     * 设置opengl 要在egl初始化后进行
     * **/
    float *vertexData = new float[12]{
        1.0f, -1.0f, 0.0f,
        -1.0f, -1.0f, 0.0f,
        1.0f, 1.0f, 0.0f,
        -1.0f, 1.0f, 0.0f
    };
    
    float *textureVertexData = new float[8]{
        1.0f, 0.0f,//右下
        0.0f, 0.0f,//左下
        1.0f, 1.0f,//右上
        0.0f, 1.0f//左上
    };
    ShaderUtils *shaderUtils = new ShaderUtils();
    
    GLuint programId = shaderUtils->createProgram(vertexShaderString,fragmentShaderString );
    delete shaderUtils;
    GLuint aPositionHandle = (GLuint) glGetAttribLocation(programId, "aPosition");
    GLuint aTextureCoordHandle = (GLuint) glGetAttribLocation(programId, "aTexCoord");
    
    GLuint textureSamplerHandleY = (GLuint) glGetUniformLocation(programId, "yTexture");
    GLuint textureSamplerHandleU = (GLuint) glGetUniformLocation(programId, "uTexture");
    GLuint textureSamplerHandleV = (GLuint) glGetUniformLocation(programId, "vTexture");
    
    
    
    //因为没有用矩阵所以就手动自适应
    int videoWidth = cnxwidth;//视频宽度
    int videoHeight = cnxheight;//视频高度
    
    
    int left,top,viewWidth,viewHeight;
    if(windowHeight > windowWidth){
        left = 0;
        viewWidth = windowWidth;
        viewHeight = (int)(videoHeight*1.0f/videoWidth*viewWidth);
        top = (windowHeight - viewHeight)/2;
    }else{
        top = 0;
        viewHeight = windowHeight;
        viewWidth = (int)(videoWidth*1.0f/videoHeight*viewHeight);
        left = (windowWidth - viewWidth)/2;
    }
     LOGE("left is %d, top is %d ,viewWidth is %d,viewheight is %d,%s(%d) \n",left,top,viewWidth,viewHeight, __FUNCTION__, __LINE__);
    glViewport(left, top, viewWidth, viewHeight);
    
    glUseProgram(programId);
    glEnableVertexAttribArray(aPositionHandle);
    glVertexAttribPointer(aPositionHandle, 3, GL_FLOAT, GL_FALSE,
                          12, vertexData);
    glEnableVertexAttribArray(aTextureCoordHandle);
    glVertexAttribPointer(aTextureCoordHandle,2,GL_FLOAT,GL_FALSE,8,textureVertexData);
    /***
     * 初始化空的yuv纹理
     * **/
   
    glGenTextures(1,&yTextureId);
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D,yTextureId);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
                    GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    
    glUniform1i(textureSamplerHandleY,0);
    
    glGenTextures(1,&uTextureId);
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D,uTextureId);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
                    GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    
    glUniform1i(textureSamplerHandleU,1);
    
    glGenTextures(1,&vTextureId);
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D,vTextureId);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
                    GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    
    glUniform1i(textureSamplerHandleV,2);

    return 1;
}

解码器的初始化都差不多,这里就不写了

解码和显示:

int jvideo_decode_frame(JNIEnv* env, jclass obj, jbyteArray data, int Len)
{
    LOGE("%s(%d)\n", __FUNCTION__, __LINE__);
  AVPacket packet;
  jbyte * Buf = (jbyte*) env -> GetByteArrayElements( data, 0);
  packet.data = (uint8_t*)Buf;
  packet.size = Len;
    
   
    
  int gotPicture = 0;
  int size = 0;

  while (packet.size > 0)
  {
     
    size = avcodec_decode_video2(_video.ContextV,_video.Frame422, &gotPicture, &packet);
  
      if (gotPicture == 0)
    {
       LOGE("%s(%d)\n", __FUNCTION__, __LINE__);
       
      size = avcodec_decode_video2(_video.ContextV, _video.Frame422, &gotPicture, &packet);
      break;
    }
    packet.size -= size;
    packet.data += size;
      LOGE("decode size is %d,gotpicture is %d ,%s(%d) \n",size,gotPicture, __FUNCTION__, __LINE__);
  }
  if (gotPicture <= 0) return 0;
   
    
    
    
    
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, yTextureId);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _video.Frame422->linesize[0], _video.Frame422->height,0, GL_LUMINANCE, GL_UNSIGNED_BYTE, _video.Frame422->data[0]);
    
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, uTextureId);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE,  _video.Frame422->linesize[1], _video.Frame422->height/2,0, GL_LUMINANCE, GL_UNSIGNED_BYTE, _video.Frame422->data[1]);
    
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, vTextureId);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE,  _video.Frame422->linesize[2], _video.Frame422->height/2,0, GL_LUMINANCE, GL_UNSIGNED_BYTE, _video.Frame422->data[2]);
    
    
    /***
     * 纹理更新完成后开始绘制
     ***/
    glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
    
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    
    eglSwapBuffers(eglDisp, eglWindow);
  

  return (gotPicture > 0);
}

注意初始化gl和显示需要在一个线程中

shader:

#ifndef shaderUtils_hpp
#define shaderUtils_hpp
#include <GLES2/gl2.h>

class ShaderUtils {
public:
    GLuint createProgram(const char *vertexSource, const char *fragmentSource);
    
    GLuint loadShader(GLenum shaderType, const char *source);
};
#endif /* shaderUtils_hpp */

#include <malloc.h>
#include "shaderUtils.h"
GLuint ShaderUtils::createProgram(const char *vertexSource, const char *fragmentSource) {
    GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexSource);
    if (!vertexShader) {
        return 0;
    }
    GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, fragmentSource);
    if (!pixelShader) {
        return 0;
    }
    
    GLuint program = glCreateProgram();
    if (program != 0) {
        glAttachShader(program, vertexShader);
        glAttachShader(program, pixelShader);
        glLinkProgram(program);
        GLint  linkStatus = 0;
        glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
        if (!linkStatus) {
            GLint info_length = 0;
            glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_length);
            if(info_length){
                char* buf = (char*)malloc(info_length * sizeof(char));
                glGetProgramInfoLog(program, info_length, NULL, buf);
                free(buf);
            }
            glDeleteProgram(program);
            program = 0;
        }
    }
    return program;
}
GLuint ShaderUtils::loadShader(GLenum shaderType, const char *source) {
    GLuint shader = glCreateShader(shaderType);
    if (shader != 0) {
        glShaderSource(shader,1, &source,NULL);
        glCompileShader(shader);
        GLint compiled = 0;
        glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
        if (!compiled) {
            GLint info_length = 0;
            glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_length);
            if(info_length){
                char* buf = (char*)malloc(info_length * sizeof(char));
                if(buf){ glGetShaderInfoLog(shader, info_length, NULL, buf);
                }
                free(buf);
            }
            glDeleteShader(shader);shader = 0;
        }
    }
    return shader;
}

实际的效果图


image

参考:

android jni基于ffmpeg,opengles,egl的yuv视频播放功能

相关文章

网友评论

      本文标题:android opengles显示yuv视频

      本文链接:https://www.haomeiwen.com/subject/liduhftx.html