美文网首页Android OpenGL
OpenGL ES之绘制YUV视频

OpenGL ES之绘制YUV视频

作者: 放羊娃华振 | 来源:发表于2022-10-04 11:11 被阅读0次

一、概述

画面绘制是一个播放器的重要组成部分,这个demo就是可以简单了解下视频是怎么被渲染出来的。主要的功能就是,读取已经处理好的YUV420P的数据,之后传送到OpenGL ES中进行,渲染。
二、读取YUV420P的数据
想要读取yuv420p的数据就必须先了解它的数据结构,我把结构画个示意图,如下:


image.png

所以读取的时候,从0位置开始,根据视频帧画面的尺寸依次读取即可,代码如下:

is = getResources().getAssets().open("yuv_video.yuv");

byte[] y = new byte[w * h];
byte[] u = new byte[w * h / 4];
byte[] v = new byte[w * h / 4];

while (true) {
    int ready = is.read(y);
    int readu = is.read(u);
    int readv = is.read(v);
    if (ready > 0 && readu > 0 && readv > 0) {
        mYuvRenderer.setYUVData(w, h, y, u, v);
        mGLSurfaceView.requestRender();
        Thread.sleep(50);
    } else {
        Log.d("dzh", "已经完成");
        break;
    }
}

以上代码处理去读处理好的yuv数据,还有个细节就是把数据传送给renderer了。

三、实现着色器的编写

顶点着色器:

attribute vec4 v_Position;
attribute vec2 f_Position;
varying vec2 ft_Position;
void main() {
    ft_Position = f_Position;
    gl_Position = v_Position;
}

片元着色器:

precision mediump float;
varying vec2 ft_Position;
uniform sampler2D sampler_y;
uniform sampler2D sampler_u;
uniform sampler2D sampler_v;
void main() {
    float y,u,v;
    y = texture2D(sampler_y,ft_Position).x;
    u = texture2D(sampler_u,ft_Position).x- 128./255.;
    v = texture2D(sampler_v,ft_Position).x- 128./255.;

    vec3 rgb;
    rgb.r = y + 1.403 * v;
    rgb.g = y - 0.344 * u - 0.714 * v;
    rgb.b = y + 1.770 * u;

    gl_FragColor = vec4(rgb,1);
}

yuv视频渲染Renderer的完整代码:

package com.stormdzh.openglanimation.renderer;

import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;

import com.stormdzh.openglanimation.R;
import com.stormdzh.openglanimation.util.LogUtil;
import com.stormdzh.openglanimation.util.shader.ShaderUtil;

import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

/**
 * @Description: YUV-着色器
 * @Author: dzh
 * @CreateDate: 2020-06-16 18:28
 */
public class YuvRenderer implements GLSurfaceView.Renderer {
    private String TAG = "LineRenderer";

    private Context mContext;

    //顶点数据
    private float[] vertexData = {
            1f, 1f,
            -1f, 1f,
            1f, -1f,
            -1f, -1f
    };
    private FloatBuffer vertexBuffer;

    private FloatBuffer textureBuffer;
    //纹理坐标
    private final float[] textureVertexData = {
            1f, 0f,
            0f, 0f,
            1f, 1f,
            0f, 1f
    };

    private int[] texture_yuv;

    private int program;

    private int vPosition;
    private int fPosition;

    private int sampler_y;
    private int sampler_u;
    private int sampler_v;

    private int u_matrix;


    public YuvRenderer(Context context) {
        this.mContext = context;
        //顶点数据转成ByteBuffer  乘4是因为floa是4个字节
        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);


        textureBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(textureVertexData);
        textureBuffer.position(0);
//        Matrix.setIdentityM(matrix, 0);
    }


    @Override
    public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {

        LogUtil.i(TAG, "onSurfaceCreated");

        String vertexSource = ShaderUtil.readRawText(mContext, R.raw.vertex_yuv_simple_shader);
        String fragmentSource = ShaderUtil.readRawText(mContext, R.raw.fragment_yuv_shader);
        program = ShaderUtil.creteProgram(vertexSource, fragmentSource);
        if (program > 0) {
            vPosition = GLES20.glGetAttribLocation(program, "v_Position");
            fPosition = GLES20.glGetAttribLocation(program, "f_Position");
            u_matrix = GLES20.glGetUniformLocation(program, "u_Matrix");

            sampler_y = GLES20.glGetUniformLocation(program, "sampler_y");
            sampler_u = GLES20.glGetUniformLocation(program, "sampler_u");
            sampler_v = GLES20.glGetUniformLocation(program, "sampler_v");


            texture_yuv = new int[3];
            GLES20.glGenTextures(3, texture_yuv, 0);
            for (int i = 0; i < 3; i++) {
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture_yuv[i]);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
            }
        }
    }

    @Override
    public void onSurfaceChanged(GL10 gl10, int width, int height) {
        LogUtil.i(TAG, "onSurfaceChanged");
        //控件的位置和大小 x,y,width,height
        GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl10) {
        LogUtil.i(TAG, "onDrawFrame");
        //清屏
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glClearColor(1, 0, 0, 1);
//        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

        if (w > 0 && h > 0) {
            //1、使用源程序
            GLES20.glUseProgram(program);

            //2、使顶点属性数组有效
            GLES20.glEnableVertexAttribArray(vPosition);

            //3、给顶点属性赋值
            GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 2 * 4, vertexBuffer);


            GLES20.glEnableVertexAttribArray(fPosition);
            GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 2 * 4, textureBuffer);


            if (y != null) {
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture_yuv[0]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);
                GLES20.glUniform1i(sampler_y, 0);
            }

            if (u != null) {
                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture_yuv[1]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w / 2, h / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
                        u);
                GLES20.glUniform1i(sampler_u, 1);
            }

            if (v != null) {
                GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture_yuv[2]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w / 2, h / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
                        v);
                GLES20.glUniform1i(sampler_v, 2);
            }

            if (y != null)
                y.clear();
            if (u != null)
                u.clear();
            if (v != null)
                v.clear();

            y = null;
            u = null;
            v = null;
        }

        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
    }


    int w;
    int h;

    Buffer y;
    Buffer u;
    Buffer v;

    public void setYUVData(int w, int h, byte[] by, byte[] bu, byte[] bv) {

        this.w = w;
        this.h = h;
        this.y = ByteBuffer.wrap(by);
        this.u = ByteBuffer.wrap(bu);
        this.v = ByteBuffer.wrap(bv);
    }
}

相关文章

网友评论

    本文标题:OpenGL ES之绘制YUV视频

    本文链接:https://www.haomeiwen.com/subject/zznbkrtx.html