美文网首页
Android Camera Rbg->nv21数据显示

Android Camera Rbg->nv21数据显示

作者: 若无初见 | 来源:发表于2020-11-23 20:33 被阅读0次

    首先获取rgb分量数据保存到int[]Colors

     public byte[] bgr2YUV(byte[] pixels, int width, int height) {
    
            int size = pixels.length;
            if (size == 0) {
                return null;
            }
    
            int arg = 0;
            if (size % 3 != 0) {
                arg = 1;
            }
    
            // 一般RGB字节数组的长度应该是3的倍数,
            // 不排除有特殊情况,多余的RGB数据用黑色0XFF000000填充
            int[] color = new int[size / 3 + arg];
            int red, green, blue;
            int colorLen = color.length;
            if (arg == 0) {
                for (int i = 0; i < colorLen; ++i) {
                    red = convertByteToInt(pixels[i * 3]);
                    green = convertByteToInt(pixels[i * 3 + 1]);
                    blue = convertByteToInt(pixels[i * 3 + 2]);
    
                    // 获取RGB分量值通过按位或生成int的像素值
                    color[i] = (red << 16) | (green << 8) | blue;
    
                }
            }
    
            return rgb2YCbCr420(color, width, height);
    
    
        }
    
    
     /**
         * 将一个byte数转成int
         * 实现这个函数的目的是为了将byte数当成无符号的变量去转化成int
         * @param data
         * @return
         */
        public static int convertByteToInt(byte data) {
    
            int heightBit = (int) ((data >> 4) & 0x0F);
            int lowBit = (int) (0x0F & data);
            return heightBit * 16 + lowBit;
        }
    

    然后将颜色分量转成nv21(yuv420p)格式

    public static byte[] rgb2YCbCr420(int[] pixels, int width, int height) {
    
            int len = width * height;
    
            //yuv格式数组大小,y亮度占len长度,u,v各占len/4长度。
    
            byte[] yuv = new byte[len * 3 / 2];
    
            int y, u, v;
    
            for (int i = 0; i < height; i++) {
    
                for (int j = 0; j < width; j++) {
    
                    //屏蔽ARGB的透明度值
    
                    int rgb = pixels[i * width + j];
    
                    //像素的颜色顺序为bgr,移位运算。
    
                    int r = rgb & 0xFF;
    
                    int g = (rgb >> 8) & 0xFF;
    
                    int b = (rgb >> 16) & 0xFF;
    
                    //套用公式
    
    //                y = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
    //
    //                u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
    //
    //                v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
                    y = ((76 * r + 150 * g + 29 * b + 128) >> 8) + 16;
                    u = ((-43 * r - 84 * g + 127 * b + 128) >> 8) + 128;
                    v = ((127 * r - 106 * g - 21 * b + 128) >> 8) + 128;
    
                    //调整
    
                    y = y < 16 ? 16 : (y > 255 ? 255 : y);
    
                    u = u < 0 ? 0 : (u > 255 ? 255 : u);
    
                    v = v < 0 ? 0 : (v > 255 ? 255 : v);
    
    
                    //赋值
    
                    yuv[i * width + j] = (byte) y;
    
                    yuv[len + (i >> 1) * width + (j & ~1) + 0] = (byte) v;
    
                    yuv[len + +(i >> 1) * width + (j & ~1) + 1] = (byte) u;
    
                }
    
            }
    
            return yuv;
    
        }
    

    采用opengl将数据熏染到GLSurfaceView上

    public class JfGLSurfaceView extends GLSurfaceView {
    
        private JfRender jfRender;
        public JfGLSurfaceView(Context context) {
            this(context,null);
        }
    
        public JfGLSurfaceView(Context context, AttributeSet attrs) {
            super(context, attrs);
            //使用opengl 2 必须加上这行代码
            setEGLContextClientVersion(2);
            jfRender = new JfRender(context);
            setRenderer(jfRender);
            setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);//requestRender()时不会重复渲染
        }
    
        public void feedYuvData(int width,int height,byte[] y,byte[] u,byte[] v){
            if (jfRender != null) {
                jfRender.setYUVRenderData(width, height, y, u, v);
                requestRender();
            }
        }
    
    
    }
    

    JfRender

    package com.wzw.bsdlib.view;
    
    import android.content.Context;
    import android.opengl.GLES20;
    import android.opengl.GLSurfaceView;
    import android.util.Log;
    
    
    import com.wzw.bsdlib.R;
    
    import java.nio.ByteBuffer;
    import java.nio.ByteOrder;
    import java.nio.FloatBuffer;
    
    import javax.microedition.khronos.egl.EGLConfig;
    import javax.microedition.khronos.opengles.GL10;
    
    public class JfRender implements GLSurfaceView.Renderer {
        private Context context;
        private static final String TAG = "JfRender";
        private final float[] vertexData ={//顶点坐标
    
                -1f, -1f,
                1f, -1f,
                -1f, 1f,
                1f, 1f
    
        };
    
        private final float[] textureData ={//纹理坐标
                0f,1f,
                1f, 1f,
                0f, 0f,
                1f, 0f
        };
    
        private FloatBuffer vertexBuffer;
        private FloatBuffer textureBuffer;
        private int program_yuv;
        private int avPosition_yuv;
        private int afPosition_yuv;
    
        private int sampler_y;
        private int sampler_u;
        private int sampler_v;
        private int[] textureId_yuv;
    
        //渲染用
        private int width_yuv;
        private int height_yuv;
        private ByteBuffer y;
        private ByteBuffer u;
        private ByteBuffer v;
    
        public JfRender(Context context){
            this.context = context;
            //存储顶点坐标数据
            vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                    .order(ByteOrder.nativeOrder())
                    .asFloatBuffer()
                    .put(vertexData);
            vertexBuffer.position(0);
    
            //存储纹理坐标
            textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
                    .order(ByteOrder.nativeOrder())
                    .asFloatBuffer()
                    .put(textureData);
            textureBuffer.position(0);
        }
        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            initRenderYUV();
        }
    
        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            GLES20.glViewport(0, 0, width, height);
        }
    
        @Override
        public void onDrawFrame(GL10 gl) {
            //用黑色清屏
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
            renderYUV();
        }
    
    
        /**
         * 初始化
         */
        private void initRenderYUV(){
            String vertexSource = JfShaderUtil.readRawTxt(context, R.raw.vertex_shader);
            String fragmentSource = JfShaderUtil.readRawTxt(context,R.raw.fragment_shader);
            //创建一个渲染程序
            program_yuv = JfShaderUtil.createProgram(vertexSource,fragmentSource);
    
            //得到着色器中的属性
            avPosition_yuv = GLES20.glGetAttribLocation(program_yuv,"av_Position");
            afPosition_yuv = GLES20.glGetAttribLocation(program_yuv,"af_Position");
    
    
    
            sampler_y = GLES20.glGetUniformLocation(program_yuv, "sampler_y");
            sampler_u = GLES20.glGetUniformLocation(program_yuv, "sampler_u");
            sampler_v = GLES20.glGetUniformLocation(program_yuv, "sampler_v");
    
            //创建纹理
            textureId_yuv = new int[3];
            GLES20.glGenTextures(3, textureId_yuv, 0);
    
            for(int i = 0; i < 3; i++)
            {
                //绑定纹理
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[i]);
                //设置环绕和过滤方式
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            }
            Log.e(TAG, "initRenderYUV");
        }
    
        public void setYUVRenderData(int width, int height, byte[] y, byte[] u, byte[] v)
        {
            this.width_yuv = width;
            this.height_yuv = height;
            this.y = ByteBuffer.wrap(y);
            this.u = ByteBuffer.wrap(u);
            this.v = ByteBuffer.wrap(v);
        }
    
        /**
         * 渲染
         */
        private void renderYUV(){
    //        Log.e(TAG, "渲染中");
            if(width_yuv > 0 && height_yuv > 0 && y != null && u != null && v != null){
                GLES20.glUseProgram(program_yuv);//使用源程序
    
                GLES20.glEnableVertexAttribArray(avPosition_yuv);//使顶点属性数组有效
                GLES20.glVertexAttribPointer(avPosition_yuv, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);//为顶点属性赋值
    
                GLES20.glEnableVertexAttribArray(afPosition_yuv);
                GLES20.glVertexAttribPointer(afPosition_yuv, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
    
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);//激活纹理
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[0]);//绑定纹理
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv, height_yuv, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);//
    
                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[1]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv / 2, height_yuv / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);
    
                GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[2]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv / 2, height_yuv / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);
    
                GLES20.glUniform1i(sampler_y, 0);
                GLES20.glUniform1i(sampler_u, 1);
                GLES20.glUniform1i(sampler_v, 2);
    
                y.clear();
                u.clear();
                v.clear();
                y = null;
                u = null;
                v = null;
    
                GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
            }
        }
    }
    
    
    

    fragment_shader文件

    precision mediump float;
    varying vec2 v_texPosition;
    uniform sampler2D sampler_y;
    uniform sampler2D sampler_u;
    uniform sampler2D sampler_v;
    void main() {
        float y,u,v;
        y = texture2D(sampler_y,v_texPosition).r;
        u = texture2D(sampler_u,v_texPosition).r- 0.5;
        v = texture2D(sampler_v,v_texPosition).r- 0.5;
    
        vec3 rgb;
        rgb.r = y + 1.403 * v;
        rgb.g = y - 0.344 * u - 0.714 * v;
        rgb.b = y + 1.770 * u;
    
        gl_FragColor = vec4(rgb,1);
    }
    

    vertex_shader文件

    attribute vec4 av_Position;
    attribute vec2 af_Position;
    varying vec2 v_texPosition;
    void main() {
        v_texPosition = af_Position;
        gl_Position = av_Position;
    }
    

    相关文章

      网友评论

          本文标题:Android Camera Rbg->nv21数据显示

          本文链接:https://www.haomeiwen.com/subject/penviktx.html