美文网首页
Android SurfaceView 和 TextureVie

Android SurfaceView 和 TextureVie

作者: 雁过留声_泪落无痕 | 来源:发表于2021-09-10 16:00 被阅读0次
  1. SurfaceView 预览相机,适用于单纯预览,或者一边预览一边将数据发送到服务器
public class SurfaceViewCameraActivity extends AppCompatActivity
        implements SurfaceHolder.Callback, Camera.PreviewCallback {
    private Camera mCamera;
    private SurfaceView mSurfaceView;

    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        mSurfaceView = new SurfaceView(this);
        mSurfaceView.getHolder().addCallback(this);

        setContentView(mSurfaceView);
    }

    @Override
    public void surfaceCreated(@NonNull SurfaceHolder holder) {
        mCamera = Camera.open();

        try {
            mCamera.setPreviewCallback(this);
            mCamera.setPreviewDisplay(holder);
            mCamera.startPreview();
        } catch (IOException ioe) {
            // Something bad happened
        }
    }

    @Override
    public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
        mCamera.setPreviewCallback(null);
        mCamera.stopPreview();
        mCamera.release();
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        // encode and upload to server.
    }

}
  1. TextureView 预览相机,同 1
public class TextureViewCameraActivity extends AppCompatActivity
        implements TextureView.SurfaceTextureListener, Camera.PreviewCallback {
    private Camera mCamera;
    private TextureView mTextureView;

    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        mTextureView = new TextureView(this);
        mTextureView.setSurfaceTextureListener(this);

        setContentView(mTextureView);
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        mCamera = Camera.open();

        try {
            mCamera.setPreviewCallback(this);
            mCamera.setPreviewTexture(surface);
            mCamera.startPreview();
        } catch (IOException ioe) {
            // Something bad happened
        }
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
        // Ignored, Camera does all the work for us
    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        mCamera.setPreviewCallback(null);
        mCamera.stopPreview();
        mCamera.release();
        return true;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {
        // Invoked every time there's a new Camera preview frame
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        // encode and upload to server.
    }

}
  1. 相机数据先到单独的 SurfaceTexture(可以直接得到一个纹理,这里并没有直接用这个纹理,而只是借用了 SurfaceTexture 的 Buffer,否则 Camera 的数据没地方送,导致 onPreviewFrame() 回调不会得到调用),再到 SurfaceView,适用于先得到相机数据并将数据进行处理后再进行预览,比如加滤镜效果。
    这里的示例是使用的 lockCanvas 得到 Canvas 对象进行绘制到 SurfaceView 中去的。也就是先拿到 data 数据,再对数据进行处理并转换为 Bitmap,然后通过 Canvas 绘制到 SurfaceView 或者 TextureView 上。

流程:data[] => SurfaceTexture 的 buffer => 数据处理 => 转换为 Bitmap => 绘制到 SurfaceView/TextureView 上

public class SurfaceTexture2SurfaceViewCameraActivity extends AppCompatActivity
        implements SurfaceHolder.Callback, Camera.PreviewCallback {

    private Camera mCamera;
    private SurfaceView mSurfaceView;
    private SurfaceTexture mSurfaceTexture;

    private Rect mOriginalRect;
    private Rect mRotatedRect;
    private int mWidth;
    private int mHeight;
    private Bitmap mBitmap;

    private byte[] mBuffer;

    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        mSurfaceView = new SurfaceView(this);
        mSurfaceView.getHolder().addCallback(this);

        setContentView(mSurfaceView);
    }

    @Override
    public void surfaceCreated(@NonNull SurfaceHolder holder) {
        mSurfaceTexture = new SurfaceTexture(1);
        mCamera = Camera.open();
        try {
            mCamera.setPreviewTexture(mSurfaceTexture);
        } catch (IOException e) {
            e.printStackTrace();
        }

        mWidth = mCamera.getParameters().getPreviewSize().width;
        mHeight = mCamera.getParameters().getPreviewSize().height;

        int bufferSize = mWidth * mHeight * ImageFormat.getBitsPerPixel(mCamera.getParameters().getPreviewFormat()) / 8;
        mBuffer = new byte[bufferSize];

        mCamera.addCallbackBuffer(mBuffer);
        mCamera.setPreviewCallbackWithBuffer(this);
        mCamera.startPreview();
    }

    @Override
    public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
        mCamera.setPreviewCallbackWithBuffer(null);
        mCamera.stopPreview();
        mCamera.release();
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        camera.addCallbackBuffer(mBuffer);

        if (null == mOriginalRect) {
            mOriginalRect = new Rect(0, 0, mWidth, mHeight);
        }
        YuvImage image = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        image.compressToJpeg(mOriginalRect, 80, stream);
        mBitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
        try {
            stream.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        int rotation = getWindowManager().getDefaultDisplay().getRotation();
        int degree = -90 * rotation;

        Matrix matrix = new Matrix();
        matrix.postRotate(degree + 90);
        mBitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
        mBitmap = Bitmap.createScaledBitmap(mBitmap, mSurfaceView.getWidth(), mSurfaceView.getHeight(), true);

        if (mRotatedRect == null) {
            mRotatedRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight());
        }
        Canvas canvas = mSurfaceView.getHolder().lockCanvas();
        canvas.drawBitmap(mBitmap, null, mRotatedRect, null);
        mSurfaceView.getHolder().unlockCanvasAndPost(canvas);
    }

}
  1. 相机数据先到单独的 SurfaceTexture,同 3
public class SurfaceTexture2TextureViewCameraActivity extends AppCompatActivity
        implements TextureView.SurfaceTextureListener, Camera.PreviewCallback {

    private Camera mCamera;
    private TextureView mTextureView;
    private SurfaceTexture mSurfaceTexture;

    private Rect mOriginalRect;
    private Rect mRotatedRect;
    private int mWidth;
    private int mHeight;
    private Bitmap mBitmap;

    private byte[] mBuffer;

    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        mTextureView = new TextureView(this);
        mTextureView.setSurfaceTextureListener(this);

        setContentView(mTextureView);
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        camera.addCallbackBuffer(mBuffer);

        if (null == mOriginalRect) {
            mOriginalRect = new Rect(0, 0, mWidth, mHeight);
        }
        YuvImage image = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        image.compressToJpeg(mOriginalRect, 80, stream);
        mBitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
        try {
            stream.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        int rotation = getWindowManager().getDefaultDisplay().getRotation();
        int degree = -90 * rotation;

        Matrix matrix = new Matrix();
        matrix.postRotate(degree + 90);
        mBitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
        mBitmap = Bitmap.createScaledBitmap(mBitmap, mTextureView.getWidth(), mTextureView.getHeight(), true);

        if (mRotatedRect == null) {
            mRotatedRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight());
        }
        Canvas canvas = mTextureView.lockCanvas();
        canvas.drawBitmap(mBitmap, null, mRotatedRect, null);
        mTextureView.unlockCanvasAndPost(canvas);
    }

    @Override
    public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
        mSurfaceTexture = new SurfaceTexture(1);
        mCamera = Camera.open();
        try {
            mCamera.setPreviewTexture(mSurfaceTexture);
        } catch (IOException e) {
            e.printStackTrace();
        }

        mWidth = mCamera.getParameters().getPreviewSize().width;
        mHeight = mCamera.getParameters().getPreviewSize().height;

        int bufferSize = mWidth * mHeight * ImageFormat.getBitsPerPixel(mCamera.getParameters().getPreviewFormat()) / 8;
        mBuffer = new byte[bufferSize];

        mCamera.addCallbackBuffer(mBuffer);
        mCamera.setPreviewCallbackWithBuffer(this);
        mCamera.startPreview();
    }

    @Override
    public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
        mCamera.setPreviewCallbackWithBuffer(null);
        mCamera.stopPreview();
        mCamera.release();

        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {

    }
}
  1. 其实无非就是想要对原始数据进行处理再进行绘制显示出来,所以也可以直接使用 GLSurfaceView 作为 Camera 的渲染目标(TextureView 需要自行创建 EGL 环境,如新建一个类 GLTextureView),把 data 数据传到 OpenGL 处理(调用 GLES20.glTexImage2D 方法传递数据),处理完毕(调用 GLES20.glDrawArrays 方法)即可自动更新到 GLSurfaceView 上。

流程:data[] => SurfaceTexture 的 buffer => OpenGL 处理并绘制到 GLSurfaceView/GLTextureView 上

/**
 * 利用 OpenGL 处理原始数据并直接绘制到 GLSurfaceView 上,其实也是先转换成了纹理,
 * 只是这里是手动将相机数据生成纹理,参考 {@link android.opengl.GLES20#glTexImage2D}
 */
public class HandleDataByOpenGLCameraActivity extends AppCompatActivity
        implements GLSurfaceView.Renderer, Camera.PreviewCallback {

    private Camera mCamera;
    private GLSurfaceView mGLSurfaceView;
    private SurfaceTexture mSurfaceTexture;
    private IntBuffer mGLRgbBuffer;
    private DirectDrawer mDrawer;

    private int mWidth;
    private int mHeight;

    private final Object mLock = new Object();

    @Override
    protected void onCreate(@Nullable Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        initGLSurfaceView();
        setContentView(mGLSurfaceView);
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        mCamera.setPreviewCallback(null);
        mCamera.stopPreview();
        mCamera.release();
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        GLES20.glClearColor(0f, 0f, 0f, 0f);

        mCamera = Camera.open();
        final Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
        mWidth = previewSize.width;
        mHeight = previewSize.height;

        try {
            int[] textures = new int[1];
            GLES20.glGenTextures(1, textures, 0);
            mSurfaceTexture = new SurfaceTexture(textures[0]);
            mCamera.setPreviewTexture(mSurfaceTexture);
        } catch (IOException ioe) {
            // Something bad happened
        }
        mCamera.startPreview();
        mCamera.setPreviewCallback(this);
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

        if (mDrawer == null) {
            mDrawer = new DirectDrawer();
            mDrawer.init();
        }

        synchronized (mLock) {
            // 将 glRgbBuffer 中的 RGBA 数据生成纹理
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mWidth, mHeight,
                    0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mGLRgbBuffer);
            mDrawer.draw();
            mGLSurfaceView.requestRender();
        }
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        if (mGLRgbBuffer == null) {
            mGLRgbBuffer = IntBuffer.allocate(mWidth * mHeight);
        }

        synchronized (mLock) {
            // 将原始数据转换为 RGBA 数据,存放到 glRgbBuffer 中
            GPUImageNativeLibrary.YUVtoRBGA(data, mWidth, mHeight, mGLRgbBuffer.array());
        }
    }

    private void initGLSurfaceView() {
        mGLSurfaceView = new GLSurfaceView(this);
        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        mGLSurfaceView.getHolder().setFormat(PixelFormat.RGBA_8888);
        mGLSurfaceView.setRenderer(this);
        mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
        mGLSurfaceView.requestRender();
    }

    public static void checkError() {
        int errorCode = GLES20.glGetError();
        Log.d("hehe", "errorCode: " + errorCode, new Throwable());
    }

    /**
     * 参考 VideoDumpView.java,注意这里用的是普通纹理 sampler2D
     */
    private static class DirectDrawer {
        private final String vertexShaderCode =
                "attribute vec4 vPosition;\n" +
                        "attribute vec2 inputTextureCoordinate;\n" +
                        "varying vec2 textureCoordinate;\n" +
                        "void main() {\n" +
                        "  gl_Position = vPosition;\n" +
                        "  textureCoordinate = inputTextureCoordinate;\n" +
                        "}\n";

        private final String fragmentShaderCode =
                "precision mediump float;\n" +
                        "varying vec2 textureCoordinate;\n" +
                        "uniform sampler2D s_texture;\n" +
                        "void main() {\n" +
                        "  gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
                        "}\n";

        private int texture;
        private FloatBuffer vertexBuffer;
        private FloatBuffer textureVerticesBuffer;
        private ShortBuffer drawListBuffer;

        private int mProgram;
        private int mPositionHandle;
        private int mTextureCoordHandle;

        private static final short DRAW_ORDER[] = {0, 1, 2, 0, 2, 3};
        private static final int COORDS_PER_VERTEX = 2;
        private static final int VERTEX_STRIDE = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

        private static float SQUARE_COORDS[] = {
                -1.0f, 1.0f,
                -1.0f, -1.0f,
                1.0f, -1.0f,
                1.0f, 1.0f,
        };
        private static float TEXTURE_VERTICES[] = {
                0.0f, 1.0f,
                1.0f, 1.0f,
                1.0f, 0.0f,
                0.0f, 0.0f,
        };

        public void init() {
            createTexture();
            createProgram();
        }

        public void draw() {
            GLES20.glUseProgram(mProgram);
            //使用纹理
            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
            //顶点位置
            mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
            GLES20.glEnableVertexAttribArray(mPositionHandle);
            GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, VERTEX_STRIDE, vertexBuffer);
            //纹理坐标
            mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
            GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
            GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, VERTEX_STRIDE, textureVerticesBuffer);
            //绘制
            GLES20.glDrawElements(GLES20.GL_TRIANGLES, DRAW_ORDER.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
            //结束
            GLES20.glDisableVertexAttribArray(mPositionHandle);
            GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
        }

        //编译着色器
        private int loadShader(int type, String shaderCode) {
            int shader = GLES20.glCreateShader(type);
            GLES20.glShaderSource(shader, shaderCode);
            GLES20.glCompileShader(shader);
            return shader;
        }

        private void createTexture() {
            int[] texture = new int[1];
            GLES20.glGenTextures(1, texture, 0);
            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, (float) GLES20.GL_LINEAR);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, (float) GLES20.GL_LINEAR);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
            this.texture = texture[0];
        }

        private void createProgram() {
            //顶点坐标
            ByteBuffer bb = ByteBuffer.allocateDirect(SQUARE_COORDS.length * 4);
            bb.order(ByteOrder.nativeOrder());
            vertexBuffer = bb.asFloatBuffer();
            vertexBuffer.put(SQUARE_COORDS);
            vertexBuffer.position(0);
            //顶点绘制顺序
            ByteBuffer dlb = ByteBuffer.allocateDirect(DRAW_ORDER.length * 2);
            dlb.order(ByteOrder.nativeOrder());
            drawListBuffer = dlb.asShortBuffer();
            drawListBuffer.put(DRAW_ORDER);
            drawListBuffer.position(0);
            //纹理坐标
            ByteBuffer bb2 = ByteBuffer.allocateDirect(TEXTURE_VERTICES.length * 4);
            bb2.order(ByteOrder.nativeOrder());
            textureVerticesBuffer = bb2.asFloatBuffer();
            textureVerticesBuffer.put(TEXTURE_VERTICES);
            textureVerticesBuffer.position(0);
            //编译着色器
            int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
            int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
            mProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(mProgram, vertexShader);
            GLES20.glAttachShader(mProgram, fragmentShader);
            GLES20.glLinkProgram(mProgram);
        }
    }

    private static class GPUImageNativeLibrary {

        public static void YUVtoRBGA(byte[] yuv, int width, int height, int[] rgbData) {
            int sz;
            int i;
            int j;
            int Y;
            int Cr = 0;
            int Cb = 0;
            int pixPtr = 0;
            int jDiv2 = 0;
            int R = 0;
            int G = 0;
            int B = 0;
            int cOff;
            int w = width;
            int h = height;
            sz = w * h;

            for (j = 0; j < h; j++) {
                pixPtr = j * w;
                jDiv2 = j >> 1;
                for (i = 0; i < w; i++) {
                    Y = yuv[pixPtr];
                    if (Y < 0) Y += 255;
                    if ((i & 0x1) != 1) {
                        cOff = sz + jDiv2 * w + (i >> 1) * 2;
                        Cb = yuv[cOff];
                        if (Cb < 0) Cb += 127;
                        else Cb -= 128;
                        Cr = yuv[cOff + 1];
                        if (Cr < 0) Cr += 127;
                        else Cr -= 128;
                    }

                    //ITU-R BT.601 conversion
                    //
                    //R = 1.164*(Y-16) + 2.018*(Cr-128);
                    //G = 1.164*(Y-16) - 0.813*(Cb-128) - 0.391*(Cr-128);
                    //B = 1.164*(Y-16) + 1.596*(Cb-128);
                    //
                    Y = Y + (Y >> 3) + (Y >> 5) + (Y >> 7);
                    R = Y + (Cr << 1) + (Cr >> 6);
                    if (R < 0) R = 0;
                    else if (R > 255) R = 255;
                    G = Y - Cb + (Cb >> 3) + (Cb >> 4) - (Cr >> 1) + (Cr >> 3);
                    if (G < 0) G = 0;
                    else if (G > 255) G = 255;
                    B = Y + Cb + (Cb >> 1) + (Cb >> 4) + (Cb >> 5);
                    if (B < 0) B = 0;
                    else if (B > 255) B = 255;
                    rgbData[pixPtr++] = 0xff000000 + (R << 16) + (G << 8) + B;
                }
            }
        }
    }

}

附: SurfaceTexture 的使用,参考 TextureView.java 源码和 VideoDumpView.java 源码(/frameworks/base/media/tests/MediaDump/src/com/android/mediadump/)

  1. 既然用到了 SurfaceTexture,其实可以直接处理得到的纹理,而不需要处理其原始数据
/**
 * 因为用到了 SurfaceTexture,所以原始数据已经转换成了纹理,所以可以不用处理原始数据,
 * 直接利用 OpenGL 处理 SurfaceTexture 生成的纹理,然后绘制到 GLSurfaceView
 */
public class HandleTextureByOpenGLCameraActivity extends AppCompatActivity
        implements GLSurfaceView.Renderer {

    private Camera mCamera;
    private GLSurfaceView mGLSurfaceView;
    private SurfaceTexture mSurfaceTexture;
    private DirectDrawer mDrawer;

    @Override
    protected void onCreate(@Nullable Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        initGLSurfaceView();
        setContentView(mGLSurfaceView);
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        mCamera.stopPreview();
        mCamera.release();
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        int textureId = createTexture();
        mDrawer = new DirectDrawer(textureId);
        mSurfaceTexture = new SurfaceTexture(textureId);
        mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
            @Override
            public void onFrameAvailable(SurfaceTexture surfaceTexture) {
                mGLSurfaceView.requestRender();
            }
        });

        mCamera = Camera.open();
        try {
            mCamera.setPreviewTexture(mSurfaceTexture);
            mCamera.startPreview();
        } catch (IOException ioe) {
            // Something bad happened
        }
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {

    }

    @Override
    public void onDrawFrame(GL10 gl) {
        mSurfaceTexture.updateTexImage();
        mDrawer.draw();
    }

    private void initGLSurfaceView() {
        mGLSurfaceView = new GLSurfaceView(this);
        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        mGLSurfaceView.getHolder().setFormat(PixelFormat.RGBA_8888);
        mGLSurfaceView.setRenderer(this);
        mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
        mGLSurfaceView.requestRender();
    }

    private int createTexture() {
        int[] texture = new int[1];
        GLES20.glGenTextures(1, texture, 0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
        return texture[0];
    }

    /**
     * 参考 VideoDumpView.java,注意这里用的是外部纹理 samplerExternalOES
     */
    private static class DirectDrawer {
        private final String vertexShaderCode =
                "attribute vec4 vPosition;" +
                        "attribute vec2 inputTextureCoordinate;" +
                        "varying vec2 textureCoordinate;" +
                        "void main()" +
                        "{" +
                        "  gl_Position = vPosition;" +
                        "  textureCoordinate = inputTextureCoordinate;" +
                        "}";

        private final String fragmentShaderCode =
                "#extension GL_OES_EGL_image_external : require\n" +
                        "precision mediump float;" +
                        "varying vec2 textureCoordinate;\n" +
                        "uniform samplerExternalOES s_texture;\n" +
                        "void main() {" +
                        "  gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
                        "}";

        private FloatBuffer vertexBuffer;
        private FloatBuffer textureVerticesBuffer;
        private ShortBuffer drawListBuffer;
        private final int mProgram;
        private int mPositionHandle;
        private int mTextureCoordHandle;

        private short drawOrder[] = {0, 1, 2, 0, 2, 3};

        private static final int COORDS_PER_VERTEX = 2;

        private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

        static float squareCoords[] = {
                -1.0f, 1.0f,
                -1.0f, -1.0f,
                1.0f, -1.0f,
                1.0f, 1.0f,
        };

        static float textureVertices[] = {
                0.0f, 1.0f,
                1.0f, 1.0f,
                1.0f, 0.0f,
                0.0f, 0.0f,
        };

        private int texture;

        public DirectDrawer(int texture) {
            this.texture = texture;
            //顶点坐标
            ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
            bb.order(ByteOrder.nativeOrder());
            vertexBuffer = bb.asFloatBuffer();
            vertexBuffer.put(squareCoords);
            vertexBuffer.position(0);
            //顶点绘制顺序
            ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
            dlb.order(ByteOrder.nativeOrder());
            drawListBuffer = dlb.asShortBuffer();
            drawListBuffer.put(drawOrder);
            drawListBuffer.position(0);
            //纹理坐标
            ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
            bb2.order(ByteOrder.nativeOrder());
            textureVerticesBuffer = bb2.asFloatBuffer();
            textureVerticesBuffer.put(textureVertices);
            textureVerticesBuffer.position(0);
            //编译着色器
            int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
            int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
            mProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(mProgram, vertexShader);
            GLES20.glAttachShader(mProgram, fragmentShader);
            GLES20.glLinkProgram(mProgram);
        }

        public void draw() {
            GLES20.glUseProgram(mProgram);
            //使用纹理
            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
            //顶点位置
            mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
            GLES20.glEnableVertexAttribArray(mPositionHandle);
            GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
            //纹理坐标
            mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
            GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
            GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer);
            //绘制
            GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
            //结束
            GLES20.glDisableVertexAttribArray(mPositionHandle);
            GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
        }

        //编译着色器
        private int loadShader(int type, String shaderCode) {
            int shader = GLES20.glCreateShader(type);
            GLES20.glShaderSource(shader, shaderCode);
            GLES20.glCompileShader(shader);
            return shader;
        }
    }

}

相关文章

网友评论

      本文标题:Android SurfaceView 和 TextureVie

      本文链接:https://www.haomeiwen.com/subject/hccnwltx.html