美文网首页
2021-03-12

2021-03-12

作者: PC8067 | 来源:发表于2021-03-12 20:54 被阅读0次

    AndroidCamera

    package com.example.glview;
    
    import android.content.Context;
    import android.graphics.ImageFormat;
    import android.graphics.SurfaceTexture;
    import android.hardware.Camera;
    import android.os.Build;
    import android.os.Handler;
    import android.os.HandlerThread;
    import android.util.Log;
    import android.view.Surface;
    import android.view.WindowManager;
    
    import java.io.IOException;
    import java.util.ArrayList;
    import java.util.Collections;
    import java.util.Comparator;
    import java.util.List;
    
    import static java.lang.Math.abs;
    
    public class AndroidCamera {
    
        private static final int MAGIC_TEXTURE_ID = 10;
    
        private final static String TAG = "AndroidCamera";
    
        private HandlerThread mHandlerThread;
        private Handler mHandler;
        private Camera mCameraDevice;
    
        private static final int NUM_CAPTURE_BUFFERS = 4;
        private int cameraID = 0;
        private int fps = 30;
        private int width = 1280;
        private int height = 720;
        private int mRotation = 0;
    
        private boolean mIsOpenCamera = false;
    
        private SurfaceTexture mSurfaceTexture;
    
        private PreviewCallback mPreviewCallback;
    
        private Context mContext;
        private boolean mIsFront = false;
    
        public interface PreviewCallback {
            public void onPreviewFrame(byte[] data, int width, int height, int rotation, boolean front, Camera camera);
        }
    
        public AndroidCamera(Context context, PreviewCallback callback) {
            this.mPreviewCallback = callback;
            this.mContext = context;
        }
    
        private void handleOpenCamera() {
            mIsOpenCamera = false;
            Log.e(TAG, "handleOpenCamera Camera.open = " + cameraID + ", threadid=" + Thread.currentThread());
            mCameraDevice = Camera.open(cameraID);
            Camera.Parameters parameters;
            try {
                parameters = mCameraDevice.getParameters();
            }catch (Exception e){
                Log.e(TAG, "Camera.open exception = " + e.toString());
                return;
            }
            parameters.setPreviewFormat(ImageFormat.NV21);
    
            CaptureFormat captureFormat = findClosestCaptureFormat(parameters, width, height, fps);
            Log.e(TAG, "captureFormat = " + captureFormat);
            final Size pictureSize = findClosestPictureSize(parameters, width, height);
    
            int[] range = adaptPreviewFps(fps, parameters.getSupportedPreviewFpsRange());
            captureFormat.framerate.min = range[0];
            captureFormat.framerate.max = range[1];
            Log.e(TAG, "captureFormat = " + captureFormat);
            updateCameraParameters(mCameraDevice, parameters, captureFormat, pictureSize, false);
    
            width = captureFormat.width;
            height = captureFormat.height;
    //                setOrientation(cameraID, false, mCameraDevice);
            setCameraDisplayOrientation(mContext, cameraID, mCameraDevice);
    
            mIsOpenCamera = true;
            Log.e(TAG, "handleOpenCamera END mIsOpenCamera = " + mIsOpenCamera);
        }
    
        public synchronized void openCamera() {
            if (mIsOpenCamera) return;
            mHandlerThread = new HandlerThread("AndroidCamera");
            mHandlerThread.start();
            mHandler = new Handler(mHandlerThread.getLooper());
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    handleOpenCamera();
                }
            });
        }
    
        private void handleStartCamera(SurfaceTexture surfaceTexture) {
            Log.e(TAG, "handleStartCamera = " + mCameraDevice + ", mIsOpenCamera=" + mIsOpenCamera);
            if (mCameraDevice == null || !mIsOpenCamera) return;
    
            int previewFormat = mCameraDevice.getParameters().getPreviewFormat();
            int bitsPerPixel = ImageFormat.getBitsPerPixel(previewFormat);
            int bufferSize = (width * height * bitsPerPixel) / 8;
            Log.e(TAG, "startCamera width= " + width + ", height=" + height + ", bufferSize=" + bufferSize);
            byte[] buffer = null;
            for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
                buffer = new byte[bufferSize];
                mCameraDevice.addCallbackBuffer(buffer);
            }
            Log.e(TAG, "startCamera setPreviewCallbackWithBuffer");
            mCameraDevice.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
                @Override
                public void onPreviewFrame(byte[] data, Camera camera) {
                    if (data == null || data.length <= 0) return;
    //                Log.e(TAG, "onPreviewFrame : " + Thread.currentThread());
                    mRotation = getFrameOrientation(mContext, cameraID);
                    if (mPreviewCallback != null) {
                        mPreviewCallback.onPreviewFrame(data, width, height, mRotation, mIsFront, camera);
                    }
                    if(mCameraDevice != null){
                        mCameraDevice.addCallbackBuffer(data);
                    }
                    long t1 = System.currentTimeMillis();
                    if (time == 0) {
                        time = t1;
                    }
    //                Log.e(TAG, "data.length=" + data.length + ", time=" + (t1 - time));
                    time = t1;
                }
            });
            try {
                if (surfaceTexture != null) {
                    mCameraDevice.setPreviewTexture(surfaceTexture);
                }else {
                    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
                        mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
                        mCameraDevice.setPreviewTexture(mSurfaceTexture);
                    } else{
                        mCameraDevice.setPreviewDisplay(null);
                    }
                }
            } catch (IOException e) {
                Log.e(TAG, "e = " + e.toString());
                e.printStackTrace();
            }
            mCameraDevice.startPreview();
            Log.e(TAG, "handleStartCamera END");
        }
    
        private long time = 0;
        public synchronized void startCamera(final SurfaceTexture surfaceTexture) {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    handleStartCamera(surfaceTexture);
                }
            });
        }
    
        private void handleStopCamera() {
            Log.e(TAG, "handleStopCamera = " + mCameraDevice + ", mIsOpenCamera=" + mIsOpenCamera);
            if (mCameraDevice == null || !mIsOpenCamera) return;
            mCameraDevice.setPreviewCallbackWithBuffer(null);
            mCameraDevice.setPreviewCallback(null);
            mCameraDevice.stopPreview();
            Log.e(TAG, "handleStopCamera END");
        }
    
        public synchronized void stopCamera() {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    handleStopCamera();
                }
            });
        }
    
        private void handleReleaseCamera() {
            Log.e(TAG, "handleReleaseCamera = " + mCameraDevice + ", mIsOpenCamera=" + mIsOpenCamera);
            if (mCameraDevice == null || !mIsOpenCamera) return;
            mCameraDevice.release();
            mCameraDevice = null;
            mIsOpenCamera = false;
            Log.e(TAG, "handleReleaseCamera END");
        }
    
        public synchronized void releaseCamera() {
            if (mHandler != null) {
                mHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        handleReleaseCamera();
                    }
                });
            }
    
            if (mHandlerThread != null) {
                mHandlerThread.quitSafely();
                try {
                    mHandlerThread.join();
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                mHandlerThread = null;
            }
    
            mHandler = null;
        }
    
        public synchronized void switchCamera() {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    Log.e(TAG, "switchCamera = " + mCameraDevice + ", mIsOpenCamera=" + mIsOpenCamera);
                    handleStopCamera();
                    handleReleaseCamera();
                    if (cameraID == 0) {
                        cameraID = 1;
                    }else {
                        cameraID = 0;
                    }
                    handleOpenCamera();
                    handleStartCamera(mSurfaceTexture);
                    Log.e(TAG, "switchCamera END = " + mCameraDevice + ", mIsOpenCamera=" + mIsOpenCamera);
                }
            });
        }
    
        public void setCameraDisplayOrientation(Context context, int cameraId, android.hardware.Camera camera) {
            android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
            android.hardware.Camera.getCameraInfo(cameraId, info);
            final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
            int rotation = wm.getDefaultDisplay().getRotation();
            int degrees = 0;
            switch (rotation) {
                case Surface.ROTATION_0: degrees = 0; break;
                case Surface.ROTATION_90: degrees = 90; break;
                case Surface.ROTATION_180: degrees = 180; break;
                case Surface.ROTATION_270: degrees = 270; break;
            }
            int result;
            if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                result = (info.orientation + degrees) % 360;
                result = (360 - result) % 360;  // compensate the mirror
            } else {  // back-facing
                result = (info.orientation - degrees + 360) % 360;
            }
            Log.e("@@@@@@", "setCameraDisplayOrientation result=" + result + ", degrees=" + degrees + ", info.orientation=" + info.orientation);
            camera.setDisplayOrientation(result);
        }
    
        private static void setOrientation(int cameraID, boolean isLandscape, Camera camera) {
            int orientation = getDisplayOrientation(cameraID);
            if (isLandscape) {
                orientation = orientation - 90;
            }
            camera.setDisplayOrientation(orientation);
        }
    
        private static int getDisplayOrientation(int cameraId) {
            Camera.CameraInfo info = new Camera.CameraInfo();
            Camera.getCameraInfo(cameraId, info);
            int result;
            if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                result = (info.orientation) % 360;
                result = (360 - result) % 360;  // compensate the mirror
            } else {  // back-facing
                result = (info.orientation + 360) % 360;
            }
            Log.e("@@@@@@", "setCameraDisplayOrientation result=" + result + ", info.orientation=" + info.orientation);
            return result;
        }
    
        private static int[] adaptPreviewFps(int expectedFps, List<int[]> fpsRanges) {
            expectedFps *= 1000;
            int[] closestRange = fpsRanges.get(0);
            int measure = Math.abs(closestRange[0] - expectedFps) + Math.abs(closestRange[1] - expectedFps);
            for (int[] range : fpsRanges) {
                if (range[0] <= expectedFps && range[1] >= expectedFps) {
                    int curMeasure = Math.abs(range[0] - expectedFps) + Math.abs(range[1] - expectedFps);
                    if (curMeasure < measure) {
                        closestRange = range;
                        measure = curMeasure;
                    }
                }
            }
            return closestRange;
        }
    
        public static Camera.Size getOptimalPreviewSize(Camera camera, int width, int height) {
            Camera.Size optimalSize = null;
            try {
                double minHeightDiff = Double.MAX_VALUE;
                double minWidthDiff = Double.MAX_VALUE;
                List<Camera.Size> sizes = camera.getParameters().getSupportedPreviewSizes();
                if (sizes == null) return null;
                //找到宽度差距最小的
                for (Camera.Size size : sizes) {
                    if (Math.abs(size.width - width) < minWidthDiff) {
                        minWidthDiff = Math.abs(size.width - width);
                    }
                }
                //在宽度差距最小的里面,找到高度差距最小的
                for (Camera.Size size : sizes) {
                    if (Math.abs(size.width - width) == minWidthDiff) {
                        if (Math.abs(size.height - height) < minHeightDiff) {
                            optimalSize = size;
                            minHeightDiff = Math.abs(size.height - height);
                        }
                    }
                }
            }catch (Exception e){
                Log.e(TAG, "" + e.toString());
            }
            return optimalSize;
        }
    
        private static void updateCameraParameters(android.hardware.Camera camera,
                                                   android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
                                                   boolean captureToTexture) {
            final List<String> focusModes = parameters.getSupportedFocusModes();
    
            parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
            parameters.setPreviewSize(captureFormat.width, captureFormat.height);
            parameters.setPictureSize(pictureSize.width, pictureSize.height);
            if (!captureToTexture) {
                parameters.setPreviewFormat(captureFormat.imageFormat);
            }
    
            if (parameters.isVideoStabilizationSupported()) {
                parameters.setVideoStabilization(true);
            }
            if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
            }
            camera.setParameters(parameters);
        }
    
        private static Size findClosestPictureSize(
                android.hardware.Camera.Parameters parameters, int width, int height) {
            return getClosestSupportedSize(convertSizes(parameters.getSupportedPictureSizes()), width, height);
        }
    
        private static CaptureFormat findClosestCaptureFormat(
                android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
            // Find closest supported format for |width| x |height| @ |framerate|.
            final List<CaptureFormat.FramerateRange> supportedFramerates = convertFramerates(parameters.getSupportedPreviewFpsRange());
            Log.e(TAG, "Available fps ranges: " + supportedFramerates);
    
            final CaptureFormat.FramerateRange fpsRange = getClosestSupportedFramerateRange(supportedFramerates, framerate);
    
            final Size previewSize = getClosestSupportedSize(convertSizes(parameters.getSupportedPreviewSizes()), width, height);
            Log.e(TAG, "Available previewSize: " + previewSize);
            return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
        }
    
        static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
            final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
            for (int[] range : arrayRanges) {
                ranges.add(new CaptureFormat.FramerateRange(
                        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
                        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
            }
            return ranges;
        }
    
        private static abstract class ClosestComparator<T> implements Comparator<T> {
            // Difference between supported and requested parameter.
            abstract int diff(T supportedParameter);
    
            @Override
            public int compare(T t1, T t2) {
                return diff(t1) - diff(t2);
            }
        }
    
        // Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
        // lower bound, to allow the framerate to fluctuate based on lightning conditions.
        public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
                List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
            return Collections.min(
                    supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
                        // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
                        // from requested.
                        private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
                        private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
                        private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
    
                        // Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
                        private static final int MIN_FPS_THRESHOLD = 8000;
                        private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
                        private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
    
                        // Use one weight for small |value| less than |threshold|, and another weight above.
                        private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
                            return (value < threshold) ? value * lowWeight
                                    : threshold * lowWeight + (value - threshold) * highWeight;
                        }
    
                        @Override
                        int diff(CaptureFormat.FramerateRange range) {
                            final int minFpsError = progressivePenalty(
                                    range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
                            final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
                                    MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
                            return minFpsError + maxFpsError;
                        }
                    });
        }
    
        // Convert from android.hardware.Camera.Size to Size.
        static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
            final List<Size> sizes = new ArrayList<Size>();
            for (android.hardware.Camera.Size size : cameraSizes) {
                sizes.add(new Size(size.width, size.height));
            }
            return sizes;
        }
    
        public static Size getClosestSupportedSize(
                List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
            return Collections.min(supportedSizes, new ClosestComparator<Size>() {
                @Override
                int diff(Size size) {
                    return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
                }
            });
        }
    
        public static class CaptureFormat {
            // Class to represent a framerate range. The framerate varies because of lightning conditions.
            // The values are multiplied by 1000, so 1000 represents one frame per second.
            public static class FramerateRange {
                public int min;
                public int max;
    
                public FramerateRange(int min, int max) {
                    this.min = min;
                    this.max = max;
                }
    
                @Override
                public String toString() {
                    return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
                }
    
                @Override
                public boolean equals(Object other) {
                    if (!(other instanceof FramerateRange)) {
                        return false;
                    }
                    final FramerateRange otherFramerate = (FramerateRange) other;
                    return min == otherFramerate.min && max == otherFramerate.max;
                }
    
                @Override
                public int hashCode() {
                    // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
                    return 1 + 65537 * min + max;
                }
            }
    
            public final int width;
            public final int height;
            public final FramerateRange framerate;
    
            // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
            // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
            // all imageFormats.
            public final int imageFormat = ImageFormat.NV21;
    
            public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
                this.width = width;
                this.height = height;
                this.framerate = new FramerateRange(minFramerate, maxFramerate);
            }
    
            public CaptureFormat(int width, int height, FramerateRange framerate) {
                this.width = width;
                this.height = height;
                this.framerate = framerate;
            }
    
            // Calculates the frame size of this capture format.
            public int frameSize() {
                return frameSize(width, height, imageFormat);
            }
    
            // Calculates the frame size of the specified image format. Currently only
            // supporting ImageFormat.NV21.
            // The size is width * height * number of bytes per pixel.
            // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
            public static int frameSize(int width, int height, int imageFormat) {
                if (imageFormat != ImageFormat.NV21) {
                    throw new UnsupportedOperationException("Don't know how to calculate "
                            + "the frame size of non-NV21 image formats.");
                }
                return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
            }
    
            @Override
            public String toString() {
                return width + "x" + height + "@" + framerate;
            }
    
            @Override
            public boolean equals(Object other) {
                if (!(other instanceof CaptureFormat)) {
                    return false;
                }
                final CaptureFormat otherFormat = (CaptureFormat) other;
                return width == otherFormat.width && height == otherFormat.height
                        && framerate.equals(otherFormat.framerate);
            }
    
            @Override
            public int hashCode() {
                return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
            }
        }
    
        private int getFrameOrientation(Context context, int cameraID) {
            if (mContext == null) return -1;
            if (cameraID < 0) return -1;
            Camera.CameraInfo info = new Camera.CameraInfo();
            Camera.getCameraInfo(cameraID, info);
    
            WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
            int rotation = wm.getDefaultDisplay().getRotation();
            int degrees = 0;
            switch (rotation) {
                case Surface.ROTATION_0: degrees = 0; break;
                case Surface.ROTATION_90: degrees = 90; break;
                case Surface.ROTATION_180: degrees = 180; break;
                case Surface.ROTATION_270: degrees = 270; break;
            }
            int result;
            if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                result = (info.orientation + degrees) % 360;
                result = (360 - result) % 360;  // compensate the mirror
                mIsFront = true;
            } else {  // back-facing
                result = (info.orientation - degrees + 360) % 360;
                mIsFront = false;
            }
            Log.e("@@@@@@", "setCameraDisplayOrientation result=" + result + ", degrees=" + degrees + ", info.orientation=" + info.orientation);
            return result;
        }
    }
    

    DYGLView

    package com.example.glview;
    
    import android.content.Context;
    import android.opengl.GLSurfaceView;
    import android.util.AttributeSet;
    
    import java.nio.ByteBuffer;
    
    public class DYGLView extends GLSurfaceView {
    
        private DYGLViewRenderer mGLViewRenderer;
    
        public DYGLView(Context context) {
            super(context);
            initRenderer(context);
        }
    
        public DYGLView(Context context, AttributeSet attrs) {
            super(context, attrs);
            initRenderer(context);
        }
    
        private void initRenderer(Context context) {
            setEGLContextClientVersion(2);
            mGLViewRenderer = new DYGLViewRenderer(context);
            setRenderer(mGLViewRenderer);
            setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
        }
    
        public void updateYUV(byte[] nv21, int width, int height, int rotation, int orientation) {
            mGLViewRenderer.updateYUV(nv21, width, height, rotation, orientation);
            requestRender();
        }
    
    }
    
    

    DYGLViewRenderer

    package com.example.glview;
    
    import android.content.Context;
    import android.opengl.GLES20;
    import android.opengl.GLSurfaceView;
    import android.opengl.Matrix;
    import android.util.Log;
    
    import com.example.glview.gles.GLESUtils;
    
    import java.nio.ByteBuffer;
    import java.nio.ByteOrder;
    import java.nio.FloatBuffer;
    
    import javax.microedition.khronos.egl.EGLConfig;
    import javax.microedition.khronos.opengles.GL10;
    
    public class DYGLViewRenderer implements GLSurfaceView.Renderer {
    
        private Context context;
    
        //Our vertex shader code; nothing special
        private String vertexShader =
                "attribute vec4 a_position;                         \n" +
                "uniform mat4 textureTransform;                     \n" +
                "attribute vec2 a_texCoord;                         \n" +
                "varying vec2 v_texCoord;                           \n" +
    
                "void main(){                                       \n" +
                "   gl_Position = textureTransform * a_position;     \n" +
                "   v_texCoord = a_texCoord;\n" +
                "}                                                  \n";
    
        //Our fragment shader code; takes Y,U,V values for each pixel and calculates R,G,B colors,
        //Effectively making YUV to RGB conversion
        private String fragmentShader = "precision highp float;                           \n" +
                "varying vec2 v_texCoord;                         \n" +
                "uniform sampler2D y_texture;                     \n" +
                "uniform sampler2D uv_texture;                    \n" +
                "uniform float rotation;                          \n" +
                "void main (void){                                \n" +
                "   float r, g, b, y, u, v;                       \n" +
                "   float m, n;\n" +
                "   if(90.0 == rotation) {\n" +
                "       m = v_texCoord.x;\n" +
                "       n = v_texCoord.y;\n" +
                "   }else if (180.0 == rotation) {\n" +
                "       m = v_texCoord.x;\n" +
                "       n = v_texCoord.y;\n" +
                "   }else if (270.0 == rotation) {\n" +
                "       m = 1.0 - v_texCoord.x;\n" +
                "       n = v_texCoord.y;\n" +
                "   }else {\n" +
                "       m = v_texCoord.x;\n" +
                "       n = v_texCoord.y;\n" +
                "   }\n" +
                "   y = texture2D(y_texture, vec2(m, n)).r;       \n" +
                "   u = texture2D(uv_texture, vec2(m, n)).a - 0.5;\n" +
                "   v = texture2D(uv_texture, vec2(m, n)).r - 0.5;\n" +
                "   r = y + 1.13983*v;                            \n" +
                "   g = y - 0.39465*u - 0.58060*v;                \n" +
                "   b = y + 2.03211*u;                            \n" +
                "   gl_FragColor = vec4(r, g, b, 1.0);            \n" +
                "}  ";
    
        private String fragmentShader1 = "precision highp float;                           \n" +
                "varying vec2 v_texCoord;                         \n" +
                "uniform sampler2D y_texture;                     \n" +
                "uniform sampler2D uv_texture;                    \n" +
                "uniform float orientation;                          \n" +
                "void main (void){                                \n" +
                "   float r, g, b, y, u, v;                       \n" +
                "   float m = v_texCoord.x;\n" +
                "   float n = v_texCoord.y;\n" +
                "   if(orientation == 0.0 || orientation == 180.0) {" +
                "       m = 1.0 - v_texCoord.x;\n" +
                "       n = v_texCoord.y;\n" +
                "   }\n" +
                "   y = texture2D(y_texture, vec2(m, n)).r;       \n" +
                "   u = texture2D(uv_texture, vec2(m, n)).a - 0.5;\n" +
                "   v = texture2D(uv_texture, vec2(m, n)).r - 0.5;\n" +
                "   r = y + 1.13983*v;                            \n" +
                "   g = y - 0.39465*u - 0.58060*v;                \n" +
                "   b = y + 2.03211*u;                            \n" +
                "   gl_FragColor = vec4(r, g, b, 1.0);            \n" +
                "}  ";
    
        private int mProgramId = -1;
        private int mAttribPosition = -1;
        private int mAttribTexCoord = -1;
        private int mUniformTextureY = -1;
        private int mUniformTextureUV = -1;
        private int mUniformOrientation = -1;
    
        private FloatBuffer mVertexBuffer;
        private FloatBuffer mTextureBuffer;
        private int[] mInputTextures;
    
        private int mSurfaceViewWidth = 0;
        private int mSurfaceViewHeight = 0;
    
        protected int mGLUnifTransformHandle = -1;
    
        private ByteBuffer mYBuffer;
        private ByteBuffer mUVBuffer;
        private volatile int mNV21Width;
        private volatile int mNV21Height;
        private volatile int mNV21Rotation;
        private volatile int mScreenOrientation;
    
        private float[] mMatrix = GLESUtils.createIdentityMtx();
    
        public DYGLViewRenderer(Context context) {
            this.context = context;
        }
    
        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            mProgramId = creatProgram(vertexShader, fragmentShader1);
    
            mGLUnifTransformHandle = GLES20.glGetUniformLocation(mProgramId, "textureTransform");
            mAttribPosition = GLES20.glGetAttribLocation(mProgramId, "a_position");
            mAttribTexCoord = GLES20.glGetAttribLocation(mProgramId, "a_texCoord");
    
            mUniformTextureY = GLES20.glGetUniformLocation(mProgramId, "y_texture");
            mUniformTextureUV = GLES20.glGetUniformLocation(mProgramId, "uv_texture");
            mUniformOrientation = GLES20.glGetUniformLocation(mProgramId, "orientation");
    
            if (mVertexBuffer == null) {
                float[] VERTEX = {
                        -1.0f, -1.0f,
                        1.0f, -1.0f,
                        -1.0f, 1.0f,
                        1.0f, 1.0f,
                };
                mVertexBuffer = java.nio.ByteBuffer.allocateDirect(VERTEX.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
                mVertexBuffer.put(VERTEX).position(0);
            }
    
            if (mTextureBuffer == null) {
    //            float[] TEXTURE = {
    //                    1.0f, 1.0f,
    //                    1.0f, 0.0f,
    //                    0.0f, 1.0f,
    //                    0.0f, 0.0f
    //            };
                float[] TEXTURE = {
                        0.0f, 1.0f,
                        1.0f, 1.0f,
                        0.0f, 0.0f,
                        1.0f, 0.0f
                };
                mTextureBuffer = java.nio.ByteBuffer.allocateDirect(TEXTURE.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
                mTextureBuffer.put(TEXTURE).position(0);
            }
    
            if (mInputTextures == null) {
                mInputTextures = new int[2];
                GLES20.glGenTextures(2, mInputTextures, 0);
            }
        }
    
        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            mSurfaceViewWidth = width;
            mSurfaceViewHeight = height;
        }
    
        private long startTime = -1;
        @Override
        public void onDrawFrame(GL10 gl) {
            //清理屏幕:可以清理成指定的颜色
            GLES20.glClearColor(0, 0, 0, 0);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    
            if (mYBuffer == null || mUVBuffer == null || mNV21Width == 0 || mNV21Height == 0) return;
    
            long t1 = System.currentTimeMillis();
            if (startTime == -1) {
                startTime = t1;
            }
            Log.e("#####", "time = " + (t1 - startTime));
            startTime = t1;
    
            GLES20.glViewport(0, 0, mSurfaceViewWidth, mSurfaceViewHeight);
    
            GLES20.glUseProgram(mProgramId);
    
            mVertexBuffer.position(0);
            GLES20.glVertexAttribPointer(mAttribPosition, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
            GLES20.glEnableVertexAttribArray(mAttribPosition);
    
            mTextureBuffer.position(0);
            GLES20.glVertexAttribPointer(mAttribTexCoord, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
            GLES20.glEnableVertexAttribArray(mAttribTexCoord);
    
            synchronized (this) {
    
                int width = mNV21Width;
                int height = mNV21Height;
                //y
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[0]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mYBuffer);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
                mYBuffer.clear();
                GLES20.glUniform1i(mUniformTextureY, 0);
    
                //u
                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[1]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, width / 2, height / 2, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, mUVBuffer);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
                mUVBuffer.clear();
                Log.e("#####", "width=" + width + ", height=" + height + ", mNV21Rotation=" + mNV21Rotation + ", mScreenOrientation=" + mScreenOrientation);
                GLES20.glUniform1i(mUniformTextureUV, 1);
    
                GLES20.glUniform1f(mUniformOrientation, mScreenOrientation);
            }
    
            if (mGLUnifTransformHandle != -1) {
                Matrix.setIdentityM(mMatrix, 0);
                Matrix.rotateM(mMatrix, 0, -mNV21Rotation, 0.0f, 0.0f, 1.0f);
                printfMat(mMatrix);
                GLES20.glUniformMatrix4fv(mGLUnifTransformHandle, 1, false, mMatrix, 0);
            }
    
            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    
            GLES20.glDisableVertexAttribArray(mAttribPosition);
            GLES20.glDisableVertexAttribArray(mAttribTexCoord);
    
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        }
    
        private void printfMat(float[] mat) {
            if (mat != null && mat.length > 0) {
                StringBuilder builder = new StringBuilder();
                for (int i = 0; i < mat.length; i++) {
                    builder.append(mat[i] + ",");
                    if ((i + 1)% 4 == 0) {
                        builder.append("\n");
                    }
                }
                builder.append("\n");
                Log.e("$$$$$$$", "" + builder.toString());
            }
        }
    
        public void updateYUV(byte[] nv21Buffer, int width, int height, int rotation, int orientation) {
            synchronized (this) {
                mYBuffer = ByteBuffer.allocate(width * height);
                mYBuffer.put(nv21Buffer, 0, width*height);
                mYBuffer.position(0);
    
                mUVBuffer = ByteBuffer.allocate(width*height/2);
                mUVBuffer.put(nv21Buffer, width*height, width*height/2);
                mUVBuffer.position(0);
    
                mNV21Width = width;
                mNV21Height = height;
                mNV21Rotation = rotation;
                mScreenOrientation = orientation;
            }
        }
    
        //创建着色器程序 返回着色器id
        private int creatProgram(String vsi, String fsi) {
    
            int vShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);//创建一个顶点着色器
            GLES20.glShaderSource(vShader, vsi); //加载顶点着色器代码
            GLES20.glCompileShader(vShader); //编译
    
            int[] status = new int[1];
            GLES20.glGetShaderiv(vShader, GLES20.GL_COMPILE_STATUS, status, 0);//获取状态
            if (status[0] != GLES20.GL_TRUE) { //判断是否创建成功
                throw new IllegalStateException("顶点着色器创建失败!");
            }
    
            int fShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);//创建一个顶点着色器
            GLES20.glShaderSource(fShader, fsi);//加载顶点着色器代码
            GLES20.glCompileShader(fShader);
            GLES20.glGetShaderiv(fShader, GLES20.GL_COMPILE_STATUS, status, 0);
            if (status[0] != GLES20.GL_TRUE) {
                throw new IllegalStateException("片元着色器创建失败");
            }
    
            //创建着色器程序
            int mProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(mProgram, vShader);//将着色器塞入程序中
            GLES20.glAttachShader(mProgram, fShader);
            GLES20.glLinkProgram(mProgram);//链接
            //获取状态,判断是否成功
            GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, status, 0);
            if (status[0] != GLES20.GL_TRUE) {
                throw new IllegalStateException("link program:" + GLES20.glGetProgramInfoLog(mProgram));
            }
    
            GLES20.glDeleteShader(vShader);
            GLES20.glDeleteShader(fShader);
    
            return mProgram;
    
        }
    }
    
    

    相关文章

      网友评论

          本文标题:2021-03-12

          本文链接:https://www.haomeiwen.com/subject/battcltx.html