美文网首页
Camera中通过startPreview + SurfaceT

Camera中通过startPreview + SurfaceT

作者: jternence | 来源:发表于2016-09-30 17:31 被阅读0次

    今天刚好是2016-09-30,假期前最后一天,上班提不起多大的精神,不想起马上可以回家可以好好休息七天了,心里还是挺激动的。趁现在有点时间,想把最近整理出来的一个小demo分享出来,主要是通过Camera预览功能采集数据,然后通过opengl进行渲染,最后保存至Mp4文件中。主要用了四个类实现此功能:SurfaceTextureRender、SurfaceTextureWrapper、WindowInputSurface、EncodeManager,下面分别会对这个类进行详细的讲解。
    1、SurfaceTextureRender,主要是自定义渲染render,源码如下:

    /
    *
    * Created by jsp on 2016/9/29.
    */
    
    public classSurfaceTextureRender {
    
    private static finalStringTAG="SurfaceTextureRender";
    
    private static final intFLOAT_SIZE_BYTES=4;
    
    private static final intTRIANGLE_VERTICES_DATA_STRIDE_BYTES=5*FLOAT_SIZE_BYTES;
    
    private static final intTRIANGLE_VERTICES_DATA_POS_OFFSET=0;
    
    private static final intTRIANGLE_VERTICES_DATA_UV_OFFSET=3;
    
    private final float[]mTriangleVerticesData= {
    
    // X, Y, Z, U, V
    
    -1.0f,-1.0f,0,0.f,0.f,
    
    1.0f,-1.0f,0,1.f,0.f,
    
    -1.0f,1.0f,0,0.f,1.f,
    
    1.0f,1.0f,0,1.f,1.f,
    
    };
    
    privateFloatBuffermTriangleVertices;
    
    private static finalStringVERTEX_SHADER=
    
    "uniform mat4 uMVPMatrix;\n"+
    
    "uniform mat4 uSTMatrix;\n"+
    
    "attribute vec4 aPosition;\n"+
    
    "attribute vec4 aTextureCoord;\n"+
    
    "varying vec2 vTextureCoord;\n"+
    
    "void main() {\n"+
    
    "    gl_Position = uMVPMatrix * aPosition;\n"+
    
    "    vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n"+
    
    "}\n";
    
    //fragment shader
    
    private static finalStringFRAGMENT_SHADER=
    
    "#extension GL_OES_EGL_image_external : require\n"+
    
    "precision mediump float;\n"+// highp here doesn't seem to matter
    
    "varying vec2 vTextureCoord;\n"+
    
    "uniform samplerExternalOES sTexture;\n"+
    
    "void main() {\n"+
    
    "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n"+
    
    "}\n";
    
    //交换改变fragment shader
    
    public static finalStringSWAPPED_FRAGMENT_SHADER=
    
    "#extension GL_OES_EGL_image_external : require\n"+
    
    "precision mediump float;\n"+
    
    "varying vec2 vTextureCoord;\n"+
    
    "uniform samplerExternalOES sTexture;\n"+
    
    "void main() {\n"+
    
    "  gl_FragColor = texture2D(sTexture, vTextureCoord).gbra;\n"+
    
    "}\n";
    
    private float[]mMVPMatrix=new float[16];
    
    private float[]mSTMatrix=new float[16];
    
    private intmProgram;
    
    private intmTextureID= -12345;
    
    private intmuMVPMatrixHandle;
    
    private intmuSTMatrixHandle;
    
    private intmaPositionHandle;
    
    private intmaTextureHandle;
    
    publicSurfaceTextureRender() {
    
    mTriangleVertices= ByteBuffer.allocateDirect(
    
    mTriangleVerticesData.length*FLOAT_SIZE_BYTES)
    
    .order(ByteOrder.nativeOrder()).asFloatBuffer();
    
    mTriangleVertices.put(mTriangleVerticesData).position(0);
    
    Matrix.setIdentityM(mSTMatrix,0);
    
    }
    
    /**
    
    *返回纹理id
    
    *@return
    
    */
    
    public final intgetTextureId() {
    
    returnmTextureID;
    
    }
    
    /**
    
    *开始渲染
    
    *@paramst
    
    */
    
    public final void drawFrame(SurfaceTexture st) {
    
    checkGlError("onDrawFrame start");
    
    st.getTransformMatrix(mSTMatrix);
    
    // (optional) clear to green so we can see if we're failing to set pixels
    
    GLES20.glClearColor(0.0f,1.0f,0.0f,1.0f);
    
    GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT| GLES20.GL_COLOR_BUFFER_BIT);
    
    GLES20.glUseProgram(mProgram);
    
    checkGlError("glUseProgram");
    
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,mTextureID);
    
    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    
    GLES20.glVertexAttribPointer(maPositionHandle,3,GLES20.GL_FLOAT, false,
    
    TRIANGLE_VERTICES_DATA_STRIDE_BYTES,mTriangleVertices);
    
    checkGlError("glVertexAttribPointer maPosition");
    
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    
    checkGlError("glEnableVertexAttribArray maPositionHandle");
    
    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    
    GLES20.glVertexAttribPointer(maTextureHandle,2,GLES20.GL_FLOAT, false,
    
    TRIANGLE_VERTICES_DATA_STRIDE_BYTES,mTriangleVertices);
    
    checkGlError("glVertexAttribPointer maTextureHandle");
    
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    
    checkGlError("glEnableVertexAttribArray maTextureHandle");
    
    Matrix.setIdentityM(mMVPMatrix,0);
    
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle,1, false,mMVPMatrix,0);
    
    GLES20.glUniformMatrix4fv(muSTMatrixHandle,1, false,mSTMatrix,0);
    
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
    
    checkGlError("glDrawArrays");
    
    // IMPORTANT: on some devices, if you are sharing the external texture between two
    
    // contexts, one context may not see updates to the texture unless you un-bind and
    
    // re-bind it.  If you're not using shared EGL contexts, you don't need to bind
    
    // texture 0 here.
    
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,0);
    
    }
    
    /**
    
    *初始化gl资源
    
    */
    
    public final voidsurfaceCreated() {
    
    mProgram= createProgram(VERTEX_SHADER,FRAGMENT_SHADER);
    
    if(mProgram==0) {
    
    throw newRuntimeException("failed creating program");
    
    }
    
    maPositionHandle= GLES20.glGetAttribLocation(mProgram,"aPosition");
    
    checkLocation(maPositionHandle,"aPosition");
    
    maTextureHandle= GLES20.glGetAttribLocation(mProgram,"aTextureCoord");
    
    checkLocation(maTextureHandle,"aTextureCoord");
    
    muMVPMatrixHandle= GLES20.glGetUniformLocation(mProgram,"uMVPMatrix");
    
    checkLocation(muMVPMatrixHandle,"uMVPMatrix");
    
    muSTMatrixHandle= GLES20.glGetUniformLocation(mProgram,"uSTMatrix");
    
    checkLocation(muSTMatrixHandle,"uSTMatrix");
    
    int[] textures =new int[1];
    
    GLES20.glGenTextures(1,textures,0);
    
    mTextureID= textures[0];
    
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,mTextureID);
    
    checkGlError("glBindTexture mTextureID");
    
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GLES20.GL_TEXTURE_MIN_FILTER,
    
    GLES20.GL_NEAREST);
    
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GLES20.GL_TEXTURE_MAG_FILTER,
    
    GLES20.GL_LINEAR);
    
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GLES20.GL_TEXTURE_WRAP_S,
    
    GLES20.GL_CLAMP_TO_EDGE);
    
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GLES20.GL_TEXTURE_WRAP_T,
    
    GLES20.GL_CLAMP_TO_EDGE);
    
    checkGlError("glTexParameter");
    
    }
    
    /**
    
    *重新修改Fragment Shader
    
    *@paramfragmentShader
    
    */
    
    public final voidchangeFragmentShader(String fragmentShader) {
    
    if(fragmentShader ==null) {
    
    fragmentShader =FRAGMENT_SHADER;
    
    }
    
    GLES20.glDeleteProgram(mProgram);
    
    mProgram= createProgram(VERTEX_SHADER,fragmentShader);
    
    if(mProgram==0) {
    
    throw newRuntimeException("failed creating program");
    
    }
    
    }
    
    /**
    
    *设置shader
    
    *@paramshaderType
    
    *@paramsource
    
    *@return
    
    */
    
    private intloadShader(intshaderType,String source) {
    
    intshader = GLES20.glCreateShader(shaderType);
    
    checkGlError("glCreateShader type="+ shaderType);
    
    GLES20.glShaderSource(shader,source);
    
    GLES20.glCompileShader(shader);
    
    int[] compiled =new int[1];
    
    GLES20.glGetShaderiv(shader,GLES20.GL_COMPILE_STATUS,compiled,0);
    
    if(compiled[0] ==0) {
    
    Log.e(TAG,"Could not compile shader "+ shaderType +":");
    
    Log.e(TAG," "+ GLES20.glGetShaderInfoLog(shader));
    
    GLES20.glDeleteShader(shader);
    
    shader =0;
    
    }
    
    returnshader;
    
    }
    
    /**
    
    *创建gl program
    
    *@paramvertexSource
    
    *@paramfragmentSource
    
    *@return
    
    */
    
    private intcreateProgram(String vertexSource,String fragmentSource) {
    
    intvertexShader = loadShader(GLES20.GL_VERTEX_SHADER,vertexSource);
    
    if(vertexShader ==0) {
    
    return0;
    
    }
    
    intpixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER,fragmentSource);
    
    if(pixelShader ==0) {
    
    return0;
    
    }
    
    intprogram = GLES20.glCreateProgram();
    
    if(program ==0) {
    
    Log.e(TAG,"Could not create program");
    
    }
    
    GLES20.glAttachShader(program,vertexShader);
    
    checkGlError("glAttachShader");
    
    GLES20.glAttachShader(program,pixelShader);
    
    checkGlError("glAttachShader");
    
    GLES20.glLinkProgram(program);
    
    int[] linkStatus =new int[1];
    
    GLES20.glGetProgramiv(program,GLES20.GL_LINK_STATUS,linkStatus,0);
    
    if(linkStatus[0] != GLES20.GL_TRUE) {
    
    Log.e(TAG,"Could not link program: ");
    
    Log.e(TAG,GLES20.glGetProgramInfoLog(program));
    
    GLES20.glDeleteProgram(program);
    
    program =0;
    
    }
    
    returnprogram;
    
    }
    
    public voidcheckGlError(String op) {
    
    interror;
    
    while((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
    
    Log.e(TAG,op +": glError "+ error);
    
    throw newRuntimeException(op +": glError "+ error);
    
    }
    
    }
    
    public final voidcheckLocation(intlocation,String label) {
    
        if(location <0) {
    
            throw newRuntimeException("Unable to locate '"+ label +"' in program");
    
        }
      }
    
    }
    
    
    2、SurfaceTextureWrapper,主要是用作自定义纹理与Camera取出来的数据进行包装;
    
    /**
     * Created by jsp on 2016/9/29.
     */
    
    public class SurfaceTextureWrapper
            implements SurfaceTexture.OnFrameAvailableListener{
        private SurfaceTexture  mSurfaceTexture;
        private SurfaceTextureRender mESTextureRender;
    
        private Object mSyncObject = new Object();
    
        private boolean mFrameAvailable;
    
        public SurfaceTextureWrapper(){
            mESTextureRender       = new SurfaceTextureRender();
            mESTextureRender.surfaceCreated();
    
            //自定义texture地址直接构造给SurfaceTexture
            mSurfaceTexture        = new SurfaceTexture(
                    mESTextureRender.getTextureId());
    
            mSurfaceTexture.setOnFrameAvailableListener(this);
    
        }
    
        @Override
        public void onFrameAvailable(SurfaceTexture surfaceTexture) {
            synchronized (mSyncObject) {
                if (mFrameAvailable) {
                    throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
                }
                mFrameAvailable = true;
                mSyncObject.notifyAll();
            }
        }
    
        /**
         * 渲染
         */
        public final void drawImage() {
            mESTextureRender.drawFrame(mSurfaceTexture);
        }
    
        /**
         * 同步等待下一帧数据
         */
        public final void awaitNewImage() {
    //        final int TIMEOUT_MS = 2500;
            synchronized (mSyncObject) {
                while (!mFrameAvailable) {
                    try {
                        // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
                        // stalling the test if it doesn't arrive.
    //                    mSyncObject.wait(TIMEOUT_MS);
                        mSyncObject.wait();
                        if (!mFrameAvailable) {
                            // TODO: if "spurious wakeup", continue while loop
                            throw new RuntimeException("Camera frame wait timed out");
                        }
                    } catch (InterruptedException ie) {
                        throw new RuntimeException(ie);
                    }
                }
                mFrameAvailable = false;
            }
            // Latch the data.
            mESTextureRender.checkGlError("before updateTexImage");
            //更新数据
            mSurfaceTexture.updateTexImage();
        }
    
        /**
         * 获取渲染后SurfaceTexture
         * @return
         */
        public final SurfaceTexture getSurfaceTexture(){
            return mSurfaceTexture;
        }
    
        /**
         * 修改Fragment Shader
         * @param fragmentShader
         */
        public final void changeFragmentShader(String fragmentShader) {
            mESTextureRender.changeFragmentShader(fragmentShader);
        }
    
        /**
         * 释放资源
         */
        public final void release(){
            mSurfaceTexture = null;
            mESTextureRender= null;
        }
    
    }
    
    3、WindowInputSurface EGL包装类,里面封装了,EGL的初始化,创建Surface,纹理映射等操作;
    /**
     * 窗口输出表面
     */
    public class WindowInputSurface {
        private static final int EGL_RECORDABLE_ANDROID = 0x3142;
    
        //gl设备
        private EGLDisplay mEGLDisplay            = EGL14.EGL_NO_DISPLAY;
        //gl环境
        private EGLContext mEGLContext            = EGL14.EGL_NO_CONTEXT;
        //gl表面
        private EGLSurface mEGLSurface            = EGL14.EGL_NO_SURFACE;
    
        //gl属性
        private static final int[] mAttribList    = {
                EGL14.EGL_RED_SIZE, 8,
                EGL14.EGL_GREEN_SIZE, 8,
                EGL14.EGL_BLUE_SIZE, 8,
                EGL14.EGL_ALPHA_SIZE, 8,
                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                EGL_RECORDABLE_ANDROID, 1,
                EGL14.EGL_NONE
        };
    
        //配置context属性
        private static final int[] mContextAttlist = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
                EGL14.EGL_NONE
        };
    
        //surface属性
        private static final int[] mSurfaceAttribs = {
                EGL14.EGL_NONE
        };
    
        private Surface    mSurface;
    
        public WindowInputSurface(Surface mSurface) {
            this.mSurface = mSurface;
            init();
        }
    
        //初始化配置
        private final void init(){
            //创建设备
            mEGLDisplay         = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
            if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
                throw new RuntimeException("unable to get EGL14 display");
            }
    
            //初始化设备
            int[] version       = new int[2];
            if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
                throw new RuntimeException("unable to initialize EGL14");
            }
    
            //根据属性选择最优的配置
            EGLConfig[] configs = new EGLConfig[1];
            int[] numConfigs    = new int[1];
            EGL14.eglChooseConfig(mEGLDisplay, mAttribList, 0, configs, 0, configs.length,
                    numConfigs, 0);
    
            checkEglError("eglCreateContext RGB888+recordable ES2");
    
            mEGLContext         = EGL14.eglCreateContext(
                    mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
                    mContextAttlist, 0);
    
            checkEglError("eglCreateContext");
    
            mEGLSurface         = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
                    mSurfaceAttribs, 0);
            checkEglError("eglCreateWindowSurface");
    
        }
    
        /**
         * 准备当前渲染环境
         */
        public final void makeCurrent() {
            EGL14.eglMakeCurrent(mEGLDisplay,
                    mEGLSurface, mEGLSurface, mEGLContext);
            checkEglError("eglMakeCurrent");
        }
    
        /**
         * 生产数据
         * @return
         */
        public final boolean swapBuffers() {
            boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
            checkEglError("eglSwapBuffers");
            return result;
        }
    
        /**
         * 设置下一帧数据时间戳
         * @param nsecs
         */
        public final void setPresentationTime(long nsecs) {
            EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
            checkEglError("eglPresentationTimeANDROID");
        }
    
        /**
         * 释放资源
         */
        public final void release() {
            if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
                EGL14.eglMakeCurrent(mEGLDisplay,
                        EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
                        EGL14.EGL_NO_CONTEXT);
                EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
                EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
                EGL14.eglReleaseThread();
                EGL14.eglTerminate(mEGLDisplay);
            }
            mSurface.release();
            mEGLDisplay = EGL14.EGL_NO_DISPLAY;
            mEGLContext = EGL14.EGL_NO_CONTEXT;
            mEGLSurface = EGL14.EGL_NO_SURFACE;
            mSurface = null;
        }
    
        //检查错误
        private final void checkEglError(String msg) {
            int error;
            if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
                throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
            }
        }
    }
    
    4、EncodeManager采集数据、渲染生成Mp4核心类;
    /**
     * 编解码管理器
     */
    public class EncodeManager implements Runnable{
        private static final String TAG             = "EncodeManager";
    
        //编码宽度
        private static final int ENCODER_WIDTH      = 640;
        //编码高度
        private static final int ENCODER_HEIGHT     = 480;
        //bit rate
        private static final int ENCODER_BIT_RATE   = 6000000;
        //视频格式
        private static final String MIME_TYPE       = "video/avc";
        //帧率
        private static final int FRAME_RATE         = 30;
        //I帧间隔时间
        private static final int IFRAME_INTERVAL    = 5;
        //Mp4输出目录
        private static final File OUTPUT_DIR        = Environment.getExternalStorageDirectory();
    
        private MediaCodec.BufferInfo mBufferInfo;
        private MediaCodec            mEncoder;
        private MediaMuxer            mMediaMuxer;
        private WindowInputSurface    mWindowInputSurface;
        private SurfaceTextureWrapper mSurfaceTexture;
        private Camera                mCamera;
    
        private Thread                mStartPreviewThread;
    
        private boolean               mStartPreview;
    
        //视频轨道
        private int                   mTrackIndex;
        //判断muxer是否启动
        private boolean               mMuxerStarted;
    
        private MediaCallback         mMediaCallback;
    
        private String                mOutputPath;
        public EncodeManager(MediaCallback mediaCallback) {
            mMediaCallback = mediaCallback;
        }
    
        /**
         * 开始预览数据,启动入口
         */
        public final void start(){
            mStartPreview       = true;
            mStartPreviewThread = new Thread(this);
            mStartPreviewThread.start();
        }
    
        /**
         * 停止录制
         */
        public final void stop(){
            mStartPreview       = false;
            Log.v(TAG,"开始停止录像");
        }
    
    
        @Override
        public void run() {
            try{
                //打开照相机
                openCamera();
                //初始化编码器
                initEncoder();
                //准备渲染环境
                mWindowInputSurface.makeCurrent();
                //纹理映射
                attchSurfaceTexture();
                //启动照相预览
                mCamera.startPreview();
                int frameCount = 0;
                mMediaCallback.onStart();
                while (mStartPreview){
                    drainEncoder(false);
                    if ((frameCount % 15) == 0) {
                        String fragmentShader = null;
                        if ((frameCount & 0x01) != 0) {
                            fragmentShader = SurfaceTextureRender.SWAPPED_FRAGMENT_SHADER;
                        }
                        mSurfaceTexture.changeFragmentShader(fragmentShader);
                    }
                    frameCount++;
                    //等待新数据
                    mSurfaceTexture.awaitNewImage();
                    //写入一帧数据
                    mSurfaceTexture.drawImage();
                    //设置时间戳
                    mWindowInputSurface.setPresentationTime(
                            mSurfaceTexture.getSurfaceTexture().getTimestamp());
                    mWindowInputSurface.swapBuffers();
                    Log.v(TAG,"window input surface swap buffer success ");
                }
    
                //停止预览
                if(!mStartPreview){
                    Log.v(TAG,"stop preview start >>>> ");
                    drainEncoder(true);
                    Log.v(TAG,"停止录像成功");
                }
            }catch (Exception e){
                FileLoger.LOGE(TAG,"启动发生异常--->>" + e.getMessage());
            }finally {
                release();
                //完成一次录像
                mMediaCallback.onComplete();
            }
        }
    
        //编码
        private final void drainEncoder(boolean endOfStream){
            final int TIMEOUT_USEC = 10000;
            //结束编码
            if (endOfStream) {
                mEncoder.signalEndOfInputStream();
            }
    
            ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
    
            boolean outPutEnd          = true;
    
            while(outPutEnd){
                //缓冲索引
                int bufferIndex        = mEncoder.dequeueOutputBuffer(
                        mBufferInfo, TIMEOUT_USEC);
                switch (bufferIndex){
                    //等待数据状态
                    case MediaCodec.INFO_TRY_AGAIN_LATER:
                        //如果不是结束流标志,刚跳出循环
                        if (!endOfStream) {
                            outPutEnd  = false;
                        }
                        break;
                        //
                    case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                        outputBuffers  = mEncoder.getOutputBuffers();
                        break;
                    //编码格式改变
                    case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                        if (mMuxerStarted) {
                            throw new RuntimeException("format changed twice");
                        }
                        MediaFormat newFormat = mEncoder.getOutputFormat();
                        // now that we have the Magic Goodies, start the muxer
                        mTrackIndex           = mMediaMuxer.addTrack(newFormat);
                        mMediaMuxer.start();
                        mMuxerStarted         = true;
                        break;
                    default:
                        if(bufferIndex >=0){
                            ByteBuffer buffer = outputBuffers[bufferIndex];
                            if (buffer == null) {
                                throw new RuntimeException("encoderOutputBuffer " + bufferIndex +
                                        " was null");
                            }
    
                            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                                mBufferInfo.size = 0;
                            }
    
                            if (mBufferInfo.size != 0) {
                                if (!mMuxerStarted) {
                                    throw new RuntimeException("muxer hasn't started");
                                }
                                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                                buffer.position(mBufferInfo.offset);
                                buffer.limit(mBufferInfo.offset + mBufferInfo.size);
                                //写入一帧数据
                                mMediaMuxer.writeSampleData(mTrackIndex, buffer, mBufferInfo);
                            }
    
                            //释放buffer
                            mEncoder.releaseOutputBuffer(bufferIndex, false);
                        }
    
                        if ((mBufferInfo.flags &
                                MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                            outPutEnd = false;
                        }
                        break;
                }
            }
        }
    
        //打开摄像头
        private final void openCamera(){
            if (mCamera != null) {
                throw new RuntimeException("camera already initialized");
            }
            Camera.CameraInfo info = new Camera.CameraInfo();
            // Try to find a front-facing camera (e.g. for videoconferencing).
            int numCameras = Camera.getNumberOfCameras();
            for (int i = 0; i < numCameras; i++) {
                Camera.getCameraInfo(i, info);
                if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    mCamera = Camera.open(i);
                    break;
                }
            }
            if (mCamera == null) {
                mCamera = Camera.open();    // opens first back-facing camera
            }
    
            if (mCamera == null) {
                throw new RuntimeException("Unable to open camera");
            }
    
            Camera.Parameters parms = mCamera.getParameters();
            choosePreviewSize(parms);
            // leave the frame rate set to default
            mCamera.setParameters(parms);
    
        }
    
        //选择预览大小
        private final void choosePreviewSize(Camera.Parameters parms) {
            Camera.Size ppsfv = parms.getPreferredPreviewSizeForVideo();
            for (Camera.Size size : parms.getSupportedPreviewSizes()) {
                if (size.width == ENCODER_WIDTH
                        && size.height == ENCODER_HEIGHT) {
                    parms.setPreviewSize(ENCODER_WIDTH, ENCODER_HEIGHT);
                    return;
                }
            }
    
            if (ppsfv != null) {
                parms.setPreviewSize(ppsfv.width, ppsfv.height);
            }
        }
    
        //初始化编码器
        private final void initEncoder(){
            mBufferInfo        = new MediaCodec.BufferInfo();
            MediaFormat format = MediaFormat.createVideoFormat(
                    MIME_TYPE,ENCODER_WIDTH, ENCODER_HEIGHT);
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                    MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            format.setInteger(MediaFormat.KEY_BIT_RATE, ENCODER_BIT_RATE);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
            mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
            mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mWindowInputSurface = new WindowInputSurface(
                    mEncoder.createInputSurface());
            mEncoder.start();
            //输出目录
            mOutputPath         = new File(OUTPUT_DIR,
                     ENCODER_WIDTH + "x" + ENCODER_HEIGHT + ".mp4").toString();
            try {
                mMediaMuxer     = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
            } catch (IOException ioe) {
                throw new RuntimeException("MediaMuxer creation failed", ioe);
            }
            mTrackIndex = -1;
            mMuxerStarted = false;
        }
    
        public final String getOutPath(){
            return mOutputPath;
        }
    
        //纹理映射
        private final void attchSurfaceTexture() throws IOException {
            mSurfaceTexture   = new SurfaceTextureWrapper();
            SurfaceTexture st = mSurfaceTexture.getSurfaceTexture();
            mCamera.setPreviewTexture(st);
        }
    
        //释放资源
        private final void release(){
            //释放camera
            if(null != mCamera){
                mCamera.stopPreview();
                mCamera.release();
                mCamera = null;
            }
    
            //释放编码器
            if (mEncoder != null) {
                mEncoder.stop();
                mEncoder.release();
                mEncoder = null;
            }
    
            //释放缓冲区
            if (mWindowInputSurface != null) {
                mWindowInputSurface.release();
                mWindowInputSurface = null;
            }
    
            //释放MediaMuxer资源
            if (mMediaMuxer != null) {
                mMediaMuxer.stop();
                mMediaMuxer.release();
                mMediaMuxer = null;
            }
    
            //释放纹理
            if (mSurfaceTexture != null) {
                mSurfaceTexture.release();
                mSurfaceTexture = null;
            }
        }
    }
    

    以上就是全部核心的代码,稍后有时间了会再详细的介绍下里面所需要用到的知识,流媒体,opengl等,敬请期待。

    相关文章

      网友评论

          本文标题:Camera中通过startPreview + SurfaceT

          本文链接:https://www.haomeiwen.com/subject/eufryttx.html