美文网首页
Opengl中使用MediaCodec进行视频录制用法分析

Opengl中使用MediaCodec进行视频录制用法分析

作者: 一枚懒人 | 来源:发表于2023-10-15 23:39 被阅读0次

    一:综述

    在Android 中使用MediaCodec会进行视频录制,一般视频数据来自于Camera数据,但是Camera数据可能会经过opengl进行渲染的处理,增加各种滤镜,或者我们在想服务器推流时,也需要使用opengl对视频数据进行进一步的处理,此2种场景均属于需要将opengl和MediaCodec结合实现录制功能。

    简单一点说,想要在MediaCodec中录制视频数据时,录制的是opengl前期处理过的视频,关键需要2步,第一,创建MediaCodec时,向MediaCodec获取Surface,并将此Surface作为opengl的EGL surface创建参数;第二步:需要和前面的opengl环境共享Opengl 的Context,创建MediaCodec线程中的Opengl环境。这样每当前面进行渲染展示完成,我们在编码线程先试用opengl绘制一遍数据,绘制完成之后将数据送到编码器,编码器拿到数据进行编码,最后编码完成,输出H264/h265数据,供下一个节点使用

    二:opengl前台渲染和MediaCodec编码

    2.1:简单说下实现流程:

    1:创建GlSurfaceView,设置Render 2:创建展示camera数据的opengl的Program以及要增加的camera滤镜program 3:创建MediaCodec,配置参数,创将MediaCodec用于输入的Surface 4:创建共享的EGLContext 环境,创建用于编码的Opengl线程 5:打开camera,开始预览,处理每一帧数据,进行前台GlSurfaceView渲染展示 + 录制H264文件

    2.2:针对关键步骤进行代码展示

    <pre class="md-fences md-end-block ty-contain-cm modeLoaded" spellcheck="false" lang="java" cid="n51" mdtype="fences" style="box-sizing: border-box; overflow: visible; font-family: var(--monospace); font-size: 0.9em; display: block; break-inside: avoid; text-align: left; white-space: normal; background-image: inherit; background-size: inherit; background-attachment: inherit; background-origin: inherit; background-clip: inherit; background-color: rgb(248, 248, 248); position: relative !important; border: 1px solid rgb(231, 234, 237); border-radius: 3px; padding: 8px 4px 6px; margin-bottom: 15px; margin-top: 15px; width: inherit; caret-color: rgb(51, 51, 51); color: rgb(51, 51, 51); font-style: normal; font-variant-caps: normal; font-weight: 400; letter-spacing: normal; orphans: auto; text-indent: 0px; text-transform: none; widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto; -webkit-text-stroke-width: 0px; text-decoration: none; background-position: inherit; background-repeat: inherit;">public class CodecRender implements GLSurfaceView.Renderer , SurfaceTexture.OnFrameAvailableListener {

    public CodecRender(Context context , Handler handler,GLSurfaceView surfaceView) {
    mContext = context;
    mHandler = handler;
    // 1:第一步,创建前台展示的GlsurfaceView,设置Render
    mPreviewSurface = surfaceView;
    mPreviewSurface.setEGLContextClientVersion(3);
    mPreviewSurface.setRenderer(this);
    mPreviewSurface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    //3:创建MediaCodec以及配置相关参数
    mCodecManager = new CodecController(context);
    mRecordStatus = RECORDER_OFF;
    createVertexArray();
    }

    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    if(mHandler == null){
    return;
    }
    GLES30.glDisable(GL10.GL_DITHER);
    GLES30.glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
    GLES30.glEnable(GL10.GL_CULL_FACE);
    GLES30.glEnable(GL10.GL_DEPTH_TEST);

    //2: 第二步:创建用于展示相机数据的 opengl 的Program
    //创建基本的相机处理滤镜
    mCameraBaseFilter = new CameraBaseFilter(mContext);
    mCameraBaseFilter.init();

    //创建相机纹理
    cameraTextureId = GLesUtils.createCameraTexture();
    Log.i(TAG,"onSurfaceCreated : cameraTextureId " + cameraTextureId);
    if (cameraTextureId != -1) {
    surfaceTexture = new SurfaceTexture(cameraTextureId);
    surfaceTexture.setOnFrameAvailableListener(this);
    }
    //初始化相机
    initCamera();
    }
    }
    </pre>

    创建camera 的program的 CameraBaseFilter类

    <pre class="md-fences md-end-block ty-contain-cm modeLoaded" spellcheck="false" lang="java" cid="n57" mdtype="fences" style="box-sizing: border-box; overflow: visible; font-family: var(--monospace); font-size: 0.9em; display: block; break-inside: avoid; text-align: left; white-space: normal; background-image: inherit; background-size: inherit; background-attachment: inherit; background-origin: inherit; background-clip: inherit; background-color: rgb(248, 248, 248); position: relative !important; border: 1px solid rgb(231, 234, 237); border-radius: 3px; padding: 8px 4px 6px; margin-bottom: 15px; margin-top: 15px; width: inherit; caret-color: rgb(51, 51, 51); color: rgb(51, 51, 51); font-style: normal; font-variant-caps: normal; font-weight: 400; letter-spacing: normal; orphans: auto; text-indent: 0px; text-transform: none; widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto; -webkit-text-stroke-width: 0px; text-decoration: none; background-position: inherit; background-repeat: inherit;">//相机基本滤镜,主要负责前台展示相机的每一帧数据,使用OES纹理,最后输出到一个FBO绑定的普通2D纹理
    public class CameraBaseFilter extends BaseFilter {
    private final static String TAG = "CameraBaseFilter";
    public CameraBaseFilter(Context context) {
    super(GLesUtils.readTextFileFromResource(context, R.raw.base_fliter_normal_vertex),
    GLesUtils.readTextFileFromResource(context, R.raw.base_filter_nomal_oes_fragement));
    }

    private int textureTransformLocation;//mvp矩阵在glsl中的 Uniform 句柄值
    protected void onInit() {
    super.onInit();
    textureTransformLocation = GLES30.glGetUniformLocation(getProgramId(), "textureTransform");
    updateVertexArray();
    }

    private void updateVertexArray(){
    mVertexBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.VERTEX.length * 4)
    .order(ByteOrder.nativeOrder())
    .asFloatBuffer();
    mVertexBuffer.put(TextureRotateUtil.VERTEX).position(0);

    mTextureBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.TEXTURE_ROTATE_90.length * 4)
    .order(ByteOrder.nativeOrder())
    .asFloatBuffer();
    mTextureBuffer.put(TextureRotateUtil.getRotateTexture(Rotation.ROTATION_90, false, true))
    .position(0);
    }

    @Override
    public int onDrawFrame(int textureId, FloatBuffer vertexBuffer, FloatBuffer textureBuffer) {
    if (!hasInitialized()) {
    return GLesUtils.NOT_INIT;
    }
    // Log.d(TAG,"getProgramId() :" +getProgramId());
    GLES30.glUseProgram(getProgramId());
    runPendingOnDrawTask();
    if(count == 0){
    Log.d(TAG,"onDrawFrame getProgramId() :" +getProgramId());
    Log.d(TAG,"onDrawFrame textureTransformLocation() :" +
    Arrays.toString(textureTransformMatrix));
    Log.d(TAG,"onDrawFrame mInputWidth :" +
    mInputWidth + ",mInputHeight:" + mInputHeight);

    }
    count++;
    //启用顶点坐标
    vertexBuffer.position(0);
    GLES30.glVertexAttribPointer(mAttributePosition,
    2, GLES30.GL_FLOAT, false, 0, vertexBuffer);
    GLES30.glEnableVertexAttribArray(mAttributePosition);

    //启用纹理坐标
    textureBuffer.position(0);
    GLES30.glVertexAttribPointer(mAttributeTextureCoordinate,
    2, GLES30.GL_FLOAT, false, 0, textureBuffer);
    GLES30.glEnableVertexAttribArray(mAttributeTextureCoordinate);

    //设置mvp矩阵
    GLES30.glUniformMatrix4fv(textureTransformLocation,
    1, false, textureTransformMatrix, 0);

    //启用纹理,此处纹理即为相机启动之后设置给相机预览创建的texture
    if (textureId != GLesUtils.NO_TEXTURE) {
    GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
    GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
    GLES30.glUniform1i(mUniformTexture, 0);
    }

    //启动绘制,请绘制完成之后清除绘制参数,顶点着色器,片元着色器 和纹理
    GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 4);
    GLES30.glDisableVertexAttribArray(mAttributePosition);
    GLES30.glDisableVertexAttribArray(mAttributeTextureCoordinate);
    GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);

    return GLesUtils.ON_DRAWN;
    }

    //绘制到fbo上,且fbo控制在不在本方法之内
    public int onDrawToFramebuffer(final int textureId){

    GLES30.glUseProgram(getProgramId());

    mVertexBuffer.position(0);
    GLES30.glVertexAttribPointer(mAttributePosition, 2, GLES30.GL_FLOAT, false, 0, mVertexBuffer);
    GLES30.glEnableVertexAttribArray(mAttributePosition);
    mTextureBuffer.position(0);
    GLES30.glVertexAttribPointer(mAttributeTextureCoordinate, 2, GLES30.GL_FLOAT, false, 0, mTextureBuffer);
    GLES30.glEnableVertexAttribArray(mAttributeTextureCoordinate);
    GLES30.glUniformMatrix4fv(textureTransformLocation, 1, false, textureTransformMatrix, 0);

    if (textureId != GLesUtils.NO_TEXTURE) {
    GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
    GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
    GLES30.glUniform1i(mUniformTexture, 0);
    }

    GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 4);

    GLES30.glDisableVertexAttribArray(mAttributePosition);
    GLES30.glDisableVertexAttribArray(mAttributeTextureCoordinate);
    GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
    return frameBufferTexture[0];
    }

    public void initFrameBuffer(int width, int height){
    //初始化的参数,先根据默认参数进行清除数据
    if (frameBuffer != null && (frameWidth != width || frameHeight != height))
    destroyFrameBuffer();

    //初始化FBO
    if (frameBuffer == null) {
    //传入参数是预览的宽和高
    frameWidth = width;
    frameHeight = height;

    frameBuffer = new int[1];
    frameBufferTexture = new int[1];
    //生成FBO
    GLES30.glGenFramebuffers(1, frameBuffer, 0);

    //生成FBO附着的纹理
    GLES30.glGenTextures(1, frameBufferTexture, 0);
    Log.i(TAG,"initFrameBuffer:" +frameBufferTexture[0] );
    GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, frameBufferTexture[0]);
    GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR);
    GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
    GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
    GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);

    //分配FBO的缓存大小
    GLES30.glTexImage2D(GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGBA, width, height,
    0, GLES30.GL_RGBA, GLES30.GL_UNSIGNED_BYTE, null);
    //绑定FBO
    GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBuffer[0]);
    //将FBO对应附着的纹理和FBO绑定起来
    GLES30.glFramebufferTexture2D(GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0,
    GLES30.GL_TEXTURE_2D, frameBufferTexture[0], 0);

    if (GLES30.glCheckFramebufferStatus(GLES30.GL_FRAMEBUFFER)!= GLES30.GL_FRAMEBUFFER_COMPLETE) {
    Log.e(TAG,"glCheckFramebufferStatus not GL_FRAMEBUFFER_COMPLETE");
    return ;
    }else {
    Log.i(TAG,"glCheckFramebufferStatus GL_FRAMEBUFFER_COMPLETE");
    }
    GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
    GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);

    }

    }

    private int[] frameBuffer = null;
    private int[] frameBufferTexture = null;
    private int frameWidth = -1;
    private int frameHeight = -1;
    public void destroyFrameBuffer() {
    Log.i(TAG,"destroyFrameBuffer");
    if (frameBufferTexture != null) {
    GLES30.glDeleteTextures(1, frameBufferTexture, 0);
    frameBufferTexture = null;
    }
    if (frameBuffer != null) {
    GLES30.glDeleteFramebuffers(1, frameBuffer, 0);
    frameBuffer = null;
    }
    frameWidth = -1;
    frameHeight = -1;
    }
    }</pre>

    创建MediaCodec的以及相关参数

    <pre class="md-fences md-end-block ty-contain-cm modeLoaded" spellcheck="false" lang="java" cid="n65" mdtype="fences" style="box-sizing: border-box; overflow: visible; font-family: var(--monospace); font-size: 0.9em; display: block; break-inside: avoid; text-align: left; white-space: normal; background-image: inherit; background-size: inherit; background-attachment: inherit; background-origin: inherit; background-clip: inherit; background-color: rgb(248, 248, 248); position: relative !important; border: 1px solid rgb(231, 234, 237); border-radius: 3px; padding: 8px 4px 6px; margin-bottom: 15px; margin-top: 15px; width: inherit; caret-color: rgb(51, 51, 51); color: rgb(51, 51, 51); font-style: normal; font-variant-caps: normal; font-weight: 400; letter-spacing: normal; orphans: auto; text-indent: 0px; text-transform: none; widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto; -webkit-text-stroke-width: 0px; text-decoration: none; background-position: inherit; background-repeat: inherit;">public class CodecController implements Runnable{
    private static final String TAG = "CodecController";
    Context mContext ;
    RecorderHandler mRecordHandler = null;
    AVRecorderCore mEncodeCore ;

    public CodecController(Context context) {
    mContext = context;

    }

    private final Object mReadyFence = new Object();
    private boolean mReady;
    private boolean mRunning;

    public void startRecode(RecorderConfig config){
    Log.i(TAG,"startRecode with config");
    synchronized (mReadyFence) {
    if (mRunning) {
    return;
    }
    mRunning = true;
    new Thread(this, TAG).start();
    while (!mReady) {
    try {
    Log.i(TAG,"startRecode : mReadyFence wait " );
    mReadyFence.wait();
    Log.i(TAG,"startRecode : mReadyFence wait end " );
    } catch (InterruptedException e) {
    e.printStackTrace();
    }
    }
    }

    Message message = Message.obtain();
    message.what = MSG_START_RECORDING;
    message.obj = config;
    mRecordHandler.sendMessage(message);
    }

    public void stopRecording(){
    mRecordHandler.sendMessage(mRecordHandler.obtainMessage(MSG_STOP_RECORDING));
    mRecordHandler.sendMessage(mRecordHandler.obtainMessage(MSG_QUIT_RECORDING));
    }

    public void setTextureId(int textureId ){
    synchronized (mReadyFence) {
    if (!mReady)
    return;
    }
    mRecordHandler.sendMessage(mRecordHandler.obtainMessage(MSG_SET_TEXTURE_ID, textureId, 0, null));
    }

    private float[] mTransformMatrix ;
    public void frameAvailable(SurfaceTexture surfaceTexture){
    synchronized (mReadyFence) {
    if (!mReady)
    return;
    }
    if (mTransformMatrix == null) {
    mTransformMatrix = new float[16];
    }
    surfaceTexture.getTransformMatrix(mTransformMatrix);
    long timestamp = surfaceTexture.getTimestamp();
    if (timestamp == 0) {
    return;
    }
    //todo 时间戳传递问题
    mRecordHandler.sendMessage(mRecordHandler.obtainMessage(MSG_FRAME_AVAILABLE,
    (int) (timestamp >> 32), (int) timestamp, mTransformMatrix));

    //todo 如何向另外一个线程传递
    // mEncodeCore.frameAvailable(surfaceTexture);
    }

    private int mVideoWidth = -1;
    private int mVideoHeight = -1;
    private CameraBaseFilter mCameraFilter = null;
    private BeautyFilterType type = BeautyFilterType.NONE;
    private BaseFilter mAddFilter = null;
    private WindowEglSurface mWindowSurface;
    private EglCoreTest mEglCore;
    //todo 並未使用egl自定义
    private void handlerStartRecording(RecorderConfig config){

    Log.i(TAG,"handlerStartRecording: width-->" +config.mWidth +
    "height:" +config.mHeight + ",bitrate -->" +config.mBitrate + ",file: " +config.mOutputFile);

    mEncodeCore = new AVRecorderCore(mContext,config.mWidth,config.mHeight,
    config.mBitrate,config.mOutputFile);
    mVideoWidth = config.mWidth;
    mVideoHeight = config.mHeight;

    mEglCore = new EglCoreTest(config.mEglContext, EglCoreTest.FLAG_RECORDABLE);
    mWindowSurface = new WindowEglSurface(mEglCore,mEncodeCore.getInputSurface(),false);
    mWindowSurface.makeCurrent();

    mCameraFilter = new CameraBaseFilter(mContext);
    mCameraFilter.init();
    Log.i(TAG,"handlerStartRecording: type" +type);
    mAddFilter = getBaseFilter(type);
    if (mAddFilter != null) {
    mAddFilter.init();
    mAddFilter.onOutputSizeChanged(mVideoWidth, mVideoHeight);
    mAddFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
    }else {
    Log.i(TAG,"handlerStartRecording: mAddFilter is null" );
    }

    synchronized (mReadyFence){
    mReady = true;
    mReadyFence.notify();
    }
    }

    Object drawLock = new Object();
    public void setFilterType(BeautyFilterType type){
    Log.i(TAG,"setFilterType:" + type.toString() );

    synchronized (drawLock){
    if(mAddFilter != null){
    mAddFilter.destroy();
    mAddFilter = null;
    }
    mAddFilter = getBaseFilter(type);

    mAddFilter.init();
    mAddFilter.onOutputSizeChanged(mVideoWidth, mVideoHeight);
    mAddFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
    }
    this.type = type;
    }
    public void setFilterType(BaseFilter filter){
    Log.i(TAG,"setFilterType params " + filter );

    synchronized (drawLock){

    mAddFilter = filter;

    mAddFilter.init();
    mAddFilter.onOutputSizeChanged(mVideoWidth, mVideoHeight);
    mAddFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
    }
    }

    private void handleFrameAvailable(float[] transform, long timestamp){
    mEncodeCore.drainEncoder(false);
    mCameraFilter.setTextureTransformMatrix(transform);
    if (mAddFilter == null) {
    mCameraFilter.onDrawFrame(mTextureId, glVertexBuffer, glTextureBuffer);
    } else {
    Log.i(TAG,"handleFrameAvailable:" + mTextureId);
    synchronized (drawLock){
    mAddFilter.onDrawFrame(mTextureId, glVertexBuffer, glTextureBuffer);
    }
    }

    mWindowSurface.setPresentationTime(timestamp);
    mWindowSurface.swapBuffers();
    }
    private void handlerStopRecording(){
    Log.i(TAG,"handlerStopRecording");
    mEncodeCore.drainEncoder(true);
    releaseRecorder();
    }
    private void releaseRecorder() {
    mEncodeCore.release();
    if (mWindowSurface != null) {
    mWindowSurface.release();
    mWindowSurface = null;
    }
    if (mCameraFilter != null) {
    mCameraFilter.destroy();
    mCameraFilter = null;
    }
    if (mAddFilter != null) {
    mAddFilter.destroy();
    mAddFilter = null;
    type = BeautyFilterType.NONE;
    }
    if (mEglCore != null) {
    mEglCore.release();
    mEglCore = null;
    }
    }

    private volatile int mTextureId = -1;
    private void handleSetTexture(int id) {
    if(mTextureId != id){
    Log.i(TAG,"handleSetTexture: old " + mTextureId + ",new texture id " +id );
    }
    mTextureId = id;
    }

    //前台进行预览的宽和高
    private int mPreviewWidth = -1;
    private int mPreviewHeight = -1;
    public void setPreviewSize(int width, int height){
    mPreviewWidth = width;
    mPreviewHeight = height;
    }
    private FloatBuffer glVertexBuffer;
    private FloatBuffer glTextureBuffer;

    public void setTextureBuffer(FloatBuffer glTextureBuffer) {
    this.glTextureBuffer = glTextureBuffer;
    }

    public void setVertexBuffer(FloatBuffer gLVertexBuffer) {
    this.glVertexBuffer = gLVertexBuffer;
    }

    @Override
    public void run() {
    Looper.prepare();
    synchronized (mReadyFence) {
    mRecordHandler = new RecorderHandler(this);
    mReady = true;
    mReadyFence.notify();
    Log.i(TAG,"run mReadyFence notify");
    }
    Looper.loop();
    synchronized (mReadyFence) {
    mReady = false;
    mRunning = false;
    mRecordHandler = null;
    }
    }

    public static class RecorderConfig{
    final int mWidth;
    final int mHeight;
    final int mBitrate;
    final File mOutputFile;
    final EGLContext mEglContext;

    public RecorderConfig(int width, int height, int bitrate, File outputFile, EGLContext eglContext) {
    this.mWidth = width;
    this.mHeight = height;
    this.mBitrate = bitrate;
    this.mOutputFile = outputFile;
    this.mEglContext = eglContext;
    }

    // public RecorderConfig(int width, int height, int bitrate, File outputFile) {
    // this.mWidth = width;
    // this.mHeight = height;
    // this.mBitrate = bitrate;
    // this.mOutputFile = outputFile;
    // }

    }

    private final static int MSG_START_RECORDING = 0;
    private final static int MSG_STOP_RECORDING = 1;
    private final static int MSG_FRAME_AVAILABLE = 2;
    private final static int MSG_SET_TEXTURE_ID = 3;
    private final static int MSG_UPDATE_SHARED_CONTEXT = 4;
    private final static int MSG_QUIT_RECORDING = 5;

    private static class RecorderHandler extends Handler {
    private final WeakReference<CodecController> mWeakRecorder;

    public RecorderHandler(CodecController manager) {
    this.mWeakRecorder = new WeakReference<>(manager);
    }

    @Override
    public void handleMessage(@NonNull Message msg) {
    Object obj = msg.obj;
    CodecController recorder = mWeakRecorder.get();
    if (recorder == null) {
    return;
    }

    switch (msg.what) {
    case MSG_START_RECORDING:
    recorder.handlerStartRecording((RecorderConfig)obj);
    break;
    case MSG_SET_TEXTURE_ID:
    recorder.handleSetTexture(msg.arg1);
    break;
    case MSG_FRAME_AVAILABLE:
    long timestamp = (((long) msg.arg1) << 32) |
    (((long) msg.arg2) & 0xffffffffL);
    recorder.handleFrameAvailable((float[]) obj, timestamp);
    break;
    case MSG_STOP_RECORDING:
    recorder.handlerStopRecording();
    break;
    case MSG_QUIT_RECORDING:
    Looper.myLooper().quit();
    break;
    }
    }
    }

    private BaseFilter getBaseFilter(BeautyFilterType type){
    Log.i(TAG,"getBaseFilter:" + type.toString()) ;
    BaseFilter result = null;
    if(type == BeautyFilterType.BLUR){
    result = new CameraBlurFilter(mContext);
    }else if(type == BeautyFilterType.COLOR_INVERT){
    result = new CameraColorInvertFilter(mContext);
    }else if(type == BeautyFilterType.WEAK_PIXEL_INCLUSION){
    result = new CameraWeakPixInclusion(mContext);
    }
    return result;
    }

    //创建编码的核心类,主要负责MediaCodec的配置和编码,以及获取编码数据
    public class AVRecorderCore {

    private static final String TAG = "AVRecorderCore";
    MediaCodec mCodec ;
    MediaCodec.BufferInfo mBufferInfo ;
    private final static String MIME_TYPE = "video/avc";
    private final static int FRAME_RATE = 30;
    private final static int IFRAME_INTERVAL = 30;
    private Surface mInputSurface = null;
    private Object objLock = new Object();
    private MediaMuxer mMuxer;
    private int trackerID = 0;
    private boolean isStartMuxer = false;
    public AVRecorderCore(Context context) {
    // initCodec();
    }
    private Context mContext;
    public AVRecorderCore(Context context,int width, int height, int bitrate, File outputFile) {
    mContext = context;
    initCodec(width,height,bitrate,outputFile.getAbsolutePath(),null);
    }
    public void initCodec(int width, int height, int bitrate, String filePath, Surface surface){
    Log.i(TAG,"VideoRecorderCore begin");
    mBufferInfo = new MediaCodec.BufferInfo();
    MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);

    try {
    mCodec = MediaCodec.createEncoderByType(MIME_TYPE);
    } catch (IOException e) {

    e.printStackTrace();
    }

    mCodec.configure(mediaFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
    mInputSurface = mCodec.createInputSurface();
    mCodec.start();

    try {
    mMuxer = new MediaMuxer(filePath,MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
    } catch (IOException e) {
    e.printStackTrace();
    }
    trackerID = -1;
    isStartMuxer = false;
    Log.i(TAG,"VideoRecorderCore end");
    }

    public Surface getInputSurface() {
    return mInputSurface;
    }

    public void setTextureId(int textureId ){

    }
    public void frameAvailable(SurfaceTexture surfaceTexture){

    }

    private final static int TIMEOUT_USEC = 20000;
    public void drainEncoder(boolean endOfStream) {
    if (endOfStream) {
    mCodec.signalEndOfInputStream();
    }

    ByteBuffer[] outputBuffers = mCodec.getOutputBuffers();
    while (true) {
    int encodeStatus = mCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
    Log.d(TAG,"drainEncoder encodeStatus: " +encodeStatus);
    if (encodeStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    if (!endOfStream) {
    break;
    }
    }else if (encodeStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    outputBuffers = mCodec.getOutputBuffers();
    }else if (encodeStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    if (isStartMuxer) {
    throw new RuntimeException("format has changed!");
    }
    MediaFormat newFormat = mCodec.getOutputFormat();
    trackerID = mMuxer.addTrack(newFormat);
    mMuxer.start();
    isStartMuxer = true;
    }else if (encodeStatus < 0) {
    Log.e(TAG, "error encodeStatus=" + encodeStatus);
    }else {
    ByteBuffer data = outputBuffers[encodeStatus];
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    mBufferInfo.size = 0;
    }
    if (mBufferInfo.size != 0) {
    if (!isStartMuxer) {
    throw new RuntimeException("muxer hasn't started");
    }
    data.position(mBufferInfo.offset);
    data.limit(mBufferInfo.offset + mBufferInfo.size);
    mMuxer.writeSampleData(trackerID, data, mBufferInfo);
    }
    mCodec.releaseOutputBuffer(encodeStatus, false);
    Log.i(TAG,"drainEncoder mBufferInfo.flags :" +mBufferInfo.flags +
    " MediaCodec.BUFFER_FLAG_END_OF_STREAM:" + MediaCodec.BUFFER_FLAG_END_OF_STREAM +
    ",result :" +(mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM));
    // end of stream
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    break;
    }
    }
    }
    }

    public void release() {
    if (mCodec != null) {
    mCodec.stop();
    mCodec.release();
    mCodec = null;
    }
    if (mMuxer != null) {
    mMuxer.stop();
    mMuxer.release();
    mMuxer = null;
    }
    }
    }
    }
    </pre>

    2.3:主要类之间的关系

    Opengl和MediaCodec结合视频录制类关系.png

    三:结尾

    opengl和MediaCodec结合进行视频录制的核心思路是将MediaCodec的Surface设置给到Opengl的环境中,而我们一般的场景中,如果前台无展示,则需要自行搭建EGL环境,创建一个离屏幕的EGL Surface,否则就和前台共享EGL Context,创建MediaCodec的opengl 环境,进行视频录制

    相关文章

      网友评论

          本文标题:Opengl中使用MediaCodec进行视频录制用法分析

          本文链接:https://www.haomeiwen.com/subject/mrlxidtx.html