美文网首页
MediaCodec + SurfaceView 对Camera

MediaCodec + SurfaceView 对Camera

作者: Forever_Lobster | 来源:发表于2020-04-20 14:31 被阅读0次

一、对预览数据编码

private static final int FRAME_RATE = 25;
private int calcBitRate() {
        final int bitrate = (int)(0.5f * FRAME_RATE * 720 * 1280);
        Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
        return bitrate;
    }

  private void initMediaCodec() {

        try {
            mMediaCodec = MediaCodec.createEncoderByType(VCODEC);
            bufferInfo = new MediaCodec.BufferInfo();
            presentationTimeUs = System.nanoTime() / 1000;

            MediaFormat format = createFormat(720, 1280, bitrate, framerate);   

            /*format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);
            matchedColorFormat = chooseColorFormat();
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, matchedColorFormat);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);*/
            mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mMediaCodec.start();
            Log.v(TAG, "encoder start");
        } catch (Exception e) {
            Log.e(TAG, "MediaCodec start failed.", e);
        }
    }

    private MediaFormat createFormat(int width, int height, int bitRate, int frameRate) {
        MediaFormat format = new MediaFormat();
        format.setInteger(MediaFormat.KEY_WIDTH, width);
        format.setInteger(MediaFormat.KEY_HEIGHT, height);
        format.setString(MediaFormat.KEY_MIME, "video/avc");
        format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
        // must be present to configure the encoder, but does not impact the actual frame rate, which is variable
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);//输入的待编码数据与其格式一致
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
        // display the very first frame, and recover from bad quality when no new frames
        format.setLong(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, 0); // µs
        return format;
    }
    public static byte[] rotateYUVDegree90(byte[] data, int imageWidth, int imageHeight) {
        byte[] yuv = new byte[imageWidth * imageHeight * 3 / 2];
        // Rotate the Y luma
        int i = 0;
        for (int x = 0; x < imageWidth; x++) {
            for (int y = imageHeight - 1; y >= 0; y--) {
                yuv[i] = data[y * imageWidth + x];
                i++;
            }
        }
        // Rotate the U and V color components
        i = imageWidth * imageHeight * 3 / 2 - 1;
        for (int x = imageWidth - 1; x > 0; x = x - 2) {
            for (int y = 0; y < imageHeight / 2; y++) {
                yuv[i] = data[(imageWidth * imageHeight) + (y * imageWidth) + x];
                i--;
                yuv[i] = data[(imageWidth * imageHeight) + (y * imageWidth) + (x - 1)];
                i--;
            }
        }
        return yuv;
    }

    //byte[] mBytes;
    Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() {
        byte[] mPpsSps = new byte[0];
        int i = 0;

        @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
            Log.v(TAG, "onPreviewFrame--i=" + ++i);
            if (data == null) {
                return;
            }
            Camera.Size previewSize = camera.getParameters().getPreviewSize();

            //COLOR_FormatYUV420Planar = i420 = YV21 =19
            //NV21 转成 I420
            byte[] src_Bytes = new byte[data.length];

            YuvUtil.yuvYV12ToI420(data, 720, 1280, src_Bytes);

            byte[] dst_Bytes = new byte[src_Bytes.length];
            YuvUtil.yuvRotateI420(src_Bytes, 1280, 720, dst_Bytes, 90);

            //byte[] dst_Bytes;
            //dst_Bytes = rotateYUVDegree90(src_Bytes, 1280, 720);


            /*mYUVType = new Type.Builder(mRenderScript, Element.U8(mRenderScript)).setX(dst_Bytes.length);
            mInputAllocation = Allocation.createTyped(mRenderScript, mYUVType.create(), Allocation.USAGE_SCRIPT);

            mRGBAType = new Type.Builder(mRenderScript, Element.RGBA_8888(mRenderScript)).setX(720).setY(1280);
            mOutputAllocation = Allocation.createTyped(mRenderScript, mRGBAType.create(), Allocation.USAGE_SCRIPT);
            mInputAllocation.copyFrom(dst_Bytes);
            mScriptIntrinsicYuvToRGB.setInput(mInputAllocation);
            mScriptIntrinsicYuvToRGB.forEach(mOutputAllocation);
            Bitmap bmpout = Bitmap.createBitmap(720, 1280, Bitmap.Config.ARGB_8888);
            mOutputAllocation.copyTo(bmpout);
            saveBitmap(mContext, bmpout);*/


            if (dst_Bytes == null) {

                return;
            }
            //进行编码
            try {

                ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
                ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
                int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);

                if (inputBufferIndex >= 0) {
                    ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                    inputBuffer.clear();
                    inputBuffer.put(dst_Bytes, 0, dst_Bytes.length);
                    mMediaCodec.queueInputBuffer(inputBufferIndex, 0, dst_Bytes.length, 0, 0);
                }
                int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,0);
                if (outputBufferIndex >= 0) {
                    ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];

                    byte[] b = new byte[bufferInfo.size];
                    outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
                    outputBuffer.position(bufferInfo.offset);
                    outputBuffer.get(b, bufferInfo.offset, bufferInfo.offset + bufferInfo.size);

                    //传输
                    callBack.onFrame(previewSize.width, previewSize.height, b, mCameraId);
                    mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                }
                /*int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,0);

                while (outputBufferIndex >= 0)
                {
                    ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                    byte[] outData = new byte[bufferInfo.size];
                    outputBuffer.get(outData);

                    //传输
                    callBack.onFrame(previewSize.width, previewSize.height, outData, mCameraId);
                    mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                    outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
                }*/

                /*if (outPutBufId >= 0) {
                    mMediaCodec.releaseOutputBuffer(outPutBufId, false);
                }*/
            } catch (IllegalStateException e) {
                Log.e(TAG, "dequeueOutputBuffer error");
            }
        }

    };

二、解码后将数据刷至SurfaceView

1、初始化解码器

private MediaCodec mDecoder;
    private MediaCodec.BufferInfo mBufferInfo;
    private void initDecoder() {
        try {
            mDecoder = MediaCodec.createDecoderByType("video/avc");
            MediaFormat mMediaFormat = MediaFormat.createVideoFormat("video/avc",
                    720, 1280);
            mDecoder.configure(mMediaFormat, mSurfaceView.getHolder().getSurface(),
                    null, 0);
            mDecoder.start();
            mBufferInfo = new MediaCodec.BufferInfo();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

2、解码

private CloudCameraService.FrameCallback callBack = new CloudCameraService.FrameCallback() {
        int i = 0;

        @Override
        public void onFrame(int previewWidth,int previewHeight, byte[] data, int cameraId) {
            // TODO Auto-generated method stub
            /*mCameraId = cameraId;
            mBytes = data;
            if (mPreviewBitmapWidth == 0 || mPreviewBitmapHeight == 0) {
                mPreviewBitmapWidth = previewWidth;
                mPreviewBitmapHeight = previewHeight;

            }
            if (mCameraPreview != null) {
                mCameraPreview.invalidate();
            }

            takePicture(data);*/

            //获取MediaCodec的输入流
            ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();//请求一块空的输入缓存

            //设置解码等待时间,0为不等待,-1为一直等待,其余为时间单位
            int inputBufferIndex = mDecoder.dequeueInputBuffer(-1);

            //填充数据到输入流
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                //塞入数据
                inputBuffer.put(data, 0, data.length);//H.264格式编码的数据,
                mDecoder.queueInputBuffer(inputBufferIndex, 0, data.length, System.nanoTime() / 1000, 0);
            }
            int outputBufferIndex = mDecoder.dequeueOutputBuffer(mBufferInfo, 0);
            if (outputBufferIndex >= 0) {
                //使用完其中的数据(递交给下一级的消费者),并将其释放给编解码器再次使用
                mDecoder.releaseOutputBuffer(outputBufferIndex, true);
            } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                //此处可以或得到视频的实际分辨率,用以修正宽高比
                //fixHW();
            }
        }
截图.png

相关文章

网友评论

      本文标题:MediaCodec + SurfaceView 对Camera

      本文链接:https://www.haomeiwen.com/subject/zrfdihtx.html