美文网首页
使用ffmpeg + ndk21读取视频帧并转为bitmap

使用ffmpeg + ndk21读取视频帧并转为bitmap

作者: liouville | 来源:发表于2020-12-13 18:02 被阅读0次

    流程:ffmpeg读取视频帧的yuv-> jni层创建Bitmap,拿到bitmap表示像素数据的指针->将YUV转换到bitmap的像素数据中(ARGB_8888)

    一. ffmpeg读取视频帧的yuv

    这里只处理格式为yuv420p的视频帧

    初始化AVFormatContext

        const char *cstr = videoPath.c_str();
    
        LOGD("inputFmtContext = %p", iFmtContext);
        
        
        //打开AVFormatContext,用于解封装的上下文
        int ret = avformat_open_input(&iFmtContext, cstr, nullptr, nullptr);
    
        if (ret != 0) {
            LOGE("avformat_open_input file %s failed,%s", cstr, av_err2str(ret));
            return;
        }
    
        LOGI("av_find_best_stream file %s success", cstr);
    
        avformat_find_stream_info(iFmtContext, nullptr);
        
        //找到视频流在iFmtContext内部数组里的索引
        int videoIndex = av_find_best_stream(iFmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, -1);
    
        if (videoIndex < 0) {
            LOGE("av_find_best_stream file %s failed,%d", cstr, videoIndex);
            return;
        }
    
        videoStream = iFmtContext->streams[videoIndex];
        LOGD("video stream index = %d,duration = %lu,real duration = %f", videoIndex,
             videoStream->duration, videoStream->duration * timeBaseToDuration(videoStream->time_base));
    

    打开解码器,并获取YUV

    
        if (!iCodecContext) {
    
           //查找解码器
            AVCodec *avCodec = avcodec_find_decoder(videoStream->codecpar->codec_id);
            if (!avCodec) {
                LOGW("getFrameAt avcodec_find_decoder failed");
                return nullptr;
            }
    
            LOGD2(LOG_TAG, "codec name:%s", avCodec->name);
            iCodecContext = avcodec_alloc_context3(avCodec);
            if (!iCodecContext) {
                LOGW("getFrameAt avcodec_alloc_context3 failed");
                return nullptr;
            }
    
           //从AVStream里面复制解码参数
            int err = avcodec_parameters_to_context(iCodecContext, videoStream->codecpar);
            if (err < 0) {
                LOGW("getFrameAt avcodec_parameters_to_context failed,err:%s", av_err2str(err));
                return nullptr;
            }
    
            err = avcodec_open2(iCodecContext, avCodec, nullptr);
            if (err < 0) {
                LOGW("getFrameAt avcodec_open2 failed,err:%s", av_err2str(err));
                return nullptr;
            }
        }
    
        LOGI("codec init success!!!");
    
        // 未解码数据结构体
        AVPacket *packet = av_packet_alloc();
    
        //已解码数据结构体
        AVFrame *frame = av_frame_alloc();
    
        int64_t frameNum = 0;
    
    
        int length = 0;
        int read = 0;
    
      // seek到指定时间cuo
        int seek = av_seek_frame(iFmtContext, videoStream->index,
                                 timeMills / 1000 / timeBaseToDuration(videoStream->time_base),
                                 AVSEEK_FLAG_BACKWARD);
    
        if (seek < 0) {
            LOGW("seek failed,code:%d", seek);
            goto end;
        }
    
        while (!(read = av_read_frame(iFmtContext, packet))) {
    
            LOGD2(LOG_TAG, "packet index:%d", packet->stream_index);
            if (packet->stream_index == videoStream->index) {
                //LOGD("read frame:%" PRId64 ,frameNum);
                //将数据发送到解码器解码
                int code = avcodec_send_packet(iCodecContext, packet);
                if (code != 0) {
                    LOGW("avcodec_send_packet failed");
                    av_packet_unref(packet);
                    break;
                }
    
                frameNum++;
                int ret = 0;
                int num = 0;
              
                //读取解码后的视频数据
                if ((ret = avcodec_receive_frame(iCodecContext, frame)) == AVERROR(EAGAIN)) {
                    LOGD("avcodec_receive_frame ret:%d,", ret);
                    continue;
                }
    
                if (!ret) {
                    num++;
                    LOGD("single codec return:%d,ret:%d", num, ret);
                    LOGD("frame width: %d,height: %d", frame->width, frame->height);
    
                    // writeSingleFrame2File(frame);
                    // yuv4202RGB(frame);
                  
                    //这里拿到的frame数据就包含一帧yuv视频数据了
                    yuv420ToRgb(frame, rgb);
    
                }
                if (ret < 0) {
                    LOGW("avcodec_receive_frame err:%d,%s", ret, av_err2str(ret));
                }
    
    
                av_packet_unref(packet);
    
                break;
    
            }
        }
    
        LOGD("frame num:%" PRId64 ",frame read:%" PRId64 ",read %d", videoStream->nb_frames, frameNum,
             read);
    
        end:
        av_packet_free(&packet);
        av_frame_free(&frame);
    
    
    
    

    二. jni层创建Bitmap,拿到bitmap表示像素数据的指针

    1. native创建一个bitmap
    static jobject createBitmap(JNIEnv *env, int width, int height) {
        jclass bitmapCls = env->FindClass("android/graphics/Bitmap");
    
        if (!bitmapCls) {
            LOGW("bitmapCls failed");
            return nullptr;
        }
        jmethodID createBitmapFunction = env->GetStaticMethodID(bitmapCls,"createBitmap",
                                                                "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
    
        if (!createBitmapFunction) {
            LOGW("createBitmapFunction failed");
            return nullptr;
        }
    
        jstring configName = env->NewStringUTF("ARGB_8888");
        jclass bitmapConfigClass = env->FindClass("android/graphics/Bitmap$Config");
        jmethodID valueOfBitmapConfigFunction = env->GetStaticMethodID(
                bitmapConfigClass, "valueOf",
                "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
    
        if (!valueOfBitmapConfigFunction) {
            LOGW("valueOfBitmapConfigFunction failed");
            return nullptr;
        }
    
        LOGI("valueOfBitmapConfigFunction success");
    
        jobject bitmapConfig = env->CallStaticObjectMethod(bitmapConfigClass,
                                                           valueOfBitmapConfigFunction,configName);
    
        jobject bitmap = env->CallStaticObjectMethod(bitmapCls,
                                                     createBitmapFunction,
                                                     width,
                                                     height, bitmapConfig);
    
        return bitmap;
    }
    
    

    2 拿到bitmap表示像素数据的指针
    需要添加内置本地库: jnigraphics

      jobject bitmap = createBitmap(env, width, height);
    
        int ret;
        uint8_t *rgbData = nullptr;
        //AndroidBitmap_lockPixels后,rgbData就指向bitmap的像素数据了
        if ((ret = AndroidBitmap_lockPixels(env, bitmap, (void**)&rgbData)) < 0) {
            LOGW("AndroidBitmap_lockPixels() failed ! error=%d", ret);
            return nullptr;
        }
    
    
        LOGD("AndroidBitmap_lockPixels ret=%d", ret);
    
        reader->getFrameAt(time_mills,&rgbData);
        LOGD("getFrameAt end");
         //TODO
        AndroidBitmap_unlockPixels(env, bitmap);
    
      //返回bitmap到java层
       return bitmap;
    
    

    三 将YUV数据转换到bitmap的像素数据中

    
    static void yuv420ToRgb(AVFrame *frame, uint8_t **rgb) {
        int img_width = frame->width;
        int img_height = frame->height;
        //int buffer_len = frame->width * frame->height;
        //uint8_t *buffer = static_cast<uint8_t *>(malloc(sizeof(uint8_t) * buffer_len * 4));
        int channels = 4;
    
        uint8_t *buffer = *rgb;
    
        for (int y = 0; y < img_height; y++) {
            for (int x = 0; x < img_width; x++) {
    
                //linesize[0]表示一行Y数据需要多少字节存储, 由于字节对齐的优化,一般会大于图片的宽度,例如,测试视频linesize[0]为864,img_width为854
                int indexY = y * frame->linesize[0] + x;
                int indexU = y / 2 * frame->linesize[1] + x / 2;
                int indexV = y / 2 * frame->linesize[2] + x / 2;
                uint8_t Y = frame->data[0][indexY];
                uint8_t U = frame->data[1][indexU];
                uint8_t V = frame->data[2][indexV];
    
                // 这里可以参考YUV420转rgb公式
                int R = Y + 1.402 * (V - 128);  // 由于计算的结果可能不在0~255之间,所以R不能用uint8_t表示
                int G = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128);
                int B = Y + 1.772 * (U - 128);
                R = (R < 0) ? 0 : R;
                G = (G < 0) ? 0 : G;
                B = (B < 0) ? 0 : B;
                R = (R > 255) ? 255 : R;
                G = (G > 255) ? 255 : G;
                B = (B > 255) ? 255 : B;
                buffer[(y * img_width + x) * channels + 0] = (uint8_t) R;
                buffer[(y * img_width + x) * channels + 1] = (uint8_t) G;
                buffer[(y * img_width + x) * channels + 2] = (uint8_t) B;
                //补充 alpha通道数据, android转bitmap需要
                buffer[(y * img_width + x) * channels + 3] = 0xff;
            }
        }
    }
    

    相关文章

      网友评论

          本文标题:使用ffmpeg + ndk21读取视频帧并转为bitmap

          本文链接:https://www.haomeiwen.com/subject/qqrpgktx.html