ffmpeg的H.264解码

作者: Bison | 来源:发表于2017-07-11 17:24 被阅读505次
    FFmpeg_allluckly.cn.png

    新建工程,导入由Mac编译ffmpeg获取FFmpeg-iOS编译好的FFmpeg-iOS,然后导入系统依赖的库

    AudioToolbox.framework
    CoreMedia.framework
    VideoToolbox.framework
    libiconv.tbd
    libbz2.tbd
    libz.tbd
    

    编译的时候报错: 'libavcodec/avcodec.h' file not found ,修改Header search paths 里的路径:

    $(PROJECT_DIR)/FFmpeg-iOS/include
    

    然后导入头文件

    /*----------------解码必须导入-----------------*/
    #import "avformat.h"
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libavutil/imgutils.h>
    #include <libswscale/swscale.h>
    /*-------------------------------------------*/
    

    运行一下项目不报错的话,就可以开始解码渲染之旅了。下面我们先从解码开始;

    h.264解码

    ffmpeg对视频文件进行解码的大致流程:

    1.注册所有容器格式和CODEC: av_register_all()

        static dispatch_once_t onceToken;
        dispatch_once(&onceToken, ^{
            av_register_all();
        });
    

    2.打开文件: av_open_input_file()

        pFormatContext = avformat_alloc_context();
        NSString *fileName = [[NSBundle mainBundle] pathForResource:@"zjd.h264" ofType:nil];
        if (fileName == nil)
        {
            NSLog(@"Couldn't open file:%@",fileName);
            return;
        }
        //[1]函数调用成功之后处理过的AVFormatContext结构体;[2]打开的视音频流的URL;[3]强制指定AVFormatContext中AVInputFormat的。这个参数一般情况下可以设置为NULL,这样FFmpeg可以自动检测AVInputFormat;[4]附加的一些选项,一般情况下可以设置为NULL。)
        if (avformat_open_input(&pFormatContext, [fileName cStringUsingEncoding:NSASCIIStringEncoding], NULL, NULL) != 0)
        {
            NSLog(@"无法打开文件");
            return;
        }
    

    3.从文件中提取流信息: av_find_stream_info()

    if (avformat_find_stream_info(pFormatContext, NULL) < 0) {
            NSLog(@"无法提取流信息");
            return;
     }
    

    4.穷举所有的流,查找其中种类为CODEC_TYPE_VIDEO

        int videoStream = -1;
        for (int i = 0; i < pFormatContext -> nb_streams; i++)
        {
            if (pFormatContext -> streams[i] -> codec -> codec_type == AVMEDIA_TYPE_VIDEO)
            {
                videoStream = i;
            }
        }
        if (videoStream == -1) {
            NSLog(@"Didn't find a video stream.");
            return;
        }
    

    5.查找对应的解码器: avcodec_find_decoder()

    pCodecCtx = pFormatContext->streams[videoStream]->codec;
        AVCodec *pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
        if (pCodec == NULL) {
            NSLog(@"pVideoCodec not found.");
            return NO;
        }
    

    6.打开编解码器: avcodec_open()

    avcodec_open2(pCodecCtx, pCodec, NULL);
    

    7.为解码帧分配内存: avcodec_alloc_frame()

    pFrame = av_frame_alloc();
    

    8.不停地从码流中提取中帧数据: av_read_frame()

        AVPacket packet;
        av_init_packet(&packet);
    
        if (av_read_frame(pFormatContext, &packet) >= 0)
        {//已经读取到了数据
        
        }else{//没读取到数据
    
        }
    

    9.判断帧的类型,对于视频帧调用: avcodec_decode_video()

          //作用是解码一帧视频数据。输入一个压缩编码的结构体AVPacket,输出一个解码后的结构体AVFrame
        avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
        av_free_packet(&packet);
        if (frameFinished) //解码成功
        {
       
        }
    

    10.解码完后,释放解码器: avcodec_close()

    avcodec_close()
    

    11.关闭输入文件:av_close_input_file()

    av_close_input_file()
    

    最后封装的代码如下

    
    /*input_str  输入的文件路径
     *output_str 输出的文件路径
     *return 解码后的信息
     */
    
    #pragma mark - 基于FFmpeg的视频解码器
    - (NSString *)decoder:(NSString *)input_str output_str:(NSString *)output_str{
        AVFormatContext *pFormatCtx;
        int             i, videoindex;
        AVCodecContext  *pCodecCtx;
        AVCodec         *pCodec;
        AVFrame *pFrame,*pFrameYUV;
        uint8_t *out_buffer;
        AVPacket *packet;
        int y_size;
        int ret, got_picture;
        struct SwsContext *img_convert_ctx;
        FILE *fp_yuv;
        int frame_cnt;
        clock_t time_start, time_finish;
        double  time_duration = 0.0;
        
        char input_str_full[500]={0};
        char output_str_full[500]={0};
        char info[1000]={0};
        
    //    NSString *input_str= [NSString stringWithFormat:@"resource.bundle/sintel.mov"];
    //    NSString *output_str= [NSString stringWithFormat:@"resource.bundle/test.yuv"];
        
        NSString *input_nsstr=[[[NSBundle mainBundle]resourcePath] stringByAppendingPathComponent:input_str];
        NSString *output_nsstr=[[[NSBundle mainBundle]resourcePath] stringByAppendingPathComponent:output_str];
        
        sprintf(input_str_full,"%s",[input_nsstr UTF8String]);
        sprintf(output_str_full,"%s",[output_nsstr UTF8String]);
        
        printf("Input Path:%s\n",input_str_full);
        printf("Output Path:%s\n",output_str_full);
        //注册编解码器
        av_register_all();
        //打开网络流
        avformat_network_init();
        //初始化AVFormatContext
        pFormatCtx = avformat_alloc_context();
        
        if(avformat_open_input(&pFormatCtx,input_str_full,NULL,NULL)!=0){
            printf("Couldn't open input stream.\n");
            return nil;
        }
        if(avformat_find_stream_info(pFormatCtx,NULL)<0){
            printf("Couldn't find stream information.\n");
            return nil;
        }
        videoindex=-1;
        for(i=0; i<pFormatCtx->nb_streams; i++)
            if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){
                videoindex=i;
                break;
            }
        if(videoindex==-1){
            printf("Couldn't find a video stream.\n");
            return nil;
        }
        pCodecCtx = avcodec_alloc_context3(NULL);
        if (pCodecCtx == NULL)
        {
            printf("Could not allocate AVCodecContext\n");
            return nil;
        }
        avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar);
        
        pCodec = avcodec_find_decoder(pCodecCtx->codec_id);  //指向AVCodec的指针.查找解码器
        if(pCodec==NULL){
            printf("Couldn't find Codec.\n");
            return nil;
        }
        if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
            printf("Couldn't open codec.\n");
            return nil;
        }
        
        pFrame=av_frame_alloc();
        pFrameYUV=av_frame_alloc();
        out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,  pCodecCtx->width, pCodecCtx->height,1));
        av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize,out_buffer,
                             AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height,1);
        packet=(AVPacket *)av_malloc(sizeof(AVPacket));
        
        img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                                         pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
        
        
        sprintf(info,   "[Input     ]%s\n", [input_str UTF8String]);
        sprintf(info, "%s[Output    ]%s\n",info,[output_str UTF8String]);
        sprintf(info, "%s[Format    ]%s\n",info, pFormatCtx->iformat->name);
        sprintf(info, "%s[Codec     ]%s\n",info, pCodecCtx->codec->name);
        sprintf(info, "%s[Resolution]%dx%d\n",info, pCodecCtx->width,pCodecCtx->height);
        
        
        fp_yuv=fopen(output_str_full,"wb+");
        if(fp_yuv==NULL){
            printf("Cannot open output file.\n");
            return nil;
        }
        
        frame_cnt=0;
        time_start = clock();
        
        while(av_read_frame(pFormatCtx, packet)>=0){
            if(packet->stream_index==videoindex){
                ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
                if(ret < 0){
                    printf("Decode Error.\n");
                    return nil;
                }
                if(got_picture){
                    sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                              pFrameYUV->data, pFrameYUV->linesize);
                    
                    y_size=pCodecCtx->width*pCodecCtx->height;
                    fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
                    fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
                    fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
                    //Output info
                    char pictype_str[10]={0};
                    switch(pFrame->pict_type){
                        case AV_PICTURE_TYPE_I:sprintf(pictype_str,"I");break;
                        case AV_PICTURE_TYPE_P:sprintf(pictype_str,"P");break;
                        case AV_PICTURE_TYPE_B:sprintf(pictype_str,"B");break;
                        default:sprintf(pictype_str,"Other");break;
                    }
                    printf("Frame Index: %5d. Type:%s\n",frame_cnt,pictype_str);
                    frame_cnt++;
                }
            }
            av_packet_unref(packet);
        }
        //flush decoder
        //FIX: Flush Frames remained in Codec
        while (1) {
            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
            if (ret < 0)
                break;
            if (!got_picture)
                break;
            sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                      pFrameYUV->data, pFrameYUV->linesize);
            int y_size=pCodecCtx->width*pCodecCtx->height;
            fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
            fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
            fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
            //Output info
            char pictype_str[10]={0};
            switch(pFrame->pict_type){
                case AV_PICTURE_TYPE_I:sprintf(pictype_str,"I");break;
                case AV_PICTURE_TYPE_P:sprintf(pictype_str,"P");break;
                case AV_PICTURE_TYPE_B:sprintf(pictype_str,"B");break;
                default:sprintf(pictype_str,"Other");break;
            }
            printf("Frame Index: %5d. Type:%s\n",frame_cnt,pictype_str);
            frame_cnt++;
        }
        time_finish = clock();
        time_duration=(double)(time_finish - time_start);
        
        sprintf(info, "%s[Time      ]%fus\n",info,time_duration);
        sprintf(info, "%s[Count     ]%d\n",info,frame_cnt);
        
        sws_freeContext(img_convert_ctx);
        
        fclose(fp_yuv);
        
        av_frame_free(&pFrameYUV);
        av_frame_free(&pFrame);
        avcodec_close(pCodecCtx);
        avformat_close_input(&pFormatCtx);
        
        NSString * info_ns = [NSString stringWithFormat:@"%s", info];
        
    //    NSLog(@"解码后的信息%@",info_ns);
        NSLog(@"文件输出路径%@",output_nsstr);
        
        return info_ns;
        
    }
    

    参考资料:雷神的ffmpeg解码

    LBffmpegDemo下载地址

    相关文章

      网友评论

        本文标题:ffmpeg的H.264解码

        本文链接:https://www.haomeiwen.com/subject/xeslhxtx.html