FFmpeg视频解码播放

作者: 若非长得丑怎会做逗比 | 来源:发表于2016-06-18 23:31 被阅读1613次

    一 原理:

    • 通过ffmpeg对视频进行解码,解码出每一帧图片,然后根据一定时间播放每一帧图

    二 如何集成 ffmpeg

    • 下载脚本 ffmpeg脚本
    • 根据上面链接的 README 进行编译
    • 集成到项目,新建工程,将编译好的静态库以及头文件导入工程


    • 导入依赖库


    • 设置头文件路径,路径一定要对,不然胡找不到头文件


    • 先 command + B 编译一下,确保能编译成功

    三 开始编写代码

    • 新建一个OC文件
    //
    
    // SJMoiveObject.h
    // SJLiveVideo
    //
    // Created by king on 16/6/16.
    // Copyright © 2016年 king. All rights reserved.
    //
    ​
    #import <UIKit/UIKit.h>
    #import "NSString+Extions.h"
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale//swscale.h>
    ​
    @interface SJMoiveObject : NSObject
    ​
    /* 解码后的UIImage */
    @property (nonatomic, strong, readonly) UIImage *currentImage;
    ​
    /* 视频的frame高度 */
    @property (nonatomic, assign, readonly) int sourceWidth, sourceHeight;
    ​
    /* 输出图像大小。默认设置为源大小。 */
    @property (nonatomic,assign) int outputWidth, outputHeight;
    ​
    /* 视频的长度,秒为单位 */
    @property (nonatomic, assign, readonly) double duration;
    ​
    /* 视频的当前秒数 */
    @property (nonatomic, assign, readonly) double currentTime;
    ​
    /* 视频的帧率 */
    @property (nonatomic, assign, readonly) double fps;
    ​
    /* 视频路径。 */
    - (instancetype)initWithVideo:(NSString *)moviePath;
    ​
    /* 从视频流中读取下一帧。返回假,如果没有帧读取(视频)。 */
    - (BOOL)stepFrame;
    ​
    /* 寻求最近的关键帧在指定的时间 */
    - (void)seekTime:(double)seconds;
    ​
    @end
    
    • 实现文件
    //
    
    // SJMoiveObject.m
    // SJLiveVideo
    //
    // Created by king on 16/6/16.
    // Copyright © 2016年 king. All rights reserved.
    //
    ​
    #import "SJMoiveObject.h"
    ​
    @implementation SJMoiveObject
    {
       AVFormatContext     *SJFormatCtx;
       AVCodecContext     *SJCodecCtx;
       AVFrame             *SJFrame;
       AVStream           *stream;
       AVPacket           packet;
       AVPicture           picture;
       int                 videoStream;
       double             fps;
    }
    ​
    #pragma mark ------------------------------------
    #pragma mark 初始化
    - (instancetype)initWithVideo:(NSString *)moviePath {
       
       if (!(self=[super init])) return nil;
       AVCodec *pCodec;
       // 注册所有解码器
       avcodec_register_all();
       av_register_all();
       avformat_network_init();
       // 打开视频文件
       if (avformat_open_input(&SJFormatCtx, [moviePath UTF8String], NULL, NULL) != 0) {
           av_log(NULL, AV_LOG_ERROR, "打开文件失败\n");
           goto initError;
       }
       // 检查数据流
       if (avformat_find_stream_info(SJFormatCtx, NULL) < 0) {
           av_log(NULL, AV_LOG_ERROR, "检查数据流失败\n");
           goto initError;
       }
       // 根据数据流,找到第一个视频流
       if ((videoStream = av_find_best_stream(SJFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0)) < 0) {
           av_log(NULL, AV_LOG_ERROR, "没有找到第一个视频流\n");
           goto initError;
       }
       // 获取视频流的编解码上下文的指针
       stream     = SJFormatCtx->streams[videoStream];
       SJCodecCtx = stream->codec;
    #if DEBUG
       av_dump_format(SJFormatCtx, videoStream, [moviePath UTF8String], 0);
    #endif
       if(stream->avg_frame_rate.den && stream->avg_frame_rate.num) {
           fps = av_q2d(stream->avg_frame_rate);
       } else { fps = 30; }
       // 查找解码器
       pCodec = avcodec_find_decoder(SJCodecCtx->codec_id);
       if (pCodec == NULL) {
           av_log(NULL, AV_LOG_ERROR, "没有找到解码器\n");
           goto initError;
       }
       // 打开解码器
       if(avcodec_open2(SJCodecCtx, pCodec, NULL) < 0) {
           av_log(NULL, AV_LOG_ERROR, "打开解码器失败\n");
           goto initError;
       }
       // 分配视频帧
       SJFrame = av_frame_alloc();
       _outputWidth = SJCodecCtx->width;
       _outputHeight = SJCodecCtx->height;
       return self;
    initError:
       return nil;
    }
    ​
    - (void)seekTime:(double)seconds {
       AVRational timeBase = SJFormatCtx->streams[videoStream]->time_base;
       int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
       avformat_seek_file(SJFormatCtx,
                           videoStream,
                           0,
                           targetFrame,
                           targetFrame,
                           AVSEEK_FLAG_FRAME);
       avcodec_flush_buffers(SJCodecCtx);
    }
    - (BOOL)stepFrame {
       int frameFinished = 0;
       while (!frameFinished && av_read_frame(SJFormatCtx, &packet) >= 0) {
           if (packet.stream_index == videoStream) {
               avcodec_decode_video2(SJCodecCtx,
                                     SJFrame,
                                     &frameFinished,
                                     &packet);
           }
       }
       return frameFinished != 0;
    }
    #pragma mark ------------------------------------
    #pragma mark 重写属性访问方法
    -(void)setOutputWidth:(int)newValue {
       if (_outputWidth == newValue) return;
       _outputWidth = newValue;
    }
    -(void)setOutputHeight:(int)newValue {
       if (_outputHeight == newValue) return;
       _outputHeight = newValue;
    }
    -(UIImage *)currentImage {
       if (!SJFrame->data[0]) return nil;
       return [self imageFromAVPicture];
    }
    -(double)duration {
       return (double)SJFormatCtx->duration / AV_TIME_BASE;
    }
    - (double)currentTime {
       AVRational timeBase = SJFormatCtx->streams[videoStream]->time_base;
       return packet.pts * (double)timeBase.num / timeBase.den;
    }
    - (int)sourceWidth {
       return SJCodecCtx->width;
    }
    - (int)sourceHeight {
       return SJCodecCtx->height;
    }
    - (double)fps {
       return fps;
    }
    #pragma mark --------------------------
    #pragma mark - 内部方法
    - (UIImage *)imageFromAVPicture
    {
       avpicture_free(&picture);
       avpicture_alloc(&picture, AV_PIX_FMT_RGB24, _outputWidth, _outputHeight);
       struct SwsContext * imgConvertCtx = sws_getContext(SJFrame->width,
                                                           SJFrame->height,
                                                           AV_PIX_FMT_YUV420P,
                                                           _outputWidth,
                                                           _outputHeight,
                                                           AV_PIX_FMT_RGB24,
                                                           SWS_FAST_BILINEAR,
                                                           NULL,
                                                           NULL,
                                                           NULL);
       if(imgConvertCtx == nil) return nil;
       sws_scale(imgConvertCtx,
                 SJFrame->data,
                 SJFrame->linesize,
                 0,
                 SJFrame->height,
                 picture.data,
                 picture.linesize);
       sws_freeContext(imgConvertCtx);
       
       CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
       CFDataRef data = CFDataCreate(kCFAllocatorDefault,
                                     picture.data[0],
                                     picture.linesize[0] * _outputHeight);
       
       CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
       CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
       CGImageRef cgImage = CGImageCreate(_outputWidth,
                                           _outputHeight,
                                           8,
                                           24,
                                           picture.linesize[0],
                                           colorSpace,
                                           bitmapInfo,
                                           provider,
                                           NULL,
                                           NO,
                                           kCGRenderingIntentDefault);
       UIImage *image = [UIImage imageWithCGImage:cgImage];
       CGImageRelease(cgImage);
       CGColorSpaceRelease(colorSpace);
       CGDataProviderRelease(provider);
       CFRelease(data);
       
       return image;
    }
    ​
    #pragma mark --------------------------
    #pragma mark - 释放资源
    - (void)dealloc {
       // 释放RGB
       avpicture_free(&picture);
       // 释放frame
       av_packet_unref(&packet);
       // 释放YUV frame
       av_free(SJFrame);
       // 关闭解码器
       if (SJCodecCtx) avcodec_close(SJCodecCtx);
       // 关闭文件
       if (SJFormatCtx) avformat_close_input(&SJFormatCtx);
    ​
    }
    @end
    
    • 为了方便,在SB 拖一个 UIImageView 控件 和按钮 并连好线
    //
    
    // ViewController.m
    // SJLiveVideo
    //
    // Created by king on 16/6/14.
    // Copyright © 2016年 king. All rights reserved.
    //
    ​
    #import "ViewController.h"
    #import "SJMoiveObject.h"
    ​
    @interface ViewController ()
    @property (weak, nonatomic) IBOutlet UIImageView *ImageView;
    @property (weak, nonatomic) IBOutlet UIButton *playBtn;
    @property (nonatomic, strong) SJMoiveObject *video;
    @end
    ​
    @implementation ViewController
    ​
    @synthesize ImageView, fps, playBtn, video;
    ​
    - (void)viewDidLoad {
       [super viewDidLoad];
       
    //   self.video = [[SJMoiveObject alloc] initWithVideo:[NSString bundlePath:@"Dalshabet.mp4"]];
    //   self.video = [[SJMoiveObject alloc] initWithVideo:@"/Users/king/Desktop/Stellar.mp4"];
    //   self.video = [[SJMoiveObject alloc] initWithVideo:@"/Users/king/Downloads/Worth it - Fifth Harmony ft.Kid Ink - May J Lee Choreography.mp4"];
       self.video = [[SJMoiveObject alloc] initWithVideo:@"/Users/king/Downloads/4K.mp4"];
    //   self.video = [[SJMoiveObject alloc] initWithVideo:@"http://wvideo.spriteapp.cn/video/2016/0328/56f8ec01d9bfe_wpd.mp4"];
     // 设置输出图像尺寸 默认为原尺寸
    //   video.outputWidth = 1920;
    //   video.outputHeight = 1080;
       
       
       NSLog(@"视频总时长>>>video duration: %f",video.duration);
       NSLog(@"源尺寸>>>video size: %d x %d", video.sourceWidth, video.sourceHeight);
       NSLog(@"输出尺寸>>>video size: %d x %d", video.outputWidth, video.outputHeight);
       
       int tns, thh, tmm, tss;
       tns = video.duration;
       thh = tns / 3600;
       tmm = (tns % 3600) / 60;
       tss = tns % 60;
       
       NSLog(@"fps --> %.2f", video.fps);
       NSLog(@"%02d:%02d:%02d",thh,tmm,tss);
    }
    ​
    - (IBAction)PlayClick:(UIButton *)sender {
       
       [playBtn setEnabled:NO];
       // 从指定时间开始播放
       [video seekTime:0.0];
     // 创建定时器
       [NSTimer scheduledTimerWithTimeInterval: 1 / video.fps
                                         target:self
                                       selector:@selector(displayNextFrame:)
                                       userInfo:nil
                                       repeats:YES];
    }
    ​
    - (IBAction)TimerCilick:(id)sender {
       
       NSLog(@"current time: %f s",video.currentTime);
    }
    ​
    -(void)displayNextFrame:(NSTimer *)timer {
       self.TimerLabel.text = [self dealTime:video.currentTime];
       if (![video stepFrame]) {
        // 如果没有下一帧可读取 应该讲定时器 释放
           [timer invalidate];
           [playBtn setEnabled:YES];
           return;
       }
       ImageView.image = video.currentImage;
    }
    ​
    - (NSString *)dealTime:(double)time {
       
       int tns, thh, tmm, tss;
       tns = time;
       thh = tns / 3600;
       tmm = (tns % 3600) / 60;
       tss = tns % 60;
       return [NSString stringWithFormat:@"%02d:%02d:%02d",thh,tmm,tss];
    }
    @end
    

    四 运程序 ,点击播放


    我的博客
    我的微博
    百度云下载 密码: aqnp

    相关文章

      网友评论

      本文标题:FFmpeg视频解码播放

      本文链接:https://www.haomeiwen.com/subject/molbdttx.html