美文网首页FFmpeg编解码
FFmpeg学习之开发Mac播放器(三):直接播放YUV数据(C

FFmpeg学习之开发Mac播放器(三):直接播放YUV数据(C

作者: SunBye | 来源:发表于2018-08-02 18:01 被阅读0次

    上一篇使用AVFilter把YUV数据转换成RGB数据然后显示,这一篇省去转换过程直接使用CoreImage显示YUV数据

    解码代码
    
    去掉之前的AVFilter相关代码
    
    - (void)decodeVideo {
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{  //在全局队列中解码
            AVPacket * packet = av_packet_alloc();
            if (av_read_frame(self->pFormatCtx, packet) >= 0) {
                if (packet->stream_index == self->videoIndex) {  //解码视频流
                    //FFmpeg 3.0之后avcodec_send_packet和avcodec_receive_frame成对出现用于解码,包括音频和视频的解码,avcodec_decode_video2和avcodec_decode_audio4被废弃
                    NSInteger ret = avcodec_send_packet(self->pCodecCtx, packet);
                    if (ret < 0) {
                        NSLog(@"send packet error");
                        av_packet_free(&packet);
                        return;
                    }
                    AVFrame * frame = av_frame_alloc();
                    ret = avcodec_receive_frame(self->pCodecCtx, frame);
                    if (ret < 0) {
                        NSLog(@"receive frame error");
                        av_frame_free(&frame);
                        return;
                    }
                     //frame中data存放解码出的yuv数据,data[0]中是y数据,data[1]中是u数据,data[2]中是v数据,linesize对应的数据长度
                    float time = packet->pts * av_q2d(self->pFormatCtx->streams[self->videoIndex]->time_base);  //计算当前帧时间
                    av_packet_free(&packet);
    
                    CVReturn theError;
                    if (!self->pixelBufferPool){  //创建pixelBuffer缓存池,从缓存池中创建pixelBuffer以便复用
                        NSMutableDictionary* attributes = [NSMutableDictionary dictionary];
                        [attributes setObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
                        [attributes setObject:[NSNumber numberWithInt:frame->width] forKey: (NSString*)kCVPixelBufferWidthKey];
                        [attributes setObject:[NSNumber numberWithInt:frame->height] forKey: (NSString*)kCVPixelBufferHeightKey];
                        [attributes setObject:@(frame->linesize[0]) forKey:(NSString*)kCVPixelBufferBytesPerRowAlignmentKey];
                        [attributes setObject:[NSDictionary dictionary] forKey:(NSString*)kCVPixelBufferIOSurfacePropertiesKey];
                        theError = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef) attributes, &self->pixelBufferPool);
                        if (theError != kCVReturnSuccess){
                            NSLog(@"CVPixelBufferPoolCreate Failed");
                        }
                    }
    
                    CVPixelBufferRef pixelBuffer = nil;
                    theError = CVPixelBufferPoolCreatePixelBuffer(NULL, self->pixelBufferPool, &pixelBuffer);
                    if(theError != kCVReturnSuccess){
                        NSLog(@"CVPixelBufferPoolCreatePixelBuffer Failed");
                    }
    
                    theError = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
                    if (theError != kCVReturnSuccess) {
                        NSLog(@"lock error");
                    }
                    /*
                     PixelBuffer中Y数据存放在Plane0中,UV数据存放在Plane1中,数据格式如下
                     frame->data[0]  .........   YYYYYYYYY
                     frame->data[1]  .........   UUUUUUUU
                     frame->data[2]  .........   VVVVVVVVV
                     PixelBuffer->Plane0 .......  YYYYYYYY
                     PixelBuffer->Plane1 .......  UVUVUVUVUV
                     所以需要把Y数据拷贝到Plane0上,把U和V数据交叉拷贝到Plane1上
                     */
                    size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
                    size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
                    //获取Plane0的起始地址
                    void* base = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
                    memcpy(base, frame->data[0], bytePerRowY * frame->height);
                    //获取Plane1的起始地址
                    base = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
                    uint32_t size = frame->linesize[1] * frame->height / 2;
                    //把UV数据交叉存储到dstData然后拷贝到Plane1上
                    uint8_t* dstData = new uint8_t[2 * size];
                    uint8_t * firstData = new uint8_t[size];
                    memcpy(firstData, frame->data[1], size);
                    uint8_t * secondData  = new uint8_t[size];
                    memcpy(secondData, frame->data[2], size);
                    for (int i = 0; i < 2 * size; i++){
                        if (i % 2 == 0){
                            dstData[i] = firstData[i/2];
                        }else {
                            dstData[i] = secondData[i/2];
                        }
                    }
                    memcpy(base, dstData, bytesPerRowUV * frame->height/2);
                    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
                    av_frame_free(&frame);
                    free(dstData);
                    free(firstData);
                    free(secondData);
    
                    CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
                    //避免频繁的创建CIContext,所以把context设为全局变量并初始化self->context = [CIContext contextWithOptions:nil];
                    CGImageRef videoImage = [self->context createCGImage:coreImage
                                                                       fromRect:CGRectMake(0, 0, self->pCodecCtx->width, self->pCodecCtx->height)];
                    NSImage * image = [[NSImage alloc] initWithCGImage:videoImage size:NSSizeFromCGSize(CGSizeMake(self->pCodecCtx->width, self->pCodecCtx->height))];
                    CVPixelBufferRelease(pixelBuffer);
                    CGImageRelease(videoImage);
                    dispatch_async(dispatch_get_main_queue(), ^{
                        self.label.stringValue = [NSString stringWithFormat:@"%.2d:%.2d", (int)time/60, (int)time%60];
                        self.imageView.image = image;
                        self.slider.floatValue = time / (float)self->videoDuration;
                    });
                }
            } else {
                avcodec_free_context(&self->pCodecCtx);
                avformat_close_input(&self->pFormatCtx);
                avformat_free_context(self->pFormatCtx);
                [self->timer invalidate];
            }
        });
    }
    

    相关文章

      网友评论

        本文标题:FFmpeg学习之开发Mac播放器(三):直接播放YUV数据(C

        本文链接:https://www.haomeiwen.com/subject/nzesvftx.html