美文网首页
FFmpeg - 播放YUV,视频帧格式转换

FFmpeg - 播放YUV,视频帧格式转换

作者: lieon | 来源:发表于2021-07-17 00:32 被阅读0次

    播放YUV

    • 定时读取YUV的视频帧
    - (void)play {
        //
        NSTimeInterval interval = 1.0 / _yuv.fps * 1.0;
        __weak typeof(self)weakSelf = self;
        self.timer = [NSTimer timerWithTimeInterval:interval repeats:true block:^(NSTimer * _Nonnull timer) {
            [weakSelf timerAction];
        }];
        [self.timer fire];
        [[NSRunLoop mainRunLoop]addTimer:self.timer forMode:NSRunLoopCommonModes];
    }
    
    • 将YUV转换为RGB数据
    • 用RGB数据生成CGimage
    • 在view上绘制CGImage
    
    - (void)play {
        //
        NSTimeInterval interval = 1.0 / _yuv.fps * 1.0;
        __weak typeof(self)weakSelf = self;
        self.timer = [NSTimer timerWithTimeInterval:interval repeats:true block:^(NSTimer * _Nonnull timer) {
            [weakSelf timerAction];
        }];
        [self.timer fire];
        [[NSRunLoop mainRunLoop]addTimer:self.timer forMode:NSRunLoopCommonModes];
    }
    
    - (void)setYUV:(YuvParam*)yuv {
        _yuv = yuv;
        NSInteger format = yuv.pixelFomat;
        self.file = [NSFileHandle fileHandleForReadingAtPath: _yuv.filename];
        // 一帧图片的大小
        imageSize = av_image_get_buffer_size((AVPixelFormat)_yuv.pixelFomat, _yuv.width, _yuv.height, 1);
        // 当前控件的大小
        CGFloat width = self.bounds.size.width;
        CGFloat height = self.bounds.size.height;
        // 计算Rect
        CGFloat dx = 0;
        CGFloat dy = 0;
        CGFloat dw = _yuv.width;
        CGFloat dh = _yuv.height;
        // 计算目标尺寸
        if (dw > width || dh > height) {
            if (dw * height > width * dh) { // 视频的宽高比 > 播放器的宽高比
                dh = width * dh / dw;
                dw = width;
            } else {
                dw = height * dw / dh;
                dh = height;
            }
        }
        dx = (width - dw) * 0.5;
        dy = (height - dh) * 0.5;
        playerRect = CGRectMake(dx, dy, dw, dh);
    }
    
    - (CGImageRef)generateImage:(const RawVideoFrame &)output {
        int width = output.width;
        int height = output.height;
        size_t bufferLength = width * height * 3;
        char * buffer = output.pixels;
        CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, bufferLength, NULL);
        size_t bitsPerComponent = 8;
        size_t bitsPerPixel = 24;
        size_t bytesPerRow = 3 * width;
        
        CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
        if(colorSpaceRef == NULL) {
            CGDataProviderRelease(provider);
        }
        
        CGBitmapInfo bitmapInfo = kCGImageAlphaNone;
        CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
        
        CGImageRef iref = CGImageCreate(width,
                                        height,
                                        bitsPerComponent,
                                        bitsPerPixel,
                                        bytesPerRow,
                                        colorSpaceRef,
                                        bitmapInfo,
                                        provider,   // data provider
                                        NULL,      // decode
                                        NO,          // should interpolate
                                        renderingIntent);
        return iref;
    }
    
    - (void)timerAction {
        NSData *imageData = [self.file readDataOfLength:imageSize];
        if (imageData.length > 0) {
            RawVideoFrame input = {
                (char*)imageData.bytes,
                static_cast<int>(_yuv.width),
                static_cast<int>(_yuv.height),
                (AVPixelFormat)_yuv.pixelFomat,
            };
            RawVideoFrame output = {
                nullptr,
                static_cast<int>(_yuv.width),
                static_cast<int>(_yuv.height),
                AV_PIX_FMT_RGB24
            };
            [FFMpegs convertRawVideo:&input output:&output];
            CGImageRef iref = [self generateImage:output];
            self.playLayer.contents = (__bridge id)iref;
            self.playLayer.frame = playerRect;
        } else {
            [self.timer invalidate];
            self.timer = nil;
        }
        
    }
    
    

    视频帧格式转换

    • 创建转换上下文
    • 设置输入输出缓冲区
    • 计算输出输出帧的大小
    • 开始转换
    • 获取转换后的数据
    
    + (void)convertRawVideo:(RawVideoFrame*)input
                      output:(RawVideoFrame*)output {
        // 上下文
        SwsContext *ctx = nullptr;
        // 输入,输出缓冲区(指向每一个平面的数据)(Y U V  apha)
        uint8_t *inData[4], *outData[4];
        // 每一个平面的一行大小
        int inStrides[4], ouStrides[4];
        // 每一帧图片的大小
        int inFrameSize, outFrameSize;
        int ret = 0;
        // 创建上下文
        ctx = sws_getContext(input->width, input->height, input->format,
                             output->width, output->height, output->format,
                             SWS_BILINEAR, nullptr, nullptr, nullptr);
        if(!ctx) {
            NSLog(@"sws_getContext error");
            goto end;
        }
        // 输入缓冲区
        ret = av_image_alloc(inData, inStrides,
                             input->width,
                             input->height,
                             input->format, 1);
        END(av_image_alloc);
        // 输出缓冲区
        ret = av_image_alloc(outData, ouStrides,
                             output->width,
                             output->height,
                             output->format, 1);
        END(av_image_alloc);
        
        // 计算每一帧的大小
        inFrameSize = av_image_get_buffer_size(input->format, input->width, input->height, 1);
        outFrameSize = av_image_get_buffer_size(output->format, output->width, output->height, 1);
        
        // 拷贝输入数据
        memcpy(inData[0], input->pixels, inFrameSize);
        
        // 转换
        sws_scale(ctx,
                  inData, inStrides, 0, input->height,
                  outData, ouStrides);
        // 写到输出文件去
        output->frameSize = outFrameSize;
        output->pixels = (char*)malloc(outFrameSize);
        memcpy(output->pixels, outData[0], outFrameSize);
    
    end:
        NSLog(@"end");
        av_freep(&inData[0]);
        av_freep(&outData[0]);
        sws_freeContext(ctx);
    }
    

    相关文章

      网友评论

          本文标题:FFmpeg - 播放YUV,视频帧格式转换

          本文链接:https://www.haomeiwen.com/subject/aomipltx.html