美文网首页
[iOS]利用libyuv 转化视频分辨率(nv12格式)

[iOS]利用libyuv 转化视频分辨率(nv12格式)

作者: 沃小沃 | 来源:发表于2019-01-10 11:07 被阅读0次

    libyuv下载地址
    链接: https://pan.baidu.com/s/1DTInjVivZHQW0PnzJYQK5g 提取码: cy4b

    我也是在网上查找了好多东西,自己试出来可以正常转化将nv12数据分辨率的方法,有的地方也并不能完全解释,但确实可行,仅供参考吧,也是自己记录下,因为这个过程真的好辛苦,从找到nv12转化分辨率到将yuv转化成CVPixelBufferRef,再到CMSampleBufferRef.

    过程:得到CMSampleBufferRef(此处得到的是420f,也是nv12,也是kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),解析得到nv12格式yuv数据.利用libyuv ,先将nv12转化成I420,通过I420转化分辨率,再将I420转化成nv12,用到的libyuv的方法有三个:NV12ToI420,I420Scale,I420ToNV12.
    首先我运用的场景是通过系统的录屏方法获取到的CMSampleBufferRef,得到的视频流分辨率是1080*1920,但是我需要的是720 *1280.

    [[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef  _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
                
                if (bufferType == RPSampleBufferTypeVideo && sampleBuffer != nil) {
                    
                    CVPixelBufferRef pixel =[self convertVideoSmapleBufferToYuvData:sampleBuffer];
                    CMSampleBufferRef sample = [self pixelBufferToSampleBuffer:pixel];
                    [self setwangyiBuffer:sample];
                }
            } completionHandler:^(NSError * _Nullable error) {}];
    

    其中 [[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) 是系统的录屏方法,可以直接得到录屏数据的视频流,CVPixelBufferRef pixel =[self convertVideoSmapleBufferToYuvData:sampleBuffer];
    是我自己转化调用的方法其中shareSWidth = 720;
    shareSHeight = 1280;

    //转化
    -(CVPixelBufferRef)convertVideoSmapleBufferToYuvData:(CMSampleBufferRef) videoSample{
        
    //    1.
        //CVPixelBufferRef是CVImageBufferRef的别名,两者操作几乎一致。
        //获取CMSampleBuffer的图像地址
        CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(videoSample);
        //表示开始操作数据
        CVPixelBufferLockBaseAddress(pixelBuffer, 0);
        //图像宽度(像素)
        size_t pixelWidth = CVPixelBufferGetWidth(pixelBuffer);
        //图像高度(像素)
        size_t pixelHeight = CVPixelBufferGetHeight(pixelBuffer);
        //获取CVImageBufferRef中的y数据
        uint8_t *y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
        //获取CMVImageBufferRef中的uv数据
        uint8_t *uv_frame =(unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
        
        //y stride
        size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane (pixelBuffer, 0);
        //uv stride
        size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane (pixelBuffer, 1);
        //y_size
        size_t plane1_size = plane1_stride * CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
        //uv_size
        size_t plane2_size = CVPixelBufferGetBytesPerRowOfPlane (pixelBuffer, 1) * CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
        //yuv_size(内存空间)
        size_t frame_size = plane1_size + plane2_size;
        
        //开辟frame_size大小的内存空间用于存放转换好的i420数据
        uint8* buffer = (unsigned char *)malloc(frame_size);
        //buffer为这段内存的首地址,plane1_size代表这一帧中y数据的长度
        uint8* dst_u = buffer + plane1_size;
        //dst_u为u数据的首地,plane1_size/4为u数据的长度
        uint8* dst_v = dst_u + plane1_size/4;
        
        
        // Let libyuv convert
        libyuv::NV12ToI420(y_frame,plane1_stride,
                 uv_frame, plane2_stride,
                   buffer, plane1_stride,
                dst_u,plane2_stride/2,
                  dst_v, plane2_stride/2,
                   pixelWidth, pixelHeight);
        
        
    //    2
        //scale-size
        int scale_yuvBufSize = shareSWidth * shareSHeight * 3 / 2;
        //uint8_t* scale_yuvBuf= new uint8_t[scale_yuvBufSize];
        uint8* scale_yuvBuf = (unsigned char *)malloc(scale_yuvBufSize);
    
        //scale-stride
        const int32 scale_uv_stride = (shareSWidth + 1) / 2;
    
        //scale-length
        const int scale_y_length = shareSWidth * shareSHeight;
        int scale_uv_length = scale_uv_stride * ((shareSWidth+1) / 2);
    
        unsigned char *scale_Y_data_Dst = scale_yuvBuf;
        unsigned char *scale_U_data_Dst = scale_yuvBuf + scale_y_length;
        unsigned char *scale_V_data_Dst = scale_U_data_Dst + scale_y_length/4;
    
    
        libyuv::I420Scale(buffer, plane1_stride, dst_u, plane2_stride/2, dst_v, plane2_stride/2, pixelWidth, pixelHeight, scale_Y_data_Dst, shareSWidth,
                          scale_U_data_Dst, scale_uv_stride,
                          scale_V_data_Dst, scale_uv_stride,
                          shareSWidth, shareSHeight,
                          libyuv::kFilterNone);
    
    
    //    3.
        uint8 *dst_y = (uint8 *)malloc((shareSWidth * shareSHeight * 3) >> 1);
        int dst_Stride_Y = shareSWidth;
        uint8 *dst_uv = dst_y + shareSWidth*shareSHeight;
        int dst_Stride_uv = shareSWidth/2;
    
        libyuv::I420ToNV12(scale_Y_data_Dst, shareSWidth,
                           scale_U_data_Dst, scale_uv_stride,
                           scale_V_data_Dst, scale_uv_stride,dst_y, dst_Stride_Y, dst_uv, dst_Stride_Y,shareSWidth, shareSHeight);
    
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
        free(buffer);
        free(scale_yuvBuf);
    
        //转化
        NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
        CVPixelBufferRef pixelBuffer1 = NULL;
        CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
            shareSWidth,shareSHeight,kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
                                              (__bridge CFDictionaryRef)pixelAttributes,&pixelBuffer1);
    
        CVPixelBufferLockBaseAddress(pixelBuffer1, 0);
        uint8_t *yDestPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer1, 0);
        memcpy(yDestPlane, dst_y, shareSWidth * shareSHeight);
        uint8_t *uvDestPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer1, 1);
        memcpy(uvDestPlane, dst_uv, shareSWidth * shareSHeight/2);
        if (result != kCVReturnSuccess) {
            NSLog(@"Unable to create cvpixelbuffer %d", result);
        }
        CVPixelBufferUnlockBaseAddress(pixelBuffer1, 0);
        free(dst_y);
    //    CVPixelBufferRelease(pixelBuffer1);
    
        return pixelBuffer1;
    }
    
    
    时间戳
    -(CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer
    {
    
        CMSampleBufferRef sampleBuffer;
        CMTime frameTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSince1970], 1000000000);
        CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
        CMVideoFormatDescriptionRef videoInfo = NULL;
        CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
        
        OSStatus status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
        if (status != noErr) {
            NSLog(@"Failed to create sample buffer with error %zd.", status);
        }
        CVPixelBufferRelease(pixelBuffer);
        if(videoInfo)
        CFRelease(videoInfo);
        
        return sampleBuffer;
    }
    

    参考链接
    https://blog.csdn.net/sinat_36684217/article/details/75117920
    https://www.jianshu.com/p/050234c5fff2
    https://www.jianshu.com/p/68e05ad85490
    https://www.jianshu.com/p/dac9857b34d0

    相关文章

      网友评论

          本文标题:[iOS]利用libyuv 转化视频分辨率(nv12格式)

          本文链接:https://www.haomeiwen.com/subject/ovuorqtx.html