- (uint8_t *)convertVideoSmapleBufferToYuvData:(CMSampleBufferRef) videoSample{
// 获取yuv数据
// 通过CMSampleBufferGetImageBuffer方法,获得CVImageBufferRef。
// 这里面就包含了yuv420(NV12)数据的指针
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(videoSample);
//表示开始操作数据
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//图像宽度(像素)
size_t pixelWidth = CVPixelBufferGetWidth(pixelBuffer);
//图像高度(像素)
size_t pixelHeight = CVPixelBufferGetHeight(pixelBuffer);
//yuv中的y所占字节数
size_t y_size = pixelWidth * pixelHeight;
//yuv中的uv所占的字节数
size_t uv_size = y_size / 2;
// uint8_t *yuv_frame = malloc(uv_size + y_size);
//获取CVImageBufferRef中的y数据
uint8_t *y_frame = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
memcpy(yuv_frame, y_frame, y_size);
//获取CMVImageBufferRef中的uv数据
uint8_t *uv_frame = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
memcpy(yuv_frame + y_size, uv_frame, uv_size);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
return yuv_frame;
//返回数据
// return [NSData dataWithBytesNoCopy:yuv_frame length:y_size + uv_size];
}
网友评论