pragma mark buffer转image
-
(UIImage *)convertSampleBufferToUIImageSampleBuffer:(CMSampleBufferRef)sampleBuffer{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);// Get the number of bytes per row for the plane pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);// Get the number of bytes per row for the plane pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);// Create a device-dependent gray color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGImageAlphaNone);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);// Create an image object from the Quartz image
// UIImage *image = [UIImage imageWithCGImage:quartzImage];
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];// Release the Quartz image
CGImageRelease(quartzImage);return (image);
} -
(UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
// 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// 锁定pixel buffer的基地址
CVPixelBufferLockBaseAddress(imageBuffer, 0);// 得到pixel buffer的基地址
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);// 得到pixel buffer的行字节数
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// 得到pixel buffer的宽和高
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);// 创建一个依赖于设备的RGB颜色空间
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();// 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// 根据这个位图context中的像素数据创建一个Quartz image对象
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);// 释放context和颜色空间
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);// 用Quartz image创建一个UIImage对象image
// UIImage *image = [UIImage imageWithCGImage:quartzImage];
UIImage *image = nil;
if (_input.device.position == AVCaptureDevicePositionFront) {
image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationLeftMirrored];
}else{
image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
}
//UIImageOrientationRight
// 释放Quartz image对象
CGImageRelease(quartzImage);
return (image);
}
网友评论