1.rgba转UIimage
+(UIImage *_Nullable)covertBitmapRGBA8ToUIImage:(unsigned char*)buffer
width:(int)width
height:(int)heigth
{
size_t bufferlength = width * heigth * 4;
CGDataProviderRef provide = CGDataProviderCreateWithData(NULL, buffer, bufferlength, NULL);
size_t bitsPerComponent = 8;
size_t bitsPerPixl = 32;
size_t bytesPerRow = 4 * width;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
if (colorSpaceRef == NULL) {
DLog(@"error allocating color space...");
CGDataProviderRelease(provide);
return nil;
}
CGBitmapInfo bitmapinfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
CGColorRenderingIntent renderingintent = kCGRenderingIntentDefault;
CGImageRef iref = CGImageCreate(width, heigth, bitsPerComponent, bitsPerPixl, bytesPerRow, colorSpaceRef, bitmapinfo, provide, NULL, YES, renderingintent);
uint32_t *pixels = (uint32_t*)malloc(bufferlength);
if (pixels == NULL) {
NSLog(@"error: memory not allocated for bitmap");
CGDataProviderRelease(provide);
CGColorSpaceRelease(colorSpaceRef);
CGImageRelease(iref);
return nil;
}
CGContextRef context = CGBitmapContextCreate(pixels, width, heigth, bitsPerComponent, bytesPerRow, colorSpaceRef, bitmapinfo);
if (context == NULL) {
DLog(@"error context not created...");
free(pixels);
}
UIImage *image = nil;
if (context) {
CGContextDrawImage(context, CGRectMake(0.0, 0.0, width, heigth), iref);
CGImageRef imageRef = CGBitmapContextCreateImage(context);
if ([UIImage respondsToSelector:@selector(imageWithCGImage:scale:orientation:)]) {
float scale = [[UIScreen mainScreen] scale];
image = [UIImage imageWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
} else {
image = [UIImage imageWithCGImage:imageRef];
}
CGImageRelease(imageRef);
CGContextRelease(context);
}
CGColorSpaceRelease(colorSpaceRef);
CGImageRelease(iref);
CGDataProviderRelease(provide);
if(pixels) {
free(pixels);
}
return image;
}
1.yuv 转UIimage
a.YUV(NV12)-->CIImage--->UIImage
+(UIImage *)yuvToUIImage:(int)w h:(int)h buffer:(unsigned char *_Nullable)buffer
{
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey:@{}};
CVPixelBufferRef pixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
w,
h,
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)(pixelAttributes),
&pixelBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer,0);
unsigned char *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
// Here y_ch0 is Y-Plane of YUV(NV12) data.
unsigned char *y_ch0 = buffer;
unsigned char *y_ch1 = buffer + w * h;
memcpy(yDestPlane, y_ch0, w * h);
unsigned char *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
// Here y_ch1 is UV-Plane of YUV(NV12) data.
memcpy(uvDestPlane, y_ch1, w * h/2);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
return nil;
}
// CIImage Conversion
CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CIContext *MytemporaryContext = [CIContext contextWithOptions:nil];
// size_t size = CVPixelBufferGetDataSize(pixelBuffer);
// CVPixelBufferGetHeight(pixelBuffer)
// CVPixelBufferGetWidth(pixelBuffer)
CGImageRef MyvideoImage = [MytemporaryContext createCGImage:coreImage fromRect:CGRectMake(0, 0, w, h)];
// UIImage Conversion
UIImage *Mynnnimage = [[UIImage alloc] initWithCGImage:MyvideoImage scale:1.0 orientation:UIImageOrientationRight];
CVPixelBufferRelease(pixelBuffer);
CGImageRelease(MyvideoImage);
return Mynnnimage;
}
网友评论