1.添加Accelerate.Framework
2.调用方法
self.backima.image= [selfblurryImage:self.backima.imagewithBlurLevel:.8f];
3.计算方法
- (UIImage*)blurryImage:(UIImage*)image withBlurLevel:(CGFloat)blur {
if(blur <0.f|| blur >1.f) {
blur =0.5f;
}
intboxSize = (int)(blur *100);
boxSize = boxSize - (boxSize %2) +1;
CGImageRefimg = image.CGImage;
vImage_BufferinBuffer, outBuffer;
vImage_Errorerror;
void*pixelBuffer;
CGDataProviderRefinProvider =CGImageGetDataProvider(img);
CFDataRefinBitmapData =CGDataProviderCopyData(inProvider);
inBuffer.width=CGImageGetWidth(img);
inBuffer.height=CGImageGetHeight(img);
inBuffer.rowBytes=CGImageGetBytesPerRow(img);
inBuffer.data= (void*)CFDataGetBytePtr(inBitmapData);
pixelBuffer =malloc(CGImageGetBytesPerRow(img) *
CGImageGetHeight(img));
if(pixelBuffer ==NULL)
NSLog(@"No pixelbuffer");
outBuffer.data= pixelBuffer;
outBuffer.width=CGImageGetWidth(img);
outBuffer.height=CGImageGetHeight(img);
outBuffer.rowBytes=CGImageGetBytesPerRow(img);
error =vImageBoxConvolve_ARGB8888(&inBuffer,
&outBuffer,
NULL,
0,
0,
boxSize,
boxSize,
NULL,
kvImageEdgeExtend);
if(error) {
NSLog(@"error from convolution %ld", error);
}
CGColorSpaceRefcolorSpace =CGColorSpaceCreateDeviceRGB();
CGContextRefctx =CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
kCGImageAlphaNoneSkipLast);
CGImageRefimageRef =CGBitmapContextCreateImage(ctx);
UIImage*returnImage = [UIImageimageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
returnreturnImage;
}
摘自:http://blog.csdn.net/ios_che/article/details/12577131
网友评论