@interface UIImage (Category)
//由颜色生成图片
+ (UIImage *) imageWithColor:(UIColor*)color;
//将图片剪裁至目标尺寸
+ (UIImage *) imageByScalingAndCroppingForSourceImage:(UIImage *)sourceImage targetSize:(CGSize)targetSize;
//图片旋转角度
- (UIImage *) imageRotatedByDegrees:(CGFloat)degrees;
//拉伸图片UIEdgeInsets
- (UIImage *) resizableImage:(UIEdgeInsets)insets;
//拉伸图片CGFloat
- (UIImage *) imageByResizeToScale:(CGFloat)scale;
//放大图片CGSize
- (UIImage *) imageByResizeWithMaxSize:(CGSize)size;
//小样图图片CGSize
- (UIImage *) imageWithThumbnailForSize:(CGSize)size;
//通过Rect剪裁图片
- (UIImage *) imageByCropToRect:(CGRect)rect;
//图片增加圆角
- (UIImage *) imageByRoundCornerRadius:(CGFloat)radius;
//图片增加圆角及边框
- (UIImage *) imageByRoundCornerRadius:(CGFloat)radius
borderWidth:(CGFloat)borderWidth
borderColor:(UIColor *)borderColor;
//图片向左90度
- (UIImage *)imageByRotateLeft90;
//图片向右90度
- (UIImage *)imageByRotateRight90;
//图片转180度
- (UIImage *)imageByRotate180;
@end
@interface UIImage (Blur)
//玻璃化效果,这里与系统的玻璃化枚举效果一样,但只是一张图
- (UIImage *)imageByBlurSoft;
- (UIImage *)imageByBlurLight;
- (UIImage *)imageByBlurExtraLight;
- (UIImage *)imageByBlurDark;
- (UIImage *)imageByBlurWithTint:(UIColor *)tintColor;
- (UIImage *)imageByBlurRadius:(CGFloat)blurRadius
tintColor:(UIColor *)tintColor
tintMode:(CGBlendMode)tintBlendMode
saturation:(CGFloat)saturation
maskImage:(UIImage *)maskImage;
- (UIImage *) boxblurImageWithBlur:(CGFloat)blur exclusionPath:(UIBezierPath *)exclusionPath;
@end
@interface UIImage (ImageEffects)
//图片效果
- (UIImage *)applyLightEffect;
- (UIImage *)applyExtraLightEffect;
- (UIImage *)applyDarkEffect;
- (UIImage *)applyBlurEffect;
- (UIImage *)applyTintEffectWithColor:(UIColor *)tintColor;
- (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius
tintColor:(UIColor *)tintColor
saturationDeltaFactor:(CGFloat)saturationDeltaFactor
maskImage:(UIImage *)maskImage;
@end
//
// PK-ios
//
// Created by peikua on 15/9/15.
// Copyright (c) 2015年 peikua. All rights reserved.
//
#import "UIImage+Category.h"
#import <QuartzCore/QuartzCore.h>
#import <Accelerate/Accelerate.h>
#define ORIGINAL_MAX_WIDTH 640.0f
#define YY_SWAP(_a_, _b_) do { __typeof__(_a_) _tmp_ = (_a_); (_a_) = (_b_); (_b_) = _tmp_; } while (0)
@implementation UIImage (Category)
- (UIImage*)imageRotatedByDegrees:(CGFloat)degrees
{
CGFloat width = CGImageGetWidth(self.CGImage);
CGFloat height = CGImageGetHeight(self.CGImage);
CGSize rotatedSize;
rotatedSize.width = width;
rotatedSize.height = height;
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
CGContextRotateCTM(bitmap, degrees * M_PI / 180);
CGContextRotateCTM(bitmap, M_PI);
CGContextScaleCTM(bitmap, -1.0, 1.0);
CGContextDrawImage(bitmap, CGRectMake(-rotatedSize.width/2, -rotatedSize.height/2, rotatedSize.width, rotatedSize.height), self.CGImage);
UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
- (UIImage*) resizableImage:(UIEdgeInsets)insets {
if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 6.0f) {
return [self resizableImageWithCapInsets:insets resizingMode:UIImageResizingModeStretch];
}
return [self stretchableImageWithLeftCapWidth:insets.left topCapHeight:insets.top];
}
+ (UIImage *) imageByScalingAndCroppingForSourceImage:(UIImage *)sourceImage targetSize:(CGSize)targetSize {
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO)
{
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor > heightFactor)
scaleFactor = widthFactor; // scale to fit height
else
scaleFactor = heightFactor; // scale to fit width
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor > heightFactor)
{
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
}
else
if (widthFactor < heightFactor)
{
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
UIGraphicsBeginImageContext(targetSize); // this will crop
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
if(newImage == nil) NSLog(@"could not scale image");
//pop the context to get back to the default
UIGraphicsEndImageContext();
return newImage;
}
+ (UIImage*) imageWithColor:(UIColor *)color {
CGRect rect = CGRectMake(0.0f, 0.0f, 1.0f, 1.0f);
UIGraphicsBeginImageContext(rect.size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, color.CGColor);
CGContextFillRect(context, rect);
UIImage* image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData* imageData = UIImageJPEGRepresentation(image, 1.0f);
image = [UIImage imageWithData:imageData];
return image;
}
- (UIImage *)imageByResizeToScale:(CGFloat)scale{
CGSize size = CGSizeMake(self.size.width *scale, self.size.height * scale);
if (size.width <= 0 || size.height <= 0) return nil;
UIGraphicsBeginImageContextWithOptions(size, NO, self.scale);
[self drawInRect:CGRectMake(0, 0, size.width, size.height)];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
- (UIImage *)imageByResizeWithMaxSize:(CGSize)size{
CGSize resize = self.size;
if(resize.width > size.width){
resize = CGSizeMake(size.width, size.width/resize.width * resize.height);
}
if(resize.height > size.height){
resize = CGSizeMake(size.height/resize.height * resize.width, size.height);
}
if (resize.width <= 0 || resize.height <= 0) return nil;
UIGraphicsBeginImageContextWithOptions(resize, NO, self.scale);
[self drawInRect:CGRectMake(0, 0, resize.width, resize.height)];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
- (UIImage *)imageWithThumbnailForSize:(CGSize)size{
CGSize imageSize = self.size;
if(imageSize.width < imageSize.height){
imageSize = CGSizeMake(size.width, size.width/imageSize.width * imageSize.height);
}
else{
imageSize = CGSizeMake(size.height/imageSize.height * imageSize.width, size.height);
}
if (imageSize.width <= 0 || imageSize.height <= 0) return nil;
UIGraphicsBeginImageContextWithOptions(imageSize, NO, self.scale);
[self drawInRect:CGRectMake(0, 0, imageSize.width, imageSize.height)];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
- (UIImage *)imageByCropToRect:(CGRect)rect {
rect.origin.x *= self.scale;
rect.origin.y *= self.scale;
rect.size.width *= self.scale;
rect.size.height *= self.scale;
if (rect.size.width <= 0 || rect.size.height <= 0) return nil;
CGImageRef imageRef = CGImageCreateWithImageInRect(self.CGImage, rect);
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
CGImageRelease(imageRef);
return image;
}
- (UIImage *)imageByRoundCornerRadius:(CGFloat)radius {
return [self imageByRoundCornerRadius:radius borderWidth:0 borderColor:nil];
}
- (UIImage *)imageByRoundCornerRadius:(CGFloat)radius
borderWidth:(CGFloat)borderWidth
borderColor:(UIColor *)borderColor {
return [self imageByRoundCornerRadius:radius
corners:UIRectCornerAllCorners
borderWidth:borderWidth
borderColor:borderColor
borderLineJoin:kCGLineJoinMiter];
}
- (UIImage *)imageByRoundCornerRadius:(CGFloat)radius
corners:(UIRectCorner)corners
borderWidth:(CGFloat)borderWidth
borderColor:(UIColor *)borderColor
borderLineJoin:(CGLineJoin)borderLineJoin {
UIGraphicsBeginImageContextWithOptions(self.size, NO, self.scale);
CGContextRef context = UIGraphicsGetCurrentContext();
CGRect rect = CGRectMake(0, 0, self.size.width, self.size.height);
CGContextScaleCTM(context, 1, -1);
CGContextTranslateCTM(context, 0, -rect.size.height);
CGFloat minSize = MIN(self.size.width, self.size.height);
if (borderWidth < minSize / 2) {
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectInset(rect, borderWidth, borderWidth) byRoundingCorners:corners cornerRadii:CGSizeMake(radius, borderWidth)];
[path closePath];
CGContextSaveGState(context);
[path addClip];
CGContextDrawImage(context, rect, self.CGImage);
CGContextRestoreGState(context);
}
if (borderColor && borderWidth < minSize / 2 && borderWidth > 0) {
CGFloat strokeInset = (floor(borderWidth * self.scale) + 0.5) / self.scale;
CGRect strokeRect = CGRectInset(rect, strokeInset, strokeInset);
CGFloat strokeRadius = radius > self.scale / 2 ? radius - self.scale / 2 : 0;
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:strokeRect byRoundingCorners:corners cornerRadii:CGSizeMake(strokeRadius, borderWidth)];
[path closePath];
path.lineWidth = borderWidth;
path.lineJoinStyle = borderLineJoin;
[borderColor setStroke];
[path stroke];
}
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
- (UIImage *)_yy_flipHorizontal:(BOOL)horizontal vertical:(BOOL)vertical {
if (!self.CGImage) return nil;
size_t width = (size_t)CGImageGetWidth(self.CGImage);
size_t height = (size_t)CGImageGetHeight(self.CGImage);
size_t bytesPerRow = width * 4;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(colorSpace);
if (!context) return nil;
CGContextDrawImage(context, CGRectMake(0, 0, width, height), self.CGImage);
UInt8 *data = (UInt8 *)CGBitmapContextGetData(context);
if (!data) {
CGContextRelease(context);
return nil;
}
vImage_Buffer src = { data, height, width, bytesPerRow };
vImage_Buffer dest = { data, height, width, bytesPerRow };
if (vertical) {
vImageVerticalReflect_ARGB8888(&src, &dest, kvImageBackgroundColorFill);
}
if (horizontal) {
vImageHorizontalReflect_ARGB8888(&src, &dest, kvImageBackgroundColorFill);
}
CGImageRef imgRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
CGImageRelease(imgRef);
return img;
}
- (UIImage *)imageByRotate180 {
return [self _yy_flipHorizontal:YES vertical:YES];
}
- (UIImage *)imageByFlipVertical {
return [self _yy_flipHorizontal:NO vertical:YES];
}
- (UIImage *)imageByFlipHorizontal {
return [self _yy_flipHorizontal:YES vertical:NO];
}
static inline CGFloat DegreesToRadians(CGFloat degrees) {
return degrees * M_PI / 180;
}
@end
@implementation UIImage (Blur)
- (UIImage *)imageByBlurSoft {
return [self imageByBlurRadius:60 tintColor:[UIColor colorWithWhite:0.84 alpha:0.36] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
}
- (UIImage *)imageByBlurLight {
return [self imageByBlurRadius:60 tintColor:[UIColor colorWithWhite:1.0 alpha:0.3] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
}
- (UIImage *)imageByBlurExtraLight {
return [self imageByBlurRadius:40 tintColor:[UIColor colorWithWhite:0.97 alpha:0.82] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
}
- (UIImage *)imageByBlurDark {
return [self imageByBlurRadius:40 tintColor:[UIColor colorWithWhite:0.11 alpha:0.73] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
}
- (UIImage *)imageByBlurWithTint:(UIColor *)tintColor {
const CGFloat EffectColorAlpha = 0.6;
UIColor *effectColor = tintColor;
size_t componentCount = CGColorGetNumberOfComponents(tintColor.CGColor);
if (componentCount == 2) {
CGFloat b;
if ([tintColor getWhite:&b alpha:NULL]) {
effectColor = [UIColor colorWithWhite:b alpha:EffectColorAlpha];
}
} else {
CGFloat r, g, b;
if ([tintColor getRed:&r green:&g blue:&b alpha:NULL]) {
effectColor = [UIColor colorWithRed:r green:g blue:b alpha:EffectColorAlpha];
}
}
return [self imageByBlurRadius:20 tintColor:effectColor tintMode:kCGBlendModeNormal saturation:-1.0 maskImage:nil];
}
- (UIImage *)imageByBlurRadius:(CGFloat)blurRadius
tintColor:(UIColor *)tintColor
tintMode:(CGBlendMode)tintBlendMode
saturation:(CGFloat)saturation
maskImage:(UIImage *)maskImage {
if (self.size.width < 1 || self.size.height < 1) {
NSLog(@"UIImage+YYAdd error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self);
return nil;
}
if (!self.CGImage) {
NSLog(@"UIImage+YYAdd error: inputImage must be backed by a CGImage: %@", self);
return nil;
}
if (maskImage && !maskImage.CGImage) {
NSLog(@"UIImage+YYAdd error: effectMaskImage must be backed by a CGImage: %@", maskImage);
return nil;
}
// iOS7 and above can use new func.
BOOL hasNewFunc = (long)vImageBuffer_InitWithCGImage != 0 && (long)vImageCreateCGImageFromBuffer != 0;
BOOL hasBlur = blurRadius > __FLT_EPSILON__;
BOOL hasSaturation = fabs(saturation - 1.0) > __FLT_EPSILON__;
CGSize size = self.size;
CGRect rect = { CGPointZero, size };
CGFloat scale = self.scale;
CGImageRef imageRef = self.CGImage;
BOOL opaque = NO;
if (!hasBlur && !hasSaturation) {
return [self _yy_mergeImageRef:imageRef tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque];
}
vImage_Buffer effect = { 0 }, scratch = { 0 };
vImage_Buffer *input = NULL, *output = NULL;
vImage_CGImageFormat format = {
.bitsPerComponent = 8,
.bitsPerPixel = 32,
.colorSpace = NULL,
.bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Little, //requests a BGRA buffer.
.version = 0,
.decode = NULL,
.renderingIntent = kCGRenderingIntentDefault
};
if (hasNewFunc) {
vImage_Error err;
err = vImageBuffer_InitWithCGImage(&effect, &format, NULL, imageRef, kvImagePrintDiagnosticsToConsole);
if (err != kvImageNoError) {
NSLog(@"UIImage+YYAdd error: vImageBuffer_InitWithCGImage returned error code %zi for inputImage: %@", err, self);
return nil;
}
err = vImageBuffer_Init(&scratch, effect.height, effect.width, format.bitsPerPixel, kvImageNoFlags);
if (err != kvImageNoError) {
NSLog(@"UIImage+YYAdd error: vImageBuffer_Init returned error code %zi for inputImage: %@", err, self);
return nil;
}
} else {
UIGraphicsBeginImageContextWithOptions(size, opaque, scale);
CGContextRef effectCtx = UIGraphicsGetCurrentContext();
CGContextScaleCTM(effectCtx, 1.0, -1.0);
CGContextTranslateCTM(effectCtx, 0, -size.height);
CGContextDrawImage(effectCtx, rect, imageRef);
effect.data = CGBitmapContextGetData(effectCtx);
effect.width = CGBitmapContextGetWidth(effectCtx);
effect.height = CGBitmapContextGetHeight(effectCtx);
effect.rowBytes = CGBitmapContextGetBytesPerRow(effectCtx);
UIGraphicsBeginImageContextWithOptions(size, opaque, scale);
CGContextRef scratchCtx = UIGraphicsGetCurrentContext();
scratch.data = CGBitmapContextGetData(scratchCtx);
scratch.width = CGBitmapContextGetWidth(scratchCtx);
scratch.height = CGBitmapContextGetHeight(scratchCtx);
scratch.rowBytes = CGBitmapContextGetBytesPerRow(scratchCtx);
}
input = &effect;
output = &scratch;
if (hasBlur) {
// A description of how to compute the box kernel width from the Gaussian
// radius (aka standard deviation) appears in the SVG spec:
// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement
//
// For larger values of 's' (s >= 2.0), an approximation can be used: Three
// successive box-blurs build a piece-wise quadratic convolution kernel, which
// approximates the Gaussian kernel to within roughly 3%.
//
// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
//
// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
//
CGFloat inputRadius = blurRadius * scale;
if (inputRadius - 2.0 < __FLT_EPSILON__) inputRadius = 2.0;
uint32_t radius = floor((inputRadius * 3.0 * sqrt(2 * M_PI) / 4 + 0.5) / 2);
radius |= 1; // force radius to be odd so that the three box-blur methodology works.
int iterations;
if (blurRadius * scale < 0.5) iterations = 1;
else if (blurRadius * scale < 1.5) iterations = 2;
else iterations = 3;
NSInteger tempSize = vImageBoxConvolve_ARGB8888(input, output, NULL, 0, 0, radius, radius, NULL, kvImageGetTempBufferSize | kvImageEdgeExtend);
void *temp = malloc(tempSize);
for (int i = 0; i < iterations; i++) {
vImageBoxConvolve_ARGB8888(input, output, temp, 0, 0, radius, radius, NULL, kvImageEdgeExtend);
YY_SWAP(input, output);
}
free(temp);
}
if (hasSaturation) {
// These values appear in the W3C Filter Effects spec:
// https://dvcs.w3.org/hg/FXTF/raw-file/default/filters/Publish.html#grayscaleEquivalent
CGFloat s = saturation;
CGFloat matrixFloat[] = {
0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0,
0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0,
0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0,
0, 0, 0, 1,
};
const int32_t divisor = 256;
NSUInteger matrixSize = sizeof(matrixFloat) / sizeof(matrixFloat[0]);
int16_t matrix[matrixSize];
for (NSUInteger i = 0; i < matrixSize; ++i) {
matrix[i] = (int16_t)roundf(matrixFloat[i] * divisor);
}
vImageMatrixMultiply_ARGB8888(input, output, matrix, divisor, NULL, NULL, kvImageNoFlags);
YY_SWAP(input, output);
}
UIImage *outputImage = nil;
if (hasNewFunc) {
CGImageRef effectCGImage = NULL;
effectCGImage = vImageCreateCGImageFromBuffer(input, &format, &_yy_cleanupBuffer, NULL, kvImageNoAllocate, NULL);
if (effectCGImage == NULL) {
effectCGImage = vImageCreateCGImageFromBuffer(input, &format, NULL, NULL, kvImageNoFlags, NULL);
free(input->data);
}
free(output->data);
outputImage = [self _yy_mergeImageRef:effectCGImage tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque];
CGImageRelease(effectCGImage);
} else {
CGImageRef effectCGImage;
UIImage *effectImage;
if (input != &effect) effectImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if (input == &effect) effectImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
effectCGImage = effectImage.CGImage;
outputImage = [self _yy_mergeImageRef:effectCGImage tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque];
}
return outputImage;
}
static void _yy_cleanupBuffer(void *userData, void *buf_data) {
free(buf_data);
}
- (UIImage *)_yy_mergeImageRef:(CGImageRef)effectCGImage
tintColor:(UIColor *)tintColor
tintBlendMode:(CGBlendMode)tintBlendMode
maskImage:(UIImage *)maskImage
opaque:(BOOL)opaque {
BOOL hasTint = tintColor != nil && CGColorGetAlpha(tintColor.CGColor) > __FLT_EPSILON__;
BOOL hasMask = maskImage != nil;
CGSize size = self.size;
CGRect rect = { CGPointZero, size };
CGFloat scale = self.scale;
if (!hasTint && !hasMask) {
return [UIImage imageWithCGImage:effectCGImage];
}
UIGraphicsBeginImageContextWithOptions(size, opaque, scale);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, 0, -size.height);
if (hasMask) {
CGContextDrawImage(context, rect, self.CGImage);
CGContextSaveGState(context);
CGContextClipToMask(context, rect, maskImage.CGImage);
}
CGContextDrawImage(context, rect, effectCGImage);
if (hasTint) {
CGContextSaveGState(context);
CGContextSetBlendMode(context, tintBlendMode);
CGContextSetFillColorWithColor(context, tintColor.CGColor);
CGContextFillRect(context, rect);
CGContextRestoreGState(context);
}
if (hasMask) {
CGContextRestoreGState(context);
}
UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return outputImage;
}
- (UIImage *) boxblurImageWithBlur:(CGFloat)blur exclusionPath:(UIBezierPath *)exclusionPath {
if (blur < 0.f || blur > 1.f) {
blur = 0.5f;
}
int boxSize = (int)(blur * 40);
boxSize = boxSize - (boxSize % 2) + 1;
CGImageRef img = self.CGImage;
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
void *pixelBuffer;
// create unchanged copy of the area inside the exclusionPath
UIImage *unblurredImage = nil;
if (exclusionPath != nil) {
CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
maskLayer.frame = (CGRect){CGPointZero, self.size};
maskLayer.backgroundColor = [UIColor blackColor].CGColor;
maskLayer.fillColor = [UIColor whiteColor].CGColor;
maskLayer.path = exclusionPath.CGPath;
// create grayscale image to mask context
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
CGContextRef context = CGBitmapContextCreate(nil, maskLayer.bounds.size.width, maskLayer.bounds.size.height, 8, 0, colorSpace, kCGImageAlphaNone);
CGContextTranslateCTM(context, 0, maskLayer.bounds.size.height);
CGContextScaleCTM(context, 1.f, -1.f);
[maskLayer renderInContext:context];
CGImageRef imageRef = CGBitmapContextCreateImage(context);
UIImage *maskImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGColorSpaceRelease(colorSpace);
CGContextRelease(context);
UIGraphicsBeginImageContext(self.size);
context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0, maskLayer.bounds.size.height);
CGContextScaleCTM(context, 1.f, -1.f);
CGContextClipToMask(context, maskLayer.bounds, maskImage.CGImage);
CGContextDrawImage(context, maskLayer.bounds, self.CGImage);
unblurredImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
//create vImage_Buffer with data from CGImageRef
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//create vImage_Buffer for output
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
if(pixelBuffer == NULL)
NSLog(@"No pixelbuffer");
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
// Create a third buffer for intermediate processing
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
//perform convolution
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
kCGImageAlphaNoneSkipLast);
CGImageRef imageRef = CGBitmapContextCreateImage(ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
// overlay images?
if (unblurredImage != nil) {
UIGraphicsBeginImageContext(returnImage.size);
[returnImage drawAtPoint:CGPointZero];
[unblurredImage drawAtPoint:CGPointZero];
returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGImageRelease(imageRef);
return returnImage;
}
@end
@implementation UIImage (ResizableImage)
- (UIImage*) resizableImage:(UIEdgeInsets)insets {
// if (IOS6) {
// return [self resizableImageWithCapInsets:insets resizingMode:UIImageResizingModeStretch];
// }
//
return [self stretchableImageWithLeftCapWidth:insets.left topCapHeight:insets.top];
}
@end
@implementation UIImage (ImageEffects)
- (UIImage *)applyLightEffect {
UIColor* tintColor = [UIColor colorWithWhite:1.0 alpha:0.3];
return [self applyBlurWithRadius:30 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil];
}
- (UIImage *)applyExtraLightEffect {
UIColor* tintColor = [UIColor colorWithWhite:0.97 alpha:0.82];
return [self applyBlurWithRadius:20 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil];
}
- (UIImage *)applyDarkEffect {
UIColor* tintColor = [UIColor colorWithWhite:0.11 alpha:0.73];
return [self applyBlurWithRadius:30 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil];
}
- (UIImage *)applyBlurEffect {
UIColor* tintColor = [UIColor colorWithWhite:0.4f alpha:0.3f];
return [self applyBlurWithRadius:8.0f tintColor:tintColor saturationDeltaFactor:1.8f maskImage:nil];
}
- (UIImage *)applyTintEffectWithColor:(UIColor *)tintColor {
const CGFloat EffectColorAlpha = 0.6;
UIColor* effectColor = tintColor;
NSUInteger componentCount = CGColorGetNumberOfComponents(tintColor.CGColor);
if (componentCount == 2) {
CGFloat b;
if ([tintColor getWhite:&b alpha:nil]) {
effectColor = [UIColor colorWithWhite:b alpha:EffectColorAlpha];
}
} else {
CGFloat r, g, b;
if ([tintColor getRed:&r green:&g blue:&b alpha:nil]) {
effectColor = [UIColor colorWithRed:r green:g blue:b alpha:EffectColorAlpha];
}
}
return [self applyBlurWithRadius:10 tintColor:effectColor saturationDeltaFactor:-1.0 maskImage:nil];
}
- (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius
tintColor:(UIColor *)tintColor
saturationDeltaFactor:(CGFloat)saturationDeltaFactor
maskImage:(UIImage *)maskImage {
if (self.size.width < 1 || self.size.height < 1) {
NSLog (@"*** error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self);
return nil;
}
if (!self.CGImage) {
NSLog (@"*** error: image must be backed by a CGImage: %@", self);
return nil;
}
if (maskImage && !maskImage.CGImage) {
NSLog (@"*** error: maskImage must be backed by a CGImage: %@", maskImage);
return nil;
}
CGRect imageRect = { CGPointZero, self.size };
UIImage *effectImage = self;
BOOL hasBlur = blurRadius > __FLT_EPSILON__;
BOOL hasSaturationChange = fabs(saturationDeltaFactor - 1.) > __FLT_EPSILON__;
if (hasBlur || hasSaturationChange) {
UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
CGContextRef effectInContext = UIGraphicsGetCurrentContext();
CGContextScaleCTM(effectInContext, 1.0, -1.0);
CGContextTranslateCTM(effectInContext, 0, -self.size.height);
CGContextDrawImage(effectInContext, imageRect, self.CGImage);
vImage_Buffer effectInBuffer;
effectInBuffer.data = CGBitmapContextGetData(effectInContext);
effectInBuffer.width = CGBitmapContextGetWidth(effectInContext);
effectInBuffer.height = CGBitmapContextGetHeight(effectInContext);
effectInBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectInContext);
UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
CGContextRef effectOutContext = UIGraphicsGetCurrentContext();
vImage_Buffer effectOutBuffer;
effectOutBuffer.data = CGBitmapContextGetData(effectOutContext);
effectOutBuffer.width = CGBitmapContextGetWidth(effectOutContext);
effectOutBuffer.height = CGBitmapContextGetHeight(effectOutContext);
effectOutBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectOutContext);
if (hasBlur) {
// A description of how to compute the box kernel width from the Gaussian
// radius (aka standard deviation) appears in the SVG spec:
// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement
//
// For larger values of 's' (s >= 2.0), an approximation can be used: Three
// successive box-blurs build a piece-wise quadratic convolution kernel, which
// approximates the Gaussian kernel to within roughly 3%.
//
// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
//
// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
//
CGFloat inputRadius = blurRadius * [[UIScreen mainScreen] scale];
NSUInteger radius = floor(inputRadius * 3. * sqrt(2 * M_PI) / 4 + 0.5);
if (radius % 2 != 1) {
radius += 1; // force radius to be odd so that the three box-blur methodology works.
}
vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (uint32_t)radius, (uint32_t)radius, 0, kvImageEdgeExtend);
vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, NULL, 0, 0, (uint32_t)radius, (uint32_t)radius, 0, kvImageEdgeExtend);
vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (uint32_t)radius, (uint32_t)radius, 0, kvImageEdgeExtend);
}
BOOL effectImageBuffersAreSwapped = NO;
if (hasSaturationChange) {
CGFloat s = saturationDeltaFactor;
CGFloat floatingPointSaturationMatrix[] = {
0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0,
0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0,
0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0,
0, 0, 0, 1,
};
const int32_t divisor = 256;
NSUInteger matrixSize = sizeof(floatingPointSaturationMatrix)/sizeof(floatingPointSaturationMatrix[0]);
int16_t saturationMatrix[matrixSize];
for (NSUInteger i = 0; i < matrixSize; ++i) {
saturationMatrix[i] = (int16_t)roundf(floatingPointSaturationMatrix[i] * divisor);
}
if (hasBlur) {
vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags);
effectImageBuffersAreSwapped = YES;
}
else {
vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags);
}
}
if (!effectImageBuffersAreSwapped)
effectImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if (effectImageBuffersAreSwapped)
effectImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
// Set up output context.
UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
CGContextRef outputContext = UIGraphicsGetCurrentContext();
CGContextScaleCTM(outputContext, 1.0, -1.0);
CGContextTranslateCTM(outputContext, 0, -self.size.height);
// Draw base image.
CGContextDrawImage(outputContext, imageRect, self.CGImage);
// Draw effect image.
if (hasBlur) {
CGContextSaveGState(outputContext);
if (maskImage) {
CGContextClipToMask(outputContext, imageRect, maskImage.CGImage);
}
CGContextDrawImage(outputContext, imageRect, effectImage.CGImage);
CGContextRestoreGState(outputContext);
}
// Add in color tint.
if (tintColor) {
CGContextSaveGState(outputContext);
CGContextSetFillColorWithColor(outputContext, tintColor.CGColor);
CGContextFillRect(outputContext, imageRect);
CGContextRestoreGState(outputContext);
}
// Output image is ready.
UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return outputImage;
}
@end
网友评论