方法一
1,設(shè)置原圖片
UIImageView *imgV = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, WIDTH, 140 *ZZHeight)];
[imgV sd_setImageWithURL:[NSURL URLWithString:_img_url] placeholderImage:[UIImage imageNamed:@"archImg"]];
2,設(shè)置高斯模糊
UIBlurEffect *effect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleLight];
UIVisualEffectView *effectView = [[UIVisualEffectView alloc] initWithEffect:effect];
effectView.frame = CGRectMake(0, 0, WIDTH, 150 *ZZHeight);
3,將高斯模糊效果添加到原圖片上
[imgV addSubview:effectView];
[self.view addSubview:imgV];
//感覺并不是太好用 效果比較單一 并且模糊效果很重 完全看不出來有原圖片的痕跡
//如果要求不高 可以使用
方法二
//1,添加方法
+(UIImage *)coreBlurImage:(UIImage *)image withBlurNumber:(CGFloat)blur {
CIContext *context = [CIContext contextWithOptions:nil];
CIImage *inputImage=[CIImage imageWithCGImage:image.CGImage];
CIFilter *filter = [CIFilter filterWithName:@"CIGaussianBlur"];
[filter setValue:inputImage forKey:kCIInputImageKey];
[filter setValue:@(blur) forKey: @"inputRadius"];
CIImage *result=[filter valueForKey:kCIOutputImageKey];
CGImageRef outImage=[context createCGImage:result fromRect:[result extent]];
UIImage *blurImage=[UIImage imageWithCGImage:outImage];
CGImageRelease(outImage);
return blurImage;
}
//2,調(diào)用方法
UIImageView *imgV = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, WIDTH, 140 *ZZHeight)];
[imgV sd_setImageWithURL:[NSURL URLWithString:_img_url] placeholderImage:[UIImage imageNamed:@"archImg"]];
imgV.image = [CommonHelper coreBlurImage:imgV.image withBlurNumber:9];
[self.view addSubview:imgV];
//這個(gè)倒是比方法一好很多 可以自定義模糊值 從0到無窮大 但是到了十幾二十以后就失去了模糊效果的初衷了 建議十以內(nèi) 當(dāng)然 看自己選擇
//但是也有一個(gè)缺點(diǎn)是 加了效果后的圖片會(huì)變小并且四周有白邊
方法三
//常用的方法還有一個(gè)vImage
//但是此方法屬于Accelerate.Framework柳沙,需要導(dǎo)入 Accelerate下的 Accelerate頭文件殴瘦。
#import <Accelerate/Accelerate.h>
+(UIImage *)boxblurImage:(UIImage *)image withBlurNumber:(CGFloat)blur {
if (blur < 0.f || blur > 1.f) {
blur = 0.5f;
}
int boxSize = (int)(blur * 40);
boxSize = boxSize - (boxSize % 2) + 1;
CGImageRef img = image.CGImage;
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
void *pixelBuffer;
//從CGImage中獲取數(shù)據(jù)
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
//設(shè)置從CGImage獲取對(duì)象的屬性
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
pixelBuffer = malloc(CGImageGetBytesPerRow(img) *
CGImageGetHeight(img));
if(pixelBuffer == NULL)
NSLog(@"No pixelbuffer");
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
outBuffer.rowBytes,
colorSpace,
kCGImageAlphaNoneSkipLast);
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
//2,調(diào)用方法
UIImageView *imgV = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, WIDTH, 140 *ZZHeight)];
[imgV sd_setImageWithURL:[NSURL URLWithString:_img_url] placeholderImage:[UIImage imageNamed:@"archImg"]];
imgV.image = [CommonHelper boxblurImage:imgV.image withBlurNumber:9];
[self.view addSubview:imgV];
建議使用第三種