主要講使用vImage來(lái)實(shí)現(xiàn)圖片模糊效果
- 1.模糊效果代碼(兩套)
- 2.模糊效果發(fā)紅問(wèn)題(發(fā)紅問(wèn)題是因?yàn)閳D片原因)
-
3.先看看效果镀钓,為什么是兩套代碼(我這個(gè)GIF截圖工具太垃圾了)
使用之前要導(dǎo)入#import <Accelerate/Accelerate.h>
這個(gè)框架,但是有個(gè)問(wèn)題就是潮瓶,代碼一和代碼二在我兩個(gè)項(xiàng)目中剛好相反腮敌,一個(gè)發(fā)紅一個(gè)不發(fā)紅跨释,我同樣代碼為什么會(huì)出現(xiàn)這樣的情況凛剥,經(jīng)過(guò)我的分析,處理蛤高,得出結(jié)果古毛,是因?yàn)椋乙粋€(gè)圖片是直接用原圖片檐盟,一個(gè)是用截屏褂萧,或者OpenGL截屏上下文得到的圖片,應(yīng)該是內(nèi)部圖片丟了寫東西造成的
代碼一(View截圖生成的圖片)
/**
高斯模糊(對(duì)用content截圖葵萎,opengl截圖的圖片發(fā)紅處理高斯模糊)
@param image 圖片
@param blur 1-100(最好是1-25)
@return 高斯模糊圖片
*/
+(UIImage *)fan_accelerateBlurWithImage:(UIImage *)image blurNumber:(CGFloat)blur
{
if(image==nil){
return nil;
}
int boxSize = blur;
if (blur<1||blur>100) {
boxSize=25;
}
boxSize = boxSize - (boxSize % 2) + 1;
CGImageRef img = image.CGImage;
vImage_Buffer inBuffer, outBuffer, rgbOutBuffer;
vImage_Error error;
void *pixelBuffer, *convertBuffer;
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
convertBuffer = malloc( CGImageGetBytesPerRow(img) * CGImageGetHeight(img) );
rgbOutBuffer.width = CGImageGetWidth(img);
rgbOutBuffer.height = CGImageGetHeight(img);
rgbOutBuffer.rowBytes = CGImageGetBytesPerRow(img);
rgbOutBuffer.data = convertBuffer;
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void *)CFDataGetBytePtr(inBitmapData);
pixelBuffer = malloc( CGImageGetBytesPerRow(img) * CGImageGetHeight(img) );
if (pixelBuffer == NULL) {
NSLog(@"No pixelbuffer");
}
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
void *rgbConvertBuffer = malloc( CGImageGetBytesPerRow(img) * CGImageGetHeight(img) );
vImage_Buffer outRGBBuffer;
outRGBBuffer.width = CGImageGetWidth(img);
outRGBBuffer.height = CGImageGetHeight(img);
outRGBBuffer.rowBytes = CGImageGetBytesPerRow(img);//3
outRGBBuffer.data = rgbConvertBuffer;
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
// error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
const uint8_t mask[] = {2, 1, 0, 3};
vImagePermuteChannels_ARGB8888(&outBuffer, &rgbOutBuffer, mask, kvImageNoFlags);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(rgbOutBuffer.data,
rgbOutBuffer.width,
rgbOutBuffer.height,
8,
rgbOutBuffer.rowBytes,
colorSpace,
kCGImageAlphaNoneSkipLast);
CGImageRef imageRef = CGBitmapContextCreateImage(ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
free(pixelBuffer);
free(convertBuffer);
free(rgbConvertBuffer);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
代碼二 對(duì)原圖片處理
/**
高斯模糊(直接對(duì)原圖片高斯模糊)
@param image 圖片
@param blur 1-100(最好是1-25)
@return 高斯模糊圖片
*/
+(UIImage *)fan_accelerateBlurShortWithImage:(UIImage *)image blurNumber:(CGFloat)blur
{
if(image==nil){
return nil;
}
int boxSize = blur;
if (blur<1||blur>100) {
boxSize=25;
}
boxSize = boxSize - (boxSize % 2) + 1;
CGImageRef img = image.CGImage;
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
void *pixelBuffer;
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
pixelBuffer = malloc(CGImageGetBytesPerRow(img) *
CGImageGetHeight(img));
if(pixelBuffer == NULL)
NSLog(@"No pixelbuffer");
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
kCGImageAlphaNoneSkipLast);
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
下面看看我的截屏代碼导犹,這兩套截屏代碼都必須用代碼一來(lái)高斯模糊不然會(huì)發(fā)紅
/** 截屏-不能截圖播放器View唱凯,或者Unity的OpenGLView*/
+(UIImage*)fan_beginImageContext:(CGRect)rect fromView:(UIView*)view
{
UIGraphicsBeginImageContext(view.frame.size); //currentView 當(dāng)前的view
//取得當(dāng)前畫布的上下文UIGraphicsGetCurrentContext render渲染
[view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
//從全屏中截取指定的范圍
CGImageRef imageRef = viewImage.CGImage;
CGImageRef imageRefRect =CGImageCreateWithImageInRect(imageRef, rect);
UIImage *sendImage = [[UIImage alloc] initWithCGImage:imageRefRect];
/******截取圖片保存的位置,如果想要保存谎痢,請(qǐng)把return向后移動(dòng)*********/
// CGImageRelease(imageRef);//加入這個(gè)會(huì)崩潰磕昼,不知道為什么
CGImageRelease(imageRefRect);
return sendImage;
}
/** 截屏-能截圖,播放器View节猿,或者Unity的OpenGLView票从,也能截圖普通的View*/
+ (UIImage *)fan_openglSnapshotImage:(UIView *)openGLView{
//圖片位圖的大小
CGSize size = openGLView.frame.size;
UIGraphicsBeginImageContextWithOptions(size, NO, [UIScreen mainScreen].scale);
//View 內(nèi)的圖像放到size位圖的位置
CGRect rect = openGLView.bounds;
// 自iOS7開始它允許你截取一個(gè)UIView或者其子類中的內(nèi)容,并且以位圖的形式(bitmap)保存到UIImage中
[openGLView drawViewHierarchyInRect:rect afterScreenUpdates:YES];
UIImage *snapshotImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return snapshotImage;
//AVCaptureVideoPreviewLayer 和 AVSampleBufferDisplayLayer可以用這個(gè)獲取一個(gè)View沐批,但是能添加纫骑,不能再截圖
// UIView *snapView = [self snapshotViewAfterScreenUpdates:YES];
}