毛玻璃效果

#import <QuartzCore/QuartzCore.h>
#import <Accelerate/Accelerate.h>
//让对象view/imageView产生模糊效果
- (IBAction)startAction:(id)sender{
[self blurryImage:[self convertViewToImage:self.view] withBlurLevel:0.6 completed:^(UIImage *image) {
_myImageView.layer.contents=(__bridge id)image.CGImage;
}];
}
-(UIImage *)convertViewToImage:(UIView *)v{
UIGraphicsBeginImageContextWithOptions(v.frame.size, NO, [UIScreen mainScreen].scale);
[v.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage*image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *imageData = UIImageJPEGRepresentation(image, 0.1);
image = [UIImage imageWithData:imageData];
return image;
}
- (void)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur completed:(void(^)(UIImage *))complieted{

__block CGFloat _blur=blur;
dispatch_block_t block=^{
if (_blur < 0.f || _blur > 2.f) {
_blur = 0.5f;
}
int boxSize = (int)(blur * 40);
boxSize = boxSize - (boxSize % 2) + 1;
CGImageRef img = image.CGImage;
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
void *pixelBuffer;
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
if(pixelBuffer == NULL) NSLog(@"No pixelbuffer");
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
CGImageGetBitmapInfo(image.CGImage));
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGImageRelease(imageRef);
dispatch_async(dispatch_get_main_queue(), ^{
if (complieted) {
complieted(returnImage);
}
});
};
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), block);
}

posted @ 2016-03-13 14:28  Jk_Chan  阅读(233)  评论(0编辑  收藏  举报