FXBlurView
FXBlurView copied to clipboard
- (UIImage *)blurredImageWithRadius:iterations:tintColor: assumes image is in ARGB format
- (UIImage *)blurredImageWithRadius:iterations:tintColor:
assumes image is in ARGB format, leading to crashes (EXEC_BAD_ACCESS) if image is not in that format.
You can reproduce with this example that uses a 8bit per pixel png indexed image:
UIImage *img = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:@"http://data3.whicdn.com/images/84761236/large.png"]]];
[img blurredImageWithRadius:10 iterations:10 tintColor:nil];
I have the same problem! does anyone know some workaround?
me too
Here's my solution which converts non-ARGB8888 images into ARGB8888:
#import "UIImage+Blur.h"
#import <Accelerate/Accelerate.h>
#import <QuartzCore/QuartzCore.h>
#pragma mark UIImage+Blur Implementation
@implementation UIImage (Blur)
- (UIImage *)blurredImageWithRadius:(CGFloat)radius iterations:(NSUInteger)iterations tintColor:(UIColor *)tintColor {
// Image must be nonzero size
if (floorf(self.size.width) * floorf(self.size.height) <= 0.0f) {
return self;
}
// Box size must be an odd integer
int boxSize = radius * self.scale;
if (boxSize % 2 == 0) {
boxSize++;
}
// Create image buffers
CGImageRef imageRef = self.CGImage;
// DEBUG
// CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
// CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef);
// NSLog(@"\n"
// "CGImageGetHeight: %d\n"
// "CGImageGetWidth: %d\n"
// "CGImageGetColorSpace: %@\n"
// "CGImageGetBitsPerPixel: %d\n"
// "CGImageGetBitsPerComponent: %d\n"
// "CGImageGetBytesPerRow: %d\n"
// "CGImageGetBitmapInfo: 0x%.8X\n"
// " kCGBitmapAlphaInfoMask = %s\n"
// " kCGBitmapFloatComponents = %s\n"
// " kCGBitmapByteOrderMask = 0x%.8X\n"
// " kCGBitmapByteOrderDefault = %s\n"
// " kCGBitmapByteOrder16Little = %s\n"
// " kCGBitmapByteOrder32Little = %s\n"
// " kCGBitmapByteOrder16Big = %s\n"
// " kCGBitmapByteOrder32Big = %s\n"
// "CGImageGetAlphaInfo: 0x%.8X\n"
// " kCGImageAlphaNone = %s\n"
// " kCGImageAlphaPremultipliedLast = %s\n"
// " kCGImageAlphaPremultipliedFirst = %s\n"
// " kCGImageAlphaLast = %s\n"
// " kCGImageAlphaFirst = %s\n"
// " kCGImageAlphaNoneSkipLast = %s\n"
// " kCGImageAlphaNoneSkipFirst = %s\n"
// " kCGImageAlphaOnly = %s\n",
// (int)CGImageGetWidth(imageRef),
// (int)CGImageGetHeight(imageRef),
// CGImageGetColorSpace(imageRef),
// (int)CGImageGetBitsPerPixel(imageRef),
// (int)CGImageGetBitsPerComponent(imageRef),
// (int)CGImageGetBytesPerRow(imageRef),
// (unsigned)bitmapInfo,
// (bitmapInfo & kCGBitmapAlphaInfoMask) ? "YES" : "NO",
// (bitmapInfo & kCGBitmapFloatComponents) ? "YES" : "NO",
// (bitmapInfo & kCGBitmapByteOrderMask),
// ((bitmapInfo & kCGBitmapByteOrderMask) == kCGBitmapByteOrderDefault) ? "YES" : "NO",
// ((bitmapInfo & kCGBitmapByteOrderMask) == kCGBitmapByteOrder16Little) ? "YES" : "NO",
// ((bitmapInfo & kCGBitmapByteOrderMask) == kCGBitmapByteOrder32Little) ? "YES" : "NO",
// ((bitmapInfo & kCGBitmapByteOrderMask) == kCGBitmapByteOrder16Big) ? "YES" : "NO",
// ((bitmapInfo & kCGBitmapByteOrderMask) == kCGBitmapByteOrder32Big) ? "YES" : "NO",
// (unsigned)alphaInfo,
// (alphaInfo == kCGImageAlphaNone) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaPremultipliedLast) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaPremultipliedFirst) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaLast) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaFirst) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaNoneSkipLast) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaNoneSkipFirst) ? "YES" : "NO",
// (alphaInfo == kCGImageAlphaOnly) ? "YES" : "NO"
// );
// END DEBUG
if (![self isARGB8888:imageRef]) {
// Convert to ARGB if it isn't
NSLog(@"Converting image to ARGB8888");
CGContextRef ctx = [self createARGBBitmapContextFromImage:imageRef];
CGRect rect = {{0, 0}, {CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)}};
// Draw the image to the bitmap context. Once we draw, the memory
// allocated for the context for rendering will then contain the
// raw image data in the desired color space and byte order
CGContextDrawImage(ctx, rect, imageRef);
imageRef = CGBitmapContextCreateImage(ctx);
CGContextRelease(ctx);
}
vImage_Buffer buffer1, buffer2;
buffer1.width = buffer2.width = CGImageGetWidth(imageRef);
buffer1.height = buffer2.height = CGImageGetHeight(imageRef);
buffer1.rowBytes = buffer2.rowBytes = CGImageGetBytesPerRow(imageRef);
CFIndex bytes = buffer1.rowBytes * buffer1.height;
buffer1.data = malloc(bytes);
buffer2.data = malloc(bytes);
// Create temp buffer
void *tempBuffer = malloc(vImageBoxConvolve_ARGB8888(&buffer1, &buffer2, NULL, 0, 0, boxSize, boxSize,
NULL, kvImageEdgeExtend + kvImageGetTempBufferSize));
// Copy image data
CFDataRef dataSource = CGDataProviderCopyData(CGImageGetDataProvider(imageRef));
memcpy(buffer1.data, CFDataGetBytePtr(dataSource), bytes);
CFRelease(dataSource);
for (int i = 0; i < iterations; i++) {
// Perform blur
vImageBoxConvolve_ARGB8888(&buffer1, &buffer2, tempBuffer, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
// Swap buffers
void *temp = buffer1.data;
buffer1.data = buffer2.data;
buffer2.data = temp;
}
// Free buffers
free(buffer2.data);
free(tempBuffer);
// Create image context from buffer
CGContextRef ctx = CGBitmapContextCreate(buffer1.data, buffer1.width, buffer1.height,
8, buffer1.rowBytes, CGImageGetColorSpace(imageRef),
CGImageGetBitmapInfo(imageRef));
// Apply tint
if (tintColor && ![tintColor isEqual:[UIColor clearColor]]) {
CGContextSetFillColorWithColor(ctx, [tintColor colorWithAlphaComponent:0.25].CGColor);
CGContextSetBlendMode(ctx, kCGBlendModePlusLighter);
CGContextFillRect(ctx, CGRectMake(0, 0, buffer1.width, buffer1.height));
}
// Create image from context
imageRef = CGBitmapContextCreateImage(ctx);
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
CGImageRelease(imageRef);
CGContextRelease(ctx);
free(buffer1.data);
return image;
}
- (BOOL)isARGB8888:(CGImageRef)imageRef {
return (CGImageGetBitsPerPixel(imageRef) == 32
&& CGImageGetBitsPerComponent(imageRef) == 8
&& (CGImageGetBitmapInfo(imageRef) & kCGBitmapAlphaInfoMask));
}
- (CGContextRef)createARGBBitmapContextFromImage:(CGImageRef)inImage {
// Get image width, height. We'll use the entire image.
size_t pixelsWide = CGImageGetWidth(inImage);
size_t pixelsHigh = CGImageGetHeight(inImage);
// Declare the number of bytes per row. Each pixel in the bitmap in this
// example is represented by 4 bytes; 8 bits each of red, green, blue, and alpha.
int bitmapBytesPerRow = (pixelsWide * 4);
int bitmapByteCount = (bitmapBytesPerRow * pixelsHigh);
// Use the generic RGB color space.
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL) {
NSLog(@"Error allocating color space");
return NULL;
}
// Allocate memory for image data. This is the destination in memory
// where any drawing to the bitmap context will be rendered.
void *bitmapData = malloc(bitmapByteCount);
if (bitmapData == NULL) {
NSLog(@"Memory not allocated!");
CGColorSpaceRelease(colorSpace);
return NULL;
}
// Create the bitmap context. We want pre-multiplied ARGB, 8-bits
// per component. Regardless of what the source image format is
// (CMYK, Grayscale, and so on) it will be converted over to the format
// specified here by CGBitmapContextCreate.
CGContextRef context = CGBitmapContextCreate(bitmapData,
pixelsWide,
pixelsHigh,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
(CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
if (context == NULL) {
free(bitmapData);
NSLog(@"Context not created!");
}
// Make sure and release colorspace before returning
CGColorSpaceRelease(colorSpace);
return context;
}
@end
@gresrun Thx! Saved me time.