Set the background image or navigation to frosted glass in iOS7
1. First import the system library Accelerate. framework
2. Add the header file # import
3. Add the implementation function again as follows:
// Blur function. input parameter: image is an image, and blur is a blur (0 ~ Between 2.0)
-(UIImage *) blurryImage :( UIImage *) image withBlurLevel :( CGFloat) blur
{
// Blur,
If (blur <0.1f) | (blur> 2.0f ))
{
Blur = 0.5f;
}
// BoxSize must be greater than 0
Int boxSize = (int) (blur * 100 );
BoxSize-= (boxSize % 2) + 1;
NSLog (@ "boxSize: % I", boxSize );
// Image Processing
CGImageRef img = image. CGImage;
// Image cache, input cache, and output Cache
VImage_Buffer inBuffer, outBuffer;
VImage_Error error;
// Pixel Cache
Void * pixelBuffer;
// Data source provider, Defines an opaque type that supplies Quartz with data.
CGDataProviderRef inProvider = CGImageGetDataProvider (img );
// Provider's data.
CFDataRef inBitmapData = CGDataProviderCopyData (inProvider );
// Width, height, byte/row, data
InBuffer. width = CGImageGetWidth (img );
InBuffer. height = CGImageGetHeight (img );
InBuffer. rowBytes = CGImageGetBytesPerRow (img );
InBuffer. data = (void *) CFDataGetBytePtr (inBitmapData );
// Image size cache, byte rows * Image Height
PixelBuffer = malloc (CGImageGetBytesPerRow (img) * CGImageGetHeight (img ));
OutBuffer. data = pixelBuffer;
OutBuffer. width = CGImageGetWidth (img );
OutBuffer. height = CGImageGetHeight (img );
OutBuffer. rowBytes = CGImageGetBytesPerRow (img );
// The third intermediate cache area, anti-aliasing Effect
Void * pixelBuffer2 = malloc (CGImageGetBytesPerRow (img) * CGImageGetHeight (img ));
VImage_Buffer outBuffer2;
OutBuffer2.data = pixelBuffer2;
OutBuffer2.width = CGImageGetWidth (img );
OutBuffer2.height = CGImageGetHeight (img );
OutBuffer2.rowBytes = CGImageGetBytesPerRow (img );
// Perform convolution on an implicit m×n area particle and an ARGB8888 source image with a box filter to obtain the target area.
Error = vImageBoxConvolve_ARGB8888 (& inBuffer, & outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend );
Error = vImageBoxConvolve_ARGB8888 (& outBuffer2, & inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend );
Error = vImageBoxConvolve_ARGB8888 (& inBuffer, & outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend );
If (error)
{
NSLog (@ "error from convolution % ld", error );
}
// NSLog (@ "Byte composition: % zu", CGImageGetBitsPerComponent (img ));
// Color Space DeviceRGB
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB ();
// Use an image to create the context, CGImageGetBitsPerComponent (img ),
CGContextRef ctx = CGBitmapContextCreate (
OutBuffer. data,
OutBuffer. width,
OutBuffer. height,
8,
OutBuffer. rowBytes,
ColorSpace,
CGImageGetBitmapInfo (image. CGImage ));
// Re-component the processed image according to the context
CGImageRef imageRef = CGBitmapContextCreateImage (ctx );
UIImage * returnImage = [UIImage imageWithCGImage: imageRef];
// Clean up
CGContextRelease (ctx );
Cgcolorspacerelstrap (colorSpace );
Free (pixelBuffer );
Free (pixelBuffer2 );
CFRelease (inBitmapData );
CGImageRelease (imageRef );
Return returnImage;
}
4. directly input the required parameters when using the image and run the returned image directly.UIImage * maoImage = [UIImage imageNamed: @ "aboutBackImage.jpg"];
TabBarImage. image = [self blurryImage: maoImage withBlurLevel: 0.5];
Success! (* ^__ ^ *)...... Let's try the results ~