How to efficiently and fast blur an image on the iPhone?

前端 未结 3 933
一个人的身影
一个人的身影 2021-02-04 22:07

If I have a UIImage or CGContextRef or the pure bitmap data (direct access to decompressed ARGB-8 pixels), what\'s my best option to blur an image with radius 10 pixels as fast

3条回答
  •  北海茫月
    2021-02-04 22:51

    https://github.com/rnystrom/RNBlurModalView

    -(UIImage *)boxblurImageWithBlur:(CGFloat)blur bluringImage : (UIImage *) image
    {
    
        int boxSize = (int)(blur * 40);
        boxSize = boxSize - (boxSize % 2) + 1;
    
        CGImageRef img = image.CGImage;
        enter code here
        vImage_Buffer inBuffer, outBuffer;
    
        vImage_Error error;
    
        void *pixelBuffer;
    
    
        //create vImage_Buffer with data from CGImageRef
    
        CGDataProviderRef inProvider = CGImageGetDataProvider(img);
        CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
    
    
        inBuffer.width = CGImageGetWidth(img);
        inBuffer.height = CGImageGetHeight(img);
        inBuffer.rowBytes = CGImageGetBytesPerRow(img);
    
        inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
    
        //create vImage_Buffer for output
    
        pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
    
        if(pixelBuffer == NULL)
            NSLog(@"No pixelbuffer");
    
        outBuffer.data = pixelBuffer;
        outBuffer.width = CGImageGetWidth(img);
        outBuffer.height = CGImageGetHeight(img);
        outBuffer.rowBytes = CGImageGetBytesPerRow(img);
    
        // Create a third buffer for intermediate processing
        void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
        vImage_Buffer outBuffer2;
        outBuffer2.data = pixelBuffer2;
        outBuffer2.width = CGImageGetWidth(img);
        outBuffer2.height = CGImageGetHeight(img);
        outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
    
        //perform convolution
        error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
        error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
        error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
    
        if (error) {
            NSLog(@"error from convolution %ld", error);
        }
    
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,
                                                outBuffer.width,
                                                outBuffer.height,
                                                8,
                                                outBuffer.rowBytes,
                                                colorSpace,
                                                kCGImageAlphaNoneSkipLast);
        CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
        UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
    
        //clean up
        CGContextRelease(ctx);
        CGColorSpaceRelease(colorSpace);
    
        free(pixelBuffer);
        CFRelease(inBitmapData);
    
        CGImageRelease(imageRef);
    
        return returnImage;
    }
    

提交回复
热议问题