With this question I only ask for the possibilities I have with Xcode and iOS without external libraries. I am already exploring the possibility of using libtiff
As you've already cracked 1, 3 and 4, it seems the only hurdle you're missing is saving the data and metadata together. Try this (assuming the unprocessed data is in a CMSampleBufferRef
called myImageDataSampleBuffer
and you've done the heavy lifting of putting the graphical data into a CGImageRef
called myImage
):
CFDictionaryRef metadata = CMCopyDictionaryOfAttachments(kCFAllocatorDefault,
myImageDataSampleBuffer,
kCMAttachmentMode_ShouldPropagate);
NSFileManager* fm = [[NSFileManager alloc] init];
NSURL* pathUrl = [fm URLForDirectory:saveDir
inDomain:NSUserDomainMask
appropriateForURL:nil
create:YES
error:nil];
NSURL* saveUrl = [pathUrl URLByAppendingPathComponent:@"myfilename.tif"];
CGImageDestinationRef destination = CGImageDestinationCreateWithURL((__bridge CFURLRef)saveUrl,
(CFStringRef)@"public.tiff", 1, NULL);
CGImageDestinationAddImage(destination, myImage, metadata);
CGImageDestinationFinalize(destination);
CFRelease(destination);
This thread was very helpful in resolving a very similar problem, so I thought I'd contribute a Swift 2.0 implementation of the solution in case someone comes looking.
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (imageDataSampleBuffer, error) -> Void in
// get image meta data (EXIF, etc)
let metaData: CFDictionaryRef? = CMCopyDictionaryOfAttachments( kCFAllocatorDefault, imageDataSampleBuffer, kCMAttachmentMode_ShouldPropagate )
// get reference to image
guard let imageBuffer = CMSampleBufferGetImageBuffer( imageDataSampleBuffer ) else { return }
// lock the buffer
CVPixelBufferLockBaseAddress( imageBuffer, 0 )
// read image properties
let baseAddress = CVPixelBufferGetBaseAddress( imageBuffer )
let bytesPerRow = CVPixelBufferGetBytesPerRow( imageBuffer )
let width = CVPixelBufferGetWidth( imageBuffer )
let height = CVPixelBufferGetHeight( imageBuffer )
// color space
let colorSpace = CGColorSpaceCreateDeviceRGB()
// context - camera output settings kCVPixelFormatType_32BGRA
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue).union(.ByteOrder32Little)
let newContext = CGBitmapContextCreate( baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo.rawValue )
//unlock buffer
CVPixelBufferUnlockBaseAddress( imageBuffer, 0 )
//Create a CGImageRef from the CVImageBufferRef
guard let newImage = CGBitmapContextCreateImage( newContext ) else {
return
}
// create tmp file and write image with metadata
let fileName = String(format: "%@_%@", NSProcessInfo.processInfo().globallyUniqueString, "cap.tiff")
let fileURL = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent(fileName)
if let destination = CGImageDestinationCreateWithURL( fileURL, kUTTypeTIFF, 1, nil) {
CGImageDestinationAddImage( destination, newImage, metaData )
let wrote = CGImageDestinationFinalize( destination )
if !wrote || NSFileManager.defaultManager().fileExistsAtPath(fileURL.URLString) {
return
}
}
}
p.s. for this to work, you have to configure your image buffer like this:
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [ kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA) ]