I am writing an iPhone app that does some sort of real-time image detection with OpenCV. What is the best way to convert a CMSampleBufferRef
image from the camera
"iplimage->imageData = (char*)bufferBaseAddress;" will lead to memory leak.
It should be "memcpy(iplimage->imageData, (char*)bufferBaseAddress, iplimage->imageSize);"
so the complete coded is:
-(IplImage *)createIplImageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer {
IplImage *iplimage = 0;
if (sampleBuffer) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// get information of the image in the buffer
uint8_t *bufferBaseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bufferWidth = CVPixelBufferGetWidth(imageBuffer);
size_t bufferHeight = CVPixelBufferGetHeight(imageBuffer);
// create IplImage
if (bufferBaseAddress) {
iplimage = cvCreateImage(cvSize(bufferWidth, bufferHeight), IPL_DEPTH_8U, 4);
//iplimage->imageData = (char*)bufferBaseAddress;
memcpy(iplimage->imageData, (char*)bufferBaseAddress, iplimage->imageSize);
}
// release memory
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
else
DLog(@"No sampleBuffer!!");
return iplimage;
}