I'm trying to do some image manipulation on the iPhone, basing things on the GLImageProcessing example from Apple.
Ultimately what I'd like to do is to load an image into a texture, perform one or more of the operations in the example code (hue, saturation, brightness, etc.), then read the resulting image back out for later processing/saving. For the most part, this would never need to touch the screen, so I thought that FBOs might be the way to go.
To start with, I've cobbled together a little example that creates an offscreen FBO, draws to it, then reads the data back out as an image. I was psyched when this worked perfectly in the simulator, then bummed as I realized I just got a black screen on the actual device.
Disclaimer: my OpenGL is old enough that I've had quite a bit of a learning curve going to OpenGL ES, and I've never been much of a texture wizard. I do know that the device has different characteristics from the simulator in terms of framebuffer access (mandatory offscreen FBO and swap on the device, direct access on the simulator), but I haven't been able to find what I've been doing wrong, even after a fairly extensive search.
Any suggestions?
// set up the offscreen FBO sizes
int renderBufferWidth = 1280;
int renderBufferHeight = 720;
// now the FBO
GLuint fbo = 0;
glGenFramebuffersOES(1, &fbo);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, fbo);
GLuint renderBuffer = 0;
glGenRenderbuffersOES(1, &renderBuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, renderBuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES,
GL_RGBA8_OES,
renderBufferWidth,
renderBufferHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES,
GL_COLOR_ATTACHMENT0_OES,
GL_RENDERBUFFER_OES,
renderBuffer);
GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES);
if (status != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(@"Problem with OpenGL framebuffer after specifying color render buffer: %x", status);
}
// throw in a test drawing
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
static const GLfloat triangleVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f
};
static const GLfloat triangleColors[] = {
1.0, 0.0, 0.0, 0.5,
0.0, 1.0, 0.0, 0.5,
0.0, 0.0, 1.0, 0.5
};
GLint backingWidth = 320;
GLint backingHeight = 480;
NSLog(@"setting up view/model matrices");
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glVertexPointer(2, GL_FLOAT, 0, triangleVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_FLOAT, 0, triangleColors);
glEnableClientState(GL_COLOR_ARRAY);
// draw the triangle
glDrawArrays(GL_TRIANGLE_STRIP, 0, 3);
// Extract the resulting rendering as an image
int samplesPerPixel = 4; // R, G, B and A
int rowBytes = samplesPerPixel * renderBufferWidth;
char* bufferData = (char*)malloc(rowBytes * renderBufferHeight);
if (bufferData == NULL) {
NSLog(@"Unable to allocate buffer for image extraction.");
}
// works on simulator with GL_BGRA, but not on device
glReadPixels(0, 0, renderBufferWidth,
renderBufferHeight,
GL_BGRA,
GL_UNSIGNED_BYTE, bufferData);
NSLog(@"reading pixels from framebuffer");
// Flip it vertically - images read from OpenGL buffers are upside-down
char* flippedBuffer = (char*)malloc(rowBytes * renderBufferHeight);
if (flippedBuffer == NULL) {
NSLog(@"Unable to allocate flipped buffer for corrected image.");
}
for (int i = 0 ; i < renderBufferHeight ; i++) {
bcopy(bufferData + i * rowBytes,
flippedBuffer + (renderBufferHeight - i - 1) * rowBytes,
rowBytes);
}
// unbind my FBO
glBindFramebufferOES(GL_FRAMEBUFFER_OES, 0);
// Output the image to a file
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bitsPerComponent = 8;
CGBitmapInfo bitmapInfo = kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Host;
CGContextRef contextRef = CGBitmapContextCreate(flippedBuffer,
renderBufferWidth,
renderBufferHeight,
bitsPerComponent,
rowBytes, colorSpace, bitmapInfo);
if (contextRef == nil) {
NSLog(@"Unable to create CGContextRef.");
}
CGImageRef imageRef = CGBitmapContextCreateImage(contextRef);
if (imageRef == nil) {
NSLog(@"Unable to create CGImageRef.");
} else {
if (savedImage == NO) {
UIImage *myImage = [UIImage imageWithCGImage:imageRef];
UIImageWriteToSavedPhotosAlbum(myImage, nil, nil, nil);
savedImage = YES;
}
}
Edit:
The answer, of course, was that the bitmap format should be GL_RGBA, not GL_BGRA:
// works on simulator with GL_BGRA, but not on device
glReadPixels(0, 0, renderBufferWidth,
renderBufferHeight,
**GL_RGBA**,
GL_UNSIGNED_BYTE, bufferData);
As Andrew answered himself:
The answer, was that the bitmap format should be GL_RGBA, not GL_BGRA
// works on simulator with GL_BGRA, but not on device
glReadPixels(0, 0, renderBufferWidth,
renderBufferHeight,
GL_RGBA, // <--
GL_UNSIGNED_BYTE, bufferData);
来源:https://stackoverflow.com/questions/4110023/writing-to-then-reading-from-an-offscreen-fbo-on-iphone-works-on-simulator-but