I'm rewriting a large part of my texturing code. I would like to be able to specify certain internal formats: GL_RGB8I, GL_RGB8UI, GL_RGB16I, GL_RGB16UI, GL_RGB32I, and GL_RGB32UI. These tokens do not exist in OpenGL 2.
When specifying these internal formats as arguments to glTexImage2D, the texturing fails (the texture appears as white). When checking for errors, I get [EDIT:] 1282 ("invalid operation"). I take this to mean that the OpenGL is still using OpenGL 2 for glTexImage2D, and so the call is failing. Obviously, it will need to use a newer version to succeed. Enums like GL_RGB, GL_RGBA, and (oddly) GL_RGB32F, GL_RGBA32F work as expected.
I configure to use GLEW or GLee for extensions. I can use OpenGL 4 calls with no problem elsewhere (e.g., glPatchParameteri, glBindFramebuffer, etc.), and the enums in question certainly exist. For completeness, glGetString(GL_VERSION) returns "4.2.0". My question: can I force one of these extension libraries to use the OpenGL 4.2 version? If so, how?
EDIT: The code is too complicated to post, but here is a simple, self-contained example using GLee that also demonstrates the problem:
#include #include #include #include //For Windows #pragma comment(lib,"GLee.lib") #pragma comment(lib,"opengl32.lib") #pragma comment(lib,"glu32.lib") #pragma comment(lib,"glut32.lib") #include #include const int screen_size[2] = {512,512}; #define TEXTURE_SIZE 64 //Choose a selection. If you see black, then texturing is working. If you see red, then the quad isn't drawing. If you see white, texturing has failed. #define TYPE 1 void error_check(void) { GLenum error_code = glGetError(); const GLubyte* error_string = gluErrorString(error_code); (error_string==NULL) ? printf("%d = (unrecognized error--an extension error?)\n",error_code) : printf("%d = \"%s\"\n",error_code,error_string); } #if TYPE==1 //############ 8-BIT TESTS ############ inline GLenum get_type(int which) { return (which==1)? GL_RGB8: GL_RGB; } //works #elif TYPE==2 inline GLenum get_type(int which) { return (which==1)? GL_RGBA8:GL_RGBA; } //works #elif TYPE==3 inline GLenum get_type(int which) { return (which==1)? GL_RGB8UI: GL_RGB; } //doesn't work (invalid op) #elif TYPE==4 inline GLenum get_type(int which) { return (which==1)? GL_RGB8I: GL_RGB; } //doesn't work (invalid op) #elif TYPE==5 inline GLenum get_type(int which) { return (which==1)? GL_RGBA8UI:GL_RGBA; } //doesn't work (invalid op) #elif TYPE==6 inline GLenum get_type(int which) { return (which==1)? GL_RGBA8I:GL_RGBA; } //doesn't work (invalid op) #elif TYPE==7 //############ 16-BIT TESTS ############ inline GLenum get_type(int which) { return (which==1)? GL_RGB16: GL_RGB; } //works #elif TYPE==8 inline GLenum get_type(int which) { return (which==1)? GL_RGBA16:GL_RGBA; } //works #elif TYPE==9 inline GLenum get_type(int which) { return (which==1)? GL_RGB16UI: GL_RGB; } //doesn't work (invalid op) #elif TYPE==10 inline GLenum get_type(int which) { return (which==1)? GL_RGB16I: GL_RGB; } //doesn't work (invalid op) #elif TYPE==11 inline GLenum get_type(int which) { return (which==1)?GL_RGBA16UI:GL_RGBA; } //doesn't work (invalid op) #elif TYPE==12 inline GLenum get_type(int which) { return (which==1)? GL_RGBA16I:GL_RGBA; } //doesn't work (invalid op) #elif TYPE==13 //############ 32-BIT TESTS ############ inline GLenum get_type(int which) { return (which==1)? GL_RGB32: GL_RGB; } //token doesn't exist #elif TYPE==14 inline GLenum get_type(int which) { return (which==1)? GL_RGBA32:GL_RGBA; } //token doesn't exist #elif TYPE==15 inline GLenum get_type(int which) { return (which==1)? GL_RGB32UI: GL_RGB; } //doesn't work (invalid op) #elif TYPE==16 inline GLenum get_type(int which) { return (which==1)? GL_RGB32I: GL_RGB; } //doesn't work (invalid op) #elif TYPE==17 inline GLenum get_type(int which) { return (which==1)?GL_RGBA32UI:GL_RGBA; } //doesn't work (invalid op) #elif TYPE==18 inline GLenum get_type(int which) { return (which==1)? GL_RGBA32I:GL_RGBA; } //doesn't work (invalid op) #elif TYPE==19 //############ 32-BIT FLOAT ############ inline GLenum get_type(int which) { return (which==1)? GL_RGB32F: GL_RGB; } //works #elif TYPE==20 inline GLenum get_type(int which) { return (which==1)? GL_RGBA32F:GL_RGBA; } //works #endif GLuint texture; void create_texture(void) { printf(" Status before texture setup: "); error_check(); glGenTextures(1,&texture); glBindTexture(GL_TEXTURE_2D,texture); printf(" Status after texture created: "); error_check(); GLenum data_type = GL_UNSIGNED_BYTE; int data_length = TEXTURE_SIZE*TEXTURE_SIZE*4; //maximum number of channels, so it will work for everything unsigned char* data = new unsigned char[data_length]; for (int i=0;i