I'm rewriting a large part of my texturing code. I would like to be able to specify certain internal formats: GL_RGB8I, GL_RGB8UI, GL_RGB16I, GL_RGB16UI, GL_RGB32I, and GL_RGB32UI. These tokens do not exist in OpenGL 2.
When specifying these internal formats as arguments to glTexImage2D, the texturing fails (the texture appears as white). When checking for errors, I get [EDIT:] 1282 ("invalid operation"). I take this to mean that the OpenGL is still using OpenGL 2 for glTexImage2D, and so the call is failing. Obviously, it will need to use a newer version to succeed. Enums like GL_RGB, GL_RGBA, and (oddly) GL_RGB32F, GL_RGBA32F work as expected.
I configure to use GLEW or GLee for extensions. I can use OpenGL 4 calls with no problem elsewhere (e.g., glPatchParameteri, glBindFramebuffer, etc.), and the enums in question certainly exist. For completeness, glGetString(GL_VERSION) returns "4.2.0". My question: can I force one of these extension libraries to use the OpenGL 4.2 version? If so, how?
EDIT: The code is too complicated to post, but here is a simple, self-contained example using GLee that also demonstrates the problem:
#include <GLee5_4/GLee.h>
#include <GL/gl.h>
#include <GL/glu.h>
#include <gl/glut.h>
//For Windows
#pragma comment(lib,"GLee.lib")
#pragma comment(lib,"opengl32.lib")
#pragma comment(lib,"glu32.lib")
#pragma comment(lib,"glut32.lib")
#include <stdlib.h>
#include <stdio.h>
const int screen_size[2] = {512,512};
#define TEXTURE_SIZE 64
//Choose a selection. If you see black, then texturing is working. If you see red, then the quad isn't drawing. If you see white, texturing has failed.
#define TYPE 1
void error_check(void) {
GLenum error_code = glGetError();
const GLubyte* error_string = gluErrorString(error_code);
(error_string==NULL) ? printf("%d = (unrecognized error--an extension error?)\n",error_code) : printf("%d = \"%s\"\n",error_code,error_string);
}
#if TYPE==1 //############ 8-BIT TESTS ############
inline GLenum get_type(int which) { return (which==1)? GL_RGB8: GL_RGB; } //works
#elif TYPE==2
inline GLenum get_type(int which) { return (which==1)? GL_RGBA8:GL_RGBA; } //works
#elif TYPE==3
inline GLenum get_type(int which) { return (which==1)? GL_RGB8UI: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==4
inline GLenum get_type(int which) { return (which==1)? GL_RGB8I: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==5
inline GLenum get_type(int which) { return (which==1)? GL_RGBA8UI:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==6
inline GLenum get_type(int which) { return (which==1)? GL_RGBA8I:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==7 //############ 16-BIT TESTS ############
inline GLenum get_type(int which) { return (which==1)? GL_RGB16: GL_RGB; } //works
#elif TYPE==8
inline GLenum get_type(int which) { return (which==1)? GL_RGBA16:GL_RGBA; } //works
#elif TYPE==9
inline GLenum get_type(int which) { return (which==1)? GL_RGB16UI: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==10
inline GLenum get_type(int which) { return (which==1)? GL_RGB16I: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==11
inline GLenum get_type(int which) { return (which==1)?GL_RGBA16UI:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==12
inline GLenum get_type(int which) { return (which==1)? GL_RGBA16I:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==13 //############ 32-BIT TESTS ############
inline GLenum get_type(int which) { return (which==1)? GL_RGB32: GL_RGB; } //token doesn't exist
#elif TYPE==14
inline GLenum get_type(int which) { return (which==1)? GL_RGBA32:GL_RGBA; } //token doesn't exist
#elif TYPE==15
inline GLenum get_type(int which) { return (which==1)? GL_RGB32UI: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==16
inline GLenum get_type(int which) { return (which==1)? GL_RGB32I: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==17
inline GLenum get_type(int which) { return (which==1)?GL_RGBA32UI:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==18
inline GLenum get_type(int which) { return (which==1)? GL_RGBA32I:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==19 //############ 32-BIT FLOAT ############
inline GLenum get_type(int which) { return (which==1)? GL_RGB32F: GL_RGB; } //works
#elif TYPE==20
inline GLenum get_type(int which) { return (which==1)? GL_RGBA32F:GL_RGBA; } //works
#endif
GLuint texture;
void create_texture(void) {
printf(" Status before texture setup: "); error_check();
glGenTextures(1,&texture);
glBindTexture(GL_TEXTURE_2D,texture);
printf(" Status after texture created: "); error_check();
GLenum data_type = GL_UNSIGNED_BYTE;
int data_length = TEXTURE_SIZE*TEXTURE_SIZE*4; //maximum number of channels, so it will work for everything
unsigned char* data = new unsigned char[data_length];
for (int i=0;i<data_length;++i) {
data[i] = (unsigned char)(0);
};
glTexImage2D(GL_TEXTURE_2D,0,get_type(1), TEXTURE_SIZE,TEXTURE_SIZE, 0,get_type(2),data_type,data);
printf(" Status after glTexImage2D: "); error_check();
delete [] data;
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
printf(" Status after texture filters defined: "); error_check();
}
void keyboard(unsigned char key, int x, int y) {
switch (key) {
case 27: //esc
exit(0);
break;
}
}
void draw(void) {
glClearColor(1.0,0.0,0.0,1.0); //in case the quad doesn't draw
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glViewport(0,0,screen_size[0],screen_size[1]);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0,screen_size[0],0,screen_size[1]);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_QUADS);
glTexCoord2f(0,0); glVertex2f(0,0);
glTexCoord2f(2,0); glVertex2f(screen_size[0],0);
glTexCoord2f(2,2); glVertex2f(screen_size[0],screen_size[1]);
glTexCoord2f(0,2); glVertex2f(0,screen_size[1]);
glEnd();
glutSwapBuffers();
}
int main(int argc, char* argv[]) {
glutInit(&argc,argv);
glutInitWindowSize(screen_size[0],screen_size[1]);
glutInitDisplayMode(GLUT_RGB|GLUT_DOUBLE|GLUT_DEPTH);
glutCreateWindow("Texture Types - Ian Mallett");
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
printf("Status after OpenGL setup: "); error_check();
create_texture();
printf("Status after texture setup: "); error_check();
glutDisplayFunc(draw);
glutIdleFunc(draw);
glutKeyboardFunc(keyboard);
glutMainLoop();
return 0;
}
When checking for errors, I get [EDIT:] 1282 ("invalid operation"). I take this to mean that the OpenGL is still using OpenGL 2 for glTexImage2D, and so the call is failing.
OpenGL errors are not that complex to understand. GL_INVALID_ENUM/VALUE
are thrown when you pass something an enum or value that is unexpected, unsupported, or out-of-range. If you pass "17" as the internal format to glTexImage2D
, you will get GL_INVALID_ENUM
, because 17 is not a valid enum number for an internal format. If you pass 103,422 as the width to glTexImage2D
, you will get GL_INVALID_VALUE
, because 103,422 is almost certainly larger than GL_MAX_TEXTURE_2D
's size.
GL_INVALID_OPERATION
is always used for combinations of state that go wrong. Either there is some context state previously set that doesn't mesh with the function you're calling, or two or more parameters combined are causing a problem. The latter is the case you have here.
If your implementation didn't support integer textures at all, then you would get INVALID_ENUM
(because the internal format is not a valid format). Getting INVALID_OPERATION
means that something else is wrong.
Namely, this:
glTexImage2D(GL_TEXTURE_2D,0,get_type(1), TEXTURE_SIZE,TEXTURE_SIZE, 0,get_type(2),data_type,data);
Your get_type(2)
call returns GL_RGB
or GL_RGBA
in all cases. However, when using integral image formats, you must use a pixel transfer format with _INTEGER
at the end.
So your get_type(2)
needs to be this:
inline GLenum get_type(int which) { return (which==1)? GL_RGB16UI: GL_RGB_INTEGER; }
And similarly for other integral image formats.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With