SDL surface to GL texture question

Hi guys,

In working on my new resource manager, I’ve changed from having a
single model and single texture (which was working great, and adapted
from the testgl.c code) to multiple models and multiple textures… in
theory at least. Currently I’m only loading one model, which
references 2 textures. I know the two textures are being loaded, the
SDL surfaces are correct - I can save them back as BMP files right
before I upload the surface->pixels to opengl. I also know the texture
id’s are being generated and bound correctly when rendering, as I’m
spitting out the texid for each chunk as it’s bound (in cout). Problem
is, the model is entirely white. I’m not sure where else to look, as I
adapted the code from SDL_GL_LoadTexture in testgl.c, and it worked
great when it was only trying to do a single global texture.

Here’s the function, maybe someone can spot my error…

GLuint cResourceMgr::SDL_GL_LoadTexture(char *filename)
{
GLuint texid;
int w, h;
SDL_Surface *src = NULL;
SDL_Surface *dest = NULL;
SDL_Rect area;
Uint32 saved_flags;
Uint8 saved_alpha;
GLfloat texcoord[4];

/* Load the image (could use SDL_image library here) */
src = IMG_Load(filename);
if (!src)
{
cerr << “SDL_GL_LOADTEXTURE: Error loading image file “” << filename
<< “”” << endl;
}
else
{
cout << “SDL_GL_LOADTEXTURE: Loaded image file “” << filename << “”
” << src->w << “x” << src->h << endl;
}

if ((!isPowerofTwo(src->w)) || (!isPowerofTwo(src->h))) // if w or h
are not already power of two, expand them
{
cout << “SDL_GL_LOADTEXTURE: Setting Powers of Two” << endl;
w = nextPowerofTwo(src->w);
h = nextPowerofTwo(src->h);
texcoord[0] = 0.0f; /* Min X /
texcoord[1] = 0.0f; /
Min Y /
texcoord[2] = (GLfloat)src->w / w; /
Max X /
texcoord[3] = (GLfloat)src->h / h; /
Max Y */
}
else // if w and h are both already powers of two, set coords to 0 and 1
{
texcoord[0] = texcoord[1] = 0.0f;
texcoord[2] = texcoord[3] = 1.0f;
}
cout << "SDL_GL_LoadTexture: Done converting coords: " << texcoord[0]
<< " " << texcoord[1] << " " << texcoord[2] << " " << texcoord[3] <<
endl;

dest = SDL_CreateRGBSurface(
SDL_SWSURFACE,
w, h,
32,
#if SDL_BYTEORDER == SDL_LIL_ENDIAN /* OpenGL RGBA masks */
0x000000FF,
0x0000FF00,
0x00FF0000,
0xFF000000
#else
0xFF000000,
0x00FF0000,
0x0000FF00,
0x000000FF
#endif
);
if (!dest)
{
cerr << “SDL_GL_LOADTEXTURE: error creating dest for file “” <<
filename << “”” << endl;
return 0;
}
else
{
cout << “SDL_GL_LOADTEXTURE: created dest for “” << filename << “”
” << dest->w << “x” << dest->h << endl;
}

/* Save the alpha blending attributes */
saved_flags = src->flags&(SDL_SRCALPHA|SDL_RLEACCELOK);
saved_alpha = src->format->alpha;
if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA ) {
cout << “Calling SDL_SetAlpha” << endl;
SDL_SetAlpha(src, 0, 0);
}

/* Copy the surface into the GL texture image */
area.x = 0;
area.y = 0;
area.w = dest->w;
area.h = dest->h;

cout << "Area set to " << area.w << “x” << area.h << endl;
SDL_BlitSurface(src, &area, dest, &area);
SDL_SaveBMP(src, “src.bmp”);

/* Restore the alpha blending attributes */
if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA ) {
SDL_SetAlpha(dest, saved_flags, saved_alpha);
}
cout << “SDL_GL_LOADTEXTURE: done setalpha” << endl;
SDL_SaveBMP(dest, “dest.bmp”);

/* Create an OpenGL texture for the image /
glGenTextures (1, &texid);
cout << "SDL_GL_LoadTexture: generated texid " << texid << endl;
glBindTexture (GL_TEXTURE_2D, texid); // bind generated texture id
//cout << "SDL_GL_LoadTexture: Bound texture to id: " << texid << endl;
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
w, h,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
dest->pixels);
SDL_FreeSurface(dest); /
No longer needed */

return texid;
};

Thanks,

-Justin