OpenGL Texture problems

I’m having trouble getting a SDL_Surface into OpenGL where as I’m only able to render a white box. What I’m trying to do is take a sprite sheet and cut it into tiles and then send the tiles to the video card for use as an OpenGl texture later. I don’t have so much of a clue of what the problem could be, since OpenGL or SDL doesn’t seem to report an error. Could it be that my formats are wrong?

bool OGLRenderer::InitializeSprite( Sprite *spr)
{
if (spr == NULL)
  return false;
if (spr->loaded)
  return true;
SDL_Surface *surf = 0;
if (!spr->fromData)
{
  surf = IMG_Load( spr->fname.c_str());
  if (surf == NULL)
  {
   printf("Could not load sprite %s.\n%s\n", spr->fname.c_str(), IMG_GetError());
   return false;
  }
}
else
{
  SDL_RWops *ops = SDL_RWFromMem( (void*)spr->data, spr->dataSize);
  if (ops == NULL)
  {
   printf("Could not load sprite located at %p\n%s\n", spr->data, SDL_GetError());
   return false;
  }
  surf = IMG_Load_RW( ops, 1);
  if (surf == NULL)
  {
   printf("Could not load sprite located at %p\n%s\n", spr->data, IMG_GetError());
   return false;
  }
}
//Convert To 32 bit RGBA format
if (surf->format->Rmask != rmask)
{
  SDL_Surface *tmp = SDL_CreateRGBSurface( SDL_SWSURFACE, surf->w, surf->h, surf->format->BitsPerPixel, rmask, gmask, bmask, amask);
  SDL_BlitSurface( surf, NULL, tmp, NULL);
  SDL_FreeSurface( surf);
  surf = tmp;
}
int fw = spr->FrameWidth == -1 ? spr->ImageWidth : spr->FrameWidth;
int fh = spr->FrameHeight == -1 ? spr->ImageHeight : spr->FrameHeight;
for (int i = 0; i < spr->FrameCount; ++i)
{
  SDL_Rect rect;
  rect.x = (i % (spr->ImageWidth/fw) *fw);
  rect.y = (i / (spr->ImageWidth/fw) *fh);
  rect.w = rect.x + fw;
  rect.h = rect.y + fh;
  SDL_Surface *f = SDL_CreateRGBSurface( SDL_SWSURFACE, fw, fh, surf->format->BitsPerPixel, surf->format->Rmask, surf->format->Gmask, surf->format->Bmask, surf->format->Amask);
  //++f->refcount;
  if (SDL_BlitSurface( surf, &rect, f, NULL) == -1)
  {
   SDL_FreeSurface( f);
   SDL_FreeSurface( surf);
   printf( "Could not load sprite.\n%s\n", SDL_GetError());
   return false;
  }
  SDL_SetColorKey( f, SDL_SRCCOLORKEY, spr->ColorKey);
  OGLFrame *frame = new OGLFrame( spr);
  frame->surf = f;
  glGenTextures( 1, &frame->texture);
  glBindTexture( GL_TEXTURE_2D, frame->texture);
  glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, f->w, f->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, f->pixels);
  if (glGetError() != GL_NO_ERROR)
  {
   printf( "There was an error loading the frame.\n%s\n", gluErrorString(glGetError()));
  }
  glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  //glPixelStorei( GL_UNPACK_ROW_LENGTH, f->pitch);
  create_mask( frame->GetMask(), f, fw, fh, spr->ColorKey);
  spr->Frames.push_back( frame);
}
SDL_FreeSurface( surf);
spr->loaded = true;
return true;
}



void OGLRenderer::draw_sprite( int index, float frame, float x, float y, float xscale, float yscale, float angle, int col)
{
if (index < 0 || index >= spriteList.size())
  return;
Sprite *tmp = spriteList[index];
if (frame < 0 || frame >= tmp->Frames.size())
  return;
if (!tmp->loaded)
  InitializeSprite( tmp);
int subImage = clamp_value((int)floor( (float)frame), 0, tmp->FrameCount-1);
unsigned int color = 0xffffffff;
unsigned int red = 255;
unsigned int blue = 255;
unsigned int green = 255;
unsigned int a = (int)floor(255*alpha);
if (col != -1)
{
  red = (col & 0x00FF0000) >> 16;
  green = (col & 0x0000FF00) >> 8;
  blue = (col & 0x000000FF);
  color = MAKE_COLOR( a, red, green, blue);
}
Rect r;
r.left = x - tmp->CenterX;
r.top = y - tmp->CenterY;
r.right = r.left + tmp->FrameWidth;
r.bottom = r.top + tmp->FrameHeight;
OGLFrame *pFrame = reinterpret_cast<OGLFrame*>(tmp->Frames[subImage]);
glBindTexture( GL_TEXTURE_2D, pFrame->texture);
/*glPushMatrix();
glTranslatef( r.left, r.top, 0);
glRotatef( camera_angle, 0, 0, 0);
glScalef( xscale, yscale, 1);*/
glBegin( GL_QUADS);
glTexCoord2f( 0, 0); /*glColor4i( red, green, blue, a);*/ glVertex3f( r.left, r.top, depth);
glTexCoord2f( 0, 1); /*glColor4i( red, green, blue, a);*/ glVertex3f( r.left, r.bottom, depth);
glTexCoord2f( 1, 1); /*glColor4i( red, green, blue, a);*/ glVertex3f( r.right, r.bottom, depth);
glTexCoord2f( 1, 0); /*glColor4i( red, green, blue, a);*/ glVertex3f( r.right, r.top, depth);
glEnd();
//glPopMatrix();
}

why dont you use SDL_Texture?

there are better ways to convers a surface pixel format then creating a new surface and blitting to it.
there is also no way to tell from your code what the actual pixel format of your surface is since i cant see where your masks are defined.

i want to say there is a better test for pixel format then what you have too.

sorry I cant be more use. i dont know opengl since I let SDL handle all my drawing.

I found the problem using a program that I found called gDEBugger. The problem was that I had double buffering enabled. From my observations, SDL created a total of 4 contexts, with my textures existing in the second context. It then created two more and destroying the first two and the third it had recently created, leaving only the fourth context active. Since my textures only existed in the second context, they were also destroyed (or leaked). To fix the problem, I just simply initialized without double buffering, but in the event that I want to enable double buffering, how do I handle this problem effectively?