SDL_image and OpenGL in 16 bits mode?

Hi group,

I?m trying to load an image stored in 32 bit color depth, in a 16 bits
desktop (windows and linux), and load it to OpenGL, but I only get a white
image.

I wanted to use the R5G5B5A1 format, to have transparency and keep high
color quality, but I?m quite lost with all the formats and options.

I used the code found in this list to load a SDL_Surface and converting to a
openGL texture, but it only appears to work in 32 bits.

Code:

Init Video:
??? SDL_GL_SetAttribute(SDL_GL_RED_SIZE,?? 5);
??? SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
??? SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE,? 5);
??? SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 1);
??? SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);

??? SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

??? // create a new window
??? m_pScreen = SDL_SetVideoMode(Config.ResX, Config.ResY, 16, SDL_OPENGL);

Loading texture:

??? SDL_Surface* temp = IMG_Load(basefilename.c_str());
??? if (temp!=NULL)
??? {
??? SDL_Surface* temp2;
??? ??temp2 = SDL_DisplayFormatAlpha(temp);

??? ? m_tID = SDL_GL_LoadTexture(temp2, m_aTextureCoords);

??? SDL_FreeSurface(temp);
??? SDL_FreeSurface(temp2);
??? }

Converting SDL_Surface to texture:

GLuint COpenGLUtils::SDL_GL_LoadTexture(SDL_Surface *surface, GLfloat
*texcoord)
{
??? GLuint texture;
??? int w, h;
??? SDL_Surface *image;
??? SDL_Rect area;
??? Uint32 saved_flags;
??? Uint8? saved_alpha;

??? // Use the surface width and height expanded to powers of 2
???
??? w = power_of_two(surface->w);
??? h = power_of_two(surface->h);
??? texcoord[0] = 0.0f;??? // Min X
??? texcoord[1] = 0.0f;??? // Min Y
??? texcoord[2] = (GLfloat)surface->w / w;?? // Max X
??? texcoord[3] = (GLfloat)surface->h / h;?? // Max Y

??? image = SDL_CreateRGBSurface(
??? SDL_SWSURFACE,
??? w, h,
??? 16,
#if SDL_BYTEORDER == SDL_LIL_ENDIAN // OpenGL RGBA masks
??? 0x0000001F,
??? 0x000003E0,
??? 0x00007C00,
??? 0x00008000
#else
??? 0xFF000000,
??? 0x00FF0000,
??? 0x0000FF00,
??? 0x000000FF
#endif
??? ??? );
??? if ( image == NULL ) {
??? return 0;
??? }

??? // Save the alpha blending attributes
??? saved_flags = surface->flags;//&(SDL_SRCALPHA|SDL_RLEACCELOK);
??? saved_alpha = surface->format->alpha;
???
??? if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
??? {
??? SDL_SetAlpha(surface, 0, 0);
??? }
???

??? // Copy the surface into the GL texture image
??? area.x = 0;
??? area.y = 0;
??? area.w = surface->w;
??? area.h = surface->h;
??? SDL_BlitSurface(surface, &area, image, &area);

??? // Restore the alpha blending attributes
??? if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
??? {
??? SDL_SetAlpha(surface, saved_flags, saved_alpha);
??? }

??? //SetKeyColor(surface);
???

??? // Create an OpenGL texture for the image
??? glGenTextures(1, &texture);
??? glBindTexture(GL_TEXTURE_2D, texture);
??? glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
??? glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

??? glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB5_A1, w, h, 0, GL_RGB5_A1,
GL_UNSIGNED_BYTE, image->pixels);
???
??? SDL_FreeSurface(image); /* No longer needed */

??? return texture;
}

Hi,

Try calling glGetError after each gl call. One of them will probably
return an error code. I alluded to this yesterday, many people totally
overlook OpenGL error checking. Even something simple, like:

assert(glGetError() == GL_NO_ERROR);

is better than nothing.

Your “format” parameter to glTexImage2D looks suspect, check the docs here:
http://www.opengl.org/sdk/docs/man/xhtml/glTexImage2D.xml

Hope this helps,
Peter

2008/10/10 Arnau Font :> Hi group,

I’m trying to load an image stored in 32 bit color depth, in a 16 bits
desktop (windows and linux), and load it to OpenGL, but I only get a white
image.

I wanted to use the R5G5B5A1 format, to have transparency and keep high
color quality, but I’m quite lost with all the formats and options.

I used the code found in this list to load a SDL_Surface and converting to a
openGL texture, but it only appears to work in 32 bits.

Code:

Init Video:
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);

SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

// create a new window
m_pScreen = SDL_SetVideoMode(Config.ResX, Config.ResY, 16, SDL_OPENGL);

Loading texture:

SDL_Surface* temp = IMG_Load(basefilename.c_str());
if (temp!=NULL)
{
    SDL_Surface* temp2;
    temp2 = SDL_DisplayFormatAlpha(temp);

    m_tID = SDL_GL_LoadTexture(temp2, m_aTextureCoords);

    SDL_FreeSurface(temp);
    SDL_FreeSurface(temp2);
  }

Converting SDL_Surface to texture:

GLuint COpenGLUtils::SDL_GL_LoadTexture(SDL_Surface *surface, GLfloat
*texcoord)
{
GLuint texture;
int w, h;
SDL_Surface *image;
SDL_Rect area;
Uint32 saved_flags;
Uint8 saved_alpha;

  // Use the surface width and height expanded to powers of 2

  w = power_of_two(surface->w);
  h = power_of_two(surface->h);
  texcoord[0] = 0.0f;                // Min X
  texcoord[1] = 0.0f;                // Min Y
  texcoord[2] = (GLfloat)surface->w / w;   // Max X
  texcoord[3] = (GLfloat)surface->h / h;   // Max Y

  image = SDL_CreateRGBSurface(
              SDL_SWSURFACE,
              w, h,
              16,

#if SDL_BYTEORDER == SDL_LIL_ENDIAN // OpenGL RGBA masks
0x0000001F,
0x000003E0,
0x00007C00,
0x00008000
#else
0xFF000000,
0x00FF0000,
0x0000FF00,
0x000000FF
#endif
);
if ( image == NULL ) {
return 0;
}

  // Save the alpha blending attributes
  saved_flags = surface->flags;//&(SDL_SRCALPHA|SDL_RLEACCELOK);
  saved_alpha = surface->format->alpha;

  if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
  {
        SDL_SetAlpha(surface, 0, 0);
  }


  // Copy the surface into the GL texture image
  area.x = 0;
  area.y = 0;
  area.w = surface->w;
  area.h = surface->h;
  SDL_BlitSurface(surface, &area, image, &area);

  // Restore the alpha blending attributes
  if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
  {
        SDL_SetAlpha(surface, saved_flags, saved_alpha);
  }

  //SetKeyColor(surface);


  // Create an OpenGL texture for the image
  glGenTextures(1, &texture);
  glBindTexture(GL_TEXTURE_2D, texture);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB5_A1, w, h, 0, GL_RGB5_A1,

GL_UNSIGNED_BYTE, image->pixels);

  SDL_FreeSurface(image); /* No longer needed */

  return texture;

}


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org

Thanks Peter,

I’ve changed the glTexImage2D call to:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA,
GL_UNSIGNED_SHORT_5_5_5_1, image->pixels);
Which I understand is that the image and the texture have both red, green,
blue and alpha, and the pixel format is 5551.

But, if I have my desktop in 16 bits, I get a GL_INVALID_ENUM (Running in 32
bits, I can render the texture but the colors are all incorrect (the alpha
is correct)).

What I also don’t understand (I’m pretty new at this!) is why, when loading
32 bit textures, the pixel format is GL_UNSIGNED_BYTE (8 bits, I guess), and
not a GL_UNSIGNED_INT_8_8_8_8, which is a 32 bit integer.

Thanks a lot!

-----Missatge original-----
nom de Peter Mackay
Enviat: divendres, 10 / octubre / 2008 12:12
Per a: A list for developers using the SDL library. (includes SDL-announce)
Tema: Re: [SDL] SDL_image and OpenGL in 16 bits mode?

Hi,

Try calling glGetError after each gl call. One of them will probably
return an error code. I alluded to this yesterday, many people totally
overlook OpenGL error checking. Even something simple, like:

assert(glGetError() == GL_NO_ERROR);

is better than nothing.

Your “format” parameter to glTexImage2D looks suspect, check the docs here:
http://www.opengl.org/sdk/docs/man/xhtml/glTexImage2D.xml

Hope this helps,
Peter

2008/10/10 Arnau Font <@Arnau_Font>:

Hi group,

I’m trying to load an image stored in 32 bit color depth, in a 16 bits
desktop (windows and linux), and load it to OpenGL, but I only get a white
image.

I wanted to use the R5G5B5A1 format, to have transparency and keep high
color quality, but I’m quite lost with all the formats and options.

I used the code found in this list to load a SDL_Surface and converting to
a
openGL texture, but it only appears to work in 32 bits.

Code:

Init Video:
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);

SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

// create a new window
m_pScreen = SDL_SetVideoMode(Config.ResX, Config.ResY, 16,

SDL_OPENGL);De: sdl-bounces at lists.libsdl.org [mailto:sdl-bounces at lists.libsdl.org] En

Loading texture:

SDL_Surface* temp = IMG_Load(basefilename.c_str());
if (temp!=NULL)
{
    SDL_Surface* temp2;
    temp2 = SDL_DisplayFormatAlpha(temp);

    m_tID = SDL_GL_LoadTexture(temp2, m_aTextureCoords);

    SDL_FreeSurface(temp);
    SDL_FreeSurface(temp2);
  }

Converting SDL_Surface to texture:

GLuint COpenGLUtils::SDL_GL_LoadTexture(SDL_Surface *surface, GLfloat
*texcoord)
{
GLuint texture;
int w, h;
SDL_Surface *image;
SDL_Rect area;
Uint32 saved_flags;
Uint8 saved_alpha;

  // Use the surface width and height expanded to powers of 2

  w = power_of_two(surface->w);
  h = power_of_two(surface->h);
  texcoord[0] = 0.0f;                // Min X
  texcoord[1] = 0.0f;                // Min Y
  texcoord[2] = (GLfloat)surface->w / w;   // Max X
  texcoord[3] = (GLfloat)surface->h / h;   // Max Y

  image = SDL_CreateRGBSurface(
              SDL_SWSURFACE,
              w, h,
              16,

#if SDL_BYTEORDER == SDL_LIL_ENDIAN // OpenGL RGBA masks
0x0000001F,
0x000003E0,
0x00007C00,
0x00008000
#else
0xFF000000,
0x00FF0000,
0x0000FF00,
0x000000FF
#endif
);
if ( image == NULL ) {
return 0;
}

  // Save the alpha blending attributes
  saved_flags = surface->flags;//&(SDL_SRCALPHA|SDL_RLEACCELOK);
  saved_alpha = surface->format->alpha;

  if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
  {
        SDL_SetAlpha(surface, 0, 0);
  }


  // Copy the surface into the GL texture image
  area.x = 0;
  area.y = 0;
  area.w = surface->w;
  area.h = surface->h;
  SDL_BlitSurface(surface, &area, image, &area);

  // Restore the alpha blending attributes
  if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
  {
        SDL_SetAlpha(surface, saved_flags, saved_alpha);
  }

  //SetKeyColor(surface);


  // Create an OpenGL texture for the image
  glGenTextures(1, &texture);
  glBindTexture(GL_TEXTURE_2D, texture);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB5_A1, w, h, 0, GL_RGB5_A1,

GL_UNSIGNED_BYTE, image->pixels);

  SDL_FreeSurface(image); /* No longer needed */

  return texture;

}


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org

Another thing, if I don’t call the SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE,
1), setting the colors to RGB565, the glTexImage2D works fine, but when I
activate the alpha it doesn’t work.

Any ideas?!?

-----Missatge original-----
nom de Arnau Font
Enviat: divendres, 10 / octubre / 2008 13:40
Per a: 'A list for developers using the SDL library. (includes
SDL-announce)'
Tema: Re: [SDL] SDL_image and OpenGL in 16 bits mode?

Thanks Peter,

I’ve changed the glTexImage2D call to:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA,
GL_UNSIGNED_SHORT_5_5_5_1, image->pixels);
Which I understand is that the image and the texture have both red, green,
blue and alpha, and the pixel format is 5551.

But, if I have my desktop in 16 bits, I get a GL_INVALID_ENUM (Running in 32
bits, I can render the texture but the colors are all incorrect (the alpha
is correct)).

What I also don’t understand (I’m pretty new at this!) is why, when loading
32 bit textures, the pixel format is GL_UNSIGNED_BYTE (8 bits, I guess), and
not a GL_UNSIGNED_INT_8_8_8_8, which is a 32 bit integer.

Thanks a lot!

-----Missatge original-----
nom de Peter Mackay
Enviat: divendres, 10 / octubre / 2008 12:12
Per a: A list for developers using the SDL library. (includes SDL-announce)
Tema: Re: [SDL] SDL_image and OpenGL in 16 bits mode?

Hi,

Try calling glGetError after each gl call. One of them will probably
return an error code. I alluded to this yesterday, many people totally
overlook OpenGL error checking. Even something simple, like:

assert(glGetError() == GL_NO_ERROR);

is better than nothing.

Your “format” parameter to glTexImage2D looks suspect, check the docs here:
http://www.opengl.org/sdk/docs/man/xhtml/glTexImage2D.xml

Hope this helps,
Peter

2008/10/10 Arnau Font <@Arnau_Font>:

Hi group,

I’m trying to load an image stored in 32 bit color depth, in a 16 bits
desktop (windows and linux), and load it to OpenGL, but I only get a white
image.

I wanted to use the R5G5B5A1 format, to have transparency and keep high
color quality, but I’m quite lost with all the formats and options.

I used the code found in this list to load a SDL_Surface and converting to
a
openGL texture, but it only appears to work in 32 bits.

Code:

Init Video:
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);

SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

// create a new window
m_pScreen = SDL_SetVideoMode(Config.ResX, Config.ResY, 16,

SDL_OPENGL);De: sdl-bounces at lists.libsdl.org [mailto:sdl-bounces at lists.libsdl.org] En
De: sdl-bounces at lists.libsdl.org [mailto:sdl-bounces at lists.libsdl.org] En

Loading texture:

SDL_Surface* temp = IMG_Load(basefilename.c_str());
if (temp!=NULL)
{
    SDL_Surface* temp2;
    temp2 = SDL_DisplayFormatAlpha(temp);

    m_tID = SDL_GL_LoadTexture(temp2, m_aTextureCoords);

    SDL_FreeSurface(temp);
    SDL_FreeSurface(temp2);
  }

Converting SDL_Surface to texture:

GLuint COpenGLUtils::SDL_GL_LoadTexture(SDL_Surface *surface, GLfloat
*texcoord)
{
GLuint texture;
int w, h;
SDL_Surface *image;
SDL_Rect area;
Uint32 saved_flags;
Uint8 saved_alpha;

  // Use the surface width and height expanded to powers of 2

  w = power_of_two(surface->w);
  h = power_of_two(surface->h);
  texcoord[0] = 0.0f;                // Min X
  texcoord[1] = 0.0f;                // Min Y
  texcoord[2] = (GLfloat)surface->w / w;   // Max X
  texcoord[3] = (GLfloat)surface->h / h;   // Max Y

  image = SDL_CreateRGBSurface(
              SDL_SWSURFACE,
              w, h,
              16,

#if SDL_BYTEORDER == SDL_LIL_ENDIAN // OpenGL RGBA masks
0x0000001F,
0x000003E0,
0x00007C00,
0x00008000
#else
0xFF000000,
0x00FF0000,
0x0000FF00,
0x000000FF
#endif
);
if ( image == NULL ) {
return 0;
}

  // Save the alpha blending attributes
  saved_flags = surface->flags;//&(SDL_SRCALPHA|SDL_RLEACCELOK);
  saved_alpha = surface->format->alpha;

  if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
  {
        SDL_SetAlpha(surface, 0, 0);
  }


  // Copy the surface into the GL texture image
  area.x = 0;
  area.y = 0;
  area.w = surface->w;
  area.h = surface->h;
  SDL_BlitSurface(surface, &area, image, &area);

  // Restore the alpha blending attributes
  if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA )
  {
        SDL_SetAlpha(surface, saved_flags, saved_alpha);
  }

  //SetKeyColor(surface);


  // Create an OpenGL texture for the image
  glGenTextures(1, &texture);
  glBindTexture(GL_TEXTURE_2D, texture);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB5_A1, w, h, 0, GL_RGB5_A1,

GL_UNSIGNED_BYTE, image->pixels);

  SDL_FreeSurface(image); /* No longer needed */

  return texture;

}


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org