In the GL texture a red color was replaced by blue

When I tried to convert SDL Surface to a GL texture, I got texture where red colour was replaced by blue. The problem is not in the GL texture, because the colors are correctly if I use Native::loadTexture() in the following example. And problem isn’t in SDL_image, because If I load a SDL Surface with a SDL_image and use a SDL for render it, the colors are correct.

Where is the problem? It’s a bug in SDL Surface? Or maybe it’s a feature? :slight_smile:

Code:
namespace SDL {
void loadTexture() {
SDL_Surface *texSurf = IMG_Load(“texture.png”);
if(!texSurf)
error(“Unable to load texture”);

    GLint textureColors = texSurf->format->BytesPerPixel;
    GLenum textureFormat = textureColors == 4 ? GL_RGBA : GL_RGB;
    glTexImage2D(GL_TEXTURE_2D, 0, textureFormat, texSurf->w, texSurf->h, 0, textureFormat, GL_UNSIGNED_BYTE, texSurf->pixels);
    
    SDL_FreeSurface(texSurf);
}

};

namespace Native {
void loadTexture() {
NSString *path = [[NSBundle mainBundle] pathForResource:@“texture” ofType:@“png”];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
error(“Unable to load texture”);

    GLuint width = CGImageGetWidth(image.CGImage);
    GLuint height = CGImageGetHeight(image.CGImage);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    void *imageData = malloc(height * width * 4);
    CGContextRef context = CGBitmapContextCreate(
        imageData, 
        width, 
        height, 
        8, 4 * width, 
        colorSpace, 
        kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big
    );
    CGColorSpaceRelease(colorSpace);
    CGContextClearRect(context, CGRectMake( 0, 0, width, height));
    CGContextTranslateCTM(context, 0, height - height);
    CGContextDrawImage(context, CGRectMake(0, 0, width, height), image.CGImage);

    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);

    CGContextRelease(context);
    free(imageData);
    [image release];
    [texData release];    
}

};

int main(int argc, char *argv[])
{
//// Init SDL
if(SDL_Init(SDL_INIT_VIDEO) != 0)
error(“Unable to initialize SDL”);
atexit(SDL_Quit);

//// Enable OpenGL double buffering
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

//// Set the color depth (16-bit 565)
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 6);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);

//// Set video mode
if(!SDL_SetVideoMode(Width, Height, 16, SDL_OPENGL))
    error("Unable to set video mode");       

//// Generate a texture object
GLuint texId;
glGenTextures(1, &texId);

//// Bind the texture object
glBindTexture(GL_TEXTURE_2D, texId);

//// Load Texture
SDL::loadTexture();
// Native::loadTexture();

//// Set the texture's stretching properties
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);    

using namespace OpenGL::ES1::D2;
Vertex vertices[] = {
    Vertex(-1.0f,  1.0f),
    Vertex( 1.0f,  1.0f),
    Vertex( 1.0f, -1.0f),
    Vertex(-1.0f, -1.0f)        
};
Texture::Coords textureCoords(
    Texture::Coord(0.0f, 0.0f),
    Texture::Coord(1.0f, 0.0f),
    Texture::Coord(1.0f, 1.0f),
    Texture::Coord(0.0f, 1.0f)
);

//// Setting up an orthogonal viewport
glViewport(0, 0, Width, Height);

glLoadIdentity();
glOrthof(-2.0f, 2.0f, -2.0/(Width/Height), 2.0f/(Width/Height), -2.0f, 2.0f);

//// Display
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);

glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, texId);    
glVertexPointer(2, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, &textureCoords);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
SDL_GL_SwapBuffers();
SDL_Delay(5000);

//// Free OpenGL texture
glDeleteTextures(1, &texId);
return 0;

}

Code:
int main(int argc, char **argv) {
atexit(SDL_Quit);
if(SDL_Init(SDL_INIT_VIDEO) != 0)
error(“Unable to initialize SDL”);

SDL_Surface *screen = SDL_SetVideoMode(Width, Height, 16, 0 /* SDL_FULLSCREEN */);
if(!screen)
    error("Unable to set video mode");

SDL_Surface *image = IMG_Load("bg.png");
if(!image)
	error("Unable to load image");

SDL_Rect src, dest;
dest.x = dest.y = src.x = src.y = 0;
dest.w = src.w = image->w;
dest.h = src.h = image->h;
SDL_BlitSurface(image, &src, screen, &dest);
SDL_UpdateRect(screen, 0, 0, 0, 0);
SDL_Delay(3000);
SDL_FreeSurface(image);

return 0;

}

I’m a bit too busy to give a real answer, but here’s the quick tip:
Your color masks on the surface are wrong, they default to the equivalent of GL_BGRA and GL_BGR, you can either use these GL types to make it work right now, or you can read up on SDL color masks and
understand it fully :)On 04/05/2011 06:35 AM, manifest wrote:

When I tried to convert SDL Surface to a GL texture, I got texture where red colour was replaced by blue. The problem is not in the GL texture, because the colors are correctly if I use
Native::loadTexture() in the following example. And problem isn’t in SDL_image, because If I load a SDL Surface with a SDL_image and use a SDL for render it, the colors are correct.

Where is the problem? It’s a bug in SDL Surface? Or maybe it’s a feature? Smile

Code:

namespace SDL {
void loadTexture() {
SDL_Surface *texSurf = IMG_Load(“texture.png”);
if(!texSurf)
error(“Unable to load texture”);

GLint textureColors = texSurf->format->BytesPerPixel;
GLenum textureFormat = textureColors == 4 ? GL_RGBA : GL_RGB;
glTexImage2D(GL_TEXTURE_2D, 0, textureFormat, texSurf->w, texSurf->h, 0, textureFormat, GL_UNSIGNED_BYTE, texSurf->pixels);

SDL_FreeSurface(texSurf);
}
};

namespace Native {
void loadTexture() {
NSString *path = [[NSBundle mainBundle] pathForResource:@“texture” ofType:@“png”];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
error(“Unable to load texture”);

GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc(height * width * 4);
CGContextRef context = CGBitmapContextCreate(
imageData,
width,
height,
8, 4 * width,
colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big
);
CGColorSpaceRelease(colorSpace);
CGContextClearRect(context, CGRectMake( 0, 0, width, height));
CGContextTranslateCTM(context, 0, height - height);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), image.CGImage);

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);

CGContextRelease(context);
free(imageData);
[image release];
[texData release];
}
};

int main(int argc, char *argv[])
{
//// Init SDL
if(SDL_Init(SDL_INIT_VIDEO) != 0)
error(“Unable to initialize SDL”);
atexit(SDL_Quit);

//// Enable OpenGL double buffering
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

//// Set the color depth (16-bit 565)
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 6);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);

//// Set video mode
if(!SDL_SetVideoMode(Width, Height, 16, SDL_OPENGL))
error(“Unable to set video mode”);

//// Generate a texture object
GLuint texId;
glGenTextures(1, &texId);

//// Bind the texture object
glBindTexture(GL_TEXTURE_2D, texId);

//// Load Texture
SDL::loadTexture();
// Native::loadTexture();

//// Set the texture’s stretching properties
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

using namespace OpenGL::ES1::D2;
Vertex vertices[] = {
Vertex(-1.0f, 1.0f),
Vertex( 1.0f, 1.0f),
Vertex( 1.0f, -1.0f),
Vertex(-1.0f, -1.0f)
};
Texture::Coords textureCoords(
Texture::Coord(0.0f, 0.0f),
Texture::Coord(1.0f, 0.0f),
Texture::Coord(1.0f, 1.0f),
Texture::Coord(0.0f, 1.0f)
);

//// Setting up an orthogonal viewport
glViewport(0, 0, Width, Height);

glLoadIdentity();
glOrthof(-2.0f, 2.0f, -2.0/(Width/Height), 2.0f/(Width/Height), -2.0f, 2.0f);

//// Display
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);

glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, texId);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, &textureCoords);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
SDL_GL_SwapBuffers();
SDL_Delay(5000);

//// Free OpenGL texture
glDeleteTextures(1, &texId);
return 0;
}

Code:

int main(int argc, char **argv) {
atexit(SDL_Quit);
if(SDL_Init(SDL_INIT_VIDEO) != 0)
error(“Unable to initialize SDL”);

SDL_Surface screen = SDL_SetVideoMode(Width, Height, 16, 0 / SDL_FULLSCREEN */);
if(!screen)
error(“Unable to set video mode”);

SDL_Surface *image = IMG_Load(“bg.png”);
if(!image)
error(“Unable to load image”);

SDL_Rect src, dest;
dest.x = dest.y = src.x = src.y = 0;
dest.w = src.w = image->w;
dest.h = src.h = image->h;
SDL_BlitSurface(image, &src, screen, &dest);
SDL_UpdateRect(screen, 0, 0, 0, 0);
SDL_Delay(3000);
SDL_FreeSurface(image);

return 0;
}


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org


LordHavoc
Author of DarkPlaces Quake1 engine - http://icculus.org/twilight/darkplaces
Co-designer of Nexuiz - http://alientrap.org/nexuiz
"War does not prove who is right, it proves who is left." - Unknown
"Any sufficiently advanced technology is indistinguishable from a rigged demo." - James Klass
"A game is a series of interesting choices." - Sid Meier

Thank you, Forest Hale for a very fast answer :), but GL_BGRA and GL_BGR aren’t available in OpenGL ES or am I wrong?

Forest Hale wrote:> I’m a bit too busy to give a real answer, but here’s the quick tip:

Your color masks on the surface are wrong, they default to the equivalent of GL_BGRA and GL_BGR, you can either use these GL types to make it work right now, or you can read up on SDL color masks and
understand it fully :slight_smile:

On 04/05/2011 06:35 AM, manifest wrote:

When I tried to convert SDL Surface to a GL texture, I got texture where red colour was replaced by blue. The problem is not in the GL texture, because the colors are correctly if I use
Native::loadTexture() in the following example. And problem isn’t in SDL_image, because If I load a SDL Surface with a SDL_image and use a SDL for render it, the colors are correct.

Where is the problem? It’s a bug in SDL Surface? Or maybe it’s a feature? Smile

Code:

namespace SDL {
void loadTexture() {
SDL_Surface *texSurf = IMG_Load(“texture.png”);
if(!texSurf)
error(“Unable to load texture”);

GLint textureColors = texSurf->format->BytesPerPixel;
GLenum textureFormat = textureColors == 4 ? GL_RGBA : GL_RGB;
glTexImage2D(GL_TEXTURE_2D, 0, textureFormat, texSurf->w, texSurf->h, 0, textureFormat, GL_UNSIGNED_BYTE, texSurf->pixels);

SDL_FreeSurface(texSurf);
}
};

namespace Native {
void loadTexture() {
NSString *path = [[NSBundle mainBundle] pathForResource:@“texture” ofType:@“png”];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
error(“Unable to load texture”);

GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc(height * width * 4);
CGContextRef context = CGBitmapContextCreate(
imageData,
width,
height,
8, 4 * width,
colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big
);
CGColorSpaceRelease(colorSpace);
CGContextClearRect(context, CGRectMake( 0, 0, width, height));
CGContextTranslateCTM(context, 0, height - height);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), image.CGImage);

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);

CGContextRelease(context);
free(imageData);
[image release];
[texData release];
}
};

int main(int argc, char *argv[])
{
//// Init SDL
if(SDL_Init(SDL_INIT_VIDEO) != 0)
error(“Unable to initialize SDL”);
atexit(SDL_Quit);

//// Enable OpenGL double buffering
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

//// Set the color depth (16-bit 565)
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 6);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);

//// Set video mode
if(!SDL_SetVideoMode(Width, Height, 16, SDL_OPENGL))
error(“Unable to set video mode”);

//// Generate a texture object
GLuint texId;
glGenTextures(1, &texId);

//// Bind the texture object
glBindTexture(GL_TEXTURE_2D, texId);

//// Load Texture
SDL::loadTexture();
// Native::loadTexture();

//// Set the texture’s stretching properties
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

using namespace OpenGL::ES1::D2;
Vertex vertices[] = {
Vertex(-1.0f, 1.0f),
Vertex( 1.0f, 1.0f),
Vertex( 1.0f, -1.0f),
Vertex(-1.0f, -1.0f)
};
Texture::Coords textureCoords(
Texture::Coord(0.0f, 0.0f),
Texture::Coord(1.0f, 0.0f),
Texture::Coord(1.0f, 1.0f),
Texture::Coord(0.0f, 1.0f)
);

//// Setting up an orthogonal viewport
glViewport(0, 0, Width, Height);

glLoadIdentity();
glOrthof(-2.0f, 2.0f, -2.0/(Width/Height), 2.0f/(Width/Height), -2.0f, 2.0f);

//// Display
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);

glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, texId);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, &textureCoords);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
SDL_GL_SwapBuffers();
SDL_Delay(5000);

//// Free OpenGL texture
glDeleteTextures(1, &texId);
return 0;
}

Code:

int main(int argc, char **argv) {
atexit(SDL_Quit);
if(SDL_Init(SDL_INIT_VIDEO) != 0)
error(“Unable to initialize SDL”);

SDL_Surface screen = SDL_SetVideoMode(Width, Height, 16, 0 / SDL_FULLSCREEN */);
if(!screen)
error(“Unable to set video mode”);

SDL_Surface *image = IMG_Load(“bg.png”);
if(!image)
error(“Unable to load image”);

SDL_Rect src, dest;
dest.x = dest.y = src.x = src.y = 0;
dest.w = src.w = image->w;
dest.h = src.h = image->h;
SDL_BlitSurface(image, &src, screen, &dest);
SDL_UpdateRect(screen, 0, 0, 0, 0);
SDL_Delay(3000);
SDL_FreeSurface(image);

return 0;
}


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org


LordHavoc
Author of DarkPlaces Quake1 engine - http://icculus.org/twilight/darkplaces
Co-designer of Nexuiz - http://alientrap.org/nexuiz
"War does not prove who is right, it proves who is left." - Unknown
"Any sufficiently advanced technology is indistinguishable from a rigged demo." - James Klass
"A game is a series of interesting choices." - Sid Meier


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org

It is better to understand SDL color masks in the long run.

I can’t speak to whether GL_BGRA and GL_BGR are supported on OpenGL ES in general, I know they are not favorable on some platforms but they work fine on ipod touch 4 at least (which has an OpenGL ES
2.0 driver), I think they are frowned upon/poor performing/or maybe not supported (I haven’t checked the spec, sorry) on OpenGL ES 1.x.

SDL lets you blit from a surface to another surface, the tutorials for using SDL_image tend to discuss color masks, please read up on those, you can ensure that the data in the surface is in RGBA
order for OpenGL.On 04/05/2011 07:02 AM, manifest wrote:

Thank you, Forest Hale for a very fast answer Smile, but GL_BGRA and GL_BGR aren’t available in OpenGL ES or am I wrong?


LordHavoc
Author of DarkPlaces Quake1 engine - http://icculus.org/twilight/darkplaces
Co-designer of Nexuiz - http://alientrap.org/nexuiz
"War does not prove who is right, it proves who is left." - Unknown
"Any sufficiently advanced technology is indistinguishable from a rigged demo." - James Klass
"A game is a series of interesting choices." - Sid Meier

Forest Hale wrote:

On 04/05/2011 07:02 AM, It is better to understand SDL color masks in the long run.

I’ve tried to make a new surface with only green color, but I’ve gotten the same result as the previos example.
Why the surface->format->Rmask returns zero? It’s normal?

Code:
void loadTexture() {
SDL_Surface *texSurf = IMG_Load(“texture.png”);
if(!texSurf)
error(“Unable to load texture”);

    SDL_Surface *fixedSurf = SDL_CreateRGBSurfaceFrom(
        texSurf->pixels, 
        texSurf->w, 
        texSurf->h, 
        texSurf->format->BytesPerPixel, 
        texSurf->pitch, 
        0x0000FF00,
        0x0000FF00,
        0x0000FF00,
        texSurf->format->Amask
    );
    
    Uint16 r = fixedSurf->format->Rmask; //// r == 0
    
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, fixedSurf->w, fixedSurf->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, fixedSurf->pixels);

    SDL_FreeSurface(fixedSurf);
    SDL_FreeSurface(texSurf);
}