SDL/OpenGL: GL calls only work in init function

Hi,

I’m currently working on a C project that until recently had been coded completely against the AmigaOS 3 API. However, I have decided to try and create a separate strand within the project that’ll output to SDL/OpenGL.

The SDL part of the project has been arranged so that the GL initiation code is in a file main_sdl.c, and the actual graphical functions are within another code file, graphics.c. In main_sdl.c I currently have

Code:
/* SDL errors yield nonzero values */
if (!SDL_Init(SDL_INIT_VIDEO)) {
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

  if((coret.gfx->screen = SDL_SetVideoMode(SCR_W, SCR_H, 32, 
                                           SDL_OPENGL))) {
    init_gfx_buffer(&coret);


    SDL_WM_SetCaption("SoR", "SoR");
    

    glClearColor(0, 0, 0, 0);
      glEnable(GL_TEXTURE_2D);

    glViewport(0, 0, SCR_W, SCR_H);

    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();

    glOrtho(0,SCR_W, SCR_H, 0, -1, 1);

    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();

Following this code is some code I pasted over from one of the functions in graphics.c, most of which is copied below:

Code:
SDL_Surface *surface;

if((surface = IMG_Load((char*)filename))) {
GLuint texture;

glGenTextures(1, &texture);
glBindTexture( GL_TEXTURE_2D, texture );
SDL_LockSurface(surface);

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, surface->w, surface->h, 0, 
             GL_RGB, GL_UNSIGNED_BYTE, surface->pixels);

UWORD w = surface->w;

UWORD h = surface->h;

SDL_UnlockSurface(surface);

SDL_FreeSurface(surface);
/* Sets filters for texture (smaller, larger) */

glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, texture);    
glBegin( GL_QUADS );
  glTexCoord2i(0, 0);
    glVertex2f(0, 0);
  glTexCoord2i(1, 0);
  glVertex2f(w - 1, 0);
  glTexCoord2i(1, 1);
  glVertex2f(w - 1, h - 1);
  glTexCoord2i(0, 1);
  glVertex2f(0, h - 1);
glEnd();

SDL_GL_SwapBuffers();
SDL_Delay(3000);

glDeleteTextures(1, &texture);

} else {
fprintf(stderr, “Image file oops: %s\n”, SDL_GetError());
}

When I run the project, the copy of the code in the initialising function works and displays the image, however the copy in graphics.c, which is run after the initialising code and from a different function in main_sdl.h, instead draws a black box in place of the texture. Where could I going wrong?

Thanks, Matt

Hey there,

I have been learning OpenGL for two months and am now trying to add SDL so that I can get access to image files, sound, etc. Mostly things have been going okay, except that, like you, I was unable to convert an SDL image to an OpenGL texture to display it. Instead, I just get a white rectangle with no texture. The code I am using, which seems similar to your own, is also similar to many versions I have seen at other websites. Here is mine:

Code:
// sdl.cc (OpenGL + SDL image to texture test program)
// Compile: g++ -Wall -lGL -lSDL -lSDL_image sdl.cc -o sdl

#include <SDL/SDL.h>
#include <SDL/SDL_image.h>
#include <GL/gl.h>
#include

const int WIDTH1 = 1024, HEIGHT1 = 768;

int main( int argc, char *argv[] )
{
SDL_Init( SDL_INIT_VIDEO | SDL_INIT_TIMER );
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1); //Set before setting video mode.
// You can’t use SDL_SWSURFACE or SDL_HWSURFACE when running SDL_OPENGL
SDL_SetVideoMode( WIDTH1, HEIGHT1, 0, SDL_OPENGL | SDL_FULLSCREEN );

glViewport( 0, 0, WIDTH1, HEIGHT1 );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( 0, 1023, 0, 767, 1.0, -1.0 );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glClearColor( 0.0f, 0.0f, 0.0f, 1.0f );
glClear( GL_COLOR_BUFFER_BIT );

SDL_Surface *image1 = IMG_Load( “devil.jpg” );
if( !image1 ) {
std::cout << “Image won’t load.\n”;
return 0;
}

GLenum format1;
switch( image1->format->BytesPerPixel )
{
case 4: if( image1->format->Rmask == 0x000000ff )
format1 = GL_RGBA; else format1 = GL_BGRA; break;
case 3: if( image1->format->Rmask == 0x000000ff )
format1 = GL_RGB; else format1 = GL_BGR; break;
default: std::cout << “Image format not truecolor.\n”;
return 0;
}

glFrontFace( GL_CCW );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE );
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
GLuint texture1;
glGenTextures( 1, &texture1 );
glBindTexture( GL_TEXTURE_2D, texture1 );
SDL_LockSurface( image1 );
glTexImage2D( GL_TEXTURE_2D, 0, image1->format->BytesPerPixel, image1->w,
image1->h, 0, format1, GL_UNSIGNED_BYTE, image1->pixels );
SDL_UnlockSurface( image1 );
SDL_FreeSurface( image1 );

glEnable( GL_TEXTURE_2D );
glBegin( GL_QUADS );
glTexCoord2f( 0.0, 0.0 ); // Bottom-left vertex.
glVertex3f( 100.0, 100.0, 0.0 );
glTexCoord2f( 1.0, 0.0 ); // Bottom-right vertex.
glVertex3f( 648.0, 100.0, 0.0 );
glTexCoord2f( 1.0, 1.0 ); // Top-right vertex.
glVertex3f( 648.0, 380.0, 0.0 );
glTexCoord2f( 0.0, 1.0 ); // Top-left vertex.
glVertex3f( 100.0, 380.0, 0.0 );
glEnd();
glDisable( GL_TEXTURE_2D );

SDL_GL_SwapBuffers();
SDL_Delay( 2000 );

glDeleteTextures( 1, &texture1 );
SDL_Quit();
return 0;
}

I guess the reason I’m posting this is to see if you ever found a fix, or if anyone else has one or knows about this problem.

Thanks [Exclamation]