Hi,
I’m migrating a c++ application build with SDL1.2 to SDL2.0.20 (Linux Mint).
I have a Screen
class which holds a uint8 *pixels_
that is used to prepare the screen every frame.
And then in a SystemSDL
class, I have an updateScreen()
method, called every frame, to copy the Screen::pixels_ data to the back buffer and present the screen.
Here is an extract of the code:
bool SystemSDL::initialize(bool fullscreen) {
...
//pScreenSurface_ and pScreenTexture_ are members of SystemSDL
pScreenSurface_ = SDL_CreateRGBSurface(0, GAME_SCREEN_WIDTH, GAME_SCREEN_HEIGHT, 8, 0, 0, 0, 0);
pScreenTexture_ = SDL_CreateTexture(pRenderer_,
SDL_PIXELFORMAT_ARGB8888,
SDL_TEXTUREACCESS_STREAMING,
GAME_SCREEN_WIDTH, GAME_SCREEN_HEIGHT);
...
}
void SystemSDL::updateScreen() {
if (g_Screen.dirty()) {
// Clear screen buffer
SDL_RenderClear(pRenderer_);
SDL_LockSurface(pScreenSurface_);
const uint8 *srcPixels = g_Screen.pixels();
Uint32 *dstPixels = (Uint32 *)pScreenSurface_->pixels;
// Manual blitting to convert from 8bpp palette indexed values to 32bpp RGB for each pixel
uint8 r, g, b;
for (int i = 0; i < GAME_SCREEN_WIDTH * GAME_SCREEN_HEIGHT; i++) {
uint8 index = srcPixels[i];
r = pScreenSurface_->format->palette->colors[index].r;
g = pScreenSurface_->format->palette->colors[index].g;
b = pScreenSurface_->format->palette->colors[index].b;
Uint32 c = ((r << 16) | (g << 8) | (b << 0)) | (255 << 24);
dstPixels[i] = c;
}
SDL_UnlockSurface(pScreenSurface_);
// Copy the pixels to the texture
SDL_UpdateTexture(pScreenTexture_, NULL, pScreenSurface_->pixels, GAME_SCREEN_WIDTH * sizeof (Uint32));
// Copy texture to the screen buffer
SDL_RenderCopy(pRenderer_, pScreenTexture_, NULL, NULL);
// Flip screen
SDL_RenderPresent( pRenderer_ );
}
}
SystemSDL::~SystemSDL() {
if (pScreenSurface_) {
SDL_FreeSurface(pScreenSurface_);
pScreenSurface_ = nullptr;
}
if (pScreenTexture_) {
SDL_DestroyTexture(pScreenTexture_);
pScreenTexture_ = nullptr;
}
if (pRenderer_) {
SDL_DestroyRenderer(pRenderer_);
pRenderer_ = nullptr;
}
if (pWindow_) {
SDL_DestroyWindow( pWindow_ );
pWindow_ = nullptr;
}
SDL_Quit();
}
The problem I have, is that when I exit the application I have a “double free or corruption (!prev)” error.
I have found a solution that doesn’t generate this error. It’s to use a Uint32 *pixels_ array as the destination array for pixel transformation.
void SystemSDL::updateScreen() {
if (g_Screen.dirty()|| (cursor_visible_ && update_cursor_)) {
// Clear screen buffer
SDL_RenderClear(pRenderer_);
SDL_LockSurface(pScreenSurface_);
const uint8 *srcPixels = g_Screen.pixels();
//Uint32 *dstPixels = (Uint32 *)pScreenSurface_->pixels;
// We do manual blitting to convert from 8bpp palette indexed values to 32bpp RGB for each pixel
uint8 r, g, b;
for (int i = 0; i < GAME_SCREEN_WIDTH * GAME_SCREEN_HEIGHT; i++) {
uint8 index = srcPixels[i];
r = pScreenSurface_->format->palette->colors[index].r;
g = pScreenSurface_->format->palette->colors[index].g;
b = pScreenSurface_->format->palette->colors[index].b;
Uint32 c = ((r << 16) | (g << 8) | (b << 0)) | (255 << 24);
pixels_[i] = c;
}
SDL_UnlockSurface(pScreenSurface_);
// Copy the pixel to the texture
SDL_UpdateTexture(pScreenTexture_, NULL, pixels_, GAME_SCREEN_WIDTH * sizeof (Uint32));
...
In that case, everything works fine. But then I don’t use the pScreenSurface_ other than for storing the palette. And I have 2 memory spaces for the same thing.
If you have any idea, it would be nice to share!
Thanks!