I have the following code, with DO_SCALE_3DACC = 1 causing quite massive (0.1MB in 5 seconds) memory leakage that isn’t shown by -fsanitize=address which I assume means it’s a texture/VRAM/driver side leak, and DO_SCALE_3DACC = 0 doesn’t leak at all:
if (!DO_SCALE_3DACC) { // software rendered upscale path:
SDL_Surface *tgsrf = SDL_GetWindowSurface(target);
SDL_BlitScaled(srf, &srcrect, tgsrf, &tgrect);
SDL_UpdateWindowSurface(target);
} else { // 3d accelerated upscale path:
if (copytex) {
int tx = 0;
int ty = 0;
SDL_QueryTexture(copytex, NULL, NULL, &tx, &ty);
if (tx != srcrect.w || ty != srcrect.h) {
SDL_DestroyTexture(copytex);
copytex = NULL;
}
}
if (!copytex) {
copytex = SDL_CreateTexture(
winrender, SDL_PIXELFORMAT_RGBA8888,
SDL_TEXTUREACCESS_STREAMING,
srcrect.w, srcrect.h
);
if (!copytex) {
LOG(LT_ERROR,
"draw.c: failed to create texture: %s\n",
SDL_GetError());
return;
}
}
char *pixels = NULL;
int pitch = 0;
if (SDL_LockTexture(
copytex, NULL, (void**)&pixels, &pitch) != 0) {
LOG(LT_ERROR,
"draw.c: failed to lock texture: %s\n",
SDL_GetError());
return;
}
SDL_LockSurface(srf);
int i = 0;
while (i < tempsrf->h) {
memcpy(
pixels + (i * pitch),
srf->pixels + i * tempsrf->pitch, 4 * tempsrf->w
);
i++;
}
SDL_UnlockSurface(srf);
SDL_UnlockTexture(copytex);
SDL_RenderCopy(winrender, copytex, &srcrect, &tgrect);
SDL_RenderPresent(winrender);
}
Did I do something obviously wrong that would cause a leak here? Or is this an SDL2 or driver bug? The leak is quite substantial and this is called each frame, so this is pretty bad