From 12cee1cf46c36ed67605f61dc21449dc851b3028 Mon Sep 17 00:00:00 2001
From: Cameron Cawley <[EMAIL REDACTED]>
Date: Wed, 8 Jan 2025 18:45:43 +0000
Subject: [PATCH] Use the correct pixel formats for OpenGL on big endian
---
src/render/opengl/SDL_render_gl.c | 23 +++++++++++++----------
1 file changed, 13 insertions(+), 10 deletions(-)
diff --git a/src/render/opengl/SDL_render_gl.c b/src/render/opengl/SDL_render_gl.c
index d2fd5dc9652f8..6791dadfb7981 100644
--- a/src/render/opengl/SDL_render_gl.c
+++ b/src/render/opengl/SDL_render_gl.c
@@ -414,14 +414,14 @@ static bool GL_SupportsBlendMode(SDL_Renderer *renderer, SDL_BlendMode blendMode
static bool convert_format(Uint32 pixel_format, GLint *internalFormat, GLenum *format, GLenum *type)
{
switch (pixel_format) {
- case SDL_PIXELFORMAT_ARGB8888:
- case SDL_PIXELFORMAT_XRGB8888:
+ case SDL_PIXELFORMAT_BGRA32:
+ case SDL_PIXELFORMAT_BGRX32:
*internalFormat = GL_RGBA8;
*format = GL_BGRA;
*type = GL_UNSIGNED_BYTE; // previously GL_UNSIGNED_INT_8_8_8_8_REV, seeing if this is better in modern times.
break;
- case SDL_PIXELFORMAT_ABGR8888:
- case SDL_PIXELFORMAT_XBGR8888:
+ case SDL_PIXELFORMAT_RGBA32:
+ case SDL_PIXELFORMAT_RGBX32:
*internalFormat = GL_RGBA8;
*format = GL_RGBA;
*type = GL_UNSIGNED_BYTE; // previously GL_UNSIGNED_INT_8_8_8_8_REV, seeing if this is better in modern times.
@@ -737,7 +737,7 @@ static bool GL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL_P
if (texture->format == SDL_PIXELFORMAT_INDEX8) {
data->shader = SHADER_PALETTE_NEAREST;
- } else if (texture->format == SDL_PIXELFORMAT_ABGR8888 || texture->format == SDL_PIXELFORMAT_ARGB8888) {
+ } else if (texture->format == SDL_PIXELFORMAT_RGBA32 || texture->format == SDL_PIXELFORMAT_BGRA32) {
data->shader = SHADER_RGBA;
} else {
data->shader = SHADER_RGB;
@@ -1607,7 +1607,7 @@ static bool GL_RunCommandQueue(SDL_Renderer *renderer, SDL_RenderCommand *cmd, v
static SDL_Surface *GL_RenderReadPixels(SDL_Renderer *renderer, const SDL_Rect *rect)
{
GL_RenderData *data = (GL_RenderData *)renderer->internal;
- SDL_PixelFormat format = renderer->target ? renderer->target->format : SDL_PIXELFORMAT_ARGB8888;
+ SDL_PixelFormat format = renderer->target ? renderer->target->format : SDL_PIXELFORMAT_RGBA32;
GLint internalFormat;
GLenum targetFormat, type;
SDL_Surface *surface;
@@ -1822,10 +1822,9 @@ static bool GL_CreateRenderer(SDL_Renderer *renderer, SDL_Window *window, SDL_Pr
renderer->window = window;
renderer->name = GL_RenderDriver.name;
- SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_ARGB8888);
- SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_ABGR8888);
- SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_XRGB8888);
- SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_XBGR8888);
+ SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_RGBA32);
+ /* TODO: Check for required extensions on OpenGL 1.1 systems? */
+ SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_BGRA32);
data->context = SDL_GL_CreateContext(window);
if (!data->context) {
@@ -1916,6 +1915,10 @@ static bool GL_CreateRenderer(SDL_Renderer *renderer, SDL_Window *window, SDL_Pr
data->shaders = GL_CreateShaderContext();
SDL_LogInfo(SDL_LOG_CATEGORY_RENDER, "OpenGL shaders: %s",
data->shaders ? "ENABLED" : "DISABLED");
+ if (data->shaders) {
+ SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_RGBX32);
+ SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_BGRX32);
+ }
// We support INDEX8 textures using 2 textures and a shader
if (data->shaders && data->num_texture_units >= 2) {
SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_INDEX8);