White textures with SDL 1.3 and OpenGL ES 1.1
I'm trying to port a game that uses SDL 1.2 and OpenGL 1.3 to Android and eventually iOS.
For that I had to switch to SDL 1.3, the unreleased next version that's going to become SDL 2.0, which has been ported to Android. Initial results were good, everything works fine with just SDL and SDL_image.
But when I add OpenGL ES 1.1 to the mix and try to render a textured rectangle, all I get is a white rectangle.
Any ideas what I'm doing wrong here? Like I said, just SDL/SDL_image displays the image just fine.
Edit : I've updated the code below according to Ryan Maloney's suggestions below, namely using float literals where I use GL_FLOAT and try to use SDL_GL_BindTexture
instead of creating the texture manually. Doesn't work either though, I get a black texture and some errors: No EGL config available, EGLNativeWindowType 0x2a1b5380 already connected to another API. glBindTexture
still results in a white texture.
Edit 2 : Just had the glorious idea of actually checking for errors: If I call glGetError()
right after glTexImage2D
, I get GL_INVALID_OPERATION
.
#include <GLES/gl.h>
#include <SDL.h>
#include <SDL_image.h>
#include <string>
// Set to 1 to use SDL_GL_BindTexture instead of glBindTexture
#define USE_SDL_TEXTURE 0
static const int screen_width = 640;
static const int screen_height = 480;
void init_opengl()
{
glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, screen_width, screen_height);
glOrthof(0, screen_width, screen_height, 0, -1, 1);
glMatrixMode(GL_MODELVIEW);
}
int next_power_of_two(int n)
{
double logbase2 = log((double) n) / log(2.0);
return (int) (pow(2, ceil(logbase2)) + 0.5);
}
SDL_Surface* convert_to_power_of_two(SDL_Surface* surface)
{
int width = next_power_of_two(surface->w);
int height = next_power_of_two(surface->h);
SDL_Surface* pot_surface = SDL_CreateRGBSurface(0, width, height, 32,
0x00ff0000, 0x0000ff00,
0x000000ff, 0xff000000);
SDL_Rect dstrect;
dstrect.w = surface->w;
dstrect.h = surface->h;
dstrect.x = 0;
dstrect.y = 0;
SDL_SetSurfaceAlphaMod(surface, 0);
SDL_BlitSurface(surface, NULL, pot_surface, &dstrect);
SDL_FreeSurface(surface);
return pot_surface;
}
GLenum get_texture_format(SDL_PixelFormat* pixel_format, GLint bpp)
{
switch (bpp) {
case 4:
return GL_RGBA;
case 3:
return GL_RGB;
}
throw "Unsupported pixel format";
}
#if USE_SDL_TEXTURE
SDL_Texture* load_image(const std::string& path, SDL_Renderer* renderer)
#else
GLuint load_image(const std::string& path)
#endif
{
SDL_Surface* surface = IMG_Load(path.c_str());
SDL_Surface* pot_surface = convert_to_power_of_two(surface);
#if USE_SDL_TEXTURE
return SDL_CreateTextureFromSurface(renderer, surface);
#else
SDL_PixelFormat* pixel_format = pot_surface->format;
GLint bpp = pixel_format->BytesPerPixel;
GLenum texture_format = get_texture_format(pixel_format, bpp);
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, bpp, pot_surface->w, pot_surface->h, 0,
texture_format, GL_UNSIGNED_BYTE, pot_surface->pixels);
SDL_FreeSurface(pot_surface);
return texture;
#endif
}
#if USE_SDL_TEXTURE
void draw_texture(SDL_Texture* texture, float width, float height)
#else
void draw_texture(GLuint texture, float width, float height)
#endif
{
#if USE_SDL_TEXTURE
SDL_GL_BindTexture(texture, NULL, NULL);
#else
glBindTexture(GL_TEXTURE_2D, texture);
#endif
GLfloat texture_coordinates[] = {0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f};
glTexCoordPointer(2, GL_FLOAT, 0, texture_coordinates);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
GLfloat vertices[] = {0.0f, height,
0.0f, 0.0f,
width, height,
width, 0.0f};
glVertexPointer(2, GL_FLOAT, 0, vertices);
glEnableClientState(GL_VERTEX_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
int main(int argc, char* argv[])
{
SDL_Init(SDL_INIT_VIDEO);
atexit(SDL_Quit);
IMG_Init(IMG_INIT_PNG);
atexit(IMG_Quit);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
const Uint32 flags = SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN;
SDL_Window* window = SDL_CreateWindow("Foo", 0, 0,
screen_width, screen_height, flags);
#if USE_SDL_TEXTURE
SDL_Renderer* renderer = SDL_CreateRenderer(window, 1, SDL_RENDERER_ACCELERATED);
#endif
SDL_GLContext context = SDL_GL_CreateContext(window);
SDL_GL_SetSwapInterval(1);
init_opengl();
#if USE_SDL_TEXTURE
SDL_Texture* texture = load_image("character_editor_bg.png", renderer);
#else
GLuint texture = load_image("character_editor_bg.png");
#endif
SDL_Event event;
for (;;) {
SDL_WaitEvent(&event);
if (event.type == SDL_QUIT)
break;
glClear(GL_COLOR_BUFFER_BIT);
draw_texture(texture, screen_width, screen_height);
SDL_GL_SwapWindow(window);
SDL_Delay(1);
}
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
Finally found the problem. I was in fact using glTexImage2D
completely wrong, and for some reason it worked fine in OpenGL 1.3.
Here's what the invocation ought to be:
glTexImage2D(GL_TEXTURE_2D, 0, texture_format, pot_surface->w, pot_surface->h, 0,
texture_format, GL_UNSIGNED_BYTE, pot_surface->pixels);
Previously, I had passed bpp
as the third parameter, which is rubbish.