Using textures OpenGL switches to software renderer

Nibbie
Posts: 2
Joined: 2008.10
Post: #1
Hi,

I'm developing a game with XCode, SDL, SDL_Image and C++. I set up the opengl context with SDL and load the textures in memory with SDL_Image. Whenever I render a texture with opengl, the framerate drops (in a 800x600 window I get 10fps when the textured quad ocupies the whole window) and opengl uses the software renderer instead of the driver for my video card. I know this because using the opengl profiler, setting the 'break on SW fallback' option in the breakpoints window, the application breaks on all opengl methods I use. The quad (actually two triangles) gets rendered using vertex arrays ...

I'm pretty much puzzled at what could be the cause of this, any other opengl application seams to work find and doesn't break with the ogl profiler. I have compiled a small example that triggers the described behavior on my macbook pro 2ghz with an ati Radeon X1600, it would be of great help if you could compile it and tell me if you can reproduce the behavior, or give me any clues in what areas to search more.

Here is the example (you'll need to download the frameworks SDL and SDL_Image from http://www.libsdl.org):

Code:
/* Simple program:  Create a blank window, wait for keypress, quit.

   Please see the SDL documentation for details on using the SDL API:
   /Developer/Documentation/SDL/docs.html
*/

#include <SDL/SDL.h>
#include <SDL_image/SDL_image.h>
#include <OpenGL/gl.h>
#include <stdio.h>

void initContext();
void initGL();
void initTexture();
void renderLoop();
void processEvents();
void animate();

GLuint textureId;
int running = 0;
int fps = 0, fpsCounter = 0;
unsigned int lastSecond = 0;
double anim = 0.0;

void renderLoop() {
    running = 1;
    while(running) {
        processEvents();
        animate();
        
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        //gluLookAt(2, 2, 2, 0, 0, 0, 0, 0, 1);
        
        glBindTexture(GL_TEXTURE_2D, textureId);
        glScalef(4.0f,4.0f,4.0f);
        glRotated(anim, 0.0, 0.0, 1.0);
        //glColor3f(1.0f, 0.5f, 0.25f);
        glBegin(GL_TRIANGLES);
        
        glTexCoord2f(0.0f, 0.0f);
        glVertex3f(-0.5f,-0.5f,0.0f);
        glTexCoord2f(0.0f, 1.0f);
        glVertex3f(0.5f,-0.5f,0.0f);
        glTexCoord2f(1.0f, 0.0f);
        glVertex3f(-0.5f,0.5f,0.0f);
        
        glTexCoord2f(0.0f, 1.0f);
        glVertex3f(0.5f,-0.5f,0.0f);
        glTexCoord2f(1.0f, 1.0f);
        glVertex3f(0.5f,0.5f,0.0f);
        glTexCoord2f(1.0f, 0.0f);
        glVertex3f(-0.5f,0.5f,0.0f);
        
        glEnd();
        
        SDL_GL_SwapBuffers();
    }
}
void initGL() {
    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glEnable(GL_DEPTH_TEST);
    glEnable(GL_TEXTURE_2D);
    glCullFace(GL_FRONT);
    glShadeModel(GL_SMOOTH);
    
    
    glViewport(0, 0, 800, 600);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    gluPerspective(60.0, (float)800/(float)600, 1, 100);
}

void initContext() {
    //initialize SDL
    if(SDL_Init(SDL_INIT_VIDEO)!=0) {
        fprintf(stderr, "Error initializing SDL video: %s",SDL_GetError());
        return;
    }
    //Initialize OpenGL attributes in SDL
    SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
    SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
    SDL_GL_SetAttribute(SDL_GL_ACCELERATED_VISUAL, 1);
    //create drawing surface
    unsigned int flags = SDL_OPENGL;
    SDL_Surface * surface = SDL_SetVideoMode(800, 600, 0, flags);
    if(surface==NULL) {
        fprintf(stderr, "Error setting SDL video mode: %s",SDL_GetError());
        SDL_Quit();
        return;
    }
    
    int hwaccell = -1;
    SDL_GL_GetAttribute(SDL_GL_ACCELERATED_VISUAL, &hwaccell);
    printf("hwaccell: %d",hwaccell);
}

void initTexture(const char * imagePath) {
    SDL_Surface * image = IMG_Load(imagePath);
    
    glGenTextures(1, &textureId);
    glBindTexture(GL_TEXTURE_2D, textureId);
    
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    
    unsigned int numberOfColors = image->format->BytesPerPixel;
    GLenum format;
    if(numberOfColors==4)
        format=GL_RGBA;
    else
        format=GL_RGB;
    
    glTexImage2D(GL_TEXTURE_2D, 0, numberOfColors, image->w, image->h, 0, format, GL_UNSIGNED_BYTE, image->pixels);
}

void processEvents() {
    SDL_Event event;
    while(SDL_PollEvent(&event)) {
        switch(event.type) {
            case SDL_KEYDOWN:
                if(event.key.keysym.sym==SDLK_ESCAPE)
                    running=0;
                break;
            case SDL_QUIT:
                running=0;
                break;
        }
    }
}

void animate() {
    unsigned int currentTime = SDL_GetTicks();
    unsigned int second = currentTime/1000;
    if(second!=lastSecond) {
        fps=fpsCounter;
        fpsCounter=1;
        lastSecond = second;
        char str[200];
        sprintf(str, "FPS: %d",fps);
        SDL_WM_SetCaption(str, NULL);
    } else {
        ++fpsCounter;
    }
    
    double seconds = ((double)currentTime)/1000.0;
    anim = (seconds)*360.0/4;
}

int SDL_main(int argc, char *argv[])
{
    const char * texture;
    if(argc!=2) {
        //printf("usage: ./sdldebug <path to texture>");
        //exit(1);
        texture = "/Users/bruno/Desktop/texture_sdl/skybox.png";
    } else {
        texture = argv[1];
    }
    initContext();
    initGL();
    initTexture(texture);
    renderLoop();
    // Cleanup
    SDL_Quit();
    
    return 0;
}

Any help would be really appreciated as I am kind of lost ...

B.
Quote this message in a reply
Sage
Posts: 1,234
Joined: 2002.10
Post: #2
You're trying to use an NPOT texture with the default REPEAT wrapping, on hardware that can't do it.

Radeon X1600 really only supports ARB_texture_rectangle. It will handle the coordinate normalization for you when using 2D NPOT textures, but if you use anything that can't be done with rectangle textures (like mipmaps, or REPEAT) then you fall back to SW.

See ATI's explanation.

Use CLAMP_TO_EDGE and LINEAR filtering, and it will run in hardware.
Or just use rectangle textures.
Quote this message in a reply
Nibbie
Posts: 2
Joined: 2008.10
Post: #3
Resizing the texture to 512x512 indeed did the job!
Thank you very much!!
Quote this message in a reply
Post Reply 

Possibly Related Threads...
Thread: Author Replies: Views: Last Post
  [SOLVED]OpenGL edges of textures mk12 2 4,575 Sep 2, 2010 08:07 PM
Last Post: mk12
  OpenGL Image Textures mikey 52 25,931 Jun 30, 2009 10:42 AM
Last Post: AnotherJake
  Dealing with inverted textures in OpenGL johncmurphy 7 7,238 Jun 15, 2009 08:11 AM
Last Post: Skorche
  Loading and using textures with alpha in OpenGL with Cocoa corporatenewt 4 6,691 Dec 8, 2007 02:06 PM
Last Post: Malarkey
  loading textures - cocoa openGL mDmarco 20 9,637 Aug 28, 2007 08:48 PM
Last Post: OneSadCookie