From: Ricardo Cruz Date: Thu, 10 Jun 2004 15:50:01 +0000 (+0000) Subject: Ignore the gradient Surface (cache) when in OpenGL (as asked by Ryan). X-Git-Url: https://git.verplant.org/?a=commitdiff_plain;h=7579564e01bdace2bcb99dffe8df9a2ccda84ab1;p=supertux.git Ignore the gradient Surface (cache) when in OpenGL (as asked by Ryan). SVN-Revision: 1463 --- diff --git a/src/background.cpp b/src/background.cpp index 65d7be70a..830c42296 100644 --- a/src/background.cpp +++ b/src/background.cpp @@ -104,8 +104,12 @@ void Background::draw(DrawingContext& context) { if(type == GRADIENT) { - context.draw_surface(image, Vector(0, 0), LAYER_BACKGROUND0); -// context.draw_gradient(gradient_top, gradient_bottom, LAYER_BACKGROUND0); + /* In case we are using OpenGL just draw the gradient, else (software mode) + use the cache. */ + if(use_gl) + context.draw_gradient(gradient_top, gradient_bottom, LAYER_BACKGROUND0); + else + context.draw_surface(image, Vector(0, 0), LAYER_BACKGROUND0); } else if(type == IMAGE) { int sx = int(-context.get_translation().x * speed) % image->w - image->w;