Simple JOGL game running very slowly on a gtx 470 - java

I've been making a game for my computer science class. For simplicity, I've just been making a set of minigames. For fun, I tried to make a version of the classic Snake game in 3d. The physics and collision detection works fine, and on the school computers (medium quality macs) the game runs very smoothly. However, on my home computer, it runs at 8 fps. My home computer runs on a gtx 470 with the latest drivers, and a query in the program confirms that the code is running on a gtx 470 with opengl 4.2.
Here's the render code (running in GLCanvas)
GL2 gl = ( drawable.getGL()).getGL2();
/*System.out.println(gl.glGetString(GL.GL_VENDOR)+"\n"+
gl.glGetString(GL.GL_RENDERER)+"\n"+
gl.glGetString(GL.GL_VERSION));*/
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT);
//Init camera
gl.glMatrixMode(GL2.GL_PROJECTION);
gl.glLoadIdentity();
// Perspective.
float widthHeightRatio = (float) getWidth() / (float) getHeight();
glu.gluPerspective(75, widthHeightRatio, 1, 2000);
double dX, dY, dZ;
if (player.locs.size()==0)
{
dX=0.1*player.vel.x;
dY=0.1*player.vel.y;
dZ=0.1*player.vel.z;
}
else
{
dX=player.xHead-player.locs.get(0).x;
dY=player.yHead-player.locs.get(0).y;
dZ=player.zHead-player.locs.get(0).z;
}
player.up.normalizeDist();
double xPos=4*dX-0.1*player.up.x;
double yPos=4*dY-0.1*player.up.y;
double zPos=4*dZ-0.1*player.up.z;
double desiredDist=0.2;
double totalDist=Math.sqrt(xPos*xPos+yPos*yPos+zPos*zPos);
xPos=xPos*desiredDist/totalDist;
yPos=yPos*desiredDist/totalDist;
zPos=zPos*desiredDist/totalDist;
double camX=player.xHead-xPos;
double camY=player.yHead-yPos;
double camZ=player.zHead-zPos;
glu.gluLookAt(xWidth*(camX), yWidth*(camY),zWidth*(camZ), xWidth*(player.xHead+2*dX), yWidth*(player.yHead+2*dY), zWidth*(player.zHead+2*dZ), player.up.x, player.up.y, -player.up.z);
// Change back to model view matrix.
gl.glMatrixMode(GL2.GL_MODELVIEW);
gl.glLoadIdentity();
float SHINE_ALL_DIRECTIONS = 1;
float[] lightPos = {xWidth/2, yWidth/2, zWidth/2, SHINE_ALL_DIRECTIONS};
float[] lightColorAmbient = {0.2f, 0.2f, 0.2f, 0.2f};
float[] lightColorSpecular = {0.8f, 0.8f, 0.8f, 0.8f};
// Set light parameters.
gl.glLightfv(GL2.GL_LIGHT1, GL2.GL_POSITION, lightPos, 0);
gl.glLightfv(GL2.GL_LIGHT1, GL2.GL_AMBIENT, lightColorAmbient, 0);
gl.glLightfv(GL2.GL_LIGHT1, GL2.GL_SPECULAR, lightColorSpecular, 0);
// Enable lighting in GL.
gl.glEnable(GL2.GL_LIGHT1);
gl.glEnable(GL2.GL_LIGHTING);
// Set material properties.
float[] rgba = {1f, 1f, 1f};
gl.glMaterialfv(GL2.GL_FRONT, GL2.GL_AMBIENT, rgba, 0);
gl.glMaterialfv(GL2.GL_FRONT, GL2.GL_SPECULAR, rgba, 0);
gl.glMaterialf(GL2.GL_FRONT, GL2.GL_SHININESS, 0.5f);
/*gl.glMaterialfv(GL.GL_BACK, GL.GL_AMBIENT, rgba, 0);
gl.glMaterialfv(GL.GL_BACK, GL.GL_SPECULAR, rgba, 0);
gl.glMaterialf(GL.GL_BACK, GL.GL_SHININESS, 0.5f);*/
// gl.glColor3f(1f,1f,1f);
if (camX>0)
{
gl.glBegin(GL2.GL_POLYGON);
gl.glNormal3d(1,0,0);
gl.glVertex3d(0, 0, 0);
gl.glVertex3d(0, 0, zWidth);
gl.glVertex3d(0, yWidth, zWidth);
gl.glVertex3d(0, yWidth, 0);
gl.glEnd();
}
if (camY>0)
{
gl.glBegin(GL2.GL_POLYGON);
gl.glNormal3d(0, 1, 0);
gl.glVertex3d(0, 0, 0);
gl.glVertex3d(0, 0, zWidth);
gl.glVertex3d(xWidth, 0, zWidth);
gl.glVertex3d(xWidth, 0, 0);
gl.glEnd();
}
if (camZ>0)
{
gl.glBegin(GL2.GL_POLYGON);
gl.glNormal3d(0, 0, 1);
gl.glVertex3d(0, 0, 0);
gl.glVertex3d(xWidth, 0, 0);
gl.glVertex3d(xWidth, yWidth, 0);
gl.glVertex3d(0, yWidth, 0);
gl.glEnd();
}
if (camX<1)
{
gl.glBegin(GL2.GL_POLYGON);
gl.glNormal3d(-1, 0, 0);
gl.glVertex3d(xWidth, 0, 0);
gl.glVertex3d(xWidth, 0, zWidth);
gl.glVertex3d(xWidth, yWidth, zWidth);
gl.glVertex3d(xWidth, yWidth, 0);
gl.glEnd();
}
if (camY<1)
{
gl.glBegin(GL2.GL_POLYGON);
gl.glNormal3d(0, -1, 0);
gl.glVertex3d(0, yWidth, 0);
gl.glVertex3d(0, yWidth, zWidth);
gl.glVertex3d(xWidth, yWidth, zWidth);
gl.glVertex3d(xWidth, yWidth, 0);
gl.glEnd();
}
if (camZ<1)
{
gl.glBegin(GL2.GL_POLYGON);
gl.glNormal3d(0, 0, 1);
gl.glVertex3d(0, 0, zWidth);
gl.glVertex3d(xWidth, 0, zWidth);
gl.glVertex3d(xWidth, yWidth, zWidth);
gl.glVertex3d(0, yWidth, zWidth);
gl.glEnd();
}
player.draw(xWidth, yWidth, zWidth, drawable, glu);
for (int i=0; i<bullets.size(); i++)
{
bullets.get(i).draw(drawable, glu, xWidth, yWidth, zWidth);
}
for (int i=0; i<basicEntities.size(); i++)
{
basicEntities.get(i).draw( xWidth, yWidth, zWidth, drawable, glu);
}
And then a lot of copy pasted calls to code like this: (xHead, yHead, and zHead are coordinates)
GL gl=drawable.getGL();
GL2 gl2=gl.getGL2();
gl2.glPushMatrix();
gl2.glTranslated(xHead*xWidth, yHead*yWidth, zHead*zWidth);
float[] rgba = {0.3f, 0.5f, 1f};
gl2.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, rgba, 0);
gl2.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, rgba, 0);
gl2.glMaterialf(GL.GL_FRONT, GL2.GL_SHININESS, 0.5f);
GLUquadric head = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(head, GLU.GLU_FILL);
glu.gluQuadricNormals(head, GLU.GLU_FLAT);
glu.gluQuadricOrientation(head, GLU.GLU_OUTSIDE);
final float radius = (float) (dotSize*xWidth);
final int slices = 32;
final int stacks = 32;
glu.gluSphere(head, radius, slices, stacks);
glu.gluDeleteQuadric(head);
gl2.glPopMatrix();
Edit: I can get the game to run faster by reducing the number of slices and stacks in the quadrics, but this makes the game rather ugly.
Also, I removed the a.add(this) (from the animator) and the game still runs. Was I animating everything twice? It's still slow though.

I can't fully explain why it runs so much better on your school computer, but the way you are using OpenGL is an ancient way and is terrible for performance.
Using glBegin to draw will always be very expensive, because it must send every single vertex as a separate API call, which is bad for performance. You should instead look into rendering with Vertex Arrays (good) or Vertex Buffer Objects (better in most cases). Using these will require a slight shift in thinking, but I'm sure you can find many tutorials using those search terms.
I'm also not an expert on what glu does, though your use of gluSphere and gluQuadrics also makes me suspicious. Most of the work of the glu functions are probably not executed on the graphics card, so maybe every time you call gluSphere the CPU must recompute all the vertices of the sphere before it can do anything with the GPU. A much better solution would be to generate your own list of sphere vertices, upload it to GPU as a VBO, and then just execute the VBO draw call anytime you want to draw a sphere. That should save a huge amount of computation time.

Related

LWJGL OpenGL won't draw 2D texture

I'm trying to create some code for loading and drawing 2D textures in LWJGL. Here is my code for drawing:
glfwShowWindow(window);
GL.createCapabilities();
loadTextures();
glClearColor(1f, 1f, 1f, 1f);
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//draw
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glPushMatrix();
glTranslatef(100, 100, 0);
glBindTexture(GL_TEXTURE_2D, testTexture);
glBegin(GL_QUADS);
{
glTexCoord2f(0, 0);
glVertex2f(0, 0);
glTexCoord2f(1, 0);
glVertex2f(TEXTURE_WIDTH, 0);
glTexCoord2f(1, 1);
glVertex2f(TEXTURE_WIDTH, TEXTURE_HEIGHT);
glTexCoord2f(0, 1);
glVertex2f(0, TEXTURE_HEIGHT);
}
glEnd();
glPopMatrix();
//end draw
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwFreeCallbacks(window);
glfwDestroyWindow(window);
glfwTerminate();
glfwSetErrorCallback(null).free();
And this is my texture loading code:
try
{
BufferedImage image = ImageIO.read(file);
/*
if (image.getType() != BufferedImage.TYPE_INT_ARGB)
{
throw new TextureException("Invalid image!");
}
*/
int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getRGB(0, 0, image.getWidth(), image.getHeight(), pixels, 0, image.getWidth());
ByteBuffer byteBuffer = BufferUtils.createByteBuffer(image.getWidth() * image.getHeight() * 4);
for (int x = 0; x < image.getWidth(); x++)
{
for (int y = 0; y < image.getHeight(); y++)
{
int pixel = pixels[y * image.getWidth() + x];
byteBuffer.put((byte)((pixel >> 16) & 0xFF));
byteBuffer.put((byte)((pixel >> 8) & 0xFF));
byteBuffer.put((byte)(pixel & 0xFF));
byteBuffer.put((byte)((pixel >> 24) & 0xFF));
}
}
byteBuffer.flip();
int textureID = glGenTextures();
glBindTexture(GL_TEXTURE_2D, textureID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL12.GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL12.GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, image.getWidth(), image.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, byteBuffer);
return textureID;
}
catch (Exception e)
{
e.printStackTrace();
throw new TextureException("Failed to load image!");
}
However, when I run this code all I get is a white screen. I checked the value of testTexture and it was set to 1, so I assume that's the texture's ID which makes me believe that worked, but I think there's something going wrong when drawing.
Two-dimensional texturing has to be enabled by glEnable and can be disabled by glDisable:
glEnable(GL_TEXTURE_2D);
If texturing is enables then the texture wich is currently bound is applied, when the geometry is drawn by the glBegin/glEnd sequences.
If you want to draw the geometry in window (pixel) coordinates, then you've to set an orthographic projection with. The orthographic projection can be set by glOrtho.
If you dont set the orthographic projection, the vertex coordinates would have to be in normalized device space in range [-1.0, 1.0].
In the following windowWidth an windowHeight is assumed to be the width and height of the window:
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, windowWidth, windowHeight, 0.0, -1.0, 1.0);
glMatrixMode(GL_MODELVIEW);
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// [...]
}

LWJGL Texture not stretching to Quads

I have been trying for hours to get a Texture in LWJGL to stretch to a quad.
Here is the code I am using for the quad:
private static void renderLoad() {
glClear(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT);
texture.bind();
glPushMatrix();{
glBegin(GL_QUADS);
{
glTexCoord2f(0, 1);
glVertex2f(0, 0); //Upper-left
glTexCoord2f(1, 1);
glVertex2f(Display.getWidth(), 0); //Upper-right
glTexCoord2f(1, 0);
glVertex2f(Display.getWidth(), Display.getHeight()); //Bottom-right
glTexCoord2f(0, 0);
glVertex2f(0, Display.getHeight()); //Bottom-left
}
glEnd();
}glPopMatrix();
}
This is what the display looks like when I run it:
http://gyazo.com/376ddb0979c55226d2f63c26215a1e12
I am trying to make the image expand to the size of the window. The Quad is at the size of the window, but the texture seems to not stretch.
Here is what it looks like if I do not use a texture and I simple make the quad a color:
http://gyazo.com/65f21fe3efa2d3948de69b55d5c85424
If it helps here is my main loop:
glMatrixMode(GL_PROJECTION);
glViewport(0, 0, displaySizeX, displaySizeY);
glLoadIdentity();
glOrtho(0, displaySizeX, 0, displaySizeY, 1, -1);
glMatrixMode(GL_MODELVIEW);
glEnable(GL_TEXTURE_2D);
texture = loadLoadingImage();
//This is the main loop for the game.
while(!Display.isCloseRequested()){
delta = getDelta();
updateFPS();
if(Display.wasResized()){
displaySizeX = Display.getWidth();
displaySizeY = Display.getHeight();
glViewport(0, 0, displaySizeX, displaySizeY);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, displaySizeX, 0, displaySizeY, -1, 1);
}
render();
checkInput();
Display.update();
Display.sync(sync);
}
cleanUp();
return true;
How do I make the image stretch to the quad?
public void stretch() {
Color.white.bind();
texture.bind
GL11.glBegin(GL11.GL_QUADS);
GL11.glTexCoord2f(0,0);
GL11.glVertex2f(100,100);
GL11.glTexCoord2f(1,0);
GL11.glVertex2f(100+texture.getTextureWidth(),100);
GL11.glTexCoord2f(1,1);
GL11.glVertex2f(100+texture.getTextureWidth(),100+character.getTextureHeight());
GL11.glTexCoord2f(0,1);
GL11.glVertex2f(100,100+texture.getTextureHeight());
GL11.glEnd(); // all the 0's were originally 100 but it was off centered
}
texture = TextureLoader.getTexture("PNG",ResourceLoader.getResourceAsStream("res/texture.png"));
Try using this. This is usually how I do this.
Perhaps something is modifying the texture matrix. You could try adding a
glMatrixMode(GL_TEXTURE);
glLoadIdentity();
to see if that affects anything.

GL_LINE_STIPPLE doesn't work as it should

So this should make a stippled triangle but all i get is normal lines. Any ideas why this issue may be caused?
private void render(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
gl.glViewport(0, 0 , 1000, 1000);
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
short stipple = (short)0xAAAA;
((GL2) gl).glLineStipple( 1, stipple );
gl.glEnable(GL_LINE_STIPPLE);
gl.glBegin(GL.GL_LINE_STRIP);
gl.glVertex3f(0f, 0.75f, 0);
gl.glVertex3f(0.5f, -0.25f, 0);
gl.glVertex3f(-0.5f, -0.25f, 0);
gl.glVertex3f(0f, 0.75f, 0);
gl.glEnd();
gl.glDisable(GL_LINE_STIPPLE);}

JOGL - Texture shows blue scale

I'm trying to make a game in Java OpenGL (JOGL), but I have a problem with textures.
When I draw a quad with a texture, the only thing I see is the image in blue scale.
The following is my code:
Texture grass;
// public void init() in Base class that implements GLEventListener
try {
grass = TextureIO.newTexture(new File("src/com/jeroendonners/main/grass.png"), false);
} catch(Exception e) {
e.printStackTrace();
}
To render a quad with this texture I use the following code:
int x = 0;
int y = 0;
gl.glColor3f(1f, 1f, 1f);
this.grass.bind(gl);
gl.glBegin(gl.GL_QUADS);
gl.glTexCoord2d(0, 0);
gl.glVertex3f(0, 0, 0);
gl.glTexCoord2d(1, 0);
gl.glVertex3f(1, 0, 0);
gl.glTexCoord2d(1, 1);
gl.glVertex3f(1, 1, 0);
gl.glTexCoord2d(0, 1);
gl.glVertex3f(0, 1, 0);
gl.glEnd();
I have read here that I have to use GL_BGR instead of the default GL_RGB, but since that question initializes textures in a different way, I don't know what to do with it.
Maybe a note: I am using an old version of JOGL, 1.0 I think. That's because I had a course on school with this version.

Java LWJGL Slick TTF blur

I'm having a bit of trouble rendering textures and true type fonts at the same time.
The following screenshots portray my problem:
This shows the font rendering perfectly. This was before I was using textures.
http://i.imgur.com/sUnoz.png
This shows the font after changing to textures. It goes all blurry.
http://i.imgur.com/gyhrZ.png
I'm not sure how to fix this. I've tried enabling and disabling various things, but I can't figure it out.
Here's my code:
GL initialation:
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, Display.getWidth(), Display.getHeight(), 0, 1, -1);
glMatrixMode(GL_MODELVIEW);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glDisable(GL_DEPTH_TEST);
glClearColor(0, 0, 0, 0);
glEnable(GL_TEXTURE_2D);
Sprite Rendering:
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glPushMatrix();
glBindTexture(GL_TEXTURE_2D, this.textureID);
glBegin(GL_QUADS);
{
glTexCoord2f(0, 0);
glVertex2f(0, 0);
glTexCoord2f(1, 0);
glVertex2f(this.width, 0);
glTexCoord2f(1, 1);
glVertex2f(this.width, this.height);
glTexCoord2f(0, 1);
glVertex2f(0, this.height);
}
glEnd();
glPopMatrix();
Texture Loader:
public static int loadTexture(BufferedImage image) {
int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getRGB(0, 0, image.getWidth(), image.getHeight(), pixels, 0,
image.getWidth());
ByteBuffer buffer = BufferUtils.createByteBuffer(image.getWidth()
* image.getHeight() * BYTES_PER_PIXEL); // 4 for RGBA, 3 for RGB
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
int pixel = pixels[y * image.getWidth() + x];
buffer.put((byte) ((pixel >> 16) & 0xFF)); // Red component
buffer.put((byte) ((pixel >> 8) & 0xFF)); // Green component
buffer.put((byte) (pixel & 0xFF)); // Blue component
buffer.put((byte) ((pixel >> 24) & 0xFF)); // Alpha component.
// Only for RGBA
}
}
buffer.flip(); // FOR THE LOVE OF GOD DO NOT FORGET THIS
// You now have a ByteBuffer filled with the color data of each pixel.
// Now just create a texture ID and bind it. Then you can load it using
// whatever OpenGL method you want, for example:
int textureID = glGenTextures(); // Generate texture ID
glBindTexture(GL_TEXTURE_2D, textureID); // Bind texture ID
// Setup wrap mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL12.GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL12.GL_CLAMP_TO_EDGE);
// Setup texture scaling filtering
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Send texel data to OpenGL
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, image.getWidth(),
image.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// Return the texture ID so we can bind it later again
return textureID;
}
I'm willing to give any more information if necessary.
Any help appreciated, thanks in advance.
I know this question was asked 6 months ago at the time of this answer, but I just want other people to know in case they run into the same problem.
I had the same problem when I was experimenting with rendering images, too, and the fix for me was to unbind the texture after I finish rendering.
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glPushMatrix();
glBindTexture(GL_TEXTURE_2D, this.textureID);
glBegin(GL_QUADS);
{
glTexCoord2f(0, 0);
glVertex2f(0, 0);
glTexCoord2f(1, 0);
glVertex2f(this.width, 0);
glTexCoord2f(1, 1);
glVertex2f(this.width, this.height);
glTexCoord2f(0, 1);
glVertex2f(0, this.height);
}
glEnd();
glPopMatrix();
glBindTexture(GL_TEXTURE_2D, 0);

Categories