Related
I am beginner in openGL.
i am created a prism ( each face is equilateral triangle ) in android using openGL library and i am able to rotate the prism successfully.
but my requirment is to put three different images in each face of the prism and i am not able to put the images. when i am putting the image it is scaling and mapping to all faces.
MyRenderer Class
public class MyRenderer implements Renderer {
/** Cube instance */
/* Rotation values for all axis */
private float xrot; //X Rotation ( NEW )
private float yrot; //Y Rotation ( NEW )
private float zrot; //Z Rotation ( NEW )
/** The Activity Context ( NEW ) */
private Context context;
private Pyramid pyramid;
/**
* Instance the Cube object and set
* the Activity Context handed over
*/
public MyRenderer(Context context) {
this.context = context;
pyramid = new Pyramid(this.context);
}
/**
* The Surface is created/init()
*/
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
//Load the texture for the cube once during Surface creation
gl.glEnable(GL10.GL_TEXTURE_2D); //Enable Texture Mapping ( NEW )
gl.glShadeModel(GL10.GL_SMOOTH); //Enable Smooth Shading
gl.glClearColor(1.0f, 1.0f, 1.0f, 0.5f); //Black Background
gl.glClearDepthf(1.0f); //Depth Buffer Setup
gl.glEnable(GL10.GL_DEPTH_TEST); //Enables Depth Testing
gl.glDepthFunc(GL10.GL_LEQUAL); //The Type Of Depth Testing To Do
//Really Nice Perspective Calculations
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
}
/**
* Here we do our drawing
*/
public void onDrawFrame(GL10 gl) {
//Clear Screen And Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity(); //Reset The Current Modelview Matrix
//Drawing
gl.glTranslatef(0.0f, -1.0f, -5.0f); //Move 5 units into the screen
gl.glScalef(1.0f, 1.0f, 1.0f); //Scale the Cube to 80 percent, otherwise it would be too large for the screen
//Rotate around the axis based on the rotation matrix (rotation, x, y, z)
gl.glRotatef(yrot, 0.0f, 1.65f, 0.0f); //X
pyramid.draw(gl, context);
yrot += 1.0f;
}
/**
* If the surface changes, reset the view
*/
public void onSurfaceChanged(GL10 gl, int width, int height) {
if(height == 0) { //Prevent A Divide By Zero By
height = 1; //Making Height Equal One
}
gl.glViewport(0, 0, width, height); //Reset The Current Viewport
gl.glMatrixMode(GL10.GL_PROJECTION); //Select The Projection Matrix
gl.glLoadIdentity(); //Reset The Projection Matrix
//Calculate The Aspect Ratio Of The Window
GLU.gluPerspective(gl, 45.0f, (float)width / (float)height, 0.1f, 100.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW); //Select The Modelview Matrix
gl.glLoadIdentity(); //Reset The Modelview Matrix
}
}
MyPyramid class
public class Pyramid {
/** The buffer holding the vertices */
private FloatBuffer vertexBuffer;
/** The buffer holding the color values */
private FloatBuffer colorBuffer;
private ByteBuffer indexBuffer;
private FloatBuffer textureBuffer;
private int noOfFaces = 3;
private int[] texturesID = new int[3];
private float PyramidVertices [] = {
0.0f, 1.65f, 0.0f,
-1.3f, 0.0f, 1.0f,
1.3f, 0.0f, 1.0f,
0.0f, 0.0f, -1.65f,
};
private float textures[] = {
//Mapping coordinates for the vertices
0.0f, 1.65f,
0.0f, 1.65f,
-1.3f, 0.0f,
1.3f, 0.0f,
};
private float colors[] = {
1.0f, 0.0f, 0.0f, 1.0f, //Red
0.0f, 1.0f, 0.0f, 1.0f, //Green
0.0f, 0.0f, 1.0f, 1.0f, //Blue
1.0f, 0.0f, 0.0f, 1.0f, //Red
};
private byte indices [] = { 0, 2, 1,
0, 2, 3,
0, 1, 3,
};
/**
* The Pyramid constructor.
*
* Initiate the buffers.
*/
public Pyramid( Context context) {
//
ByteBuffer byteBuf = ByteBuffer.allocateDirect(PyramidVertices.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
vertexBuffer = byteBuf.asFloatBuffer();
vertexBuffer.put(PyramidVertices);
vertexBuffer.position(0);
byteBuf = ByteBuffer.allocateDirect(colors.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
colorBuffer = byteBuf.asFloatBuffer();
colorBuffer.put(colors);
colorBuffer.position(0);
indexBuffer = ByteBuffer.allocateDirect(indices.length);
indexBuffer.put(indices);
indexBuffer.position(0);
byteBuf = ByteBuffer.allocateDirect(textures.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
textureBuffer = byteBuf.asFloatBuffer();
textureBuffer.put(textures);
textureBuffer.position(0);
}
/**
* The object own drawing function.
* Called from the renderer to redraw this instance
* with possible changes in values.
*
* #param gl - The GL Context
*/
public void draw(GL10 gl, Context context) {
//Set the face rotation
// gl.glFrontFace(GL10.GL_CW);
gl.glCullFace(GL10.GL_CCW);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
// gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
loadTexture(gl, context);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
gl.glEnable(GL10.GL_TEXTURE_2D);
// Enable the texture state
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Point to our buffers
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDrawElements(GL10.GL_TRIANGLES, indices.length, GL10.GL_UNSIGNED_BYTE, indexBuffer);
//Disable the client state before leaving
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public void loadTexture(GL10 gl, Context context) {
Bitmap bitmap;
gl.glGenTextures(3, texturesID, 0); // Generate texture-ID array for 6 IDs
gl.glBindTexture(GL10.GL_TEXTURE_2D, texturesID[2]);
InputStream is = context.getResources().openRawResource(R.drawable.forward);
try {
//BitmapFactory is an Android graphics utility for images
bitmap = BitmapFactory.decodeStream(is);
} finally {
//Always clear and close
try {
is.close();
is = null;
} catch (IOException e) {
}
}
// Generate OpenGL texture images
// Create Nearest Filtered Texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
// Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_REPEAT);
// Build Texture from loaded bitmap for the currently-bind texture ID
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
}
and i took the help from
http://www3.ntu.edu.sg/home/ehchua/programming/android/Android_3D.html
http://nehe.gamedev.net/
How to give different images on each face?
There are few ways of doing this but here are the simplest two for you to try:
Modify object to your needs
It's quite tricky to use few textures at a time and select them per-face, but the simple solution is to divide your pyramid into few objects. Then, you can assign different texture for each of your objects as you like.
Modify texture to your needs
You can use a technique known as Texture Atlas. In this solution you can take few textures and stitch them together into one, bigger bitmap. Then you use this bitmap as your main texture. You also need to modify your vertices UVs, so one of your triangles uses some part of your bigger texture, while another triangle uses different part of your texture. Using this technique you can get an appearance that different triangles use totally different images as their texture (even that there's just one real texture during rendering).
Let us know if you need more details about it.
Thanks for response.
I found the solution for my problem. I created a equilateral triangle and then by giving the proper rotation angle i am able to rotate it as well as i put the different texture in each face.
MyPyramid class
public class PyramidNew {
int []texturesID = new int[3];
Bitmap []bitmap = new Bitmap[3];
private FloatBuffer textureBuffer;
private FloatBuffer vertexBuffer; // Buffer for vertex-array
private float[][] colors = { // Colors of the 6 faces
{1.0f, 0.5f, 0.0f, 1.0f}, // 0. orange
{1.0f, 0.0f, 1.0f, 1.0f}, // 1. violet
{0.0f, 1.0f, 0.0f, 1.0f}, // 2. green
{0.0f, 0.0f, 1.0f, 1.0f}, // 3. blue
{1.0f, 0.0f, 0.0f, 1.0f}, // 4. red
{1.0f, 1.0f, 0.0f, 1.0f} // 5. yellow
};
/* private float[] vertices = { // Vertices for the front face
-1.5f, 0.0f, 0.86f, // 0. left-bottom-front
1.5f, 0.0f, 0.86f, // 1. right-bottom-front
0.0f, 1.86f, 0.0f, // 2. left-top-front
// 3. right-top-front
};*/
private float[] vertices = { // Vertices for the front face
-1.0f, 0.0f, 0.86f, // 0. left-bottom-front
1.0f, 0.0f, 0.86f, // 1. right-bottom-front
0.0f, 1.86f, 0.0f, // 2. left-top-front
// 3. right-top-front
};
private float textures[] = {
//Mapping coordinates for the vertices
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
// Constructor - Set up the buffers
public PyramidNew( Context context) {
// Setup vertex-array buffer. Vertices in float. An float has 4 bytes
System.out.println("calling Pyramid:::::::::");
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder()); // Use native byte order
vertexBuffer = vbb.asFloatBuffer(); // Convert from byte to float
vertexBuffer.put(vertices); // Copy data into buffer
vertexBuffer.position(0); // Rewind
ByteBuffer byteBuf = ByteBuffer.allocateDirect(textures.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
textureBuffer = byteBuf.asFloatBuffer();
textureBuffer.put(textures);
textureBuffer.position(0);
InputStream stream1 = context.getResources().openRawResource(R.drawable.splash_screen);
InputStream stream2 = context.getResources().openRawResource(R.drawable.bg);
InputStream stream3 = context.getResources().openRawResource(R.drawable.bg1);
try {
//BitmapFactory is an Android graphics utility for images
bitmap[0] = BitmapFactory.decodeStream(stream1);
bitmap[1] = BitmapFactory.decodeStream(stream2);
bitmap[2] = BitmapFactory.decodeStream(stream3);
} finally {
//Always clear and close
try {
stream1.close();
stream2.close();
stream3.close();
stream1 = stream2 = stream3 = null;
} catch (IOException e) {
}
}
}
// Draw the color cube
public void draw(GL10 gl, Context context) {
gl.glFrontFace(GL10.GL_CCW); // Front face in counter-clockwise orientation
gl.glEnable(GL10.GL_CULL_FACE); // Enable cull face
gl.glCullFace(GL10.GL_BACK); // Cull the back face (don't display)
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//loading the first texture in first face
loadTexture(gl, context, 0);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 3);
// rotating the face and puting the second texture in face
gl.glRotatef(120.0f, 0.0f, 1.0f, 0.0f);
//gl.glColor4f(colors[1][0], colors[1][1], colors[1][2], colors[1][3]);
loadTexture(gl, context, 1);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 3);
// Back - Rotate another 120 degree about y-axis and then put different texture
gl.glRotatef(120.0f, 0.0f, 1.0f, 0.0f);
//gl.glColor4f(colors[2][0], colors[2][1], colors[2][2], colors[2][3]);
loadTexture(gl, context, 2);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 3);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisable(GL10.GL_CULL_FACE);
}
public void loadTexture(GL10 gl, Context context, int currentImage) {
// Bitmap []bitmap = new Bitmap[3];
gl.glGenTextures(3, texturesID, 0); // Generate texture-ID array for 6 IDs
gl.glBindTexture(GL10.GL_TEXTURE_2D, texturesID[currentImage]);
// Generate OpenGL texture images
// Create Nearest Filtered Texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
// Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_REPEAT);
// Build Texture from loaded bitmap for the currently-bind texture ID
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap[currentImage], 0);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
gl.glEnable(GL10.GL_TEXTURE_2D);
// Enable the texture state
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Point to our buffers
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
}
}
I'm running this lwjgl application:
Display.setDisplayMode(new DisplayMode(500, 500));
Display.create();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-5, 5, -5, 5, -10, 5);
glMatrixMode(GL_MODELVIEW);
float x = 0;
while (!Display.isCloseRequested()) {
Display.update();
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glColor3f(1, 0, 0);
x += 0.01f;
glRotatef(x, x, 3 * x, 0.5f * x);
glBegin(GL_QUADS);
drawCube();
glEnd();
glLoadIdentity();
}
Display.destroy();
Which basically draws a 1x1x1 cube in the window. Method drawCube() is this:
public void drawCube() {
glColor3f(0.0f, 1.0f, 0.0f); // Set The Color To Green
glVertex3f(1.0f, 1.0f, 0f); // Top Right Of The Quad (Top)
glVertex3f(0f, 1.0f, 0f); // Top Left Of The Quad (Top)
glVertex3f(0f, 1.0f, 1.0f); // Bottom Left Of The Quad (Top)
glVertex3f(1.0f, 1.0f, 1.0f); // Bottom Right Of The Quad (Top)
glColor3f(1.0f, 0.5f, 0.0f); // Set The Color To Orange
glVertex3f(1.0f, 0f, 1.0f); // Top Right Of The Quad (Bottom)
glVertex3f(0f, 0f, 1.0f); // Top Left Of The Quad (Bottom)
glVertex3f(0f, 0f, 0f); // Bottom Left Of The Quad (Bottom)
glVertex3f(1.0f, 0f, 0f); // Bottom Right Of The Quad (Bottom)
glColor3f(1.0f, 0.0f, 0.0f); // Set The Color To Red
glVertex3f(1.0f, 1.0f, 1.0f); // Top Right Of The Quad (Front)
glVertex3f(0f, 1.0f, 1.0f); // Top Left Of The Quad (Front)
glVertex3f(0f, 0f, 1.0f); // Bottom Left Of The Quad (Front)
glVertex3f(1.0f, 0f, 1.0f); // Bottom Right Of The Quad (Front)
glColor3f(1.0f, 1.0f, 0.0f); // Set The Color To Yellow
glVertex3f(1.0f, 0f, 0f); // Bottom Left Of The Quad (Back)
glVertex3f(0f, 0f, 0f); // Bottom Right Of The Quad (Back)
glVertex3f(0f, 1.0f, 0f); // Top Right Of The Quad (Back)
glVertex3f(1.0f, 1.0f, 0f); // Top Left Of The Quad (Back)
glColor3f(0.0f, 0.0f, 1.0f); // Set The Color To Blue
glVertex3f(0f, 1.0f, 1.0f); // Top Right Of The Quad (Left)
glVertex3f(0f, 1.0f, 0f); // Top Left Of The Quad (Left)
glVertex3f(0f, 0f, 0f); // Bottom Left Of The Quad (Left)
glVertex3f(0f, 0f, 1.0f); // Bottom Right Of The Quad (Left)
glColor3f(1.0f, 0.0f, 1.0f); // Set The Color To Violet
glVertex3f(1.0f, 1.0f, 0f); // Top Right Of The Quad (Right)
glVertex3f(1.0f, 1.0f, 1.0f); // Top Left Of The Quad (Right)
glVertex3f(1.0f, 0f, 1.0f); // Bottom Left Of The Quad (Right)
glVertex3f(1.0f, 0f, 0f); // Bottom Right Of The Quad (Right)
}
It outputs this:
To me lines are pretty horrible, Does lwjgl have anti aliasing? If yes, how can i enable it?
For Anti-Aliasing, you can use multisampling. To enable it, add a PixelFormat parameter to Display.create();. Here's an example:
Display.create(new PixelFormat(/*Alpha Bits*/8, /*Depth bits*/ 8, /*Stencil bits*/ 0, /*samples*/8)); I'm not sure what the first 3 parameters do - these are values I found here. I've also seen people use Display.create(new PixelFormat(8,0,0,8)).
NOTE: If you set the AA too high (or other things an incorrect value) it will throw an LWJGLException.
Hope this helps.
PS apologies for the late answer.
Comments
Antialiasing can be achieved by creating a multisample framebuffer, either in a framebuffer object or through the default framebuffer when the LWJGL window is created for the first time.
If you are learning LWJGL and OpenGL then learn how to use VBOs, because glBegin, glVertex, etc. are removed from the core profile of OpenGL.
VBO Example
Here is a little example of a VBO storing Vertices and Texture Coordinates for two triangles and rendering it!
I assume that you know how to load and bind textures already.
Creating the VBO
This is the code where you create the actual Vertex and Texture Coordinate Buffer and store them onto the GPU.
int vertices = 6;
int vertex_size = 3; // X, Y, Z,
int texture_size = 2; // U, V,
FloatBuffer vertex_data = BufferUtils.createFloatBuffer(vertices * vertex_size);
vertex_data.put(new float[] { -1f, 1f, 0f, }); // Vertex
vertex_data.put(new float[] { 1f, 1f, 0f, }); // Vertex
vertex_data.put(new float[] { -1f, -1f, 0f, }); // Vertex
vertex_data.put(new float[] { 1f, -1f, 0f, }); // Vertex
vertex_data.put(new float[] { -1f, -1f, 0f, }); // Vertex
vertex_data.put(new float[] { 1f, 1f, 0f, }); // Vertex
FloatBuffer texture_data = BufferUtils.createFloatBuffer(vertices * texture_size);
texture_data.put(new float[] { 0f, 1f, }); // Texture Coordinate
texture_data.put(new float[] { 1f, 1f, }); // Texture Coordinate
texture_data.put(new float[] { 0f, 0f, }); // Texture Coordinate
texture_data.put(new float[] { 1f, 0f, }); // Texture Coordinate
texture_data.put(new float[] { 0f, 0f, }); // Texture Coordinate
texture_data.put(new float[] { 1f, 1f, }); // Texture Coordinate
vertex_data.flip();
texture_data.flip();
int vbo_vertex_handle = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo_vertex_handle);
glBufferData(GL_ARRAY_BUFFER, vertex_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
int vbo_texture_handle = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo_texture_handle);
glBufferData(GL_ARRAY_BUFFER, texture_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
Rendering the VBO
Then when you want to render the VBO, you need to do the following.
texture.bind();
glBindBuffer(GL_ARRAY_BUFFER, vbo_vertex_handle);
glVertexPointer(vertex_size, GL_FLOAT, 0, 0l);
glBindBuffer(GL_ARRAY_BUFFER, vbo_texture_handle);
glTexCoordPointer(texture_size, GL_FLOAT, 0, 0l);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glDrawArrays(GL_TRIANGLES, 0, vertices); // The vertices is of course the max vertices count, in this case 6
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
texture.unbind();
Deleting the VBO
Then when you're done with the VBO and you don't need it anymore, you can delete it by doing the following.
glDeleteBuffers(vbo_vertex_handle);
glDeleteBuffers(vbo_texture_handle);
LWJGL/OpenGL - Static Imports
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL15.*;
Info
You only have to create a VBO once (It is really bad to create one for each frame), so if you used VBO to store your terrain. Then if some changes happens then only at that point you want to update the VBO, else just keep it as it is. If you use the VBO for terrain and the terrain is really huge, then split the terrain into different chunks of terrain with it's own VBO.
Extra
If you have an Wavefront OBJ Model, and you want to render it multiple times, the best way to do that, would be to load the whole model to one VBO. Then you would do some Instancing to render it multiple times in multiple position, etc.
Instancing Tutorial 1
Instancing Tutorial 2
Update
A little example of a VBO storing Vertices and Colors for a Triangle and rendering it!
Creating the VBO.
This is the code where you create the actual Vertex and Color Buffer and bind them to the VBO.
int vertices = 3;
int vertex_size = 3; // X, Y, Z,
int color_size = 3; // R, G, B,
FloatBuffer vertex_data = BufferUtils.createFloatBuffer(vertices * vertex_size);
vertex_data.put(new float[] { -1f, -1f, 0f, });
vertex_data.put(new float[] { 1f, -1f, 0f, });
vertex_data.put(new float[] { 1f, 1f, 0f, });
vertex_data.flip();
FloatBuffer color_data = BufferUtils.createFloatBuffer(vertices * color_size);
color_data.put(new float[] { 1f, 0f, 0f, });
color_data.put(new float[] { 0f, 1f, 0f, });
color_data.put(new float[] { 0f, 0f, 1f, });
color_data.flip();
int vbo_vertex_handle = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo_vertex_handle);
glBufferData(GL_ARRAY_BUFFER, vertex_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
int vbo_color_handle = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo_color_handle);
glBufferData(GL_ARRAY_BUFFER, color_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
Rendering the VBO.
This is the code you need to call, to render the VBO.
glBindBuffer(GL_ARRAY_BUFFER, vbo_vertex_handle);
glVertexPointer(vertex_size, GL_FLOAT, 0, 0l);
glBindBuffer(GL_ARRAY_BUFFER, vbo_color_handle);
glColorPointer(color_size, GL_FLOAT, 0, 0l);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLES, 0, vertices);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
The imports and the code for disposing/deleting the buffers are the same as the previous example!
Update 2 - Complete Example
Okay, so here is a complete example of a VBO containing Vertices and Colors. The example is 100% LWJGL only!
When I run the following code this is the desired result I get.
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL15.*;
import java.nio.FloatBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
public class VBOTest
{
public final static void main(String[] args)
{
int width = 1280;
int height = 720;
try
{
Display.setTitle("VBO Test");
Display.setDisplayMode(new DisplayMode(width, height));
Display.create();
}
catch (Exception ex)
{
ex.printStackTrace();
System.exit(0);
}
/*
* Initialize OpenGL States
*/
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0f, width, height, 0f, -1f, 1f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
/*
* Creating the Vertex & Color VBO
*/
final int VERTEX_SIZE = 3; // X, Y, Z,
final int COLOR_SIZE = 4; // R, G, B, A,
int vertices = 6;
int vbo_vertex_handle;
int vbo_color_handle;
FloatBuffer vertex_data = BufferUtils.createFloatBuffer(vertices * VERTEX_SIZE);
float half_width = 200f;
float half_height = 200f;
vertex_data.put(new float[] { -half_width, -half_height, 0f, });
vertex_data.put(new float[] { -half_width, half_height, 0f, });
vertex_data.put(new float[] { half_width, -half_height, 0f, });
vertex_data.put(new float[] { half_width, half_height, 0f, });
vertex_data.put(new float[] { half_width, -half_height, 0f, });
vertex_data.put(new float[] { -half_width, half_height, 0f, });
vertex_data.flip();
FloatBuffer color_data = BufferUtils.createFloatBuffer(vertices * COLOR_SIZE);
color_data.put(new float[] { 1f, 0f, 0f, 1f, });
color_data.put(new float[] { 1f, 0f, 1f, 1f, });
color_data.put(new float[] { 1f, 1f, 0f, 1f, });
color_data.put(new float[] { 0f, 1f, 0f, 1f, });
color_data.put(new float[] { 1f, 1f, 0f, 1f, });
color_data.put(new float[] { 1f, 0f, 1f, 1f, });
color_data.flip();
vbo_vertex_handle = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo_vertex_handle);
glBufferData(GL_ARRAY_BUFFER, vertex_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
vbo_color_handle = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo_color_handle);
glBufferData(GL_ARRAY_BUFFER, color_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
/*
* Main Rendering Loop
*/
boolean running = true;
while (running)
{
running = (!Display.isCloseRequested() && !Keyboard.isKeyDown(Keyboard.KEY_ESCAPE));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glPushMatrix();
{
glTranslatef(width / 2f, height / 2f, 0f);
glBindBuffer(GL_ARRAY_BUFFER, vbo_vertex_handle);
glVertexPointer(VERTEX_SIZE, GL_FLOAT, 0, 0l);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, vbo_color_handle);
glColorPointer(COLOR_SIZE, GL_FLOAT, 0, 0l);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLES, 0, vertices);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
}
glPopMatrix();
glFlush();
Display.sync(60);
Display.update();
}
/*
* Dispose Elements
*/
glDeleteBuffers(vbo_vertex_handle);
glDeleteBuffers(vbo_color_handle);
Display.destroy();
System.exit(0);
}
}
I started drawing my Quad but when I started playing around with the Vertices I noticed that the X Coordinates are flipped. Heres a picture to show what I mean:
Here are my Vertices - Indices and Texture Coordinates which I don't really have to show.
static final int COORDS_PER_VERTEX = 3;
static float positionCoords[] = {
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
static final int COORDS_PER_TEXTURE = 2;
static float textureCoords[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f, };
private final short indices[] = { 0, 1, 2 };
And this is where I change the Projection and View Matrices.
public void onSurfaceChanged(GL10 naGl, int width, int height)
{
Log.d(TAG, "GL Surface Changed - Setting Up View");
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(ProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
Matrix.setLookAtM(ViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
Why would it be drawn 'backwards'. I also thought that my Camera might be behind the object so that in 3 dimensional space left would be positive if I'm behind the object.
You are indeed looking at the object from the reverse side.
Your lookAt function is placing the eye at (0,0,-3), and the lookAt point at (0, 0, 0). By default the negative z axis points into the screen, but you're looking at it from the reverse direction (towards the positive z axis).
You should have your eye at (0,0,3) looking toward (0,0,0) to get the view that you expect.
You may find this chapter of the Red Book informative.
This has probably something to do with my transformations, but right now I can't figure this out and this is driving me instane. I have wrapped the draw code so that I can easily define new triangles. However, when I put this into a function, it just shows a grey screen. Te function code is as follows:
public void Draw(float[] mViewMatrix, float[] mModelMatrix, float[] mProjectionMatrix, int mPositionHandle, int mColorHandle, int mMVPMatrixHandle)
{
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
aBuffer = ByteBuffer.allocateDirect(verts.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
//aBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the color information
aBuffer.position(mColorOffset);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
The code which IS working is:
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
// Draw the triangle facing straight on.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(mTriangle1Vertices);
// Draw one translated a bit down and rotated to be flat on the ground.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -1.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, 90.0f, 1.0f, 0.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(mTriangle2Vertices);
// Draw one translated a bit to the right and rotated to be facing to the left.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 1.0f, 0.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, 90.0f, 0.0f, 1.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(mTriangle3Vertices);
*/
/*
for (int x = 0; x < staticHolder.objectList.size(); x++)
{
staticHolder.objectList.get(x).Draw(mViewMatrix, mModelMatrix, mProjectionMatrix, mPositionHandle, mColorHandle, mMVPMatrixHandle);
}
*/
}
/**
* Draws a triangle from the given vertex data.
*
* #param aTriangleBuffer The buffer containing the vertex data.
*/
private void drawTriangle(final FloatBuffer aTriangleBuffer)
{
// Pass in the position information
aTriangleBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aTriangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the color information
aTriangleBuffer.position(mColorOffset);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aTriangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
I am passing in the same variables and the final variables used here are initialized the same. There is some other work that happens in the function for encapsulation. Any idea why it is refusing to render in the function?
The following code loads the objects in the list:
final float[] triangle1VerticesData = {
// X, Y, Z,
// R, G, B, A
-0.5f, -0.25f, 0.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.559016994f, 0.0f,
0.0f, 1.0f, 0.0f, 1.0f};
final float[] triangle2VerticesData = {
// X, Y, Z,
// R, G, B, A
-0.5f, -0.25f, 0.0f,
1.0f, 1.0f, 0.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 0.559016994f, 0.0f,
1.0f, 0.0f, 1.0f, 1.0f};
// This triangle is white, gray, and black.
final float[] triangle3VerticesData = {
// X, Y, Z,
// R, G, B, A
-0.5f, -0.25f, 0.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f,
0.0f, 0.559016994f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f};
staticHolder.objectList.add(new Triangle(triangle1VerticesData));
staticHolder.objectList.add(new Triangle(triangle2VerticesData));
staticHolder.objectList.add(new Triangle(triangle3VerticesData));
The receiving class is:
public class Triangle extends shape
{
public Triangle(float[] data)
{
verts = data;
}
}
After the following bit of code:
aBuffer = ByteBuffer.allocateDirect(verts.length * mBytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
You must put the vertices into the buffer (otherwise, it's blank!):
aBuffer.put(verts);
The reason this isn't in the bit of code that works, is because those three sets of vertices' buffers are pre-allocated, and the vertices are put into it then (at initialization). They are simply passed to the method each time, so they don't have to be put() in again.
On that note, you will want to avoid allocations in your Draw method, as it's called many times per frame and could lead to slow rendering. Allocate aBuffer once, and put new vertices into it each time.
I'm creating application to Android using OpenGL ES.
I created rectangle using following vertices.
private float vertices[] = {
-1.0f, 0.5f, 0.0f, // 0, Top Left
-1.0f, -0.5f, 0.0f, // 1, Bottom Left
1.0f, -0.5f, 0.0f, // 2, Bottom Right
1.0f, 0.5f, 0.0f, // 3, Top Right
};
private short[] indices = { 0, 1, 2, 0, 2, 3 };
How do find location in pixels for this rectangle.
It depends on your viewport, projection and model view matrices. position of a vertex on the screen is calculated with the formula like: projectionMatrix * modelviewMatrix * vertex
find some useful explanations here:
http://robertokoci.com/world-view-projection-matrix-unveiled/
http://db-in.com/blog/2011/04/cameras-on-opengl-es-2-x/