Drawing a sphere in OpenGL ES 2.0 - java

I'm trying to draw a sphere in openGL ES 2.0 on Android. I already looked at the related questions and tried some of their code but I still can't get it to work.
Based on the Android developer examples and this code found on gamedev.net I came up with the code below. However it is not drawing correctly; When using glDrawArrays() rendering works but the results are not correct, when using glDrawElements() I get an GL_INVALID_OPERATION error. I listed the contents of my buffers below.
Sphere.java
public class Sphere
{
private int stacks;
private int slices;
private float radius;
//Buffers
private FloatBuffer vertexBuffer;
private FloatBuffer colorBuffer;
private ShortBuffer indexBuffer;
//Buffer sizes in aantal bytes
private int vertexBufferSize;
private int colorBufferSize;
private int indexBufferSize;
private int vertexCount;
private int program;
static final int FLOATS_PER_VERTEX = 3; // Het aantal floats in een vertex (x, y, z)
static final int FLOATS_PER_COLOR = 4; // Het aantal floats in een kleur (r, g, b, a)
static final int SHORTS_PER_INDEX = 2;
static final int BYTES_PER_FLOAT = 4;
static final int BYTES_PER_SHORT = 2;
static final int BYTES_PER_VERTEX = FLOATS_PER_VERTEX * BYTES_PER_FLOAT;
static final int BYTES_PER_COLOR = FLOATS_PER_COLOR * BYTES_PER_FLOAT;
static final int BYTES_PER_INDEX_ENTRY = SHORTS_PER_INDEX * BYTES_PER_SHORT;
// Set color with red, green, blue and alpha (opacity) values
private float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Sphere(float radius, int stacks, int slices)
{
this.stacks = stacks;
this.slices = slices;
this.radius = radius;
vertexCount = (stacks+1) * (slices+1);
vertexBufferSize = vertexCount * BYTES_PER_VERTEX;
colorBufferSize = vertexCount * BYTES_PER_COLOR;
indexBufferSize = vertexCount * BYTES_PER_INDEX_ENTRY;
program = GLHelpers.createProgram();
if (program == 0) {
return;
}
GLHelpers.checkGlError("program");
// Setup vertex-array buffer. Vertices in float. A float has 4 bytes.
vertexBuffer = ByteBuffer.allocateDirect(vertexBufferSize).order(ByteOrder.nativeOrder()).asFloatBuffer();
colorBuffer = ByteBuffer.allocateDirect(colorBufferSize).order(ByteOrder.nativeOrder()).asFloatBuffer();
indexBuffer = ByteBuffer.allocateDirect(indexBufferSize).order(ByteOrder.nativeOrder()).asShortBuffer();
generateSphereCoords(radius, stacks, slices);
vertexBuffer.position(0);
colorBuffer.position(0);
indexBuffer.position(0);
}
public void draw(float[] modelViewProjectionMatrix)
{
GLES20.glUseProgram(program);
GLHelpers.checkGlError("useprogram");
int positionHandle = GLES20.glGetAttribLocation(program, "a_Position");
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, BYTES_PER_VERTEX, vertexBuffer);
GLHelpers.checkGlError("pos");
//int colorHandle = GLES20.glGetAttribLocation(program, "a_Color");
//GLES20.glEnableVertexAttribArray(colorHandle);
//GLES20.glVertexAttribPointer(colorHandle, 4, GLES20.GL_FLOAT, false, BYTES_PER_COLOR, colorBuffer);
//GLHelpers.checkGlError("color");
int matrixHandle = GLES20.glGetUniformLocation(program, "u_Matrix");
GLES20.glUniformMatrix4fv(matrixHandle, 1, false, modelViewProjectionMatrix, 0);
/*
* When using glDrawArrays rendering works but the results are not correct, when using glDrawElements I get an GL_INVALID_OPERATION error.
*/
GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, indexBuffer.capacity(), GLES20.GL_SHORT, indexBuffer);
//GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLHelpers.checkGlError("draw");
// Disable vertex array
GLES20.glDisableVertexAttribArray(positionHandle);
//GLES20.glDisableVertexAttribArray(colorHandle);
}
private void generateSphereCoords(float radius, int stacks, int slices)
{
for (int stackNumber = 0; stackNumber <= stacks; ++stackNumber)
{
for (int sliceNumber = 0; sliceNumber < slices; ++sliceNumber)
{
float theta = (float) (stackNumber * Math.PI / stacks);
float phi = (float) (sliceNumber * 2 * Math.PI / slices);
float sinTheta = FloatMath.sin(theta);
float sinPhi = FloatMath.sin(phi);
float cosTheta = FloatMath.cos(theta);
float cosPhi = FloatMath.cos(phi);
vertexBuffer.put(new float[]{radius * cosPhi * sinTheta, radius * sinPhi * sinTheta, radius * cosTheta});
}
}
for (int stackNumber = 0; stackNumber < stacks; ++stackNumber)
{
for (int sliceNumber = 0; sliceNumber <= slices; ++sliceNumber)
{
indexBuffer.put((short) ((stackNumber * slices) + (sliceNumber % slices)));
indexBuffer.put((short) (((stackNumber + 1) * slices) + (sliceNumber % slices)));
}
}
}
}
GLHelpers.java
public class GLHelpers
{
private static final String TAG = "GLHelpers";
private static final String VERTEX_SHADER_CODE =
"uniform mat4 u_Matrix;" +
"attribute vec4 a_Position;" +
"attribute vec4 a_Color;" +
"varying vec4 v_Color;" +
"void main() {" +
" v_Color = a_Color;" +
" gl_Position = a_Position * u_Matrix;" +
"}";
private static final String FRAGMENT_SHADER_CODE =
"precision mediump float;" +
"varying vec4 v_Color;" +
"void main() {" +
" gl_FragColor = v_Color;" +
"}";
private static int loadShader(int shaderType, String source)
{
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0)
{
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
public static int createProgram()
{
int vertexShader = GLHelpers.loadShader(GLES20.GL_VERTEX_SHADER, GLHelpers.VERTEX_SHADER_CODE);
if (vertexShader == 0)
return 0;
int pixelShader = GLHelpers.loadShader(GLES20.GL_FRAGMENT_SHADER, GLHelpers.FRAGMENT_SHADER_CODE);
if (pixelShader == 0)
return 0;
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
GLHelpers.checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
GLHelpers.checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE)
{
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
public static void checkGlError(String glOperation)
{
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR)
{
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
The contents of vertexBuffer
X Y Z
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
-0.0, 0.0, 1.0,
-0.0, -0.0, 1.0,
0.0, -0.0, 1.0,
0.58778524, 0.0, 0.809017,
0.18163562, 0.559017, 0.809017,
-0.4755283, 0.34549147, 0.809017,
-0.4755282, -0.34549156, 0.809017,
0.18163571, -0.55901694, 0.809017,
0.95105654, 0.0, 0.30901697,
0.29389262, 0.90450853, 0.30901697,
-0.769421, 0.55901694, 0.30901697,
-0.76942086, -0.5590171, 0.30901697,
0.29389274, -0.9045085, 0.30901697,
0.9510565, 0.0, -0.30901703,
0.2938926, 0.9045085, -0.30901703,
-0.7694209, 0.5590169, -0.30901703,
-0.7694208, -0.55901706, -0.30901703,
0.29389274, -0.9045084, -0.30901703,
0.5877852, 0.0, -0.80901706,
0.1816356, 0.55901694, -0.80901706,
-0.47552824, 0.3454914, -0.80901706,
-0.47552818, -0.34549153, -0.80901706,
0.1816357, -0.5590169, -0.80901706,
-8.742278E-8, -0.0, -1.0,
-2.7015123E-8, -8.3144E-8, -1.0,
7.0726514E-8, -5.138581E-8, -1.0,
7.072651E-8, 5.138583E-8, -1.0,
-2.7015135E-8, 8.3143995E-8, -1.0,
0.0, 0.0, 0.0,
0.0, 0.0, 0.0,
0.0, 0.0, 0.0,
0.0, 0.0, 0.0,
0.0, 0.0, 0.0,
0.0, 0.0, 0.0
The contents of indexBuffer
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
The result when using glDrawArrays():

As promised the working code for creating the sphere:
public static Model3D createSphere(float radius, int stacks, int slices)
{
int vertexCount = (stacks + 1) * (slices + 1);
FloatBuffer vertexBuffer = ByteBuffer.allocateDirect(vertexCount * GLHelpers.BYTES_PER_VERTEX).order(ByteOrder.nativeOrder()).asFloatBuffer();
FloatBuffer normalBuffer = ByteBuffer.allocateDirect(vertexCount * GLHelpers.BYTES_PER_NORMAL).order(ByteOrder.nativeOrder()).asFloatBuffer();
FloatBuffer textureCoordBuffer = ByteBuffer.allocateDirect(vertexCount * GLHelpers.BYTES_PER_TEXTURE_COORD).order(ByteOrder.nativeOrder()).asFloatBuffer();
ShortBuffer indexBuffer = ByteBuffer.allocateDirect(vertexCount * GLHelpers.BYTES_PER_TRIANGLE_INDEX).order(ByteOrder.nativeOrder()).asShortBuffer();
for (int stackNumber = 0; stackNumber <= stacks; ++stackNumber)
{
for (int sliceNumber = 0; sliceNumber <= slices; ++sliceNumber)
{
float theta = (float) (stackNumber * Math.PI / stacks);
float phi = (float) (sliceNumber * 2 * Math.PI / slices);
float sinTheta = FloatMath.sin(theta);
float sinPhi = FloatMath.sin(phi);
float cosTheta = FloatMath.cos(theta);
float cosPhi = FloatMath.cos(phi);
float nx = cosPhi * sinTheta;
float ny = cosTheta;
float nz = sinPhi * sinTheta;
float x = radius * nx;
float y = radius * ny;
float z = radius * nz;
float u = 1.f - ((float)sliceNumber / (float)slices);
float v = (float)stackNumber / (float)stacks;
normalBuffer.put(nx);
normalBuffer.put(ny);
normalBuffer.put(nz);
vertexBuffer.put(x);
vertexBuffer.put(y);
vertexBuffer.put(z);
textureCoordBuffer.put(u);
textureCoordBuffer.put(v);
}
}
for (int stackNumber = 0; stackNumber < stacks; ++stackNumber)
{
for (int sliceNumber = 0; sliceNumber < slices; ++sliceNumber)
{
int second = (sliceNumber * (stacks + 1)) + stackNumber;
int first = second + stacks + 1;
//int first = (stackNumber * slices) + (sliceNumber % slices);
//int second = ((stackNumber + 1) * slices) + (sliceNumber % slices);
indexBuffer.put((short) first);
indexBuffer.put((short) second);
indexBuffer.put((short) (first + 1));
indexBuffer.put((short) second);
indexBuffer.put((short) (second + 1));
indexBuffer.put((short) (first + 1));
}
}
vertexBuffer.rewind();
normalBuffer.rewind();
indexBuffer.rewind();
textureCoordBuffer.rewind();
Model3D sphere = new Model3D().setVertexBuffer(vertexBuffer)
.setNormalBuffer(normalBuffer)
.setIndexBuffer(indexBuffer)
.setTexture(R.drawable.earth)
.setTextureCoordBuffer(textureCoordBuffer)
.setDiffuseLighting(-3f, 2.3f, 2f);
return sphere;
}

You're setting vertexCount as lat * lon * bytes per float, which looks very weird to me.
I think you have misnamed this variable, as the number of vertices has nothing to do with bytes per float.
You're using the same variable in glDrawArrays, which seems to me will not have the accurate number of vertices.

Related

Android OpenGL ES2 Skeletal animation child bone matrix

If I have a geometry with single bone I can render it with animation but if it has 2 bones the vertices do not animated correctly, I load a JSON format exported from blender, as I understand the child bone relative matrix multiplied by its parent absolute matrix then multiplied by it relative matrix inverse then each bone matrix multiplied by the key frame matrix to get the final matrix then send these matrices to the vertex shader.
I set the weight and bone index as attributes with the VBO.
for (int y = 0; y < g.getBones().size(); y++) {
com.thatulborooj.opengl.objects.Bone bone = new com.thatulborooj.opengl.objects.Bone();
Bone gBone = g.getBones().get(y);
float[] t = new float[] { gBone.getPosition().get(0), gBone.getPosition().get(0), gBone.getPosition().get(0) };
float[] r = new float[] { gBone.getRotation().get(0), gBone.getRotation().get(1), gBone.getRotation().get(2), gBone.getRotation().get(3) };
float[] s = gBone.getScale() == null ? new float[] { 1, 1, 1 } : new float[] { gBone.getScale().get(0), gBone.getScale().get(1), gBone.getScale().get(2) };
float[] nMatrix = createMat4(t, r, s);
float[] inverseMatrix = new float[16];
if (gBone.getParent() == -1) {
bone.setBindMatrix(nMatrix);
invertM(inverseMatrix, 0, nMatrix, 0);
float[] fMatrix = new float[16];
multiplyMM(fMatrix, 0, nMatrix, 0, inverseMatrix, 0);
bone.setFinalMatrix(fMatrix);
} else {
float[] pMatrix = mesh.getBones().get(gBone.getParent()).getBindMatrix();
float[] bMatrix = new float[16];
multiplyMM(bMatrix, 0, pMatrix, 0, nMatrix, 0);
bone.setBindMatrix(bMatrix);
invertM(inverseMatrix, 0, bMatrix, 0);
float[] fMatrix = new float[16];
multiplyMM(fMatrix, 0, bMatrix, 0, inverseMatrix, 0);
bone.setFinalMatrix(fMatrix);
}
bone.setName(gBone.getName());
bone.setParent(gBone.getParent());
mesh.getBones().add(bone);
}
}
this is the render code
if (animated) {
if (playing) {
if (curFrame < frames) {
for (int i = 0; i < bones.size(); i++) {
Key key = animations.get(0).getHierarchy().get(i).getKeys().get(curFrame);
setIdentityM(aMatrix, 0);
multiplyMM(aMatrix, 0, bones.get(i).getFinalMatrix(), 0, key.getMatrix(), 0);
for (int q = 0; q < 16; q++) {
bs[i][q] = aMatrix[q];
}
}
curFrame++;
} else
curFrame = 0;
shaderProgram.setBones(bs);
}
}
this is the function which make the matrix from position, quaternion and scale:
public static float[] createMat4(float[] t, float[] r, float[] s) {
float[] mat4 = new float[16];
float[] T = new float[16];
float[] R = quaternionToMatrix(r);
float[] S = new float[16];
setIdentityM(T, 0);
setIdentityM(S, 0);
translateM(T, 0, t[0], t[1], t[2]);
scaleM(S, 0, s[0], s[1], s[2]);
float[] temp = new float[16];
multiplyMM(temp, 0, T, 0, R, 0);
multiplyMM(mat4, 0, temp, 0, S, 0);
return mat4;
}
private static float[] quaternionToMatrix(float[] q) {
float[] m = new float[16];
final float xx = q[0] * q[0];
final float xy = q[0] * q[1];
final float xz = q[0] * q[2];
final float xw = q[0] * q[3];
final float yy = q[1] * q[1];
final float yz = q[1] * q[2];
final float yw = q[1] * q[3];
final float zz = q[2] * q[2];
final float zw = q[2] * q[3];
// Set matrix from quaternion
m[0] = 1 - 2 * (yy + zz);
m[1] = 2 * (xy - zw);
m[2] = 2 * (xz + yw);
m[3] = 0;
m[4] = 2 * (xy + zw);
m[5] = 1 - 2 * (xx + zz);
m[6] = 2 * (yz - xw);
m[7] = 0;
m[8] = 2 * (xz - yw);
m[9] = 2 * (yz + xw);
m[10] = 1 - 2 * (xx + yy);
m[11] = 0;
m[12] = 0;
m[13] = 0;
m[14] = 0;
m[15] = 1;
return m;
}
I set the bones array uniform by this line:
private void setBonesUniforms() {
for (int i = 0; i < bones.length; i++) {
int uBonesLocation = glGetUniformLocation(program, "bones[" + i + "]");
glUniformMatrix4fv(uBonesLocation, 1, false, bones[i], 0);
}
}
finally this is the vertex shader:
vec4 newVertex=vertexPosition;
vec4 newNormal=vertexNormal;
if(animated==1){
int index;
index=int(skinindex.x);
newVertex = (bones[index] * skinweight.x) * vertexPosition;
newNormal = (bones[index] * skinweight.x) * vertexNormal;
index=int(skinindex.y);
newVertex += (bones[index] * skinweight.y) * vertexPosition;
newNormal += (bones[index] * skinweight.y) * vertexNormal;
}
newVertex=vec4(newVertex.xyz, 1.0);
I noticed that changing the child bone matrix with any floats do not change the result.
You can use AssimpLib it is easy to use and supports most formats including collada with skeletal animations

How to change specific part of vbo?

I have recently created 2D height map grid which generates 3D terrain mesh for my world With the ability to add hills/bumps with mouse click events during runtime. My Problem is that every time i add to the height of the vertices i update the whole terrain's normal and position vbos(very not efficient). what is the way to
change specific part of vbo?
I have heared that glBufferSubData is the way, but How can i change only the Y value? (the vbo is x,y,z,x,y,z...)
and get the changed verticies in order for glBufferSubData?
Terrain class:
public class Terrain {
public static final int SIZE = 500;
//VAO, vertexCount, VBOS
private RawModel model;
//textures for the terrain
private terrainTexturePack texturePack;
Loader loader;
private static int VERTEX_COUNT =128;
float[] Vertices;
float[] Normals;
float[] TextureCoords;
int[] Indices;
private float[][] heights;
public Terrain(Loader loader, terrainTexturePack texturePack) {
this.texturePack = texturePack;
this.loader = loader;
this.model = generateTerrain(loader);
}
public RawModel getModel() {
return model;
}
public terrainTexturePack getTexturePack() {
return texturePack;
}
//player collision detection witn the terrain
public Vector3f getXYZOfTerrain(float worldX, float worldZ) {
float gridSquareSize = SIZE / ((float) heights.length - 1);
int gridX = (int) Math.floor(worldX / gridSquareSize);
int gridZ = (int) Math.floor(worldZ / gridSquareSize);
if(gridX >= heights.length - 1 || gridZ >= heights.length - 1 || gridX < 0 || gridZ < 0) {
return null;
}
float xCoord = (worldX % gridSquareSize)/gridSquareSize;
float zCoord = (worldZ % gridSquareSize)/gridSquareSize;
float yCoord;
if (xCoord <= (1-zCoord)) {
yCoord = Maths.barryCentric(new Vector3f(0, heights[gridX][gridZ], 0), new Vector3f(1,
heights[gridX + 1][gridZ], 0), new Vector3f(0,
heights[gridX][gridZ + 1], 1), new Vector2f(xCoord, zCoord));
} else {
yCoord = Maths.barryCentric(new Vector3f(1, heights[gridX + 1][gridZ], 0), new Vector3f(1,
heights[gridX + 1][gridZ + 1], 1), new Vector3f(0,
heights[gridX][gridZ + 1], 1), new Vector2f(xCoord, zCoord));
}
return new Vector3f(gridX, yCoord, gridZ);
}
//GENERATE THE TERRAIN
private RawModel generateTerrain(Loader loader) {
int pointer = 0;
int count = VERTEX_COUNT * VERTEX_COUNT;
heights = new float[VERTEX_COUNT][VERTEX_COUNT];
float[] vertices = new float[count * 3];
float[] normals = new float[count * 3];
float[] textureCoords = new float[count * 2];
int[] indices = new int[6 * (VERTEX_COUNT - 1) * (VERTEX_COUNT * 1)];
int vertexPointer = 0;
for (int i = 0; i < VERTEX_COUNT; i++) {
for (int j = 0; j < VERTEX_COUNT; j++) {
vertices[vertexPointer * 3] = (float) j / ((float) VERTEX_COUNT - 1) * SIZE;
float height = 0f;
vertices[vertexPointer * 3 + 1] = height;
heights[j][i] = height;
vertices[vertexPointer * 3 + 2] = (float) i / ((float) VERTEX_COUNT - 1) * SIZE;
Vector3f normal =new Vector3f(0, 1, 0);// calculateNormal(j, i, noise);
normals[vertexPointer * 3] = normal.x;
normals[vertexPointer * 3 + 1] = normal.y;
normals[vertexPointer * 3 + 2] = normal.z;
textureCoords[vertexPointer * 2] = (float) j / ((float) VERTEX_COUNT - 1);
textureCoords[vertexPointer * 2 + 1] = (float) i / ((float) VERTEX_COUNT - 1);
vertexPointer++;
if(i < VERTEX_COUNT - 1 && j < VERTEX_COUNT - 1){
int topLeft = (i * VERTEX_COUNT) + j;
int topRight = topLeft + 1;
int bottomLeft = ((i + 1) * VERTEX_COUNT) + j;
int bottomRight = bottomLeft + 1;
indices[pointer++] = topLeft;
indices[pointer++] = bottomLeft;
indices[pointer++] = topRight;
indices[pointer++] = topRight;
indices[pointer++] = bottomLeft;
indices[pointer++] = bottomRight;
}
}
}
Vertices = vertices;
TextureCoords = textureCoords;
Normals = normals;
Indices = indices;
return loader.loadToVAO(vertices, textureCoords, normals, indices);
}
//Calculate normal
private Vector3f calculateNormal(int x, int z) {
float heightL = Vertices[((( (z) *VERTEX_COUNT)+ (x-1) )*3)+1];
float heightR = Vertices[((( (z) *VERTEX_COUNT)+ (x+1) )*3)+1];
float heightD = Vertices[((( (z-1) *VERTEX_COUNT)+ (x) )*3)+1];
float heightU = Vertices[((( (z+1) *VERTEX_COUNT)+ (x) )*3)+1];
Vector3f normal = new Vector3f(heightL - heightR, 2f, heightD - heightU);
normal.normalise();
return normal;
}
//create mountain where the mouse clicked
//Vertices[(((y*VERTEX_COUNT)+x)*3)+1] = one Vertex in 2d grid
public void createHill(int x0, int y0){
float h = 0.06f;
int xs=VERTEX_COUNT;
int ys=VERTEX_COUNT;
float maxHeight =Vertices[(((y0*xs)+x0)*3)+1]+h;
float r = (9*maxHeight)/30;
//Loop the vertices
for(int y=(int) (y0-r);y<=y0+r;y++)
for(int x=(int) (x0-r);x<=x0+r;x++){
double circule = Math.sqrt((x-x0)*(x-x0)+(y0-y)*(y0-y));
if (circule <= r)
if ((x>=1)&&(x<xs-1))
if ((y>=1)&&(y<ys-1)){
Vertices[(((y*xs)+x)*3)+1] = Maths.hillsHeight(x0, x, y0, y,(maxHeight), r);
Vector3f normal = calculateNormal(x,y);
Normals[((((y*xs)+x))) * 3] = normal.x;
Normals[((((y*xs)+x))) * 3 + 1] = normal.y;
Normals[((((y*xs)+x))) * 3 + 2] = normal.z;
}
}
//change the whole VBO's not effective
//Note: i know that i dont need to update textures and indices
this.model=loader.loadToVAO(Vertices, TextureCoords, Normals, Indices);
}
}
Raw model class(vbo and vao holder):
//Store the VAOS and VBOS
public class RawModel {
private int vaoID;
private int vertexCount;
private int positionVbo;
private int normalVbo;
private int textureVbo;
public RawModel(int vaoID, int vertexCount, int positionVbo, int normalVbo, int textureVbo) {
this.vaoID = vaoID;
this.vertexCount = vertexCount;
this.positionVbo = positionVbo;
this.normalVbo = normalVbo;
this.textureVbo = textureVbo;
}
public RawModel(int vaoID, int vertexCount) {
this.vaoID = vaoID;
this.vertexCount = vertexCount;
}
public int getVaoID() {
return vaoID;
}
public int getVertexCount() {
return vertexCount;
}
public int getPositionVbo() {
return positionVbo;
}
public int getTextureVbo() {
return textureVbo;
}
public int getNormalVbo() {
return normalVbo;
}
}
loader class:
public class Loader {
//For clean up
private List<Integer> vaos = new ArrayList<Integer>();
private List<Integer> vbos = new ArrayList<Integer>();
private List<Integer> textures = new ArrayList<Integer>();
//Load mesh into VAO
public RawModel loadToVAO(float[] positions,float[] textureCoords,float[] normals,int[] indices){
int vaoID = createVAO();
bindIndicesBuffer(indices);
int positionvbo = storeDataInAttributeList(0,3,positions);
int textureVbo = storeDataInAttributeList(1,2,textureCoords);
int normalsnvbo = storeDataInAttributeList(2,3,normals);
unbindVAO();
return new RawModel(vaoID,indices.length, positionvbo, textureVbo, normalsnvbo);
}
//Load texture
public int loadTexture(String fileName) {
Texture texture = null;
try {
texture = TextureLoader.getTexture("PNG",
new FileInputStream("res/textures/" + fileName + ".png"));
GL30.glGenerateMipmap(GL11.GL_TEXTURE_2D);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER,
GL11.GL_LINEAR_MIPMAP_LINEAR);
GL11.glTexParameterf(GL11.GL_TEXTURE_2D, GL14.GL_TEXTURE_LOD_BIAS, -2);
if(GLContext.getCapabilities().GL_EXT_texture_filter_anisotropic){
float amount = Math.min(4f,
GL11.glGetFloat(EXTTextureFilterAnisotropic.GL_TEXTURE_MAX_ANISOTROPY_EXT));
GL11.glTexParameterf(GL11.GL_TEXTURE_2D,
EXTTextureFilterAnisotropic.GL_TEXTURE_MAX_ANISOTROPY_EXT, amount);
}
} catch (Exception e) {
e.printStackTrace();
System.err.println("Tried to load texture " + fileName + ".png , didn't work");
System.exit(-1);
}
textures.add(texture.getTextureID());
return texture.getTextureID();
}
//Clean up
public void cleanUp(){
for(int vao:vaos){
GL30.glDeleteVertexArrays(vao);
}
for(int vbo:vbos){
GL15.glDeleteBuffers(vbo);
}
for(int texture:textures){
GL11.glDeleteTextures(texture);
}
}
//Creates vao
private int createVAO(){
int vaoID = GL30.glGenVertexArrays();
vaos.add(vaoID);
GL30.glBindVertexArray(vaoID);
return vaoID;
}
//Store data in vbo
private int storeDataInAttributeList(int attributeNumber, int coordinateSize,float[] data){
int vboID = GL15.glGenBuffers();
vbos.add(vboID);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboID);
FloatBuffer buffer = storeDataInFloatBuffer(data);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, buffer, GL15.GL_STATIC_DRAW);
GL20.glVertexAttribPointer(attributeNumber,coordinateSize,GL11.GL_FLOAT,false,0,0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
return vboID;
}
private void unbindVAO(){
GL30.glBindVertexArray(0);
}
//Bind indices buffer
private void bindIndicesBuffer(int[] indices){
int vboID = GL15.glGenBuffers();
vbos.add(vboID);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vboID);
IntBuffer buffer = storeDataInIntBuffer(indices);
GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, buffer, GL15.GL_STATIC_DRAW);
}
//Store in int buffer
private IntBuffer storeDataInIntBuffer(int[] data){
IntBuffer buffer = BufferUtils.createIntBuffer(data.length);
buffer.put(data);
buffer.flip();
return buffer;
}
//Store in float buffer
private FloatBuffer storeDataInFloatBuffer(float[] data){
FloatBuffer buffer = BufferUtils.createFloatBuffer(data.length);
buffer.put(data);
buffer.flip();
return buffer;
}
//Load skyBox textures
public int loadCubeMap(String[] textureFiles){
int texID = GL11.glGenTextures();
GL13.glActiveTexture(GL13.GL_TEXTURE0);
GL11.glBindTexture(GL13.GL_TEXTURE_CUBE_MAP, texID);
for(int i = 0; i < textureFiles.length; i++){
TextureData data = decodeTextureFile("res/textures/"+ textureFiles[i] + ".png");
GL11.glTexImage2D(GL13.GL_TEXTURE_CUBE_MAP_POSITIVE_X+i, 0, GL11.GL_RGBA, data.getWidth(), data.getHeight(), 0,
GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, data.getBuffer());
}
GL11.glTexParameteri(GL13.GL_TEXTURE_CUBE_MAP, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR);
GL11.glTexParameteri(GL13.GL_TEXTURE_CUBE_MAP, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR);
textures.add(texID);
GL11.glTexParameteri(GL13.GL_TEXTURE_CUBE_MAP, GL11.GL_TEXTURE_WRAP_S, GL12.GL_CLAMP_TO_EDGE);
GL11.glTexParameteri(GL13.GL_TEXTURE_CUBE_MAP, GL11.GL_TEXTURE_WRAP_T, GL12.GL_CLAMP_TO_EDGE);
return texID;
}
private TextureData decodeTextureFile(String fileName) {
int width = 0;
int height = 0;
ByteBuffer buffer = null;
try {
FileInputStream in = new FileInputStream(fileName);
PNGDecoder decoder = new PNGDecoder(in);
width = decoder.getWidth();
height = decoder.getHeight();
buffer = ByteBuffer.allocateDirect(4 * width * height);
decoder.decode(buffer, width * 4, Format.RGBA);
buffer.flip();
in.close();
} catch (Exception e) {
e.printStackTrace();
System.err.println("Tried to load texture " + fileName + ", didn't work");
System.exit(-1);
}
return new TextureData(buffer, width, height);
}
//Load textures for GUI
public RawModel loadToVAO(float[] positions, int dimensions) {
int vaoID = createVAO();
this.storeDataInAttributeList(0, dimensions, positions);
unbindVAO();
return new RawModel(vaoID, positions.length / dimensions);
}
}
Solved Thank to Reto Koradi
public void changeVbo(int position, float[] data, int VboId){
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, VboId);
FloatBuffer ArrayData = storeDataInFloatBuffer(data);
GL15.glBufferSubData(GL15.GL_ARRAY_BUFFER,position * 4, ArrayData);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
}
The easiest and likely most efficient way is to store the heights (y-values) in a separate VBO, and specify them as a separate vertex attribute.
Then, in your vertex shader code, you can simply reassemble the position from the separate attributes. You might have something like this in your shader code now:
in vec3 pos;
This changes to:
in vec3 posXZ;
in float posY;
...
vec3 pos = vec3(posXZ.x, posY, posXZ.y);
Using a separate VBO for data that changes frequently also allows you to specify the allocation flags accordingly. You can use GL_DYNAMIC_DRAW for the data that changes frequently, GL_STATIC_DRAW for the rest.
Another option would be to use glMapBuffer(). This gives you a CPU pointer to the buffer content, allowing you to modify only the data that you actually want to change. However, you'll have to be careful that you don't introduce undesirable synchronization between CPU and GPU. The glMapBuffer() call might block until the GPU finished all rendering calls using the previous content of the buffer. One common technique is to use multiple copies of the data in a set of buffers, and cycle through them, to minimize synchronization. But if the amount of data is large, that will obviously cause memory usage to increase dramatically.
In your use case, I suspect that you'll have to update the normals as well, since they depend on the height values.

How to apply color to an irregular polygon with unknown indices?

I'm having a set of vertices (X,Y) of an irregular polygon, some having 4 vertices and some more than 4. I'm creating an array of vertices for drawing from the set of vertices I have using the getTransformedVertices() method. The indices and UV for the texture are unknown so I had to calculate tem using Triangulate() method below. I've setup the color and texture in the setColor() and setImage() methods.
However, the polygons having more than 4 vertices are not rendered properly. I've been trying for weeks now and almost the searched half of the internet. This is what I could come up with. The polygons with 4 vertices and the outlines are working properly. But I couldn't get the textures displayed properly on the polygon. Please help
public class BoothRectangle
{
float angle;
float scale;
RectF base;
PointF translation;
int textureId;
int positionBufferId;
int textureBufferId;
PointF[] corners;
float[] verts;
// Geometric variables
public float vertices[];
public float colors[];
public short indices[];
public float uvs[];
public FloatBuffer vertexBuffer;
public ShortBuffer drawListBuffer;
public FloatBuffer colorBuffer;
public FloatBuffer uvBuffer;
TextPaint textPaint = new TextPaint();
String title;
public BoothRectangle(PointF[] corners, int textureId, float[] colors, String title)
{
// Initialise our intital size around the 0,0 point
base = new RectF(corners[1].x, corners[3].y, corners[0].x, corners[1].y);
this.corners = corners;
this.title = title;
// Offset translation
translation = new PointF(0f,0f);
// Initial Size
scale = 1f;
// We start in our inital angle
angle = 0f;
this.textureId = textureId;
this.colors = colors;
}
public void setColor(float[] topColor){
List<Float> colorsList = new ArrayList<Float>();
for(PointF point : corners){
colorsList.add(topColor[0] / 255);
colorsList.add(topColor[1] / 255);
colorsList.add(topColor[2] / 255);
colorsList.add(1f);
}
int i = 0;
float[] colors = new float[colorsList.size()];
for (Float f : colorsList) {
colors[i++] = (f != null ? f : Float.NaN);
}
ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length * 4);
cbb.order(ByteOrder.nativeOrder());
colorBuffer = cbb.asFloatBuffer();
colorBuffer.put(colors);
colorBuffer.position(0);
int[] buffers = new int[1];
GLES11.glGenBuffers(1, buffers, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, buffers[0]);
GLES11.glBufferData(GLES11.GL_ARRAY_BUFFER, 4 * colors.length, colorBuffer, GLES11.GL_STATIC_DRAW);
textureBufferId = buffers[0];
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
}
public float[] getTransformedVertices()
{
float z;
List<Float> finalVertices = new ArrayList<Float>();
if(textureId == 0)
z = 0.5f;
else
z = 2.0f;
finalVertices.clear();
for(PointF point : corners){
finalVertices.add(point.x);
finalVertices.add(point.y);
finalVertices.add(0.0f);
}
int i = 0;
float[] verticesArray = new float[finalVertices.size()];
for (Float f : finalVertices) {
verticesArray[i++] = (f != null ? f : Float.NaN);
}
return verticesArray;
}
public void setImage()
{
uvs = new float[] {
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f
};
// The texture buffer
ByteBuffer bb = ByteBuffer.allocateDirect(uvs.length * 4);
bb.order(ByteOrder.nativeOrder());
uvBuffer = bb.asFloatBuffer();
uvBuffer.put(uvs);
uvBuffer.position(0);
int[] buffers = new int[1];
GLES11.glGenBuffers(1, buffers, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, buffers[0]);
GLES11.glBufferData(GLES11.GL_ARRAY_BUFFER, 4 * uvs.length, uvBuffer, GLES11.GL_STATIC_DRAW);
textureBufferId = buffers[0];
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
}
public void initBooth()
{
vertices = this.getTransformedVertices();
indices = PolygonTriangulation.Process(vertices);
if(indices==null){
Log.d("PolygonTriangulation",title + " - failed");
if(this.corners.length == 4){
indices = new short[] {2, 1, 0, 2, 0, 3};
}else{
indices = new short[corners.length];
for(int i=0;i<corners.length;i++){
indices[i] = (short) i;
}
}
}
// The vertex buffer.
ByteBuffer bb = ByteBuffer.allocateDirect(vertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(indices.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(indices);
drawListBuffer.position(0);
int[] buffers = new int[1];
GLES11.glGenBuffers(1, buffers, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, buffers[0]);
GLES11.glBufferData(GLES11.GL_ARRAY_BUFFER, 4 * vertices.length, vertexBuffer, GLES11.GL_STATIC_DRAW);
positionBufferId = buffers[0];
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
if(this.textureId !=0){
setImage();
} else {
setColor(colors);
}
}
public void Render(GL10 gl){
if(textureId == 0){
GLES11.glPushMatrix();
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, positionBufferId);
GLES11.glEnableClientState(GL10.GL_VERTEX_ARRAY);
GLES11.glVertexPointer(3, GL10.GL_FLOAT, 0, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
GLES11.glFrontFace(GL10.GL_CW);
GLES11.glColor4f(0.8f, 0.8f, 0.8f, 0.8f);
GLES11.glLineWidth(3.0f);
GLES11.glDrawArrays(GL10.GL_LINE_LOOP, 0, corners.length);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, textureBufferId);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
GLES11.glColorPointer(4, GL10.GL_FLOAT, 0, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
GLES11.glDrawElements(GL10.GL_TRIANGLES, indices.length,
GL10.GL_UNSIGNED_SHORT, drawListBuffer);
GLES11.glDisableClientState(GL10.GL_VERTEX_ARRAY);
GLES11.glDisableClientState(GL10.GL_COLOR_ARRAY);
GLES11.glPopMatrix();
} else {
GLES11.glPushMatrix();
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, positionBufferId);
GLES11.glEnableClientState(GL10.GL_VERTEX_ARRAY);
GLES11.glVertexPointer(3, GL10.GL_FLOAT, 0, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, textureBufferId);
GLES11.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
GLES11.glTexCoordPointer(2, GL10.GL_FLOAT, 0, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
GLES11.glEnable(GL10.GL_BLEND);
GLES11.glFrontFace(GL10.GL_CW);
GLES11.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
GLES11.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
int error = gl.glGetError();
if (error != GL10.GL_NO_ERROR)
{
Log.e("OPENGL", "GL Texture Load Error: " + GLU.gluErrorString(error));
}
GLES11.glDrawElements(GL10.GL_TRIANGLES, indices.length,
GL10.GL_UNSIGNED_SHORT, drawListBuffer);
GLES11.glDisable(GL10.GL_BLEND);
GLES11.glDisableClientState(GL10.GL_VERTEX_ARRAY);
GLES11.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
GLES11.glDisable(GL10.GL_TEXTURE_2D);
GLES11.glPopMatrix();
}
}
}
PolygonTriangulation.java - got from AssetFX - http://www.experts-exchange.com/Programming/Languages/Java/Q_27882746.html
public class PolygonTriangulation {
static final float EPSILON=0.0000000001f;
static public float Area(float[] contour) {
int n = contour.length;
float A = 0.0f;
for(int p = n - 3, q = 0; q < n; p=q, q+=3)
{
A += contour[p] * contour[q+1] - contour[q] * contour[p+1];
}
return A * 0.5f;
}
static public boolean InsideTriangle(float Ax,float Ay,float Bx,float By,float Cx,float Cy,float Px,float Py){
float ax, ay, bx, by, cx, cy, apx, apy, bpx, bpy, cpx, cpy;
float cCROSSap, bCROSScp, aCROSSbp;
ax = Cx - Bx; ay = Cy - By;
bx = Ax - Cx; by = Ay - Cy;
cx = Bx - Ax; cy = By - Ay;
apx= Px - Ax; apy= Py - Ay;
bpx= Px - Bx; bpy= Py - By;
cpx= Px - Cx; cpy= Py - Cy;
aCROSSbp = ax * bpy - ay * bpx;
cCROSSap = cx * apy - cy * apx;
bCROSScp = bx * cpy - by * cpx;
return ((aCROSSbp >= 0.0f) && (bCROSScp >= 0.0f) && (cCROSSap >= 0.0f));
}
static public boolean Snip(float[] contour, int u, int v, int w, int n, int[] V) {
int p;
float Ax, Ay, Bx, By, Cx, Cy, Px, Py;
Ax = contour[V[u]];
Ay = contour[V[u+1]];
Bx = contour[V[v]];
By = contour[V[v+1]];
Cx = contour[V[w]];
Cy = contour[V[w+1]];
if ( EPSILON > (((Bx-Ax)*(Cy-Ay)) - ((By-Ay)*(Cx-Ax))) ){
return false;
}
for (p = 0; p < n; p++)
{
if( (p == u/2) || (p == v/2) || (p == w/2) ){
continue;
}
Px = contour[V[p*2]];
Py = contour[V[(p*2)+1]];
if (InsideTriangle(Ax,Ay,Bx,By,Cx,Cy,Px,Py)){
return false;
}
}
return true;
}
//Brings in 3D vertex float array but processes it as 2D only.
static public short[] Process(float[] contour) {
//Pre-return list
ArrayList<Integer> vertexArray = new ArrayList<Integer>();
/* allocate and initialize list of Vertices in polygon */
int n = contour.length;
if ( n/3 < 3 )
return null;
//The (n/3)*2 occurs as we are removing the z coordinate from the mix
int[] V = new int[(n/3)*2];
/* we want a counter-clockwise polygon in V */
if (0.0f < Area(contour)){
for (int s = 0, v = 0; v < n-1; s+=2, v += 3){
V[s] = v;
V[s + 1] = v + 1;
}
}
else{
for(int s = 0, v = 0; v < n-1; s += 2, v += 3){
V[s] = (n - 1) - (v + 2);
V[s + 1] = (n - 1) - (v + 1);
}
}
int nv = n/3;
/* remove nv-2 Vertices, creating 1 triangle every time */
int count = 2 * nv; /* error detection */
for(int v = nv - 1; nv > 2;)
{
/* if we loop, it is probably a non-simple polygon */
if (0 >= (count--))
{
//** Triangulate: ERROR - probable bad polygon!
Log.e("PolygonTriangulation","Invalid Polygon");
return null;
}
/* three consecutive vertices in current polygon, <u,v,w> */
int u = v;
if (nv <= u)
u = 0; /* previous */
v = u + 1;
if (nv <= v)
v = 0; /* new v */
int w = v + 1;
if (nv <= w)
w = 0; /* next */
if (Snip(contour, u*2, v*2, w*2, nv, V))
{
vertexArray.add(V[u*2]/3);
vertexArray.add(V[v*2]/3);
vertexArray.add(V[w*2]/3);
// remove v from remaining polygon
for(int s = v * 2, t = (v * 2) + 2; t < (nv * 2); s += 2, t += 2){
V[s] = V[t];
V[s+1] = V[t+1];
}
nv--;
// reset error detection counter
count = 2 * nv;
}
}
//Convert ArrayList into short array
short[] index = new short[vertexArray.size()];
for(int i = 0; i < vertexArray.size(); i++){
index[i] = vertexArray.get(i).shortValue();
}
return index;
}
}

OpenGL 2.0 Matrices not working

I am using Java and OpenGL (LWJGL) to setup some matrices, I didn't want to use the inbuilt methods as I also want this to work on Android and so using LWJGL's Matrix classes wouldn't be appropriate. Currently I am setting up a perspective view, using an fov of 70, znear 0.1, zfar 1000. Rotating using the current setup only results in strange results, not rotating in the correct way, and objects being scaled strangely and often disappearing.
Here is the Matrix4D class:
public class Matrix4D {
/* The values within this matrix */
public float[] values;
/* The default constructor */
public Matrix4D() {
//Create the values
this.values = new float[16];
}
/* The constructor with the values given */
public Matrix4D(float[] values) {
//Create the values
this.values = values;
}
/* The constructor with the values given */
public Matrix4D(float[][] values) {
//Load the values
load(values);
}
/* The method used to set the values given a 2 dimensional array */
public void load(float[][] values) {
this.values = new float[] {
values[0][0], values[0][1], values[0][2], values[0][3],
values[1][0], values[1][1], values[1][2], values[1][3],
values[2][0], values[2][1], values[2][2], values[2][3],
values[3][0], values[3][1], values[3][2], values[3][3]
};
}
/* The method used to get a value using the coordinate within this matrix */
public float get(int x, int y) {
//Get the position
int position = x + (y * 4);
//Return the value
return this.values[position];
}
/* The method used to return a string representation of this matrix */
public String toString() {
//Return the string
return "[ " + this.values[0] + " " + this.values[1] + " " + + this.values[2] + " " + + this.values[3] + " ]" + "\n" +
"[ " + this.values[4] + " " + this.values[5] + " " + + this.values[6] + " " + + this.values[7] + " ]" + "\n" +
"[ " + this.values[8] + " " + this.values[9] + " " + + this.values[10] + " " + + this.values[11] + " ]" + "\n" +
"[ " + this.values[12] + " " + this.values[13] + " " + + this.values[14] + " " + + this.values[15] + " ]";
}
/* The method used to get the values */
public float[] getValues() { return this.values; }
/* The method used to get the values in a 2D array */
public float[][] getValues2DArray() {
//The array
float[][] array = new float[4][4];
//Go through each value
int column = 0;
int row = 0;
while (column * row < array.length) {
row ++;
if (row >= 4) {
column++;
row = 0;
}
array[column][row] = this.values[column * row];
}
//Return the array
return array;
}
}
Here is the Matrix class (Used to setup and perform calculations on a matrix):
public class Matrix {
/* The different matrices */
public static Matrix4D modelMatrix = new Matrix4D();
public static Matrix4D viewMatrix = new Matrix4D();
public static Matrix4D projectionMatrix = new Matrix4D();
public static Matrix4D modelViewProjectionMatrix = new Matrix4D();
/* The static method used to load an identity matrix */
public static void loadIdentity(Matrix4D matrix) {
//Load the identity matrix
matrix.load(new float[][] {
new float[] { 1, 0, 0, 0 },
new float[] { 0, 1, 0, 0 },
new float[] { 0, 0, 1, 0 },
new float[] { 0, 0, 0, 1 },
});
}
/* The static method used to add two matrices together */
public static Matrix4D add(Matrix4D matrixA, Matrix4D matrixB) {
//Create a new matrix
Matrix4D matrix = new Matrix4D();
//Go through each value
for (int a = 0; a < matrix.values.length; a++)
//Assign the current value
matrix.values[a] = matrixA.values[a] + matrixB.values[a];
//Return the matrix
return matrix;
}
/* The static method used to subtract a matrix (B) from another (A) */
public static Matrix4D subtract(Matrix4D matrixA, Matrix4D matrixB) {
//Create a new matrix
Matrix4D matrix = new Matrix4D();
//Go through each value
for (int a = 0; a < matrix.values.length; a++)
//Assign the current value
matrix.values[a] = matrixB.values[a] - matrixA.values[a];
//Return the matrix
return matrix;
}
/* The static method used to multiply two matrices together */
public static Matrix4D multiply(Matrix4D matrixA, Matrix4D matrixB) {
//Create a new matrix
Matrix4D matrix = new Matrix4D(new float[][] {
new float[] {
(matrixA.values[0] * matrixB.values[0]) + (matrixA.values[1] * matrixB.values[4]) + (matrixA.values[2] * matrixB.values[8]) + (matrixA.values[3] * matrixB.values[12]),
(matrixA.values[0] * matrixB.values[1]) + (matrixA.values[1] * matrixB.values[5]) + (matrixA.values[2] * matrixB.values[9]) + (matrixA.values[3] * matrixB.values[13]),
(matrixA.values[0] * matrixB.values[2]) + (matrixA.values[1] * matrixB.values[6]) + (matrixA.values[2] * matrixB.values[10]) + (matrixA.values[3] * matrixB.values[14]),
(matrixA.values[0] * matrixB.values[3]) + (matrixA.values[1] * matrixB.values[7]) + (matrixA.values[2] * matrixB.values[11]) + (matrixA.values[3] * matrixB.values[15])
},
new float[] {
(matrixA.values[4] * matrixB.values[0]) + (matrixA.values[5] * matrixB.values[4]) + (matrixA.values[6] * matrixB.values[8]) + (matrixA.values[7] * matrixB.values[12]),
(matrixA.values[4] * matrixB.values[1]) + (matrixA.values[5] * matrixB.values[5]) + (matrixA.values[6] * matrixB.values[9]) + (matrixA.values[7] * matrixB.values[13]),
(matrixA.values[4] * matrixB.values[2]) + (matrixA.values[5] * matrixB.values[6]) + (matrixA.values[6] * matrixB.values[10]) + (matrixA.values[7] * matrixB.values[14]),
(matrixA.values[4] * matrixB.values[3]) + (matrixA.values[5] * matrixB.values[7]) + (matrixA.values[6] * matrixB.values[11]) + (matrixA.values[7] * matrixB.values[15])
},
new float[] {
(matrixA.values[8] * matrixB.values[0]) + (matrixA.values[9] * matrixB.values[4]) + (matrixA.values[10] * matrixB.values[8]) + (matrixA.values[11] * matrixB.values[12]),
(matrixA.values[8] * matrixB.values[1]) + (matrixA.values[9] * matrixB.values[5]) + (matrixA.values[10] * matrixB.values[9]) + (matrixA.values[11] * matrixB.values[13]),
(matrixA.values[8] * matrixB.values[2]) + (matrixA.values[9] * matrixB.values[6]) + (matrixA.values[10] * matrixB.values[10]) + (matrixA.values[11] * matrixB.values[14]),
(matrixA.values[8] * matrixB.values[3]) + (matrixA.values[9] * matrixB.values[7]) + (matrixA.values[10] * matrixB.values[11]) + (matrixA.values[11] * matrixB.values[15])
},
new float[] {
(matrixA.values[12] * matrixB.values[0]) + (matrixA.values[13] * matrixB.values[4]) + (matrixA.values[14] * matrixB.values[8]) + (matrixA.values[15] * matrixB.values[12]),
(matrixA.values[12] * matrixB.values[1]) + (matrixA.values[13] * matrixB.values[5]) + (matrixA.values[14] * matrixB.values[9]) + (matrixA.values[15] * matrixB.values[13]),
(matrixA.values[12] * matrixB.values[2]) + (matrixA.values[13] * matrixB.values[6]) + (matrixA.values[14] * matrixB.values[10]) + (matrixA.values[15] * matrixB.values[14]),
(matrixA.values[12] * matrixB.values[3]) + (matrixA.values[13] * matrixB.values[7]) + (matrixA.values[14] * matrixB.values[11]) + (matrixA.values[15] * matrixB.values[15])
}
});
//Return the matrix
return matrix;
}
/* The static method used to transpose a matrix */
public static Matrix4D transpose(Matrix4D matrix) {
//Get the values from the matrix
float[][] values = matrix.getValues2DArray();
//The new values
float[][] newValues = new float[4][4];
//Go through the array
for (int y = 0; y < values.length; y++) {
for (int x = 0; x < values[y].length; x++) {
//Assign the new value
newValues[x][y] = values[y][x];
}
}
//Return the matrix
return new Matrix4D(newValues);
}
/* The static method used to translate a matrix */
public static Matrix4D translate(Matrix4D matrix, Vector3D vector) {
//The transform matrix
Matrix4D transform = new Matrix4D(new float[][] {
new float[] { 0, 0, 0, vector.x },
new float[] { 0, 0, 0, vector.y },
new float[] { 0, 0, 0, vector.z },
new float[] { 0, 0, 0, 0 },
});
//Add onto the matrix and return the result
return add(matrix, transform);
}
/* The static method used to rotate a matrix */
public static Matrix4D rotate(Matrix4D matrix, float angle, int x, int y, int z) {
//The transform matrix
Matrix4D transform = new Matrix4D();
//Calculate the values needed
float cos = (float) Math.cos(angle);
float sin = (float) Math.sin(angle);
//Check the x y and z values
if (x == 1) {
transform.load(new float[][] {
new float[] { 0, 0, 0, 0 },
new float[] { 0, cos, -sin, 0 },
new float[] { 0, sin, cos, 0 },
new float[] { 0, 0, 0, 0 },
});
} else if (y == 1) {
transform.load(new float[][] {
new float[] { cos, 0, sin, 0 },
new float[] { 0, 0, 0, 0 },
new float[] { -sin, 0, cos, 0 },
new float[] { 0, 0, 0, 0 },
});
} else if (z == 1) {
transform.load(new float[][] {
new float[] { cos, -sin, 0, 0 },
new float[] { sin, cos, 0, 0 },
new float[] { 0, 0, 0, 0 },
new float[] { 0, 0, 0, 0 },
});
}
//Add onto the matrix and return the result
return add(matrix, transform);
}
/* The static method used to scale a matrix */
public static Matrix4D scale(Matrix4D matrix, Vector3D vector) {
//The transform matrix
Matrix4D transform = new Matrix4D(new float[][] {
new float[] { vector.x, 0, 0, 0 },
new float[] { 0, vector.y, 0, 0 },
new float[] { 0, 0, vector.z, 0 },
new float[] { 0, 0, 0, 0 },
});
//Add onto the matrix and return the result
return add(matrix, transform);
}
/* The static method used to return an orthographic projection matrix */
public static Matrix4D ortho(float left, float right, float top, float bottom, float zfar, float znear) {
return new Matrix4D(new float[][] {
new float[] { 2 / (right - left), 0, 0, -((right + left) / (right - left)) },
new float[] { 0, 2 / (top - bottom), 0, -((top + bottom) / (top - bottom)) },
new float[] { 0, 0, -2 / (zfar - znear), -((zfar + znear) / (zfar - znear)) },
new float[] { 0, 0, 0, 1 },
});
}
/* The static method used to return a perspective projection matrix */
public static Matrix4D perspective(float fov, float aspect, float zNear, float zFar) {
float f = (float) (1f / Math.tan(fov / 2f));
return new Matrix4D(new float[][] {
new float[] { f / aspect, 0, 0, 0 },
new float[] { 0, f, 0, 0 },
new float[] { 0, 0, (zFar + zNear) / (zFar - zNear), (2 * zFar * zNear) / (zNear - zFar) },
new float[] { 0, 0, -1, 0 },
});
}
}
Finally the method used to setup the perspective/orthographic projection is:
/* The static method to setup an orthographic view given the width, height
* znear and zfar values */
public static void setupOrtho(float width, float height, float znear , float zfar) {
Matrix.loadIdentity(Matrix.modelMatrix);
Matrix.loadIdentity(Matrix.viewMatrix);
Matrix.projectionMatrix = Matrix.ortho(0, width, 0, height, znear, zfar);
}
/* The static method used to setup a perspective view given the
* fov, z near and z far value */
public static void setupPerspective(float fov, float zNear, float zFar) {
setupPerspective(fov, (float) (Settings.Window.Width / Settings.Window.Height), zNear, zFar);
}
/* The static method used to setup a perspective view given the
* fov, aspect ratio, z near and z far values */
public static void setupPerspective(float fov, float aspect, float zNear, float zFar) {
Matrix.loadIdentity(Matrix.modelMatrix);
Matrix.loadIdentity(Matrix.viewMatrix);
Matrix.projectionMatrix = Matrix.perspective(fov, aspect, zNear, zFar);
}
To render all of this and pass the matrices to the shader I am using
//Multiply the matrices together
Matrix4D modelViewMatrix = Matrix.multiply(Matrix.modelMatrix, Matrix.viewMatrix);
Matrix.modelViewProjectionMatrix = (Matrix.multiply(modelViewMatrix, Matrix.projectionMatrix));
System.out.println(Matrix.modelViewProjectionMatrix.toString() + "\n");
And in the shader I multiply the current vertices position by the model view projection marix.
Here is a picture of what it currently looks like.
You seem to be multiplying your matrices in the wrong order. When combining matrix transformations, the one on the right of the equation will be the first transformation performed.
You calculate your matrix as Model × View × Projection. When multiplying this by a vector, the projection would be performed first, followed by the view transformation, and lastly the model transformation. Obviously this is not what you want.
Your final matrix should be calculated like Projection × View × Model to do the transformations in the right order.

Android OpenGLES 2 ray picking from touch coordinates, unprojecting calculation slightly off

I am trying to implement object picking based on touch coordinates via an intersecting ray test.
I am having trouble finding information on converting the touch coordinates to the coordinate system used in the world in order to construct this ray.
My understanding so far is that the matrix that is applied to each vertex in the scene is:
projectionMatrix * viewMatrix * modelMatrix
Here is my process for reversing that process in a an attempt to find the ray's endpoint in the scene as well as my drawing loop in case I'm simply applying the different matrices incorrectly:
public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
{
float[] rayDirection = new float[4];
float normalizedX = 2 * touchX/windowWidth - 1;
float normalizedY = 1 - 2*touchY/windowHeight;
float[] unviewMatrix = new float[16];
float[] viewMatrix = new float[16];
Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);
float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
float[] modelviewInverse = new float[16];
Matrix.invertM(modelviewInverse, 0, modelView, 0);
float[] cameraPos = new float[4];
cameraPos[0] = modelviewInverse[12];
cameraPos[1] = modelviewInverse[13];
cameraPos[2] = modelviewInverse[14];
cameraPos[3] = modelviewInverse[15];
rayDirection[0] = nearPoint[0] - cameraPos[0];
rayDirection[1] = nearPoint[1] - cameraPos[1];
rayDirection[2] = nearPoint[2] - cameraPos[2];
rayDirection[3] = nearPoint[3] - cameraPos[3];
return rayDirection;
}
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
#Override
public void onDrawFrame(GL10 gl10) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
//Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(triangleVertices);
//Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
//Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
drawTriangle(triangleVertices);
drawIntersectionLine();
/*
Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
drawTriangle(triangleVertices);
drawIntersectionLine();
*/
}
private void drawTriangle(final FloatBuffer triangleBuffer)
{
triangleBuffer.position(positionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
triangleBuffer.position(colorOffset);
GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);
mMVMatrix = mMVPMatrix;
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
//Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
}
private void drawIntersectionLine()
{
lineVertices.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
}
private void moveIntersectionLineEndPoint(float[] lineEndPoint)
{
this.lineEndPoint = lineEndPoint;
float[] lineVerticesData = {
lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
};
lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
lineVertices.put(lineVerticesData).position(0);
}
Although I'm pretty sure my 4x4 matrix by 4d vector multiplication method is correct, here is that method as well just in case:
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
The goal of this test app is to show the scene from a few separate angles so that I can see how the intersection line looks based on my code. I wanted to draw the line starting at the camera's origin and ending at the intersection point, but it's acting oddly. The endpoint seems to be being pushed farther along the x axis in the positive direction than it should be, and in some spots it's seems to sort of...skip, as if there were a hole at that location or something. Although I still remember a bit of linear algebra from calculus, I don't remember enough to know exactly what I'm doing here and I've scoured through many of the resources online with no luck. I'm hoping someone that reads this will have more experience dealing with this than I and will be kind enough to help me, or give me any tips if there's something else that I may be doing the wrong or in an inefficient way.
Variable Reference:
Matrices are all float arrays of length 16
mProjectionMatrix = projection matrix
mModelMatrix = model matrix
mMVPMatrix = projection * modelview matrix
mMVMatrix = modelview matrix
private final FloatBuffer triangleVertices;
private FloatBuffer lineVertices;
private final int bytesPerFloat = 4;
private float[] viewMatrix = new float[16];
private static Context context;
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mColorHandle;
private float[] mProjectionMatrix = new float[16];
private float[] mModelMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private float[] mMVMatrix = new float[16];
private final int strideBytes = 7 * bytesPerFloat;
private final int lineStrideBytes = 3 * bytesPerFloat;
private final int positionOffset = 0;
private final int positionDataSize = 3;
private final int colorOffset = 3;
private final int colorDataSize = 4;
private float width, height;
private float[] lineStartPoint = new float[]{0, 0, 1.5f};
private float[] lineEndPoint = new float[]{0, 0, 0};
After some searching, I found a page that details this process in a different manner. Now I no longer have the issue with the end of the ray jumping to an unexpected position at random times and the end point points to the exact location it should!
Here is the page I used to fix my process:
http://www.antongerdelan.net/opengl/raycasting.html
And here is my final source code for the entire intersection testing app. Most of the relevant code is within the OpenGLRenderer class under the getMouseRayProjection method.
MainActivity.java:
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.view.Menu;
import android.view.MotionEvent;
public class MainActivity extends Activity {
private MyGLSurfaceView mGLSurfaceView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mGLSurfaceView = new MyGLSurfaceView(this);
mGLSurfaceView.setEGLContextClientVersion(2);
mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
OpenGLRenderer renderer = new OpenGLRenderer(this);
mGLSurfaceView.setRenderer(renderer);
mGLSurfaceView.renderer = renderer;
setContentView(mGLSurfaceView);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
protected void onResume() {
super.onResume();
mGLSurfaceView.onResume();
}
#Override
protected void onPause() {
super.onPause();
mGLSurfaceView.onPause();
}
}
class MyGLSurfaceView extends GLSurfaceView {
public OpenGLRenderer renderer;
public float previousX, previousY;
public MyGLSurfaceView(Context context)
{
super(context);
}
#Override
public boolean onTouchEvent(MotionEvent e)
{
float x = e.getX();
float y = e.getY();
switch(e.getAction()) {
case MotionEvent.ACTION_MOVE:
float dx = x - previousX;
float dy = y - previousY;
renderer.onTouch(x, y);
}
previousX = x;
previousY = y;
return true;
}
}
OpenGLRenderer.java:
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLU;
import android.opengl.Matrix;
import android.opengl.GLSurfaceView;
import android.os.SystemClock;
import android.util.Log;
public class OpenGLRenderer implements GLSurfaceView.Renderer {
private final FloatBuffer triangleVertices;
private FloatBuffer lineVertices;
private final int bytesPerFloat = 4;
private float[] viewMatrix = new float[16];
private static Context context;
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mColorHandle;
private float[] mProjectionMatrix = new float[16];
private float[] mModelMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private float[] mMVMatrix = new float[16];
private int[] viewport = new int[4];
private final int strideBytes = 7 * bytesPerFloat;
private final int lineStrideBytes = 3 * bytesPerFloat;
private final int positionOffset = 0;
private final int positionDataSize = 3;
private final int colorOffset = 3;
private final int colorDataSize = 4;
private float width, height;
private float[] lineStartPoint = new float[]{0, 0, 1f};
private float[] lineEndPoint = new float[]{0, 0, 0};
private float[] cameraPos = new float[]{0f, 0f, 2.5f};
private float[] cameraLook = new float[]{0f, 0f, -5f};
private float[] cameraUp = new float[]{0f, 1f, 0f};
public OpenGLRenderer(Context context) {
this.context = context;
final float[] triangleVerticesData = {
-0.5f, -0.25f, 0.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.559016994f, 0.0f,
0.0f, 1.0f, 0.0f, 1.0f
};
triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
triangleVertices.put(triangleVerticesData).position(0);
float[] lineVerticesData = {
lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
};
lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
lineVertices.put(lineVerticesData).position(0);
}
#Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);
try {
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0)
{
GLES20.glShaderSource(vertexShaderHandle, readShader("vertexShader"));
GLES20.glCompileShader(vertexShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0)
{
throw new RuntimeException("Error creating vertex shader");
}
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0)
{
GLES20.glShaderSource(fragmentShaderHandle, readShader("fragmentShader"));
GLES20.glCompileShader(fragmentShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0)
{
throw new RuntimeException("Error creating fragment shader.");
}
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
GLES20.glAttachShader(programHandle, vertexShaderHandle);
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
GLES20.glBindAttribLocation(programHandle, 1, "a_Color");
GLES20.glLinkProgram(programHandle);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");
GLES20.glUseProgram(programHandle);
} catch (IOException e)
{
Log.d("OpenGLES2Test", "The shader could not be read: " + e.getMessage());
} catch (RuntimeException e)
{
Log.d("OpenGLES2Test", e.getMessage());
}
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glDepthFunc(GLES20.GL_LEQUAL);
GLES20.glDepthMask(true);
}
#Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0, 0, width/2, height/2);
this.width = width;
this.height = height;
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport, 0);
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 gl10) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
long time = SystemClock.uptimeMillis() % 10000L;
GLES20.glViewport(0, 0, (int)(width), (int)(height));
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);
Matrix.multiplyMM(mMVMatrix, 0, viewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
drawTriangle(triangleVertices);
drawIntersectionLine();
}
private void drawTriangle(final FloatBuffer triangleBuffer)
{
triangleBuffer.position(positionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
triangleBuffer.position(colorOffset);
GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
private void drawIntersectionLine()
{
lineVertices.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
}
private void moveIntersectionLineEndPoint(float[] lineEndPoint)
{
this.lineEndPoint = lineEndPoint;
float[] lineVerticesData = {
lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
};
lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
lineVertices.put(lineVerticesData).position(0);
}
public static String readShader(String filePath) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(context.getAssets().open(filePath)));
StringBuilder sb = new StringBuilder();
String line;
while( ( line = reader.readLine() ) != null)
{
sb.append(line + "\n");
}
reader.close();
return sb.toString();
}
public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
{
float[] rayDirection = new float[4];
float normalizedX = 2f * touchX/windowWidth - 1f;
float normalizedY = 1f - 2f*touchY/windowHeight;
float normalizedZ = 1.0f;
float[] rayNDC = new float[]{normalizedX, normalizedY, normalizedZ};
float[] rayClip = new float[]{rayNDC[0], rayNDC[1], -1f, 1f};
float[] inverseProjection = new float[16];
Matrix.invertM(inverseProjection, 0, projection, 0);
float[] rayEye = multiplyMat4ByVec4(inverseProjection, rayClip);
rayClip = new float[]{rayClip[0], rayClip[1], -1f, 0f};
float[] inverseView = new float[16];
Matrix.invertM(inverseView, 0, view, 0);
float[] rayWorld4D = multiplyMat4ByVec4(inverseView, rayEye);
float[] rayWorld = new float[]{rayWorld4D[0], rayWorld4D[1], rayWorld4D[2]};
rayDirection = normalizeVector3(rayWorld);
return rayDirection;
}
public float[] normalizeVector3(float[] vector3)
{
float[] normalizedVector = new float[3];
float magnitude = (float) Math.sqrt((vector3[0] * vector3[0]) + (vector3[1] * vector3[1]) + (vector3[2] * vector3[2]));
normalizedVector[0] = vector3[0] / magnitude;
normalizedVector[1] = vector3[1] / magnitude;
normalizedVector[2] = vector3[2] / magnitude;
return normalizedVector;
}
/*
public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
{
float[] rayDirection = new float[4];
float normalizedX = 2 * touchX/windowWidth - 1;
float normalizedY = 1 - 2*touchY/windowHeight;
float[] unviewMatrix = new float[16];
float[] viewMatrix = new float[16];
Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);
float[] nearPoint = multiplyMat4ByVec4(unviewMatrix, new float[]{normalizedX, normalizedY, 0, 1});
float[] modelviewInverse = new float[16];
Matrix.invertM(modelviewInverse, 0, modelView, 0);
float[] cameraPos = new float[4];
cameraPos[0] = modelviewInverse[12];
cameraPos[1] = modelviewInverse[13];
cameraPos[2] = modelviewInverse[14];
cameraPos[3] = modelviewInverse[15];
rayDirection[0] = (nearPoint[0] - cameraPos[0]);
rayDirection[1] = (nearPoint[1] - cameraPos[1]);
rayDirection[2] = (nearPoint[2] - cameraPos[2]);
rayDirection[3] = (nearPoint[3] - cameraPos[3]);
return rayDirection;
}
*/
/*
public float[] getOGLPosition(int x, int y)
{
GLU.gluUnProject(x, y, 0, , modelOffset, project, projectOffset, view, viewOffset, obj, objOffset)
}
*/
public float[] getCameraPos(float[] modelView)
{
float[] modelviewInverse = new float[16];
Matrix.invertM(modelviewInverse, 0, modelView, 0);
float[] cameraPos = new float[4];
cameraPos[0] = modelviewInverse[12];
cameraPos[1] = modelviewInverse[13];
cameraPos[2] = modelviewInverse[14];
cameraPos[3] = modelviewInverse[15];
return cameraPos;
}
public String floatArrayAsString(float[] array)
{
StringBuilder sb = new StringBuilder();
sb.append("[");
for (Float f : array)
{
sb.append(f + ", ");
}
sb.deleteCharAt(sb.length() - 1);
sb.deleteCharAt(sb.length() - 1);
sb.append("]");
return sb.toString();
}
public float[] getInverseMatrix(float[] originalMatrix)
{
float[] inverseMatrix = new float[16];
Matrix.invertM(inverseMatrix, 0, originalMatrix, 0);
return inverseMatrix;
}
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
public void onTouch(float touchX, float touchY)
{
float[] mouseRayProjection = getMouseRayProjection(touchX, touchY, width, height, mMVMatrix, mProjectionMatrix);
Log.d("OpenGLES2Test", "Mouse Ray: " + floatArrayAsString(mouseRayProjection));
//Log.d("OpenGLES2Test", "ModelView: " + floatArrayAsString(mMVMatrix));
//Log.d("OpenGLES2Test", "ModelViewInverse: " + floatArrayAsString(getInverseMatrix(mMVMatrix)));
//Log.d("OpenGLES2Test", "Mouse Coordinates: " + touchX + ", " + touchY);
//Log.d("OpenGLES2Test", "Ray Coordinates: " + mouseRayProjection[0] + ", " + mouseRayProjection[1] + ", " + mouseRayProjection[2] + ", " + mouseRayProjection[3]);
moveIntersectionLineEndPoint(mouseRayProjection);
}
}
fragmentShader:
precision mediump float;
varying vec4 v_Color;
void main()
{
gl_FragColor = v_Color;
}
vertexShader:
uniform mat4 u_MVPMatrix;
attribute vec4 a_Position;
attribute vec4 a_Color;
varying vec4 v_Color;
void main()
{
v_Color = a_Color;
gl_Position = u_MVPMatrix * a_Position;
}
your codes are good but it does not work fine for me. After studying the code from Rajawali ( https://github.com/MasDennis/Rajawali ), I would suggest the following changes in your final codes:
public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
{
float[] rayDirection = new float[4];
float normalizedX = 2f * touchX/windowWidth - 1f;
float normalizedY = 1f - 2f*touchY/windowHeight;
float normalizedZ = 1.0f;
float[] rayClip1 = new float[]{normalizedX, normalizedY, -1, 1};
float[] rayClip2 = new float[]{normalizedX, normalizedY, 1, 1};
float[] mVPMatrix = new float[16];
float[] invertVPMatrix = new float[16];
Matrix.multiplyMM(mVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
Matrix.invertM(invertVPMatrix, 0, mVPMatrix, 0);
float[] rayWorld1 = new float[4];
Matrix.multiplyMV(rayWorld1, 0, invertVPMatrix, 0, rayClip1, 0);
float[] rayWorld2 = new float[4];
Matrix.multiplyMV(rayWorld2, 0, invertVPMatrix, 0, rayClip2, 0);
if (rayWorld1[3]!=0 && rayWorld2[3]!=0)
{
rayWorld1[0] = rayWorld1[0] / rayWorld1[3];
rayWorld1[1] = rayWorld1[1] / rayWorld1[3];
rayWorld1[2] = rayWorld1[2] / rayWorld1[3];
rayWorld1[3] = 1;
rayWorld2[0] = rayWorld2[0] / rayWorld2[3];
rayWorld2[1] = rayWorld2[1] / rayWorld2[3];
rayWorld2[2] = rayWorld2[2] / rayWorld2[3];
rayWorld2[3] = 1;
}
.... continue determine which object has intersection with the ray.
I think the most important part is that you should divide the vector by the W element.

Categories