libgdx - GLGS - Create vertex shader which let objects wave - java

I have some Mesh-Objects like a flat rectangle:
private void generateMesh(float xshift_mesh, float yshift_mesh, float zshift_mesh, float size_mesh) {
float size = size_mesh/2;
mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute.ColorUnpacked(), VertexAttribute.TexCoords(0), VertexAttribute.Normal());
mesh.setVertices(new float[]
// Position XYZ Color RGBA Texture Coordinates UV Normal XYZ
//|-,-,-,| |-,-,-,-,| |-,-,| |-,-,-|
{
-size+xshift_mesh, -size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 0, 1, 0, 0, -1,
size+xshift_mesh, -size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 1, 1, 0, 0, 1,
size+xshift_mesh, size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 1, 0, 0, 0, 1,
-size+xshift_mesh, size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 0, 0, 0, 0, -1
});
mesh.setIndices(new short[] {0, 1, 2,2, 3, 0});
}
My task is to create a vertex shader which let such an object wave in some way.
My idea:
First I deliver the vertex shader a uniform variable which changes every second from 0 to 1 and vice versa:
Date endDate = new Date();
float numSeconds = (float)((endDate.getTime() - startDate.getTime()) / 1000);
float even = (numSeconds % 2);
...
shader.setUniformMatrix("u_worldView", cam.combined);//WVP-Matrix
shader.setUniformi("u_texture", 0);
shader.setUniformf("u_even", even);
Each vertex of the rectangle object has an Normal vector, like you in see above in the generateMesh method. I can access and use this vector inside the vertex shader:
uniform float u_even;
uniform mat4 u_worldView;
attribute vec4 a_color;
attribute vec4 a_position;
attribute vec2 a_texCoord0;
attribute vec4 a_normal;
varying vec4 v_color;
varying vec2 v_texCoords;
void main() {
v_color = a_color;
v_texCoords = a_texCoord0;
if(u_even > 0.5) gl_Position = u_worldView * a_position + a_normal;
else gl_Position = u_worldView * a_position - a_normal;
}
I expected that the object would change every second, which it does in some way, but I also manipulate the camera position everytime. The view position also goes back and forth if the values of the Normal vector are all 0.
I have also tried:
if(u_even > 0.5) gl_Position = u_worldView * (a_position + a_normal);
else gl_Position = u_worldView * (a_position - a_normal);
But its even worse...
So how can I change the position of a vertex without changing the whole point of view?
I appreciate your help!

It's been a while. Just want to closing the question as resolved.
The problem lied in the generateMesh function, specifically in the NORMAL XYZ values:
private void generateMesh(float xshift_mesh, float yshift_mesh, float zshift_mesh, float size_mesh) {
if(size_mesh < 0) {
return;
}
float size = size_mesh/2;
mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute.ColorUnpacked(), VertexAttribute.TexCoords(0), VertexAttribute.Normal());
mesh.setVertices(new float[]
//<------------ Position XYZ ------------------------> <-Color RGBA-> <Texture Coordinates UV> <Normal XYZ>
//| -, -, -,| |-, -, -, -, | | -, -, | |-, -, -|
{
-size+xshift_mesh, -size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 0, 1, 0, 0, 1,
size+xshift_mesh, -size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 1, 1, 0, 0, 1,
size+xshift_mesh, size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 1, 0, 0, 0, -1,
-size+xshift_mesh, size+yshift_mesh, zshift_mesh, 1, 1, 1, 1, 0, 0, 0, 0, -1
});
mesh.setIndices(new short[] {0, 1, 2,2, 3, 0});
}

Related

How to write text or put image on top of 3D cube in OpenGL java in android (Object is used in Augmenting Purpose)

I'm developing an Augmented Reality based application using EasyAR SDK in android. It rendering cube on the top of image target by default. I want to print something on the top of that cube. So what should I do? I'm new to OpenGL, please help.
If I can put an image on top of that cube then it's also fine! I just want to display "Loading" on that cube, whether it is image or text, that doesn't really matter!
This is the current situation:
And I need something like this:
Here is my code that renders box on top of image target.
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import cn.easyar.Vec2F;
import cn.easyar.Matrix44F;
public class BoxRenderer {
private int program_box;
private int pos_coord_box;
private int pos_color_box;
private int pos_trans_box;
private int pos_proj_box;
private int vbo_coord_box;
private int vbo_color_box;
private int vbo_color_box_2;
private int vbo_faces_box;
private String box_vert = "uniform mat4 trans;\n"
+ "uniform mat4 proj;\n"
+ "attribute vec4 coord;\n"
+ "attribute vec4 color;\n"
+ "varying vec4 vcolor;\n"
+ "\n"
+ "void main(void)\n"
+ "{\n"
+ " vcolor = color;\n"
+ " gl_Position = proj*trans*coord;\n"
+ "}\n"
+ "\n";
private String box_frag = "#ifdef GL_ES\n"
+ "precision highp float;\n"
+ "#endif\n"
+ "varying vec4 vcolor;\n"
+ "\n"
+ "void main(void)\n"
+ "{\n"
+ " gl_FragColor = vcolor;\n"
+ "}\n"
+ "\n";
private float[] flatten(float[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
float[] l = new float[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private int[] flatten(int[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
int[] l = new int[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private short[] flatten(short[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
short[] l = new short[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private byte[] flatten(byte[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
byte[] l = new byte[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private byte[] byteArrayFromIntArray(int[] a) {
byte[] l = new byte[a.length];
for (int k = 0; k < a.length; k += 1) {
l[k] = (byte) (a[k] & 0xFF);
}
return l;
}
private int generateOneBuffer() {
int[] buffer = {0};
GLES20.glGenBuffers(1, buffer, 0);
return buffer[0];
}
public void init() {
program_box = GLES20.glCreateProgram();
int vertShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertShader, box_vert);
GLES20.glCompileShader(vertShader);
int fragShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragShader, box_frag);
GLES20.glCompileShader(fragShader);
GLES20.glAttachShader(program_box, vertShader);
GLES20.glAttachShader(program_box, fragShader);
GLES20.glLinkProgram(program_box);
GLES20.glUseProgram(program_box);
pos_coord_box = GLES20.glGetAttribLocation(program_box, "coord");
pos_color_box = GLES20.glGetAttribLocation(program_box, "color");
pos_trans_box = GLES20.glGetUniformLocation(program_box, "trans");
pos_proj_box = GLES20.glGetUniformLocation(program_box, "proj");
vbo_coord_box = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
float cube_vertices[][] = {
/* +z */{1.0f / 2, 1.0f / 2, 0.01f / 2}, {1.0f / 2, -1.0f / 2, 0.01f / 2}, {-1.0f / 2, -1.0f / 2, 0.01f / 2}, {-1.0f / 2, 1.0f / 2, 0.01f / 2},
/* -z */{1.0f / 2, 1.0f / 2, -0.01f / 2}, {1.0f / 2, -1.0f / 2, -0.01f / 2}, {-1.0f / 2, -1.0f / 2, -0.01f / 2}, {-1.0f / 2, 1.0f / 2, -0.01f / 2}
};
FloatBuffer cube_vertices_buffer = FloatBuffer.wrap(flatten(cube_vertices));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertices_buffer.limit() * 4, cube_vertices_buffer, GLES20.GL_DYNAMIC_DRAW);
vbo_color_box = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box);
int cube_vertex_colors[][] = {
{255, 0, 0, 128}, {0, 255, 0, 128}, {0, 0, 255, 128}, {0, 0, 0, 128},
{0, 255, 255, 128}, {255, 0, 255, 128}, {255, 255, 0, 128}, {255, 255, 255, 128}};
ByteBuffer cube_vertex_colors_buffer = ByteBuffer.wrap(byteArrayFromIntArray(flatten(cube_vertex_colors)));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertex_colors_buffer.limit(), cube_vertex_colors_buffer, GLES20.GL_STATIC_DRAW);
vbo_color_box_2 = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box_2);
int cube_vertex_colors_2[][] = {
{255, 0, 0, 255}, {255, 255, 0, 255}, {0, 255, 0, 255}, {255, 0, 255, 255},
{255, 0, 255, 255}, {255, 255, 255, 255}, {0, 255, 255, 255}, {255, 0, 255, 255}};
ByteBuffer cube_vertex_colors_2_buffer = ByteBuffer.wrap(byteArrayFromIntArray(flatten(cube_vertex_colors_2)));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertex_colors_2_buffer.limit(), cube_vertex_colors_2_buffer, GLES20.GL_STATIC_DRAW);
vbo_faces_box = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, vbo_faces_box);
short cube_faces[][] = {
/* +z */{3, 2, 1, 0}, /* -y */{2, 3, 7, 6}, /* +y */{0, 1, 5, 4},
/* -x */{3, 0, 4, 7}, /* +x */{1, 2, 6, 5}, /* -z */{4, 5, 6, 7}};
ShortBuffer cube_faces_buffer = ShortBuffer.wrap(flatten(cube_faces));
GLES20.glBufferData(GLES20.GL_ELEMENT_ARRAY_BUFFER, cube_faces_buffer.limit() * 2, cube_faces_buffer, GLES20.GL_STATIC_DRAW);
}
public void render(Matrix44F projectionMatrix, Matrix44F cameraview, Vec2F size) {
float size0 = size.data[0];
float size1 = size.data[1];
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
float height = size0 / 1000;
float cube_vertices[][] = {
/* +z */{size0 / 2, size1 / 2, height / 2}, {size0 / 2, -size1 / 2, height / 2}, {-size0 / 2, -size1 / 2, height / 2}, {-size0 / 2, size1 / 2, height / 2},
/* -z */{size0 / 2, size1 / 2, 0}, {size0 / 2, -size1 / 2, 0}, {-size0 / 2, -size1 / 2, 0}, {-size0 / 2, size1 / 2, 0}};
FloatBuffer cube_vertices_buffer = FloatBuffer.wrap(flatten(cube_vertices));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertices_buffer.limit() * 4, cube_vertices_buffer, GLES20.GL_DYNAMIC_DRAW);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glUseProgram(program_box);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
GLES20.glEnableVertexAttribArray(pos_coord_box);
GLES20.glVertexAttribPointer(pos_coord_box, 3, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box);
GLES20.glEnableVertexAttribArray(pos_color_box);
GLES20.glVertexAttribPointer(pos_color_box, 4, GLES20.GL_UNSIGNED_BYTE, true, 0, 0);
GLES20.glUniformMatrix4fv(pos_trans_box, 1, false, cameraview.data, 0);
GLES20.glUniformMatrix4fv(pos_proj_box, 1, false, projectionMatrix.data, 0);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, vbo_faces_box);
for (int i = 0; i < 6; i++) {
GLES20.glDrawElements(GLES20.GL_TRIANGLE_FAN, 4, GLES20.GL_UNSIGNED_SHORT, i * 4 * 2);
}
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
float cube_vertices_2[][] = {
/* +z */{size0 / 4, size1 / 4, size0 / 4}, {size0 / 4, -size1 / 4, size0 / 4}, {-size0 / 4, -size1 / 4, size0 / 4}, {-size0 / 4, size1 / 4, size0 / 4},
/* -z */{size0 / 4, size1 / 4, 0}, {size0 / 4, -size1 / 4, 0}, {-size0 / 4, -size1 / 4, 0}, {-size0 / 4, size1 / 4, 0}};
FloatBuffer cube_vertices_2_buffer = FloatBuffer.wrap(flatten(cube_vertices_2));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertices_2_buffer.limit() * 4, cube_vertices_2_buffer, GLES20.GL_DYNAMIC_DRAW);
GLES20.glEnableVertexAttribArray(pos_coord_box);
GLES20.glVertexAttribPointer(pos_coord_box, 3, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box_2);
GLES20.glEnableVertexAttribArray(pos_color_box);
GLES20.glVertexAttribPointer(pos_color_box, 4, GLES20.GL_UNSIGNED_BYTE, true, 0, 0);
for (int i = 0; i < 6; i++) {
GLES20.glDrawElements(GLES20.GL_TRIANGLE_FAN, 4, GLES20.GL_UNSIGNED_SHORT, i * 4 * 2);
}
}
}
This one might be a bit much but it is rather simple and flexible because you can pretty much use any kind of text or font or even background.
Basically we draw text on a bitmap and render this bitmap on a 2D plane. The background of the bitmap won't be rendered (using discard in the fragment shader) as long as it is a predefined color.
So first we need to setup one additional vertex attribute for texture coordinates. Here is the complete setup including vertices and texture coordinates for a simple 2D-plane:
//the geometry with texture coordinates
public int vbs[] = new int[2];
public void initSprite(){
float vertices[] = {
1.0f, -1.0f, 0.0f, //triangle 1
-1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, //triangle 2
1.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f
};
float texcoords[] = {
1.0f, 1.0f, 0.0f, //triangle 1
0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, //triangle 2
1.0f, 0.0f, 0.0f,
1.0f, 1.0f, 0.0f
};
int triangle_count = 2;
FloatBuffer vertex_pos_buffer;
FloatBuffer tex_coord_buffer;
int bytes_per_float = 4;
//generate buffers on gpu
GLES20.glGenBuffers(2, vbs,0);
// Allocate a direct block of memory on the native heap,
// size in bytes is equal to vertices.length * BYTES_PER_FLOAT.
// BYTES_PER_FLOAT is equal to 4, since a float is 32-bits, or 4 bytes.
vertex_pos_buffer = ByteBuffer.allocateDirect(vertices.length * bytes_per_float)
// Floats can be in big-endian or little-endian order.
// We want the same as the native platform.
.order(ByteOrder.nativeOrder())
// Give us a floating-point view on this byte buffer.
.asFloatBuffer();
//Transferring data from the Java heap to the native heap is then a matter of a couple calls:
// Copy data from the Java heap to the native heap.
vertex_pos_buffer.put(vertices)
// Reset the buffer position to the beginning of the buffer.
.position(0);
//Bind the vertices buffer and give OpenGL the data
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[0]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, triangle_count * 3* 3 * bytes_per_float, vertex_pos_buffer, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
tex_coord_buffer = ByteBuffer.allocateDirect(texcoords.length * bytes_per_float)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
tex_coord_buffer.put(texcoords).position(0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[1]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, triangle_count * 3* 3 * bytes_per_float, tex_coord_buffer, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}
Next we need our Texture. We load a background image and draw our desired text ontop of it. size is the font size we want to use and should be a bit smaller than the background bitmap height. r g b are the color values of the font:
//the texture we gonna use during rendering
int tex = 0;
public void initTextTexture(String backgroundBitmapPath, String text, float size, int r, int g, int b){
//load the bitmap
Bitmap background = loadBitmapRGBA(backgroundBitmapPath);
//check if image could load
if(background == null){
return;
}
android.graphics.Bitmap.Config bitmapConfig = background.getConfig();
// set default bitmap config if none
if(bitmapConfig == null) {
bitmapConfig = android.graphics.Bitmap.Config.ARGB_8888;
}
// resource bitmaps are imutable,
// so we need to convert it to mutable one
background = background.copy(bitmapConfig, true);
Canvas canvas = new Canvas(background);
// new antialised Paint
Paint paint = new Paint();
paint.setColor(Color.rgb(r, g, b));
// text size in pixels
paint.setTextSize(size);
// draw text to the Canvas center
Rect bounds = new Rect();
paint.getTextBounds(text, 0, text.length(), bounds);
//left
int x = 1;
//center
int y = (background.getHeight() + bounds.height())/2;
canvas.drawText(text, x, y, paint);
//create a texture with the bitmap we just created
//try to allocate texture on GPU
int gl_map[] = new int[1];
GLES20.glGenTextures(1, gl_map, 0);
tex = gl_map[0];
//bind texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
//move the bitmap to the openGL texture
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, background, 0);
//set nearest filter
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
private Bitmap loadBitmapRGBA(String path){
if(path == null){
return null;
}
//replace this with your application/activity context
AssetManager assetManager = GlobalContext.getAppContext().getAssets();
InputStream istr = null;
try {
istr = assetManager.open(path);
} catch (IOException e) {
e.printStackTrace();
}
Rect outPadding = new Rect();
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
Bitmap image = BitmapFactory.decodeStream(istr, outPadding, options);
return image;
}
Next we need draw our geometry with the texture we created, notice the glBindTexture :
public void drawTextSprite(){
//program is the shader programm we gonna use to draw the 2d plane
GLES20.glUseProgram(program);
int locPosition = GLES20.glGetAttribLocation(program, "a_Position");
int locTexcoord = GLES20.glGetAttribLocation(program, "a_TexCoord");
int locTexture = GLES20.glGetUniformLocation(program, "tex_sampler");
int locMVPMatrix = GLES20.glGetUniformLocation(program, "u_MVPMatrix");
//bind the vertex data
GLES20.glEnableVertexAttribArray(locPosition);
GLES20.glEnableVertexAttribArray(locTexcoord);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[0]);
GLES20.glVertexAttribPointer(locPosition, 3, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[1]);
GLES20.glVertexAttribPointer(locTexcoord, 3, GLES20.GL_FLOAT, false, 0, 0);
//bind texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(locTexture, 0);
//set up the mvp matrix
float mvp[] = {
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f
};
GLES20.glUniformMatrix4fv(locMVPMatrix, 1, false, mvp, 0);
//draw 2 triangles
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 2*3);
}
Now we just need our shader:
//vertex shader
uniform lowp mat4 u_MVPMatrix;
attribute lowp vec4 a_Position;
attribute lowp vec3 a_TexCoord;
varying lowp vec3 texc;
void main()
{
texc = a_TexCoord;
gl_Position = u_MVPMatrix * a_Position;
}
//fragment shader
uniform lowp sampler2D tex_sampler;
varying lowp vec3 texc;
void main()
{
lowp vec3 color = texture2D(tex_sampler, texc.st).rgb;
//test for the background color
if(color.r == 1.0 && color.g == 0.0 && color.b == 1.0){
discard; //get rid of the background
}
gl_FragColor = vec4(color.r, color.g, color.b, 1.0);
}
And to set everything up we call the following two lines:
initSprite();
initTextTexture("img/FF00FF_TEXT_BG.png","Loading...", 20.0f, 255, 255, 255);
FF00FF_TEXT_BG is stored under assets/img/ and looks like this.
If we call drawTextSprite(); during the renderloop we should get something like this:
Of course the output is a bit stretched, this is because i used the identity matrix to draw it. You just need to make sure you draw this over your box by providing the proper matrix.
Also make sure not to draw the plane directly at the same position as the box's side but slightly further way, otherwise you wont see the text or artefacts if you use depthtest.
If you don't need to generate strings during runtime you can ofcourse just load bitmaps with prerendered text.
Hope that helps.

TriangleMesh - Backside faces are visible

Good Day! I have the following issue. The graphic model is not displayed correctly: some backside faces of the model that should be hidden by the frontside remain visible. Here are some exmples to clarify: (isometry)
(issue)
This issue comes out especially notable when applying light and material. So the the question is how this can be solved for JavaFX?
UPD:
public class VertexTest extends Application {
PerspectiveCamera camera;
Cam cam = new Cam();
double mouseOldX, mouseOldY, mousePosX, mousePosY, mouseDeltaX, mouseDeltaY;
public static void main(String[] args) {
launch(args);
}
#Override
public void start(Stage primaryStage) throws Exception {
TriangleMesh mesh = new Shape3DRectangle(100, 100, 100);
MeshView view = new MeshView(mesh);
view.setDrawMode(DrawMode.LINE);
view.setMaterial(new PhongMaterial(Color.RED));
cam.getChildren().add(view);
Scene scene = new Scene(cam, 1000, 1000, true);
addEvents(view, scene);
camera = new PerspectiveCamera();
camera.setTranslateX(-500);
camera.setTranslateY(-500);
camera.setTranslateZ(1000);
scene.setCamera(camera);
primaryStage.setScene(scene);
primaryStage.show();
}
private void addEvents(MeshView view, Scene s) {
s.setOnMouseDragged(new EventHandler<MouseEvent>() {
public void handle(MouseEvent me) {
mouseOldX = mousePosX;
mouseOldY = mousePosY;
mousePosX = me.getX();
mousePosY = me.getY();
mouseDeltaX = mousePosX - mouseOldX;
mouseDeltaY = mousePosY - mouseOldY;
cam.ry.setAngle(cam.ry.getAngle() - mouseDeltaX);
cam.rx.setAngle(cam.rx.getAngle() + mouseDeltaY);
}
});
}
class Cam extends Group {
Translate t = new Translate();
Translate p = new Translate();
Translate ip = new Translate();
Rotate rx = new Rotate();
{
rx.setAxis(Rotate.X_AXIS);
}
Rotate ry = new Rotate();
{
ry.setAxis(Rotate.Y_AXIS);
}
Rotate rz = new Rotate();
{
rz.setAxis(Rotate.Z_AXIS);
}
Scale s = new Scale();
public Cam() {
super();
getTransforms().addAll(t, p, rx, rz, ry, s, ip);
}
}
public class Shape3DRectangle extends TriangleMesh {
public Shape3DRectangle(float Width, float Height, float deep) {
this.getPoints().setAll(-Width / 2, Height / 2, deep / 2, // idx p0
Width / 2, Height / 2, deep / 2, // idx p1
-Width / 2, -Height / 2, deep / 2, // idx p2
Width / 2, -Height / 2, deep / 2, // idx p3
-Width / 2, Height / 2, -deep / 2, // idx p4
Width / 2, Height / 2, -deep / 2, // idx p5
-Width / 2, -Height / 2, -deep / 2, // idx p6
Width, -Height / 2, -deep / 2 // idx p7
);
this.getTexCoords().addAll(0.0f, 0.0f);
this.getFaces().addAll(5, 0, 4, 0, 0, 0 // P5,T1 ,P4,T0 ,P0,T3
, 5, 0, 0, 0, 1, 0 // P5,T1 ,P0,T3 ,P1,T4
, 0, 0, 4, 0, 6, 0 // P0,T3 ,P4,T2 ,P6,T7
, 0, 0, 6, 0, 2, 0 // P0,T3 ,P6,T7 ,P2,T8
, 1, 0, 0, 0, 2, 0 // P1,T4 ,P0,T3 ,P2,T8
, 1, 0, 2, 0, 3, 0 // P1,T4 ,P2,T8 ,P3,T9
, 5, 0, 1, 0, 3, 0 // P5,T5 ,P1,T4 ,P3,T9
, 5, 0, 3, 0, 7, 0 // P5,T5 ,P3,T9 ,P7,T10
, 4, 0, 5, 0, 7, 0 // P4,T6 ,P5,T5 ,P7,T10
, 4, 0, 7, 0, 6, 0 // P4,T6 ,P7,T10 ,P6,T11
, 3, 0, 2, 0, 6, 0 // P3,T9 ,P2,T8 ,P6,T12
, 3, 0, 6, 0, 7, 0 // P3,T9 ,P6,T12 ,P7,T13
);
}
}
}
I've been playing around with your sample, and I think I've found out the reason of your issues.
First, I checked the winding of the faces. All of them are counter-clockwise, so all their normals go outwards, as they should be.
Then I modified other vertices instead of the last one. In some cases there were no issues, in others, the issue was still there.
Basically, the issue happens when there are "concave" surfaces, meaning that two faces have normals that will cross. And it doesn't happen when all the surfaces are "convex", meaning that their normals point outwards and won't cross.
This is a clear image of both type of meshes taken from here:
Back to your sample, you are defining a concave mesh:
But if instead of modifying vertex #7, we make the #5 larger, we have a convex mesh, with no rendering issues:
Obviously, while this fix the rendering problem, it changes your initial shape.
If you want to keep your initial geometry, the other possible solution is changing the faces, so you don't have any concave areas.
Let's have a look at the faces 5-1-3 and 5-3-7, and let's say we want to move now the vertex #1.
If we keep your triangles, face 5-1-3 and 5-3-7 will define a concave surface to be render (their normals will cross), while if we change those triangles to 5-1-7 and 1-3-7, then the surface will be convex (their normals won't cross):
Back to your initial shape, this change in those two faces will solve the rendering issues.
While the vertices are the same, the geometry is a little bit difference. So it requires some refinement (more elements). Adding those elements should be done keeping in mind this convex concept. The problem is not trivial, though, as you can see here.
Nice analysis by Jose but it looks to me as if the OP has just forgotten to divide the Width by 2 in this line of his code.
Width, -Height / 2, -deep / 2 // idx p7
should be
Width / 2, -Height / 2, -deep / 2 // idx p7
The class is called Shape3DRectangle but with this mistake
the geometry is not rectangular anymore.
You can set the cullFaceProperty for every Shape3D. I guess that is what you need but I am not sure whether I understood your question precisely.
Shape3D#cullFaceProperty

Implementing Kalman filter in OpenCV Java

I'm trying to implement a Kalman filter in my OpenCV program in Java. I'm new to both OpenCV and Kalman Filter. I've found some examples in C++ (not many in Java) and this is what I have so far:
Initialization:
//create kalman filter
KalmanFilter kalman = new KalmanFilter(4,2,0,CvType.CV_32F);
//set transition matrix
float[] tM = { 1, 0, 1, 0,
0, 1, 0, 1,
0, 0, 1, 0,
0, 0, 0, 1 } ;
Mat transitionMatrix=new Mat(4,4,CvType.CV_32F,new Scalar(0));
transitionMatrix.put(0,0,tM);
kalman.set_transitionMatrix(transitionMatrix);
//set init measurement
Mat measurementMatrix = new Mat (2,1, CvType.CV_32F);
measurementMatrix.setTo(new Scalar(0));
kalman.set_measurementMatrix(measurementMatrix);
//Set state matrix
Mat statePre = new Mat(4,1, CvType.CV_32F);
statePre.put(1, 1, 300);
statePre.put(2, 1, 200);
statePre.put(3, 1, 0);
statePre.put(4, 1, 0);
kalman.set_statePre(statePre);
//Process noise Covariance matrix
Mat processNoiseCov=Mat.eye(4,4,CvType.CV_32F);
processNoiseCov=processNoiseCov.mul(processNoiseCov,1e-1);
kalman.set_processNoiseCov(processNoiseCov);
//Measurement noise Covariance matrix: reliability on our first measurement
Mat measurementNoiseCov=Mat.eye(4,4,CvType.CV_32F);
measurementNoiseCov=measurementNoiseCov.mul(measurementNoiseCov,1e-1);
kalman.set_measurementNoiseCov(measurementNoiseCov);
Mat id2=Mat.eye(4,4,CvType.CV_32F);
id2=id2.mul(id2,0.1);
kalman.set_errorCovPost(id2);
For each video frame:
prediction= kalman.predict();
predictPt.x = prediction.get(1,1)[0];
predictPt.y = prediction.get(2,1)[0];
...new measurement..
measurementMatrix.put(1, 1, center.x);
measurementMatrix.put(2, 1, center.y);
measPt.x=center.x;
measPt.y=center.y;
Mat estimated = kalman.correct(measurementMatrix);
statePt.x=estimated.get(1, 1)[1];
statePt.y= estimated.get(2, 1)[1];
The problem is that I get a null prediction, and I don't see the reason for getting it. Does somebody know what's wrong with my code? I really appreciate any help!
Thank you!
You get a null prediction because you are neither inserting nor accessing the correct elements.
First:
statePre.put(0, 1, 300); //statePre.put(1, 1, 300);
statePre.put(1, 1, 200); //statePre.put(2, 1, 200);
statePre.put(2, 1, 0); //statePre.put(3, 1, 0);
statePre.put(3, 1, 0); // statePre.put(4, 1, 0);
Predict:
prediction= kalman.predict();
predictPt.x = prediction.get(0,0)[0]; //predictPt.x = prediction.get(1,1)[0];
predictPt.y = prediction.get(1,0)[0]; //predictPt.y = prediction.get(2,1)[0];
And finally:
measurementMatrix.put(0, 0, center.x); // measurementMatrix.put(1, 1, center.x);
measurementMatrix.put(1, 0, center.y); //measurementMatrix.put(2, 1, center.y);
measPt.x=center.x;
measPt.y=center.y;
Mat estimated = kalman.correct(measurementMatrix);
statePt.x=estimated.get(0,0)[0];
statePt.y= estimated.get(1,0)[0];
Also, your measurementMatrix should be like this
Mat measurementMatrix = Mat.eye(2,4, CvType.CV_32F);

GLSL Matrix Translation Leaves Blank Screen?

I have a matrix4f that I'm passing from my ShaderProgram class into my vertex shader class using uniform variables. This matrix is supposed to act as a translation for the vertices. The following is what the matrix looks like
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
When I multiply that variable (Called "test") by the vertex points (Called gl_Vertex) nothing is visible, it just leaves a blank screen. This only happens when I multiply it by the uniform variable "test", if I multiply it by a new matrix4f with the same values, it works normally. If I use vector uniform variables instead of matrices it works as expected.
Am I passing the variable into the GLSL vertex shader class correctly? And if so, why is my quad not showing up on the screen?
Here is my vertex shader
#version 400 core
uniform vec4 translation;
uniform vec4 size;
uniform vec4 rotation;
uniform mat4 test;
in vec2 textureCoords;
in vec3 position;
out vec2 pass_textureCoords;
void main(void){
//pass texture cords
pass_textureCoords = textureCoords;
//This works by multiplying by identity matrix
//gl_Position = mat4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1) * gl_Vertex;
//This works by passing vec4's not matrix4
/*gl_Position = vec4(((gl_Vertex.x + translation.x)*size.x),
((gl_Vertex.y + translation.y)*size.y),
((gl_Vertex.z + translation.z)*size.z),
((gl_Vertex.w + translation.w)*size.w)
);*/
//this leaves a blank window
gl_Position = test * gl_Vertex;
}
This is how I declare the uniform variable locations:
translationLocation = GL20.glGetUniformLocation(programID, "translation");
sizeLocation = GL20.glGetUniformLocation(programID, "size");
rotationLocation = GL20.glGetUniformLocation(programID, "rotation");
textureLocation = GL20.glGetUniformLocation(programID, "textureSampler");
testMat = GL20.glGetUniformLocation(programID, "test");
This is how I render the uniform variables
public void start(){
GL20.glUseProgram(programID);
Vector4f translation = offset.getTranslation();
Vector4f size = offset.getSize();
Vector4f rotation = offset.getRotation();
GL20.glUniform4f(translationLocation, translation.x, translation.y, translation.z, translation.w);
GL20.glUniform4f(sizeLocation, size.x, size.y, size.z, size.w);
GL20.glUniform4f(rotationLocation, rotation.x, rotation.y, rotation.z, rotation.w);
FloatBuffer buff = BufferUtils.createFloatBuffer(16);
offset.getTestTranslation().storeTranspose(buff);
GL20.glUniformMatrix4(testMat, false, buff);
GL20.glUniform1i(textureLocation, 0);
}
And this is how I declare my variables before passing it into GLSL
Vector4f translation;
Vector4f size;
Vector4f rotation;
Matrix4f testTranslation;
public Offset(){
translation = new Vector4f(0, 0, 0, 0);
size = new Vector4f(1, 1, 1, 1);
rotation = new Vector4f(0, 0 , 0, 0);
testTranslation = new Matrix4f();
testTranslation.translate(new Vector3f(0,0,0));
}
Well, it turns out that I was using the following method to convert the matrix4f into a floatBuffer
matrix4f.storeTranspose(buff)
When apparently that doesn't properly store the matrix into a float buffer. I'm now using this method to send the matrix to the vertex shader while rendering the shader program
public void setMatrixArray(boolean transposed, Matrix4f[] matrices){
FloatBuffer matrixBuffer = BufferUtils.createFloatBuffer(16*matrices.length);
for(int i = 0; i<matrices.length; i++) {
matrices[i].store(matrixBuffer);
}
matrixBuffer.flip();
GL20.glUniformMatrix4(testMat,transposed,matrixBuffer);
}

Android openGL renderer returning null pointer exception

I am creating an app that makes use of GL10, it's made up f three classes,
Class A extends Activity.
Class B implements Renderer.
Class C extends Activity.
Class C contains the data for a 3D cube, Class B is the renderer and Class A displays it.
To display it i am using the following method in class A,
GLCubeRenderer ourSurface = new GLCubeRenderer();
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
GLSurfaceView glSurfaceView =
(GLSurfaceView) findViewById(R.id.ourCube);
glSurfaceView.setRenderer(ourSurface);
setContentView(R.layout.cubelayout);
}
And the XML is:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<com.mastermind.GLCubeRenderer
android:id="#+id/ourCube"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
</RelativeLayout>
the problem is that in the Java code for class A the line 'glSurfaceView.setRenderer(ourSurface);' returns a null pointer exception.
Class B (Renderer) code:
private GLCube cube = new GLCube();
static Context context;
public static float xAngle;
public static float yAngle;
final float[] ambient = { 0.1f, 1, 1, 1 };
final float[] position = { 45, 20, 0, 1 };
final float[] direction = { 0, -1, 0 };
public GLCubeRenderer() {
cube = new GLCube();
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// TODO Auto-generated method stub
gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
gl.glEnable(GL10.GL_LIGHT1);
gl.glLightfv(GL10.GL_LIGHT1, GL10.GL_DIFFUSE, ambient, 0);
gl.glLightfv(GL10.GL_LIGHT1, GL10.GL_POSITION, position, 0);
gl.glLightfv(GL10.GL_LIGHT1, GL10.GL_SPOT_DIRECTION, direction, 0);
gl.glLightf(GL10.GL_LIGHT1, GL10.GL_SPOT_CUTOFF, 30.0f);
gl.glClearColor(.0f, 0, .0f, 0);
gl.glClearDepthf(1f);
}
#Override
public void onDrawFrame(GL10 gl) {
// TODO Auto-generated method stub
gl.glDisable(GL10.GL_DITHER);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, -5, 0, 0, 0, 0, 2, 0);
gl.glRotatef(xAngle, 0, xAngle, 0);
// gl.glRotatef(yAngle, yAngle, 0, yAngle);
gl.glActiveTexture(GL10.GL_TEXTURE0);
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_MODULATE);
gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
cube.draw(gl);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// TODO Auto-generated method stub
gl.glViewport(0, 0, width, height);
float ratio = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1f, 1, 25);
}
And the class C code:
private float vertices[] = {
1, 1, -1, // p0-top front right
1, -1, -1, // p1-bottom front right
-1, -1, -1, // p2-bottom front left
-1, 1, -1, // p3-front top left
1, 1, 1, // p4-top back right
1, -1, 1, // p5-bottom back right
-1, -1, 1, // p6-bottom back left
-1, 1, 1 // p7-front back left
};
// Buffer for our vertices
private FloatBuffer vertBuff;
// Index for our points e.g. p0 = 0f, 1f, in vert Index
private short[] pIndex = {
3, 4, 0,
0, 4, 1,
3, 0, 1,
3, 7, 4,
7, 6, 4,
7, 3, 6,
3, 1, 2,
1, 6, 2,
6, 3, 2,
1, 4, 5,
5, 6, 1,
6, 5, 4
};
// Buffer for points index
private ShortBuffer pBuff;
// Triangle constructor
public GLCube() {
// Construction of vertices
// byte buffer for vertices
ByteBuffer bBuff = ByteBuffer.allocateDirect(vertices.length * 4);
bBuff.order(ByteOrder.nativeOrder());
vertBuff = bBuff.asFloatBuffer();
vertBuff.put(vertices);
vertBuff.position(0);
// Construction of points
// point byte buffer
ByteBuffer pointByteBuff = ByteBuffer.allocateDirect(pIndex.length * 2);
pointByteBuff.order(ByteOrder.nativeOrder());
pBuff = pointByteBuff.asShortBuffer();
pBuff.put(pIndex);
pBuff.position(0);
}
public void draw(GL10 gl) {
gl.glFrontFace(GL10.GL_CW);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glCullFace(GL10.GL_BACK);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertBuff);
gl.glDrawElements(GL10.GL_TRIANGLES, pIndex.length, GL10.GL_UNSIGNED_SHORT, pBuff);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisable(GL10.GL_CULL_FACE);
}
Pardon the length of the question but does anyone have any solutions?
I don't understand, you define ourCube in your XML as GLCubeRenderer, which is the same class as ourSurface.
GLSurfaceView glSurfaceView =
(GLSurfaceView) findViewById(R.id.ourCube);
and
<com.mastermind.GLCubeRenderer
android:id="#+id/ourCube"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
Then again when getting hold of the reference of ourCube you cast it to GLSurfaceView.
Is GLCubeRenderer extending GLSurfaceView? In that case, are you trying to set the Renderer of this surfaceview to an instance of the same class?
I think you might be mixing up classes here.

Categories