Related
I'm developing an Augmented Reality based application using EasyAR SDK in android. It rendering cube on the top of image target by default. I want to print something on the top of that cube. So what should I do? I'm new to OpenGL, please help.
If I can put an image on top of that cube then it's also fine! I just want to display "Loading" on that cube, whether it is image or text, that doesn't really matter!
This is the current situation:
And I need something like this:
Here is my code that renders box on top of image target.
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import cn.easyar.Vec2F;
import cn.easyar.Matrix44F;
public class BoxRenderer {
private int program_box;
private int pos_coord_box;
private int pos_color_box;
private int pos_trans_box;
private int pos_proj_box;
private int vbo_coord_box;
private int vbo_color_box;
private int vbo_color_box_2;
private int vbo_faces_box;
private String box_vert = "uniform mat4 trans;\n"
+ "uniform mat4 proj;\n"
+ "attribute vec4 coord;\n"
+ "attribute vec4 color;\n"
+ "varying vec4 vcolor;\n"
+ "\n"
+ "void main(void)\n"
+ "{\n"
+ " vcolor = color;\n"
+ " gl_Position = proj*trans*coord;\n"
+ "}\n"
+ "\n";
private String box_frag = "#ifdef GL_ES\n"
+ "precision highp float;\n"
+ "#endif\n"
+ "varying vec4 vcolor;\n"
+ "\n"
+ "void main(void)\n"
+ "{\n"
+ " gl_FragColor = vcolor;\n"
+ "}\n"
+ "\n";
private float[] flatten(float[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
float[] l = new float[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private int[] flatten(int[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
int[] l = new int[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private short[] flatten(short[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
short[] l = new short[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private byte[] flatten(byte[][] a) {
int size = 0;
for (int k = 0; k < a.length; k += 1) {
size += a[k].length;
}
byte[] l = new byte[size];
int offset = 0;
for (int k = 0; k < a.length; k += 1) {
System.arraycopy(a[k], 0, l, offset, a[k].length);
offset += a[k].length;
}
return l;
}
private byte[] byteArrayFromIntArray(int[] a) {
byte[] l = new byte[a.length];
for (int k = 0; k < a.length; k += 1) {
l[k] = (byte) (a[k] & 0xFF);
}
return l;
}
private int generateOneBuffer() {
int[] buffer = {0};
GLES20.glGenBuffers(1, buffer, 0);
return buffer[0];
}
public void init() {
program_box = GLES20.glCreateProgram();
int vertShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertShader, box_vert);
GLES20.glCompileShader(vertShader);
int fragShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragShader, box_frag);
GLES20.glCompileShader(fragShader);
GLES20.glAttachShader(program_box, vertShader);
GLES20.glAttachShader(program_box, fragShader);
GLES20.glLinkProgram(program_box);
GLES20.glUseProgram(program_box);
pos_coord_box = GLES20.glGetAttribLocation(program_box, "coord");
pos_color_box = GLES20.glGetAttribLocation(program_box, "color");
pos_trans_box = GLES20.glGetUniformLocation(program_box, "trans");
pos_proj_box = GLES20.glGetUniformLocation(program_box, "proj");
vbo_coord_box = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
float cube_vertices[][] = {
/* +z */{1.0f / 2, 1.0f / 2, 0.01f / 2}, {1.0f / 2, -1.0f / 2, 0.01f / 2}, {-1.0f / 2, -1.0f / 2, 0.01f / 2}, {-1.0f / 2, 1.0f / 2, 0.01f / 2},
/* -z */{1.0f / 2, 1.0f / 2, -0.01f / 2}, {1.0f / 2, -1.0f / 2, -0.01f / 2}, {-1.0f / 2, -1.0f / 2, -0.01f / 2}, {-1.0f / 2, 1.0f / 2, -0.01f / 2}
};
FloatBuffer cube_vertices_buffer = FloatBuffer.wrap(flatten(cube_vertices));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertices_buffer.limit() * 4, cube_vertices_buffer, GLES20.GL_DYNAMIC_DRAW);
vbo_color_box = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box);
int cube_vertex_colors[][] = {
{255, 0, 0, 128}, {0, 255, 0, 128}, {0, 0, 255, 128}, {0, 0, 0, 128},
{0, 255, 255, 128}, {255, 0, 255, 128}, {255, 255, 0, 128}, {255, 255, 255, 128}};
ByteBuffer cube_vertex_colors_buffer = ByteBuffer.wrap(byteArrayFromIntArray(flatten(cube_vertex_colors)));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertex_colors_buffer.limit(), cube_vertex_colors_buffer, GLES20.GL_STATIC_DRAW);
vbo_color_box_2 = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box_2);
int cube_vertex_colors_2[][] = {
{255, 0, 0, 255}, {255, 255, 0, 255}, {0, 255, 0, 255}, {255, 0, 255, 255},
{255, 0, 255, 255}, {255, 255, 255, 255}, {0, 255, 255, 255}, {255, 0, 255, 255}};
ByteBuffer cube_vertex_colors_2_buffer = ByteBuffer.wrap(byteArrayFromIntArray(flatten(cube_vertex_colors_2)));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertex_colors_2_buffer.limit(), cube_vertex_colors_2_buffer, GLES20.GL_STATIC_DRAW);
vbo_faces_box = generateOneBuffer();
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, vbo_faces_box);
short cube_faces[][] = {
/* +z */{3, 2, 1, 0}, /* -y */{2, 3, 7, 6}, /* +y */{0, 1, 5, 4},
/* -x */{3, 0, 4, 7}, /* +x */{1, 2, 6, 5}, /* -z */{4, 5, 6, 7}};
ShortBuffer cube_faces_buffer = ShortBuffer.wrap(flatten(cube_faces));
GLES20.glBufferData(GLES20.GL_ELEMENT_ARRAY_BUFFER, cube_faces_buffer.limit() * 2, cube_faces_buffer, GLES20.GL_STATIC_DRAW);
}
public void render(Matrix44F projectionMatrix, Matrix44F cameraview, Vec2F size) {
float size0 = size.data[0];
float size1 = size.data[1];
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
float height = size0 / 1000;
float cube_vertices[][] = {
/* +z */{size0 / 2, size1 / 2, height / 2}, {size0 / 2, -size1 / 2, height / 2}, {-size0 / 2, -size1 / 2, height / 2}, {-size0 / 2, size1 / 2, height / 2},
/* -z */{size0 / 2, size1 / 2, 0}, {size0 / 2, -size1 / 2, 0}, {-size0 / 2, -size1 / 2, 0}, {-size0 / 2, size1 / 2, 0}};
FloatBuffer cube_vertices_buffer = FloatBuffer.wrap(flatten(cube_vertices));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertices_buffer.limit() * 4, cube_vertices_buffer, GLES20.GL_DYNAMIC_DRAW);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glUseProgram(program_box);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
GLES20.glEnableVertexAttribArray(pos_coord_box);
GLES20.glVertexAttribPointer(pos_coord_box, 3, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box);
GLES20.glEnableVertexAttribArray(pos_color_box);
GLES20.glVertexAttribPointer(pos_color_box, 4, GLES20.GL_UNSIGNED_BYTE, true, 0, 0);
GLES20.glUniformMatrix4fv(pos_trans_box, 1, false, cameraview.data, 0);
GLES20.glUniformMatrix4fv(pos_proj_box, 1, false, projectionMatrix.data, 0);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, vbo_faces_box);
for (int i = 0; i < 6; i++) {
GLES20.glDrawElements(GLES20.GL_TRIANGLE_FAN, 4, GLES20.GL_UNSIGNED_SHORT, i * 4 * 2);
}
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_coord_box);
float cube_vertices_2[][] = {
/* +z */{size0 / 4, size1 / 4, size0 / 4}, {size0 / 4, -size1 / 4, size0 / 4}, {-size0 / 4, -size1 / 4, size0 / 4}, {-size0 / 4, size1 / 4, size0 / 4},
/* -z */{size0 / 4, size1 / 4, 0}, {size0 / 4, -size1 / 4, 0}, {-size0 / 4, -size1 / 4, 0}, {-size0 / 4, size1 / 4, 0}};
FloatBuffer cube_vertices_2_buffer = FloatBuffer.wrap(flatten(cube_vertices_2));
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, cube_vertices_2_buffer.limit() * 4, cube_vertices_2_buffer, GLES20.GL_DYNAMIC_DRAW);
GLES20.glEnableVertexAttribArray(pos_coord_box);
GLES20.glVertexAttribPointer(pos_coord_box, 3, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo_color_box_2);
GLES20.glEnableVertexAttribArray(pos_color_box);
GLES20.glVertexAttribPointer(pos_color_box, 4, GLES20.GL_UNSIGNED_BYTE, true, 0, 0);
for (int i = 0; i < 6; i++) {
GLES20.glDrawElements(GLES20.GL_TRIANGLE_FAN, 4, GLES20.GL_UNSIGNED_SHORT, i * 4 * 2);
}
}
}
This one might be a bit much but it is rather simple and flexible because you can pretty much use any kind of text or font or even background.
Basically we draw text on a bitmap and render this bitmap on a 2D plane. The background of the bitmap won't be rendered (using discard in the fragment shader) as long as it is a predefined color.
So first we need to setup one additional vertex attribute for texture coordinates. Here is the complete setup including vertices and texture coordinates for a simple 2D-plane:
//the geometry with texture coordinates
public int vbs[] = new int[2];
public void initSprite(){
float vertices[] = {
1.0f, -1.0f, 0.0f, //triangle 1
-1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, //triangle 2
1.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f
};
float texcoords[] = {
1.0f, 1.0f, 0.0f, //triangle 1
0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, //triangle 2
1.0f, 0.0f, 0.0f,
1.0f, 1.0f, 0.0f
};
int triangle_count = 2;
FloatBuffer vertex_pos_buffer;
FloatBuffer tex_coord_buffer;
int bytes_per_float = 4;
//generate buffers on gpu
GLES20.glGenBuffers(2, vbs,0);
// Allocate a direct block of memory on the native heap,
// size in bytes is equal to vertices.length * BYTES_PER_FLOAT.
// BYTES_PER_FLOAT is equal to 4, since a float is 32-bits, or 4 bytes.
vertex_pos_buffer = ByteBuffer.allocateDirect(vertices.length * bytes_per_float)
// Floats can be in big-endian or little-endian order.
// We want the same as the native platform.
.order(ByteOrder.nativeOrder())
// Give us a floating-point view on this byte buffer.
.asFloatBuffer();
//Transferring data from the Java heap to the native heap is then a matter of a couple calls:
// Copy data from the Java heap to the native heap.
vertex_pos_buffer.put(vertices)
// Reset the buffer position to the beginning of the buffer.
.position(0);
//Bind the vertices buffer and give OpenGL the data
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[0]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, triangle_count * 3* 3 * bytes_per_float, vertex_pos_buffer, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
tex_coord_buffer = ByteBuffer.allocateDirect(texcoords.length * bytes_per_float)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
tex_coord_buffer.put(texcoords).position(0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[1]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, triangle_count * 3* 3 * bytes_per_float, tex_coord_buffer, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}
Next we need our Texture. We load a background image and draw our desired text ontop of it. size is the font size we want to use and should be a bit smaller than the background bitmap height. r g b are the color values of the font:
//the texture we gonna use during rendering
int tex = 0;
public void initTextTexture(String backgroundBitmapPath, String text, float size, int r, int g, int b){
//load the bitmap
Bitmap background = loadBitmapRGBA(backgroundBitmapPath);
//check if image could load
if(background == null){
return;
}
android.graphics.Bitmap.Config bitmapConfig = background.getConfig();
// set default bitmap config if none
if(bitmapConfig == null) {
bitmapConfig = android.graphics.Bitmap.Config.ARGB_8888;
}
// resource bitmaps are imutable,
// so we need to convert it to mutable one
background = background.copy(bitmapConfig, true);
Canvas canvas = new Canvas(background);
// new antialised Paint
Paint paint = new Paint();
paint.setColor(Color.rgb(r, g, b));
// text size in pixels
paint.setTextSize(size);
// draw text to the Canvas center
Rect bounds = new Rect();
paint.getTextBounds(text, 0, text.length(), bounds);
//left
int x = 1;
//center
int y = (background.getHeight() + bounds.height())/2;
canvas.drawText(text, x, y, paint);
//create a texture with the bitmap we just created
//try to allocate texture on GPU
int gl_map[] = new int[1];
GLES20.glGenTextures(1, gl_map, 0);
tex = gl_map[0];
//bind texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
//move the bitmap to the openGL texture
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, background, 0);
//set nearest filter
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
private Bitmap loadBitmapRGBA(String path){
if(path == null){
return null;
}
//replace this with your application/activity context
AssetManager assetManager = GlobalContext.getAppContext().getAssets();
InputStream istr = null;
try {
istr = assetManager.open(path);
} catch (IOException e) {
e.printStackTrace();
}
Rect outPadding = new Rect();
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
Bitmap image = BitmapFactory.decodeStream(istr, outPadding, options);
return image;
}
Next we need draw our geometry with the texture we created, notice the glBindTexture :
public void drawTextSprite(){
//program is the shader programm we gonna use to draw the 2d plane
GLES20.glUseProgram(program);
int locPosition = GLES20.glGetAttribLocation(program, "a_Position");
int locTexcoord = GLES20.glGetAttribLocation(program, "a_TexCoord");
int locTexture = GLES20.glGetUniformLocation(program, "tex_sampler");
int locMVPMatrix = GLES20.glGetUniformLocation(program, "u_MVPMatrix");
//bind the vertex data
GLES20.glEnableVertexAttribArray(locPosition);
GLES20.glEnableVertexAttribArray(locTexcoord);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[0]);
GLES20.glVertexAttribPointer(locPosition, 3, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbs[1]);
GLES20.glVertexAttribPointer(locTexcoord, 3, GLES20.GL_FLOAT, false, 0, 0);
//bind texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(locTexture, 0);
//set up the mvp matrix
float mvp[] = {
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f
};
GLES20.glUniformMatrix4fv(locMVPMatrix, 1, false, mvp, 0);
//draw 2 triangles
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 2*3);
}
Now we just need our shader:
//vertex shader
uniform lowp mat4 u_MVPMatrix;
attribute lowp vec4 a_Position;
attribute lowp vec3 a_TexCoord;
varying lowp vec3 texc;
void main()
{
texc = a_TexCoord;
gl_Position = u_MVPMatrix * a_Position;
}
//fragment shader
uniform lowp sampler2D tex_sampler;
varying lowp vec3 texc;
void main()
{
lowp vec3 color = texture2D(tex_sampler, texc.st).rgb;
//test for the background color
if(color.r == 1.0 && color.g == 0.0 && color.b == 1.0){
discard; //get rid of the background
}
gl_FragColor = vec4(color.r, color.g, color.b, 1.0);
}
And to set everything up we call the following two lines:
initSprite();
initTextTexture("img/FF00FF_TEXT_BG.png","Loading...", 20.0f, 255, 255, 255);
FF00FF_TEXT_BG is stored under assets/img/ and looks like this.
If we call drawTextSprite(); during the renderloop we should get something like this:
Of course the output is a bit stretched, this is because i used the identity matrix to draw it. You just need to make sure you draw this over your box by providing the proper matrix.
Also make sure not to draw the plane directly at the same position as the box's side but slightly further way, otherwise you wont see the text or artefacts if you use depthtest.
If you don't need to generate strings during runtime you can ofcourse just load bitmaps with prerendered text.
Hope that helps.
I used the solution to this question to draw a 3D SKybox view using 6 images .The question is found here .
LibGDX 0.9.9 - Apply cubemap in environment. The program works but it seems to use a lot of processing power and my cpu fan starts to run. Is there a problem with the way i am rendering the 3D skybox or is there a better way to do it. Is the way I am implementing the skybox correct. Here is my code.
The Class to create the environment is below
protected final Pixmap[] data = new Pixmap[6];
protected ShaderProgram shader;
protected int u_worldTrans;
protected Mesh quad;
private Matrix4 worldTrans;
private Quaternion q;
protected String vertexShader = " attribute vec3 a_position; \n"+
" attribute vec3 a_normal; \n"+
" attribute vec2 a_texCoord0; \n"+
" uniform mat4 u_worldTrans; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" v_texCoord0 = a_texCoord0; \n"+
" vec4 g_position = u_worldTrans * vec4(a_position, 1.0); \n"+
" v_cubeMapUV = normalize(g_position.xyz); \n"+
" gl_Position = vec4(a_position, 1.0); \n"+
" } \n";
protected String fragmentShader = "#ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform samplerCube u_environmentCubemap; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" gl_FragColor = vec4(textureCube(u_environmentCubemap, v_cubeMapUV).rgb, 1.0); \n"+
" } \n";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public EnvironmentCubemap (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
data[0]=positiveX;
data[1]=negativeX;
data[2]=positiveY;
data[3]=negativeY;
data[4]=positiveZ;
data[5]=negativeZ;
init();
}
public EnvironmentCubemap (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
//IF ALL SIX SIDES ARE REPRESENTED IN ONE IMAGE
public EnvironmentCubemap (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
cubemap.dispose();
cubemap=null;
init();
}
private void init(){
shader = new ShaderProgram(vertexShader, fragmentShader);
if (!shader.isCompiled())
throw new GdxRuntimeException(shader.getLog());
u_worldTrans = shader.getUniformLocation("u_worldTrans");
quad = createQuad();
worldTrans = new Matrix4();
q = new Quaternion();
initCubemap();
}
private void initCubemap(){
//bind cubemap
Gdx.gl.glBindTexture(GL.GL_TEXTURE_CUBE_MAP, 0);
Gdx.gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL30.GL_RGB, data[0].getWidth(), data[0].getHeight(), 0, GL30.GL_RGB, GL30.GL_UNSIGNED_BYTE, data[0].getPixels());
Gdx.gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL30.GL_RGB, data[1].getWidth(), data[1].getHeight(), 0, GL30.GL_RGB, GL30.GL_UNSIGNED_BYTE, data[1].getPixels());
Gdx.gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL30.GL_RGB, data[2].getWidth(), data[2].getHeight(), 0, GL30.GL_RGB, GL30.GL_UNSIGNED_BYTE, data[2].getPixels());
Gdx.gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL30.GL_RGB, data[3].getWidth(), data[3].getHeight(), 0, GL30.GL_RGB, GL30.GL_UNSIGNED_BYTE, data[3].getPixels());
Gdx.gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL30.GL_RGB, data[4].getWidth(), data[4].getHeight(), 0, GL30.GL_RGB, GL30.GL_UNSIGNED_BYTE, data[4].getPixels());
Gdx.gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL30.GL_RGB, data[5].getWidth(), data[5].getHeight(), 0, GL30.GL_RGB, GL30.GL_UNSIGNED_BYTE, data[5].getPixels());
Gdx.gl.glGenerateMipmap(GL.GL_TEXTURE_CUBE_MAP);
Gdx.gl.glTexParameteri(GL.GL_TEXTURE_CUBE_MAP, GL30.GL_TEXTURE_MIN_FILTER, GL30.GL_LINEAR);
Gdx.gl.glTexParameteri ( GL.GL_TEXTURE_CUBE_MAP, GL30.GL_TEXTURE_MIN_FILTER,GL30.GL_LINEAR_MIPMAP_LINEAR );
Gdx.gl.glTexParameteri ( GL.GL_TEXTURE_CUBE_MAP, GL30.GL_TEXTURE_MAG_FILTER,GL30.GL_LINEAR );
Gdx.gl.glTexParameteri ( GL.GL_TEXTURE_CUBE_MAP, GL30.GL_TEXTURE_WRAP_S, GL30.GL_CLAMP_TO_EDGE );
Gdx.gl.glTexParameteri ( GL.GL_TEXTURE_CUBE_MAP, GL30.GL_TEXTURE_WRAP_T, GL30.GL_CLAMP_TO_EDGE );
Gdx.gl.glGenerateMipmap(GL30.GL_TEXTURE_CUBE_MAP);
}
public void render(Camera camera){
//SPECIAL THANKS TO Jos van Egmond
camera.view.getRotation( q, true );
q.conjugate();
///////////////////////////////////
worldTrans.idt();
worldTrans.rotate(q);
shader.begin();
shader.setUniformMatrix(u_worldTrans, worldTrans.translate(0, 0, -1));
quad.render(shader, GL30.GL_TRIANGLES);
shader.end();
}
public Mesh createQuad(){
Mesh mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute. ColorUnpacked(), VertexAttribute.TexCoords(0));
mesh.setVertices(new float[]
{-1f, -1f, 0, 1, 1, 1, 1, 0, 1,
1f, -1f, 0, 1, 1, 1, 1, 1, 1,
1f, 1f, 0, 1, 1, 1, 1, 1, 0,
-1f, 1f, 0, 1, 1, 1, 1, 0, 0});
mesh.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return mesh;
}
#Override
public void dispose() {
shader.dispose();
quad.dispose();
for(int i=0; i<6; i++)
data[i].dispose();
}
In my main java class i have
env = new EnvironmentCubemap(Gdx.files.internal("assets/skybox/back.jpg"), Gdx.files.internal("assets/skybox/front.jpg"),
Gdx.files.internal("assets/skybox/top.jpg"), Gdx.files.internal("assets/skybox/bottom.jpg"),
Gdx.files.internal("assets/skybox/left.jpg"), Gdx.files.internal("assets/skybox/right.jpg"));
and in my render method
modelBatch.begin(cam);
modelBatch.flush();
modelBatch.render(instance);
env.render(modelBatch.getCamera());
modelBatch.end();
// Stage
stage.act();
stage.draw();
Don't use a cubemap for something like a skybox, that's only over complicating and wont gain you anything. Just create a box in your modelling application (or use ModelBuilder by creating 6 rectangles if you prefer, which you shouldn't) and map the texture on it. Just like you would for any other model you use. Don't forget that you want to look at it from the inside so depending on your modelling application you might need to flip normals or vertex winding. Make sure that it is big enough (you could use a different camera for it if needed). If you are moving the camera a lot then you probably want the box to be following the camera so you can't reach the end of the sky. Finally, make sure to render it without specifying an environment, so it won't be affected by any lighting and such.
Of course, whether you'd use a cubemap or not, using a box for the sky does have its limitations. Therefor, personally I'd recommend a skysphere or skydome instead. This tutorial might help as well: https://xoppa.github.io/blog/loading-a-scene-with-libgdx/.
I'm making an app for android, and I have drawn a triangle, I can rotate it but not move it!
my question why?
Say if you need more info!
Source Code:
package com.uraniumdevs.projectx;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
public class MyGL20Renderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private Triangle mTriangle;
private Square mSquare;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
private final float[] mTranslationMatrix = new float[16];
// Declare as volatile because we are updating it from another thread
public volatile float Angle;
public volatile float Tempx;
public volatile float Tempy;
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
mTriangle = new Triangle();
mSquare = new Square();
}
#Override
public void onDrawFrame(GL10 unused) {
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
// Create a rotation for the triangle
Matrix.setRotateM(mRotationMatrix, 0, Angle, 0, 0, -1.0f);
Matrix.translateM(mTranslationMatrix, 0, Tempx, Tempy, 0);
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mTranslationMatrix, 0, mMVPMatrix, 0);
mTriangle.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
public static int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
/**
* Utility method for debugging OpenGL calls. Provide the name of the call
* just after making it:
*
* <pre>
* mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
* MyGLRenderer.checkGlError("glGetUniformLocation");
* </pre>
*
* If the operation is not successful, the check throws an error.
*
* #param glOperation
* - Name of the OpenGL call to check.
*/
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
class Triangle {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" + "void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = vPosition * uMVPMatrix;" + "}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
private final FloatBuffer vertexBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
public static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.1f, 0.0f, // top
-0.1f, -0.1f, 0.0f, // bottom left
0.1f, -0.1f, 0.0f // bottom right
};
private final int vertexCount;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per
// vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Triangle() {
vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGL20Renderer.loadShader(
GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader
// to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment
// shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGL20Renderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGL20Renderer.checkGlError("glUniformMatrix4fv");
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
class Square {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" + "void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = vPosition * uMVPMatrix;" + "}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw
// vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per
// vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f };
public Square() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGL20Renderer.loadShader(
GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader
// to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment
// shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGL20Renderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGL20Renderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I think we are modifying the same OpenGLES20Activity from the Android developer training guide
http://developer.android.com/training/graphics/opengl/environment.html
In order to move and rotate the triangle, you may do this:
(In order to make the variables accessible from other class object, I made them public.)
Add a mModelMatrix for the Triangle object. This is used to set the location and direction of the triangle. Initialize it in the constructor.
public class Triangle {
...
public float[] mModelMatrix = new float[16];
...
public Triangle() {
...
Matrix.setIdentityM(mModelMatrix, 0);
...
}
Use Matrix.translateM and Matrix.rotateM (not Matrix.setRotateM) to move and rotate the triangle. You can do these as many times as you like.
Matrix.translateM(mTriangle.mModelMatrix, 0, 0.5f, 0f, 0f);
Matrix.rotateM(mTriangle.mModelMatrix, 0, -45f, 0, 0, -1.0f);
Multiply the mViewMatrix and the mProjectMatrix to create a mMVPMatrix, same as in the example. Then multiply the mMVPMatrix with the triangle's mModelMatrix.
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 1, 10);
}
public void onDrawFrame(GL10 unused) {
float[] scratch = new float[16];
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 5f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
// Combine the model's translation & rotation matrix
// with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mTriangle.mModelMatrix, 0);
// Draw triangle
mTriangle.draw(scratch);
}
I am surprised this works at all. In OpenGL if you use proper column-major matrices, you should multiply: ModelViewProjectionMatrix * VertexPosition for transforming your vertices.
This is very likely the cause of your problem.
In other words, in both of your shaders
gl_Position = vPosition * uMVPMatrix;
should probably be:
gl_Position = uMVPMatrix * vPosition;
Your original matrix multiplication would work in Direct3D, because it uses row-major matrices. This is something to keep in mind if you are porting code from HLSL.
I've just started learning OpenGL for Android and I'm having a weird problem when drawing a circle. some of its vertices stick to the left and top wall making lines go out from the circle a bit randomly. Every time I restart the app they have a different position.
My DrawScreen class where the circle is drawn:
public class DrawScreen implements GLSurfaceView.Renderer {
Ball ball;
public float mAngle;
private int mProgram;
private int maPositionHandle;
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
ball = new Ball();
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
ball.initShapes(240, 360, 50);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the circle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, ball.ballVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the circle
GLES20.glDrawArrays(GLES20.GL_LINE_LOOP, 0, (int) (ball.getNumSeg() * 3));
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coodinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
And my Ball class where the circle is created:
public class Ball {
public FloatBuffer ballVB;
private float cx, cy, r;
float numSegments = 360;
public void initShapes(float tx, float ty, float tr){
cx = (tx / 240.f) - 1.f;
cy = (ty / 360.f) - 1.f;
r = (tr / 240.f);
float ballCoords[] = new float[(int) (numSegments * 3)];
double theta = (2 * 3.1415926 / numSegments);
float c = (float) Math.cos(theta);//precalculate the sine and cosine
float s = (float) Math.sin(theta);
float t;
float x = r;//we start at angle = 0
float y = 0;
for(int i = 0; i < (numSegments * 3); i = i + 3 ) {
ballCoords[i] = (x + cx);
ballCoords[i + 1] = (y + cy);
ballCoords[i + 2] = (0);
//apply the rotation matrix
t = x;
x = c * x - s * y;
y = s * t + c * y;
}
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
ballCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
ballVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
ballVB.put(ballCoords); // add the coordinates to the FloatBuffer
ballVB.position(0); // set the buffer to read the first coordinate
}
public float getNumSeg(){
return numSegments;
}
}
I've been scouring the internet for hours but haven't found anything. Hope you guys can help me.
GLES20.glDrawArrays(GLES20.GL_LINE_LOOP, 0, (int) (ball.getNumSeg() * 3));
I'm suspicious of this, are segments referring to individual vertices?
The final argument to glDrawArrays is the number of vertices to draw, not the number of floats. You should probably remove the * 3 multiplier from glDrawArrays.
Your extra lines are probably from drawing garbage data because you're drawing 3 times as many vertices as you've actually allocated.
I've been following the tutorial at http://developer.android.com/resources/tutorials/opengl/opengl-es20.html for OpenGL ES on android. I've gotten to the, "Apply Projection and Camera View" section however I always seem to get a blank screen with no triangle, the previous section worked perfectly fine. I also tried just copy pasting the entire tutorial into my code but got the same result. Changing the line:
gl_Position = uMVPMatrix * vPosition;
to:
gl_Position = vPosition;
puts the application back to the first section (triangle stretches depending on screen orientation). Any idea what the problem is? Here's the code I have so far just in case I missed something:
public class GLTest20Renderer implements Renderer {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
private FloatBuffer triangleVB;
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
initShapes();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear( GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT );
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 12, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private void initShapes() {
float triangleCoords[] = {
// X, Y, Z
-0.5f, -0.25f, 0,
0.5f, -0.25f, 0,
0.0f, 0.559016994f, 0
};
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
triangleVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
triangleVB.put(triangleCoords); // add the coordinates to the FloatBuffer
triangleVB.position(0); // set the buffer to read the first coordinate
}
private int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
I'm running all this on a Samsung Galaxy S2.
Fixed, just changed the near point in the lookat to be under 3:
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 2, 7);