I have trouble figuring out what's wrong with my obj file parser for android, it just draws some skewed triangles instead of cube shape.
Code for the parser
public ObjLoader(Context context){
loaderContext = context;
am = context.getAssets();
InputStream file = getFile("3dsmax.obj");
BufferedReader br = new BufferedReader(new InputStreamReader(file));
String str;
ArrayList<Float> tempModelVertices = new ArrayList<Float>();
ArrayList<Float> tempTextureVertices = new ArrayList<Float>();
ArrayList<Float> tempNormalVertices = new ArrayList<Float>();
ArrayList<Integer> facesM = new ArrayList<Integer>();
ArrayList<Integer> facesT = new ArrayList<Integer>();
ArrayList<Integer> facesN = new ArrayList<Integer>();
try {
while((str = br.readLine())!=null){
if(str.startsWith("f")){
String[] strAr = str.replaceAll("f", "").trim().split(" ");
for(String s : strAr){
String[] cornerAr = s.split("/");
facesM.add(Integer.parseInt(cornerAr[0].trim())-1);
facesT.add(Integer.parseInt(cornerAr[1].trim())-1);
facesN.add(Integer.parseInt(cornerAr[2].trim())-1);
}
}
else if(str.startsWith("vt")){
String[] strAr = str.replaceAll("vt", "").trim().split(" ");
tempTextureVertices.add(Float.valueOf(strAr[0].trim()));
tempTextureVertices.add(-1*Float.valueOf(strAr[1].trim()));
}
else if(str.startsWith("vn")){
String[] strAr = str.replaceAll("vn", "").trim().split(" ");
tempNormalVertices.add(Float.valueOf(strAr[0].trim()));
tempNormalVertices.add(Float.valueOf(strAr[1].trim()));
tempNormalVertices.add(Float.valueOf(strAr[2].trim()));
}
else if(str.startsWith("v")){
String[] strAr = str.replaceAll("v", "").trim().split(" ");
tempModelVertices.add(Float.valueOf(strAr[0].trim()));
tempModelVertices.add(Float.valueOf(strAr[1].trim()));
tempModelVertices.add(Float.valueOf(strAr[2].trim()));
}
}
//Log.v(LOG_TAG, "v :"+ String.valueOf(v) + "vt :"+ String.valueOf(vt) + "vn :"+ String.valueOf(vn) + "f :"+ String.valueOf(f));
} catch (IOException e) {
// TODO Auto-generated catch block
Log.v(LOG_TAG, "error");
}
Log.v(LOG_TAG, "vt " + String.valueOf(tempTextureVertices.size()) + " vn " + String.valueOf(tempNormalVertices.size()) + " v " + String.valueOf(tempModelVertices.size()));
modelVertices = new float[facesM.size()];
textureVertices = new float[facesT.size()];
normalVertices = new float[facesN.size()];
for(int i=0; i<facesM.size(); i++){
modelVertices[i] = tempModelVertices.get(facesM.get(i));
}
for(int i=0; i<facesT.size(); i++){
textureVertices[i] = tempTextureVertices.get(facesT.get(i));
}
for(int i=0; i<facesN.size(); i++){
normalVertices[i] = tempNormalVertices.get(facesN.get(i));
}
for(float f: modelVertices){
}
Code for drawing it using opengl
Initialization
ObjLoader obj = new ObjLoader(mActivityContext);
totalEle = obj.modelVertices.length;
// Initialize the buffers.
mSquareCoords = ByteBuffer.allocateDirect(obj.modelVertices.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mSquareCoords.put(obj.modelVertices).position(0);
mSqaureTextureCoords = ByteBuffer.allocateDirect(obj.textureVertices.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mSqaureTextureCoords.put(obj.textureVertices).position(0);
mSquareNormalCoords = ByteBuffer.allocateDirect(obj.normalVertices.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mSquareNormalCoords.put(obj.normalVertices).position(0);
And the drawOnFrame
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");
mNormalHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Normal");
if(updateTexture)
{
if(mTextureDataHandle != null){
GLES20.glDeleteTextures(1, mTextureDataHandle, 0);
}
mTextureDataHandle = TextureHelper.loadTexture(texture);
//mTextureDataHandle = TextureHelper.loadTexture(getFile("coinAnimation/1230025.png"));
//mTextureDataHandle = TextureHelper.loadTexture(mActivityContext, textureId);
textureWidth = mTextureDataHandle[1];
textureHeight = mTextureDataHandle[2];
updateTexture = false;
}
//GLES20.glBlendFunc(GLES20.GL_SRC_COLOR, GLES20.GL_DST_ALPHA);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle[0]);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
//Identity matrix of the object
Matrix.setIdentityM(mModelMatrix, 0);
//Matrix.scaleM(mModelMatrix, 0, 0.2f, 0.2f, 1.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
//Scaling
//Matrix.scaleM(mModelMatrix, 0, 1.0f, 1.0f, 0.0f);
//Moving
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -7.0f);
//Rotating
// Pass in the position information
mSquareCoords.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, mCoordsSize, GLES20.GL_FLOAT, false,
0, mSquareCoords);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the normal information
mSquareNormalCoords.position(0);
GLES20.glVertexAttribPointer(mNormalHandle, mNormalDataSize, GLES20.GL_FLOAT, false,
0, mSquareNormalCoords);
GLES20.glEnableVertexAttribArray(mNormalHandle);
mSqaureTextureCoords.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mSqaureTextureCoords);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, totalEle);
No need to code what others have done already. Check this
What are the sizes of mCoordsSize, mNormalDataSize, mTextureCoordinateDataSize?
Also move GLES20.glEnableVertexAttribArray(mPositionHandle); before passsing the pointer:
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, mCoordsSize, GLES20.GL_FLOAT, false,
0, mSquareCoords);
Try to isolate if the problem is in your obj parsing code or in your drawing code. For example generate the vertex data yourself.
Related
I'm learning to use LibGDX and my goal is to create a cube, with which you can control the resolution (number of vertices along each face). I already did that, and managed to use MeshBuilder to make it out of 6 different meshes and then render the resulting Mesh successfully using basic shaders :
Cube Mesh
//creates a square face with a normal vector and resolution number of vertices along any edge of the face
public Mesh createFace(Vector3 normal, int resolution) {
//creates 2 vectors perpendicular to each other and to the vector normal
Vector3 axisA = new Vector3(normal.y,normal.z,normal.x);
Vector3 axis = u.crs(normal, axisA);
Vector3 axisB = new Vector3(u.sqrt(axis.x),u.sqrt(axis.y),u.sqrt(axis.z));
//creates the arrays to hold the vertices and triangles
Vector3[] vertices = new Vector3[resolution * resolution];
//code for triangles
short[] triangles = new short[(resolution - 1) * (resolution - 1) * 6];
int triIndex = 0;
//looping over each vertex in the face
for (int y = 0; y < resolution; y++) {
for (int x = 0; x < resolution; x++) {
int vertexIndex = x + y * resolution;
//vector representing how close to the end of the x or y axis the loop is
Vector2 t = new Vector2(x / (resolution - 1f),y / (resolution - 1f));
//calculates the position of the vertex to place on the face
Vector3 mulA = u.mul(axisA, (2*t.x - 1));
Vector3 mulB = u.mul(axisB, (2*t.y-1));
Vector3 point = u.add3(normal, mulA, mulB);
//point = u.normalize(point);
vertices[vertexIndex] = point;
//puts the vertices into triangles
if (x != resolution - 1 && y != resolution - 1) {
triangles[triIndex + 0] = (short) vertexIndex;
triangles[triIndex + 1] = (short) (vertexIndex + resolution + 1);
triangles[triIndex + 2] = (short) (vertexIndex + resolution);
triangles[triIndex + 3] = (short) vertexIndex;
triangles[triIndex + 4] = (short) (vertexIndex + 1);
triangles[triIndex + 5] = (short) (vertexIndex + resolution + 1);
triIndex += 6;
}
}
}
float[] verticeList = u.vectorToList(vertices);
Mesh m = new Mesh(true, resolution * resolution, triangles.length, new VertexAttribute(Usage.Position,3,"a_Position"));
m.setIndices(triangles);
m.setVertices(verticeList);
return m;
}
//generates a cube Mesh with resolution vertices along each face
public Mesh generateFaces(int resolution, float scale) {
MeshBuilder meshBuilder = new MeshBuilder();
meshBuilder.begin(new VertexAttributes(new VertexAttribute (Usage.Position, 3 ,"a_Position")));
Vector3[] faceNormals = {
new Vector3(0,1*scale,0), //up
new Vector3(0,-1*scale,0), //down
new Vector3(-1*scale,0,0), //left
new Vector3(1*scale,0,0), //right
new Vector3(0,0,1*scale), //forward
new Vector3(0,0,-1*scale) //back
};
for (int i = 0; i < faceNormals.length; i++) {
meshBuilder.part("part"+ Integer.toString(i), GL20.GL_TRIANGLES);
meshBuilder.addMesh(createFace(faceNormals[i], resolution));
}
Mesh mesh = meshBuilder.end();
return mesh;
}
u is just a utilities class i created to store some math functions.
I then render it like so:
#Override
public void render () {
camController.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
shader.bind();
shader.setUniformMatrix("matViewProj", cam.combined);
//rendering mesh
mesh1.render(shader, GL20.GL_LINE_STRIP);
[...]
}
I now want to make a model out of that mesh where each of the 6 faces will have a different color.
I thus tried to do it using a ModelBuilder following the LibGDX wiki, like so:
public Model generateModel(int resolution, float scale, Color[] colors) {
Vector3[] faceNormals = {
new Vector3(0,1*scale,0), //up
new Vector3(0,-1*scale,0), //down
new Vector3(-1*scale,0,0), //left
new Vector3(1*scale,0,0), //right
new Vector3(0,0,1*scale), //forward
new Vector3(0,0,-1*scale) //back
};
ModelBuilder modelBuilder = new ModelBuilder();
modelBuilder.begin();
for (int i = 0; i < faceNormals.length; i++) {
Mesh mesh = createFace(faceNormals[i], resolution);
MeshPart part = new MeshPart("part"+Integer.toString(i),mesh, 0, mesh.getNumVertices() ,GL20.GL_TRIANGLES);
modelBuilder.node().parts.add(new NodePart(part, new Material(ColorAttribute.createDiffuse(colors[i]))));
}
Model m = modelBuilder.end();
return m;
}
And then i rendered it using a ModelBatch and ModelInstance :
#Override
public void create () {
//creates an environment to handle lighting and such
environment = new Environment();
environment.set(new ColorAttribute(ColorAttribute.AmbientLight,0.4f,0.4f,0.4f,1f));
environment.add(new DirectionalLight().set(0.8f,0.8f,0.8f,-1f,-0.8f,-0.2f));
modelBatch = new ModelBatch();
//handling the inputProcessors of the camera and stage(UI)
multiplexer = new InputMultiplexer();
stage = new Stage();
multiplexer.addProcessor(stage);
scroll = new ScrolledInputProcessor();
multiplexer.addProcessor(scroll);
//camera (3D inputProcessor)
cam = new PerspectiveCamera(67,Gdx.graphics.getWidth(),Gdx.graphics.getHeight());
cam.position.set(10f,10f,10f);
cam.lookAt(0,0,0);
cam.near = 1f;
cam.far = 300f;
cam.update();
camController = new CameraInputController(cam);
multiplexer.addProcessor(camController);
//shaders for every vertex and every pixel(fragment)
shader = new ShaderProgram(Gdx.files.internal("shader/vertexshader.glsl").readString() ,Gdx.files.internal("shader/fragmentshader.glsl").readString());
shader2 = new ShaderProgram(Gdx.files.internal("shader/vertexshader.glsl").readString() ,Gdx.files.internal("shader/fragmentshader2.glsl").readString());
//The 2D box encompassing the screen (UI)
table = new Table();
table.setFillParent(true);
stage.addActor(table);
//skins for UI
skin = new Skin(Gdx.files.internal("uiskin.json"));
//making a slider and dressing it in the skin
Drawable knobDown = skin.newDrawable("default-slider-knob", Color.GRAY);
SliderStyle sliderStyle = skin.get("default-horizontal", SliderStyle.class);
sliderStyle.knobDown = knobDown;
slider = new Slider(3.0f, 70.0f, 1.0f, false, sliderStyle);
table.right().top();
table.add(slider).row();
//creates the unit cube and unit sphere
model = generateModel(res, 1, colors);
instance = new ModelInstance(model);
font = new BitmapFont(Gdx.files.internal("uiskin.fnt"));
batch = new SpriteBatch();
Gdx.input.setInputProcessor(multiplexer);
}
#Override
public void render () {
camController.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
shader.bind();
shader.setUniformMatrix("matViewProj", cam.combined);
modelBatch.begin(cam);
modelBatch.render(instance, environment);
modelBatch.end();
batch.begin();
font.draw(batch, "Zoom Level : " + zoomLevel, 1000f, 100f);
batch.end();
stage.act(Gdx.graphics.getDeltaTime());
stage.draw();
}
However, when i run the program, nothing is rendered, just the gray void.
Gray void of nothingness
My question is: How do I get my model to render?
I am trying to create a transparent texture, but i dont know what the internal format parameter is in newTextureData(). I tried using GL4.GL_RGBA32F, but the texture still isn't transparent.
Code looks something like this:
//clear background (in a GameLoop class)
gl.glClearColor(0.0f, 0.0f, 0.2f, 1.0f);
gl.glClear(gl.GL_COLOR_BUFFER_BIT);
//update sprite vao (method in Sprite class)
gl.glBindVertexArray(vao[0]);
gl.glBindBuffer(GL4.GL_ELEMENT_ARRAY_BUFFER, ebo[0]);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, vbo[0]);
gl.glBufferData(GL4.GL_ELEMENT_ARRAY_BUFFER, indexData.capacity() * 4L, indexData, GL4.GL_STATIC_DRAW);
gl.glBufferData(GL4.GL_ARRAY_BUFFER, vertexData.capacity() * 4L, vertexData, GL.GL_STATIC_DRAW);
int stride = (3 + vertexColorLength + 2) * 4;
int textureCoordsOffset = stride - 2 * 4;
gl.glVertexAttribPointer(0, 3, GL.GL_FLOAT, false, stride, 0);
gl.glEnableVertexAttribArray(0);
gl.glVertexAttribPointer(1, vertexColorLength, GL.GL_FLOAT, false, stride, 12);
gl.glEnableVertexAttribArray(1);
gl.glVertexAttribPointer(2, 2, GL.GL_FLOAT, false, stride, textureCoordsOffset);
gl.glEnableVertexAttribArray(2);
gl.glBindVertexArray(0);
gl.glBindBuffer(GL4.GL_ELEMENT_ARRAY_BUFFER, 0);
gl.glBindBuffer(GL4.GL_ARRAY_BUFFER, 0);
//load textures (in Sprite constructor)
for (String path : texturePaths) {
File textureFile = new File(path);
TextureData textureData;
try {
textureData = TextureIO.newTextureData(gl.getGLProfile(), textureFile, GL4.GL_RGBA16, GL4.GL_RGBA, false, TextureIO.PNG);
textures.add(TextureIO.newTexture(textureData));
} catch (IOException e) {
System.err.println("Failed to load Sprite texture");
e.printStackTrace();
}
}
//diplay Sprite (method in Sprite class)
program.setUniforms();
gl.glBindVertexArray(vao[0]);
program.use();
gl.glActiveTexture(GL.GL_TEXTURE0);
gl.glUniform1i(gl.glGetUniformLocation(program.ID, "u_texture0"), 0);
textures.get(0).enable(gl);
textures.get(0).bind(gl);
gl.glUniform1i(gl.glGetUniformLocation(program.ID, "u_textureAmnt"), textures.size());
gl.glDrawElements(GL.GL_TRIANGLES, 6, GL.GL_UNSIGNED_INT, 0);
for(Texture texture : textures){
texture.disable(gl);
}
I forgot to enable Gl_Blend...
So I've been working 2 nights with this code that I got from my teacher. I have been looking to find some good Javadoc on JOGL without much success. So I've been using the try/fail method changing the variables here and there. I've learned how to control rotation, distance and size. So I made me a little "Solar System" - but here comes my problem - how can I implement multiple textures for the different planets I've made? Heres my code:
public class RelativeTransformation implements GLEventListener, KeyListener {
// OpenGL window reference
private static GLWindow window;
// The animator is responsible for continuous operation
private static Animator animator;
// The program entry point
public static void main(String[] args) {
new RelativeTransformation().setup();
}
// Vertex data
private float[] vertexData;
// Triangle data
private short[] elementData;
// Light properties (4 valued vectors due to std140 see OpenGL 4.5 reference)
private float[] lightProperties = {
// Position
2f, 0f, 3f, 0f,
// Ambient Color
0.2f, 0.2f, 0.2f, 0f,
// Diffuse Color
0.5f, 0.5f, 0.5f, 0f,
// Specular Color
1f, 1f, 1f, 0f
};
private float[] materialProperties = {
// Shininess
8f
};
// Camera properties
private float[] cameraProperties = {
0f, 0f, 2f
};
// The OpenGL profile
GLProfile glProfile;
// The texture filename
private final String textureFilename = "src/relative_transformation/sun.jpg";
private final String textureFilename2 = "src/relative_transformation/earth.jpg";
// Create buffers for the names
private IntBuffer bufferNames = GLBuffers.newDirectIntBuffer(Buffer.MAX);
private IntBuffer vertexArrayName = GLBuffers.newDirectIntBuffer(1);
private IntBuffer textureNames = GLBuffers.newDirectIntBuffer(1);
// Create buffers for clear values
private FloatBuffer clearColor = GLBuffers.newDirectFloatBuffer(new float[] {0, 0, 0, 0});
private FloatBuffer clearDepth = GLBuffers.newDirectFloatBuffer(new float[] {1});
// Create references to buffers for holding the matrices
private ByteBuffer globalMatricesPointer, modelMatrixPointer1, modelMatrixPointer2, modelMatrixPointer3;
// Program instance reference
private Program program;
// Variable for storing the start time of the application
private long start;
// Application setup function
private void setup() {
// Get a OpenGL 4.x profile (x >= 0)
glProfile = GLProfile.get(GLProfile.GL4);
// Get a structure for definining the OpenGL capabilities with default values
GLCapabilities glCapabilities = new GLCapabilities(glProfile);
// Create the window with default capabilities
window = GLWindow.create(glCapabilities);
// Set the title of the window
window.setTitle("Relative Transformation");
// Set the size of the window
window.setSize(1024, 768);
// Set debug context (must be set before the window is set to visible)
window.setContextCreationFlags(GLContext.CTX_OPTION_DEBUG);
// Make the window visible
window.setVisible(true);
// Add OpenGL and keyboard event listeners
window.addGLEventListener(this);
window.addKeyListener(this);
// Create and start the animator
animator = new Animator(window);
animator.start();
// Add window event listener
window.addWindowListener(new WindowAdapter() {
// Window has been destroyed
#Override
public void windowDestroyed(WindowEvent e) {
// Stop animator and exit
animator.stop();
System.exit(1);
}
});
}
// GLEventListener.init implementation
#Override
public void init(GLAutoDrawable drawable) {
// Get OpenGL 4 reference
GL4 gl = drawable.getGL().getGL4();
// Initialize debugging
initDebug(gl);
// Initialize buffers
initBuffers(gl);
// Initialize vertex array
initVertexArray(gl);
// Initialize texture
initTexture(gl);
// Set up the program
program = new Program(gl, "relative_transformation", "shader", "shader");
// Enable Opengl depth buffer testing
gl.glEnable(GL_DEPTH_TEST);
// Store the starting time of the application
start = System.currentTimeMillis();
}
// GLEventListener.display implementation
#Override
public void display(GLAutoDrawable drawable) {
// Get OpenGL 4 reference
GL4 gl = drawable.getGL().getGL4();
// Copy the view matrix to the server
{
// Create identity matrix
float[] view = FloatUtil.makeTranslation(new float[16], 0, false, -cameraProperties[0], -cameraProperties[1], -cameraProperties[2]);
// Copy each of the values to the second of the two global matrices
for (int i = 0; i < 16; i++)
globalMatricesPointer.putFloat(16 * 4 + i * 4, view[i]);
}
// Clear the color and depth buffers
gl.glClearBufferfv(GL_COLOR, 0, clearColor);
gl.glClearBufferfv(GL_DEPTH, 0, clearDepth);
// Copy the model matrices to the server
{
// Find a time delta for the time passed since the start of execution
long now = System.currentTimeMillis();
float diff = (float) (now - start) / 2000;
// Create a rotation matrix around the z axis based on the time delta
// Lag 2 rotate inni hverandre, relater den 2. til den 1. og sett speed opp! Se Universe.java (model og modelPos?)
float[] rotate1 = FloatUtil.makeRotationAxis(new float[16], 0, 00.5f*diff, 0f, 1f, 0f, new float[3]);
float[] rotate2 = FloatUtil.makeRotationAxis(new float[16], 0, 01.0f*diff, 0f, 1f, 0f, new float[3]);
float[] rotate3 = FloatUtil.makeRotationAxis(new float[16], 0, 15.0f*diff, 0f, 1f, 0f, new float[3]);
float[] translate2 = FloatUtil.makeTranslation(new float[16], false, 1.4f, 0f, 0f);
float[] translate3 = FloatUtil.makeTranslation(new float[16], false, 0.0f, 0f, 0f);
float[] modelPos2 = FloatUtil.multMatrix(rotate1, FloatUtil.multMatrix(rotate2, translate2, new float[16]), new float[16]);
float[] model2 = FloatUtil.multMatrix(modelPos2, FloatUtil.makeScale(new float[16], false, 0.1f, 0.1f, 0.1f), new float[16]);
float[] modelPos3 = FloatUtil.multMatrix(modelPos2, FloatUtil.multMatrix(rotate3, translate3, new float[16]), new float[16]);
float[] model3 = FloatUtil.multMatrix(modelPos3, FloatUtil.makeScale(new float[16], false, 0.5f, 0.5f, 0.5f), new float[16]);
// Copy the entire matrix to the server
modelMatrixPointer1.asFloatBuffer().put(rotate1);
modelMatrixPointer2.asFloatBuffer().put(model2);
modelMatrixPointer3.asFloatBuffer().put(model3);
}
// Activate the vertex program and vertex array
gl.glUseProgram(program.name);
gl.glBindVertexArray(vertexArrayName.get(0));
gl.glBindTexture(gl.GL_TEXTURE_2D, textureNames.get(0));
// Bind the global matrices buffer to a specified index within the uniform buffers
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.TRANSFORM0,
bufferNames.get(Buffer.GLOBAL_MATRICES));
// Bind the light properties buffer to a specified uniform index
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.LIGHT0,
bufferNames.get(Buffer.LIGHT_PROPERTIES));
// Bind the light properties buffer to a specified uniform index
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.MATERIAL,
bufferNames.get(Buffer.MATERIAL_PROPERTIES));
// Bind the light properties buffer to a specified uniform index
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.CAMERA,
bufferNames.get(Buffer.CAMERA_PROPERTIES));
// Bind the model matrix buffer to a specified index within the uniform buffers
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.TRANSFORM1,
bufferNames.get(Buffer.MODEL_MATRIX1));
// Draw the triangle
gl.glDrawElements(
GL_TRIANGLES,
elementData.length,
GL_UNSIGNED_SHORT,
0);
// Bind the model matrix buffer to a specified index within the uniform buffers
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.TRANSFORM1,
bufferNames.get(Buffer.MODEL_MATRIX2));
// Draw the triangle
gl.glDrawElements(
GL_TRIANGLES,
elementData.length,
GL_UNSIGNED_SHORT,
0);
// Bind the model matrix buffer to a specified index within the uniform buffers
gl.glBindBufferBase(
GL_UNIFORM_BUFFER,
Semantic.Uniform.TRANSFORM1,
bufferNames.get(Buffer.MODEL_MATRIX3));
// Draw the triangle
gl.glDrawElements(
GL_TRIANGLES,
elementData.length,
GL_UNSIGNED_SHORT,
0);
// Deactivate the program and vertex array
gl.glUseProgram(0);
gl.glBindVertexArray(0);
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
}
// GLEventListener.reshape implementation
#Override
public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
// Get OpenGL 4 reference
GL4 gl = drawable.getGL().getGL4();
// Create an orthogonal projection matrix
float[] ortho = FloatUtil.makePerspective(new float[16], 0, false, (float)Math.PI/2f, (float)width/height, 0.1f, 100f);
// Copy the projection matrix to the server
globalMatricesPointer.asFloatBuffer().put(ortho);
// Set the OpenGL viewport
gl.glViewport(x, y, width, height);
}
// GLEventListener.dispose implementation
#Override
public void dispose(GLAutoDrawable drawable) {
// Get OpenGL 4 reference
GL4 gl = drawable.getGL().getGL4();
// Delete the program
gl.glDeleteProgram(program.name);
// Delete the vertex array
gl.glDeleteVertexArrays(1, vertexArrayName);
// Delete the buffers
gl.glDeleteBuffers(Buffer.MAX, bufferNames);
gl.glDeleteTextures(1, textureNames);
}
// KeyListener.keyPressed implementation
#Override
public void keyPressed(KeyEvent e) {
// Destroy the window if the esape key is pressed
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
new Thread(() -> {
window.destroy();
}).start();
}
}
// KeyListener.keyPressed implementation
#Override
public void keyReleased(KeyEvent e) {
}
// Function for initializing OpenGL debugging
private void initDebug(GL4 gl) {
// Register a new debug listener
window.getContext().addGLDebugListener(new GLDebugListener() {
// Output any messages to standard out
#Override
public void messageSent(GLDebugMessage event) {
System.out.println(event);
}
});
// Ignore all messages
gl.glDebugMessageControl(
GL_DONT_CARE,
GL_DONT_CARE,
GL_DONT_CARE,
0,
null,
false);
// Enable messages of high severity
gl.glDebugMessageControl(
GL_DONT_CARE,
GL_DONT_CARE,
GL_DEBUG_SEVERITY_HIGH,
0,
null,
true);
// Enable messages of medium severity
gl.glDebugMessageControl(
GL_DONT_CARE,
GL_DONT_CARE,
GL_DEBUG_SEVERITY_MEDIUM,
0,
null,
true);
}
// Function fo initializing OpenGL buffers
private void initBuffers(GL4 gl) {
// Create a new float direct buffer for the vertex data
vertexData = createSphereVertices(0.5f, 16, 16);
FloatBuffer vertexBuffer = GLBuffers.newDirectFloatBuffer(vertexData);
// Create a new short direct buffer for the triangle indices
elementData = createSphereElements(16, 16);
ShortBuffer elementBuffer = GLBuffers.newDirectShortBuffer(elementData);
// Create a direct buffer for the light properties
FloatBuffer lightBuffer = GLBuffers.newDirectFloatBuffer(lightProperties);
// Create a direct buffer for the material properties
FloatBuffer materialBuffer = GLBuffers.newDirectFloatBuffer(materialProperties);
// Create a direct buffer for the light properties
FloatBuffer cameraBuffer = GLBuffers.newDirectFloatBuffer(cameraProperties);
// Create the OpenGL buffers names
gl.glCreateBuffers(Buffer.MAX, bufferNames);
// Create and initialize a buffer storage for the vertex data
gl.glBindBuffer(GL_ARRAY_BUFFER, bufferNames.get(Buffer.VERTEX));
gl.glBufferStorage(GL_ARRAY_BUFFER, vertexBuffer.capacity() * Float.BYTES, vertexBuffer, 0);
gl.glBindBuffer(GL_ARRAY_BUFFER, 0);
// Create and initialize a buffer storage for the triangle indices
gl.glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufferNames.get(Buffer.ELEMENT));
gl.glBufferStorage(GL_ELEMENT_ARRAY_BUFFER, elementBuffer.capacity() * Short.BYTES, elementBuffer, 0);
gl.glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// Retrieve the uniform buffer offset alignment minimum
IntBuffer uniformBufferOffset = GLBuffers.newDirectIntBuffer(1);
gl.glGetIntegerv(GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT, uniformBufferOffset);
// Set the required bytes for the matrices in accordance to the uniform buffer offset alignment minimum
int globalBlockSize = Math.max(16 * 4 * 2, uniformBufferOffset.get(0));
int modelBlockSize = Math.max(16 * 4, uniformBufferOffset.get(0));
int lightBlockSize = Math.max(12 * Float.BYTES, uniformBufferOffset.get(0));
int materialBlockSize = Math.max(3 * Float.BYTES, uniformBufferOffset.get(0));
int cameraBlockSize = Math.max(3 * Float.BYTES, uniformBufferOffset.get(0));
// Create and initialize a named storage for the global matrices
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.GLOBAL_MATRICES));
gl.glBufferStorage(GL_UNIFORM_BUFFER, globalBlockSize, null, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// Create and initialize a named storage for the model matrix
// NUMERO 1
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.MODEL_MATRIX1));
gl.glBufferStorage(GL_UNIFORM_BUFFER, modelBlockSize, null, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// NUMERO 2
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.MODEL_MATRIX2));
gl.glBufferStorage(GL_UNIFORM_BUFFER, modelBlockSize, null, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// NUMERO 3
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.MODEL_MATRIX3));
gl.glBufferStorage(GL_UNIFORM_BUFFER, modelBlockSize, null, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// Create and initialize a named buffer storage for the light properties
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.LIGHT_PROPERTIES));
gl.glBufferStorage(GL_UNIFORM_BUFFER, lightBlockSize, lightBuffer, 0);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// Create and initialize a named buffer storage for the camera properties
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.MATERIAL_PROPERTIES));
gl.glBufferStorage(GL_UNIFORM_BUFFER, materialBlockSize, materialBuffer, 0);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// Create and initialize a named buffer storage for the camera properties
gl.glBindBuffer(GL_UNIFORM_BUFFER, bufferNames.get(Buffer.CAMERA_PROPERTIES));
gl.glBufferStorage(GL_UNIFORM_BUFFER, cameraBlockSize, cameraBuffer, 0);
gl.glBindBuffer(GL_UNIFORM_BUFFER, 0);
// map the global matrices buffer into the client space
// NUMERO 1
globalMatricesPointer = gl.glMapNamedBufferRange(
bufferNames.get(Buffer.GLOBAL_MATRICES),
0,
16 * 4 * 2,
GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT | GL_MAP_INVALIDATE_BUFFER_BIT);
// NUMERO 2
modelMatrixPointer1 = gl.glMapNamedBufferRange(
bufferNames.get(Buffer.MODEL_MATRIX1),
0,
16 * 4,
GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT | GL_MAP_INVALIDATE_BUFFER_BIT);
// NUMERO 3
modelMatrixPointer2 = gl.glMapNamedBufferRange(
bufferNames.get(Buffer.MODEL_MATRIX2),
0,
16 * 4,
GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT | GL_MAP_INVALIDATE_BUFFER_BIT);
// NUMERO 4
modelMatrixPointer3 = gl.glMapNamedBufferRange(
bufferNames.get(Buffer.MODEL_MATRIX3),
0,
16 * 4,
GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT | GL_MAP_INVALIDATE_BUFFER_BIT);
}
// Function for initializing the vertex array
private void initVertexArray(GL4 gl) {
// Create a single vertex array object
gl.glCreateVertexArrays(1, vertexArrayName);
// Associate the vertex attributes in the vertex array object with the vertex buffer
gl.glVertexArrayAttribBinding(vertexArrayName.get(0), Semantic.Attr.POSITION, Semantic.Stream.A);
gl.glVertexArrayAttribBinding(vertexArrayName.get(0), Semantic.Attr.NORMAL, Semantic.Stream.A);
gl.glVertexArrayAttribBinding(vertexArrayName.get(0), Semantic.Attr.TEXCOORD, Semantic.Stream.A);
// Set the format of the vertex attributes in the vertex array object
gl.glVertexArrayAttribFormat(vertexArrayName.get(0), Semantic.Attr.POSITION, 3, GL_FLOAT, false, 0);
gl.glVertexArrayAttribFormat(vertexArrayName.get(0), Semantic.Attr.NORMAL, 3, GL_FLOAT, false, 3 * 4);
gl.glVertexArrayAttribFormat(vertexArrayName.get(0), Semantic.Attr.TEXCOORD, 2, GL_FLOAT, false, 6 * 4);
// Enable the vertex attributes in the vertex object
gl.glEnableVertexArrayAttrib(vertexArrayName.get(0), Semantic.Attr.POSITION);
gl.glEnableVertexArrayAttrib(vertexArrayName.get(0), Semantic.Attr.NORMAL);
gl.glEnableVertexArrayAttrib(vertexArrayName.get(0), Semantic.Attr.TEXCOORD);
// Bind the triangle indices in the vertex array object the triangle indices buffer
gl.glVertexArrayElementBuffer(vertexArrayName.get(0), bufferNames.get(Buffer.ELEMENT));
// Bind the vertex array object to the vertex buffer
gl.glVertexArrayVertexBuffer(vertexArrayName.get(0), Semantic.Stream.A, bufferNames.get(Buffer.VERTEX), 0, (3+3+2) * 4);
}
private void initTexture(GL4 gl) {
try {
// Load texture
TextureData textureData = TextureIO.newTextureData(glProfile, new File(textureFilename), false, TextureIO.JPG);
// Generate texture name
gl.glGenTextures(1, textureNames);
// Bind the texture
gl.glBindTexture(gl.GL_TEXTURE_2D, textureNames.get(0));
// Specify the format of the texture
gl.glTexImage2D(gl.GL_TEXTURE_2D,
0,
textureData.getInternalFormat(),
textureData.getWidth(),
textureData.getHeight(),
textureData.getBorder(),
textureData.getPixelFormat(),
textureData.getPixelType(),
textureData.getBuffer());
// Set the sampler parameters
gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
// Generate mip maps
gl.glGenerateMipmap(GL_TEXTURE_2D);
// Deactivate texture
gl.glBindTexture(GL_TEXTURE_2D, 0);
}
catch (IOException io) {
io.printStackTrace();
}
}
private float[] createSphereVertices(float radius, int numH, int numV) {
// Variables needed for the calculations
float t1, t2;
float pi = (float)Math.PI;
float pi2 = (float)Math.PI*2f;
float d1 = pi2/numH;
float d2 = pi/numV;
// Allocate the data needed to store the necessary positions, normals and texture coordinates
int numVertices = (numH*(numV-1)+2);
int numFloats = (3+3+2);
float[] data = new float[numVertices*numFloats];
data[0] = 0f; data[1] = radius; data[2] = 0f;
data[3] = 0f; data[4] = 1f; data[5] = 0f;
data[6] = 0.5f; data[7] = 1f;
for (int j=0; j<numV-1; j++) {
for (int i=0; i<numH; i++) {
// Position
data[(j*numH+i+1)*numFloats] = radius*(float)(Math.sin(i*d1)*Math.sin((j+1)*d2));
data[(j*numH+i+1)*numFloats+1] = radius*(float)Math.cos((j+1)*d2);
data[(j*numH+i+1)*numFloats+2] = radius*(float)(Math.cos(i*d1)*Math.sin((j+1)*d2));
// Normal
data[(j*numH+i+1)*numFloats+3] = (float)(Math.sin(i*d1)*Math.sin((j+1)*d2));
data[(j*numH+i+1)*numFloats+4] = (float)Math.cos((j+1)*d2);
data[(j*numH+i+1)*numFloats+5] = (float)(Math.cos(i*d1)*Math.sin((j+1)*d2));
// UV
data[(j*numH+i+1)*numFloats+6] = (float)(Math.asin(data[(j*numH+i+1)*numFloats+3])/Math.PI) + 0.5f;
data[(j*numH+i+1)*numFloats+7] = (float)(Math.asin(data[(j*numH+i+1)*numFloats+4])/Math.PI) + 0.5f;
}
}
data[(numVertices-1)*numFloats] = 0f; data[(numVertices-1)*numFloats+1] = -radius; data[(numVertices-1)*numFloats+2] = 0f;
data[(numVertices-1)*numFloats+3] = 0f; data[(numVertices-1)*numFloats+4] = -1f; data[(numVertices-1)*numFloats+5] = 0f;
data[(numVertices-1)*numFloats+6] = 0.5f; data[(numVertices-1)*numFloats+7] = 0f;
return data;
}
private short[] createSphereElements(int numH, int numV) {
// Allocate the data needed to store the necessary elements
int numTriangles = (numH*(numV-1)*2);
short[] data = new short[numTriangles*3];
for (int i=0; i<numH; i++) {
data[i*3] = 0; data[i*3+1] = (short)(i+1); data[i*3+2] = (short)((i+1)%numH+1);
}
for (int j=0; j<numV-2; j++) {
for (int i=0; i<numH; i++) {
data[((j*numH+i)*2+numH)*3] = (short)(j*numH+i+1);
data[((j*numH+i)*2+numH)*3+1] = (short)((j+1)*numH+i+1);
data[((j*numH+i)*2+numH)*3+2] = (short)((j+1)*numH+(i+1)%numH+1);
data[((j*numH+i)*2+numH)*3+3] = (short)((j+1)*numH+(i+1)%numH+1);
data[((j*numH+i)*2+numH)*3+4] = (short)(j*numH+(i+1)%numH+1);
data[((j*numH+i)*2+numH)*3+5] = (short)(j*numH+i+1);
}
}
int trianglIndex = (numTriangles-numH);
int vertIndex = (numV-2)*numH+1;
for (short i=0; i<numH; i++) {
data[(trianglIndex+i)*3] = (short)(vertIndex+i);
data[(trianglIndex+i)*3+1] = (short)((numH*(numV-1)+1));
data[(trianglIndex+i)*3+2] = (short)(vertIndex+(i+1)%numH);
}
return data;
}
// Private class representing a vertex program
private class Program {
// The name of the program
public int name = 0;
// Constructor
public Program(GL4 gl, String root, String vertex, String fragment) {
// Instantiate a complete vertex shader
ShaderCode vertShader = ShaderCode.create(gl, GL_VERTEX_SHADER, this.getClass(), root, null, vertex,
"vert", null, true);
// Instantiate a complete fragment shader
ShaderCode fragShader = ShaderCode.create(gl, GL_FRAGMENT_SHADER, this.getClass(), root, null, fragment,
"frag", null, true);
// Create the shader program
ShaderProgram shaderProgram = new ShaderProgram();
// Add the vertex and fragment shader
shaderProgram.add(vertShader);
shaderProgram.add(fragShader);
// Initialize the program
shaderProgram.init(gl);
// Store the program name (nonzero if valid)
name = shaderProgram.program();
// Compile and link the program
shaderProgram.link(gl, System.out);
}
}
// Interface for creating final static variables for defining the buffers
private interface Buffer {
int VERTEX = 0;
int ELEMENT = 1;
int GLOBAL_MATRICES = 2;
int MODEL_MATRIX1 = 3;
int MODEL_MATRIX2 = 4;
int MODEL_MATRIX3 = 5;
int LIGHT_PROPERTIES = 6;
int MATERIAL_PROPERTIES = 7;
int CAMERA_PROPERTIES = 8;
int MAX = 9;
}
// Private class to provide an semantic interface between Java and GLSL
private static class Semantic {
public interface Attr {
int POSITION = 0;
int NORMAL = 1;
int TEXCOORD = 2;
}
public interface Uniform {
int TRANSFORM0 = 1;
int TRANSFORM1 = 2;
int LIGHT0 = 3;
int MATERIAL = 4;
int CAMERA = 5;
}
public interface Stream {
int A = 0;
}
}
}
You need a texture object for each texture. For this you have to create a container with the proper size.
private IntBuffer textureNames = GLBuffers.newDirectIntBuffer( noOfTextures );
and you have to create the texture objects and you have to load the textures:
gl.glGenTextures( noOfTextures , textureNames);
for (int i=0; i<noOfTextures; i++) {
TextureData textureData = TextureIO.newTextureData(glProfile,
new File( textureFilename[i] ), false, TextureIO.JPG);
gl.glBindTexture(gl.GL_TEXTURE_2D, textureNames.get(i));
gl.glTexImage2D( ..... );
.....
}
Finally you have to bind the proper texture right before you draw the mesh:
gl.glBindTexture(gl.GL_TEXTURE_2D, textureNames.get( texture_index1 ));
gl.glDrawElements( ..... );
.....
gl.glBindTexture(gl.GL_TEXTURE_2D, textureNames.get( texture_index2 ));
gl.glDrawElements( ..... );
Take respect of the number of generated textures, when you delete them:
gl.glDeleteTextures( noOfTextures , textureNames);
I am little bit desperate here.
I am trying to update/refactor an existing code written in legacy opengl to make use of the "modern way" of opengl version 3.2+.
It is written in Java with lwjgl. I already stripped away most of the functionality to test the basic setup. For me at the moment it is really just about setting up the vbo with vertices loaded from an obj file and render it. My problem is, that the display window stays empty. If it would display me just something, I would be really happy.
Maybe you guys can help me what I am missing here.
public class Mobile {
private final String texturePath = "../CGSS15Ex3MobileDS/dataEx3/Textures";
private int
width = 1200,
height = 800,
fps = 0,
cameraDist = 2000,
fillMode = GL_LINE,
ticksPerSecond = 60,
frameCounter = 0,
vaoId,
vboId,
vboiID,
pId,
vsId,
fsId;
private long
time,
lastTime,
lastFPS,
lastKeySpace,
frameCounterTime,
avgTime = 0;
private float
dx = 0f, // mouse x distance
dy = 0f, // mouse y distance
diffTime = 0f, // frame length
mouseSensitivity = 0.5f,
movementSpeed = 800.0f; // move 10 units per second.
private Fork fork;
private CameraController camera;
FloatBuffer kugelBuff, indexBuff;
int kugelVertCount;
static LinkedList<Integer> textureIDs = new LinkedList<>();
public Mobile() {
run();
}
private void run() {
init();
while (!exit()) {
update();
draw();
updateFPS();
}
fini();
}
private void init() {
// OpenGL Setup
// create display
try {
PixelFormat pixelFormat = new PixelFormat();
ContextAttribs contextAtrributes = new ContextAttribs(3, 2)
.withProfileCore(true)
.withForwardCompatible(true);
Display.setDisplayMode(new DisplayMode(width, height));
Display.setTitle("Mobile by Aaron Scheu");
Display.create(pixelFormat, contextAtrributes);
GL11.glClearColor(0.3f, 0.3f, 0.3f, 0f);
GL11.glViewport(0, 0, width, height);
} catch (LWJGLException e) {
e.printStackTrace();
System.exit(-1);
}
// setup scene //
setupSphere();
setupShaders();
setupTex();
// set Timer
frameCounterTime = lastFPS = getTime();
System.out.println("Start timer ...");
}
private void setupTex() {
for (String file : getTextureFiles(texturePath)) {
try {
TextureReader.Texture texture = TextureReader.readTexture(file);
textureIDs.add(glGenTextures());
GL13.glActiveTexture(GL13.GL_TEXTURE0);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureIDs.getLast());
// Upload tex and generate mipmap for scaling
glTexImage2D(
GL_TEXTURE_2D, 0, GL_RGB, texture.getWidth(), texture.getHeight(), 0,
GL_RGB, GL_UNSIGNED_BYTE, texture.getPixels()
);
GL30.glGenerateMipmap(GL11.GL_TEXTURE_2D);
// Setup the ST coordinate system
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_S, GL11.GL_REPEAT);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_T, GL11.GL_REPEAT);
// Setup what to do when the texture has to be scaled
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER,
GL11.GL_NEAREST);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER,
GL11.GL_LINEAR_MIPMAP_LINEAR);
} catch(IOException e) {
System.out.println(e);
}
}
}
private void setupShaders() {
// Load the vertex shader
// vsId = GLDrawHelper.compileShader("../CGSS15Ex3MobileDS/dataEx3/Shader/phong_vertex.glsl", GL20.GL_VERTEX_SHADER);
vsId = GLDrawHelper.compileShader("shader/vert_shader.glsl", GL20.GL_VERTEX_SHADER);
// Load the fragment shader
// fsId = GLDrawHelper.compileShader("../CGSS15Ex3MobileDS/dataEx3/Shader/phong_fragment.glsl", GL20.GL_FRAGMENT_SHADER);
fsId = GLDrawHelper.compileShader("shader/frac_shader.glsl", GL20.GL_FRAGMENT_SHADER);
// Create a new shader program that links both shaders
pId = GL20.glCreateProgram();
GL20.glAttachShader(pId, vsId);
GL20.glAttachShader(pId, fsId);
// Bind shader data to vbo attribute list
// GL20.glBindAttribLocation(pId, 0, "vert_in");
// GL20.glBindAttribLocation(pId, 1, "col_in");
// GL20.glBindAttribLocation(pId, 2, "tex0_in");
// GL20.glBindAttribLocation(pId, 3, "norm_in");
// Test Shader
GL20.glBindAttribLocation(pId, 0, "in_Position");
GL20.glBindAttribLocation(pId, 1, "in_Color");
GL20.glBindAttribLocation(pId, 2, "in_TextureCoord");
GL20.glLinkProgram(pId);
GL20.glValidateProgram(pId);
}
private void setupSphere() {
Model sphere = null;
try {
sphere = OBJLoader.loadModel(new File("sphere.obj"));
} catch (IOException e) {
e.printStackTrace();
Display.destroy();
System.exit(1);
}
kugelBuff = GLDrawHelper.directFloatBuffer(sphere.getVVVNNNTT());
indexBuff = GLDrawHelper.directFloatBuffer(sphere.getVertIndices());
kugelVertCount = sphere.getVertCount();
// Create a new Vertex Array Object in memory and select it (bind)
vaoId = GL30.glGenVertexArrays();
GL30.glBindVertexArray(vaoId);
// Create a new Vertex Buffer Object in memory and select it (bind)
vboId = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, kugelBuff, GL15.GL_STATIC_DRAW);
// Attribute Pointer - list id, size, type, normalize, sprite, offset
GL20.glVertexAttribPointer(0, 3, GL11.GL_FLOAT, false, 8*4, 0); // Vertex
// GL20.glVertexAttribPointer(1, 3, GL11.GL_FLOAT, false, 3, 0); // Color
GL20.glVertexAttribPointer(2, 2, GL11.GL_FLOAT, false, 8*4, 6*4); // UV Tex
// GL20.glVertexAttribPointer(3, 3, GL11.GL_FLOAT, false, 8*4, 3*4); // Normals
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
// Deselect (bind to 0) the VAO
GL30.glBindVertexArray(0);
// Create a new VBO for the indices and select it (bind) - INDICES
vboiID = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vboiID);
GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuff, GL15.GL_STATIC_DRAW);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
}
private void update() {
// limit framerate
// Display.sync(ticksPerSecond);
// get time
time = getTime();
diffTime = (time - lastTime)/1000.0f;
lastTime = time;
// Distance mouse has been moved
dx = Mouse.getDX();
dy = Mouse.getDY();
// toggle wireframe
if(Keyboard.isKeyDown(Keyboard.KEY_SPACE)) {
if (time - lastKeySpace > 100) {
fillMode = fillMode == GL_FILL ? GL_LINE : GL_FILL;
glPolygonMode(GL_FRONT_AND_BACK, fillMode);
}
lastKeySpace = time;
}
// mouse control
camera.yaw(dx * mouseSensitivity);
camera.pitch(dy * mouseSensitivity);
// WASD control
if (Keyboard.isKeyDown(Keyboard.KEY_W)) {
camera.walkForward(movementSpeed * diffTime);
}
if (Keyboard.isKeyDown(Keyboard.KEY_S)) {
camera.walkBackwards(movementSpeed * diffTime);
}
if (Keyboard.isKeyDown(Keyboard.KEY_A)) {
camera.strafeLeft(movementSpeed * diffTime);
}
if (Keyboard.isKeyDown(Keyboard.KEY_D)) {
camera.strafeRight(movementSpeed * diffTime);
}
}
private boolean exit() {
return Display.isCloseRequested() || Keyboard.isKeyDown(Keyboard.KEY_ESCAPE);
}
// runner is finished, clean up
private void fini() {
// glDisable(GL_DEPTH_BITS);
// Delete all textures
textureIDs.stream().forEach(GL11::glDeleteTextures);
// Delete the shaders
GL20.glUseProgram(0);
GL20.glDetachShader(pId, vsId);
GL20.glDetachShader(pId, fsId);
GL20.glDeleteShader(vsId);
GL20.glDeleteShader(fsId);
GL20.glDeleteProgram(pId);
// Select the VAO
GL30.glBindVertexArray(vaoId);
// Disable the VBO index from the VAO attributes list
GL20.glDisableVertexAttribArray(0);
GL20.glDisableVertexAttribArray(1);
// Delete the vertex VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL15.glDeleteBuffers(vboId);
// Delete the index VBO
// GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
// GL15.glDeleteBuffers(vboiId);
// Delete the VAO
GL30.glBindVertexArray(0);
GL30.glDeleteVertexArrays(vaoId);
Display.destroy();
}
private void updateFPS() {
long time = getTime();
String title;
if (time - lastFPS > 1000) {
// Display.setTitle("FPS: " + fps);
title = "FPS: " + fps + " || avg time per frame: " + (avgTime != 0 ? avgTime/1000f : "-/-") + " ms";
Display.setTitle(title);
fps = 0;
lastFPS += 1000;
}
fps++;
// Frame Count over 1000
if (frameCounter == 1000) {
avgTime = time - frameCounterTime;
// System.out.println("Time for 1000 frames: " + avgTime + " ms.");
frameCounter = 0;
frameCounterTime = time;
}
frameCounter++;
}
private long getTime() {
return (Sys.getTime() * 1000 / Sys.getTimerResolution());
}
private void draw() {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
GL20.glUseProgram(pId);
// Bind the texture
GL13.glActiveTexture(GL13.GL_TEXTURE0);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureIDs.get(0));
// Bind to the VAO that has all the information about the vertices
GL30.glBindVertexArray(vaoId);
GL20.glEnableVertexAttribArray(0);
// GL20.glEnableVertexAttribArray(1);
GL20.glEnableVertexAttribArray(2);
GL20.glEnableVertexAttribArray(3);
// Bind to the index VBO that has all the information about the order of the vertices
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vboiID);
// Draw the vertices
GL11.glDrawElements(GL11.GL_TRIANGLES, kugelVertCount, GL11.GL_UNSIGNED_BYTE, 0);
// Put everything back to default (deselect)
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
GL20.glDisableVertexAttribArray(0);
// GL20.glDisableVertexAttribArray(1);
GL20.glDisableVertexAttribArray(2);
GL20.glDisableVertexAttribArray(3);
GL30.glBindVertexArray(0);
GL20.glUseProgram(0);
Display.update();
}
private static String[] getTextureFiles(String directory) {
File pathfile = new File(directory);
File[] files = pathfile.listFiles( (File dir, String name) ->
name.endsWith(".jpg") || name.endsWith(".png")
);
return Arrays.stream(files).map(File::toString).toArray(String[]::new);
}
public static void main(String[] args) {
new Mobile();
}
}
Sorry for the code mess. Maybe this is better readable.
https://codeshare.io/1SEQK
Don't be desperate, amaridev.
When you can't get nothing rendered you have in general two option:
start from something basic and working (like this hello triangle from mine, it's jogl but you can port it to lwjgl very easily) and build on top of that
debug your application step by step
In case you decide for the second one, you may want to disable first and lighting, any matrix multiplication and any texturing:
check your rendering targets setup by testing if you see the clear color you set
check if glViewport and the fragment shader work by running an hardcoded vertex shader with:
gl_Position = vec4(4.0 * float(gl_VertexID % 2) - 1.0, 4.0 * float(gl_VertexID / 2) - 1.0, 0.0, 1.0);
like here, no matrices and a simple
glDrawArrays(GL_TRIANGLES, 3, 0);
you may want also to hardcode the color output
check if you are reading valid vertex attributes, by outputting each of them in turn to the color fragment shader
out Block
{
vec4 color
} outBlock;
...
outBlock.color = position;
in Block
{
vec4 color;
} inBlock;
outputColor = inBlock.color;
enable matrix multiplication and pass a simple hardcoded triangle to check if any matrix (first proj, then view and finally also model) works as expected
start fetching from your real sphere geometry
start fetching color
enable again texturing and start fetching texture coordinates again
output light and materials values to output color and then enable them back as well
i wanted to tranlate camera in world space. It doesn`t move as i expected. My Algorithm is
first i translate the camera in View space.
Camera is always located in (0,0,0). After i calculate the new camera location points in viewspace.Then I multiply the points with inverse view matrix, So i thought i would get the camera location in the world space.
second i calculate the new view matrix
Then i calculate the view matrix by using setLookAtM function of OpenGL. and set the new viewmatrix.
but my problem is its new locations couldn`t be calculated. Its values are NAN
here is my code.
private void pivotRotation(double angle,int[] pivot, double distance){
float[] temp = new float[4];
float[] temp2 = new float[4];
float[] extObj = new float[16];
float[] startPointsOnViewSpace = new float[4];
float[] newEyeOrigin = new float[4];
float[] newLookOrigin = new float[4];
int[] viewport = new int[16];
viewport[0] = 0;
viewport[1] = 0;
viewport[2] = (int)myRenderer.screenWidth;
viewport[3] = (int)myRenderer.screenHeight;
int[] newEye = new int[2];
newEye[0] = (int)((pivot[0] + ((viewport[2]/2) - pivot[0]) * Math.cos(angle) - ((viewport[3]/2) - pivot[1]) * Math.sin(angle)));
newEye[1] = (int)((pivot[1] + ((viewport[2]/2) - pivot[0]) * Math.sin(angle) + ((viewport[3]/2) - pivot[1]) * Math.cos(angle)));
GLU.gluUnProject((float)newEye[0], (float)(viewport[3] - newEye[1]), 0.0f, myRenderer.modelViewMatrix, 0, myRenderer.projectionMatrix, 0, viewport, 0, extObj, 0);
Matrix.multiplyMV(startPointsOnViewSpace, 0, myRenderer.modelViewMatrix, 0, extObj, 0);
// new camera location in viewspace
startPointsOnViewSpace[0] /= startPointsOnViewSpace[3];
startPointsOnViewSpace[1] /= startPointsOnViewSpace[3];
startPointsOnViewSpace[2] /= startPointsOnViewSpace[3];
newEyeOrigin[0] = startPointsOnViewSpace[0];
newEyeOrigin[1] = startPointsOnViewSpace[1];
newEyeOrigin[2] += distance / pinchParameter;
newEyeOrigin[3] = 1.0f;
newLookOrigin[0] = newEyeOrigin[0];
newLookOrigin[1] = newEyeOrigin[1];
newLookOrigin[2] = newEyeOrigin[2] - 1.0f;
newLookOrigin[3] = 1.0f;
// temp[0], temp[1], temp[2] are new camera location in the world space. i think
Matrix.multiplyMV(temp, 0, myRenderer.inverseViewMatrix, 0, newEyeOrigin, 0);
Matrix.multiplyMV(temp2, 0, myRenderer.inverseViewMatrix, 0, newLookOrigin, 0);
for(int i = 0; i < 3; i++){
myRenderer.eyeLocation[i] = temp[i];
myRenderer.lookPosition[i] = temp2[i];
}
myRenderer.eyeSettings();