In this code i try to render a simple triangle on the screen, but it only shows the red background, i have tried every thing i know to debug but i just can't understand why it doesn't work
Hope somebody can help me, thanks in advance.
public float vertices[] = new float[]
{
-0.5f,-0.5f,0.0f,1.0f,
0.0f,1.0f,0.0f,1.0f,
0.0f,0.5f,0.0f,1.0f,
0.0f,1.0f,0.0f,1.0f,
0.5f,-0.5f,0.0f,1.0f,
0.0f,1.0f,0.0f,1.0f
};
public TEST()
{
}
public int start() throws IOException
{
glfwInit();
long window = glfwCreateWindow(1000, 1000, "HAHA", NULL, NULL);
glfwMakeContextCurrent(window);
glfwShowWindow(window);
GLFWKeyCallback keycallback = new GLFWKeyCallback()
{
#Override
public void invoke(long window, int key, int scancode, int action, int mods)
{
if(key==GLFW_KEY_ESCAPE&&action==GLFW_PRESS)
{
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
}
};
glfwSetKeyCallback(window,keycallback );
GLFWErrorCallback errorcallback = new GLFWErrorCallback()
{
#Override
public void invoke(int error, long description)
{
System.out.println("ERROR CODE: " + error + " ERROR DESCRITPION: "+description);
}
};
glfwSetErrorCallback(errorcallback);
GL.createCapabilities();
glViewport(0, 0, 1000, 1000);
//////////////////////////
int VAO = glGenVertexArrays();
int VBO = glGenBuffers();
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
FloatBuffer buffer = FloatBuffer.allocate(vertices.length);
buffer.clear();
buffer.put(vertices);
buffer.flip();
glBufferData(GL_ARRAY_BUFFER, buffer, GL_STATIC_DRAW);
glVertexAttribPointer(0, 4, GL_FLOAT, false, 8, 0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 4, GL_FLOAT, false, 8, 4);
glEnableVertexAttribArray(1);
buffer.clear();
buffer=null;
System.gc();
glBindVertexArray(0);
///////////////////////
///////////////////////
LoadFile loader = new LoadFile("res/shader.vs", "res/shader.frag");
int vertexshader = glCreateShader(GL_VERTEX_SHADER);
int fragmentshader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vertexshader, loader.readVertex());
glShaderSource(fragmentshader, loader.readFragment());
glCompileShader(vertexshader);
glCompileShader(fragmentshader);
System.out.println(glGetShaderInfoLog(vertexshader));
System.out.println(glGetShaderInfoLog(fragmentshader));
int program = glCreateProgram();
glAttachShader(program, vertexshader);
glAttachShader(program, fragmentshader);
glLinkProgram(program);
glDeleteShader(vertexshader);
glDeleteShader(fragmentshader);
System.out.println(glGetProgramInfoLog(program));
///////////////////////////
boolean running=true;
while(running&&glfwWindowShouldClose(window)==GLFW_FALSE)
{
glfwPollEvents();
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glUseProgram(program);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glUseProgram(0);
glfwSwapInterval(1);
glfwSwapBuffers(window);
}
System.out.println(glGetError());
glfwTerminate();
return 0;
}
My vertex shader
#version 330 core
layout (location = 0) in vec4 position;
layout (location = 1) in vec4 colorin;
out vec4 colorout;
void main()
{
gl_Position = position;
colorout=colorin;
}
My fragment shader
#version 330 core
out vec4 color;
in vec4 colorout;
void main()
{
color = colorout;
}
EDIT: I just saw that this was 1 day old question.. but oh well..
I have my suspicion here:
FloatBuffer buffer = FloatBuffer.allocate(vertices.length);
buffer.clear();
buffer.put(vertices);
buffer.flip();
glBufferData(GL_ARRAY_BUFFER, buffer, GL_STATIC_DRAW);
Have you tried replacing this with BufferUtils from lwjgl library itself? It would turn out to be something like this:
FloatBuffer buff = BufferUtils.createFloatBuffer(vertices.length);
buff.put(vertices);
buff.flip();
Or, if you want the manual one..
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * Float.BYTES);
byteBuffer.order(ByteOrder.nativeOrder());
FloatBuffer floatBuffer = byteBuffer.asFloatBuffer();
floatBuffer.put(vertices);
floatBuffer.flip();
Stride and offsets are specified in bytes, so the stride for 8 floats representing 2 vec4s should be 32, and the offset for the color should be 16.
glVertexAttribPointer(0, 4, GL_FLOAT, false, 32, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, false, 32, 16);
Related
I have been learning OpenGL recently and I have got bit confused about shaders and the VBOs.
Basically, I am confused on how my vertex shader knows about the colour and position data in my VBOs.
To my knowledge glBindAttribLocation(program, attribute, variable); is what sets the attribute in the shader but when I comment out that code, the triangle still renders fine with it's colours.
Here is my code:
public static void main(String[] args) {
String vertexSource = "#version 400 core\n"
+ "in vec3 position;\n"
+ "in vec3 colour;\n"
+ "out vec3 vertexColour;\n"
+ "uniform mat4 model;\n"
+ "uniform mat4 view;\n"
+ "uniform mat4 projection;\n"
+ "void main() {\n"
+ "vertexColour = colour;\n"
+ "mat4 mvp = projection * view * model;\n"
+ "gl_Position = vec4(position, 1.0);\n"
+ "}\n";
String fragmentSource = "#version 400 core\n"
+ "in vec3 vertexColour;\n"
+ "out vec4 colours;\n"
+ "void main()\n"
+ "{\n"
+ " colours = vec4(vertexColour, 1);\n"
+ "}";
glfwSetErrorCallback(errorCallBack);
if (!glfwInit()) {
throw new IllegalStateException("Unable to initialize GLFW");
}
glfwDefaultWindowHints();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
glfwWindowHint(GLFW_VISIBLE, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
long window = glfwCreateWindow(640, 480, "my window!", 0, 0);
glfwSetKeyCallback(window, keyCallback);
if (window == 0) {
glfwTerminate();
throw new RuntimeException("Failed to create the GLFW window");
}
glfwMakeContextCurrent(window);
GL.createCapabilities();
float[] vertexPoints = {
0f,0.5f,0f,
-0.5f,-0.5f,0f,
0.5f,-0.5f,0f
};
float[] colours = {
0.1f,0.5f,0.5f,
0.3f,0.7f,0.9f,
0.4f,0.9f,0.1f
};
ArrayList<Integer> vaos = new ArrayList<Integer>();
ArrayList<Integer> vbos = new ArrayList<Integer>();
int vaoID = glGenVertexArrays();
vaos.add(vaoID);
glBindVertexArray(vaoID);
FloatBuffer vertices = BufferUtils.createFloatBuffer(vertexPoints.length);
vertices.put(vertexPoints);
vertices.flip();
int vboID = GL33.glGenBuffers();
vbos.add(vboID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, vertices, GL_STATIC_DRAW);
//add it to the vao at position 0, size 3, with data types of float, normalised = false,
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
//unbind currently bound vbo
glBindBuffer(GL_ARRAY_BUFFER, 0);
//and one for the colour data
vboID = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, colours, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//finished with vao now unbind it
glBindVertexArray(0);
//vao and vbo stuff is now finished with
//now for the shader stuff
int vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShaderID, vertexSource);
glCompileShader(vertexShaderID);
if (!checkForSuccess(vertexShaderID)) {
glfwTerminate();
throw new IllegalStateException("vertex shader failed: " + glGetShaderInfoLog(vertexShaderID));
}
int fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShaderID, fragmentSource);
glCompileShader(fragmentShaderID);
if (!checkForSuccess(vertexShaderID)) {
glGetShaderInfoLog(vertexShaderID);
glfwTerminate();
throw new IllegalStateException("fragment shader failed");
}
//shader program
int shaderProgramID = glCreateProgram();
glAttachShader(shaderProgramID, vertexShaderID);
glAttachShader(shaderProgramID, fragmentShaderID);
glLinkProgram(shaderProgramID);
//error test
int status = glGetProgrami(shaderProgramID, GL_LINK_STATUS);
if (status != GL_TRUE) {
glfwTerminate();
throw new RuntimeException(glGetProgramInfoLog(shaderProgramID));
}
glUseProgram(shaderProgramID);
//setting position attribute in the shader:
int positionAttribute = glGetAttribLocation(shaderProgramID, "position");
glEnableVertexAttribArray(positionAttribute);
glBindAttribLocation(shaderProgramID, positionAttribute, "position");
//setting colour attribute in the shader:
int colourAttribute = glGetAttribLocation(shaderProgramID, "colour");
glEnableVertexAttribArray(colourAttribute);
glBindAttribLocation(shaderProgramID, colourAttribute, "colour");
glClearColor(0.5f,0.5f,0.5f,1f);
//this will create a wire frame view
//glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glfwShowWindow(window); //optional
while (!glfwWindowShouldClose(window)) {
double time = glfwGetTime();
glfwPollEvents();
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(vaoID);
//enables the position attribute in the vertex shader
glEnableVertexAttribArray(0);
//enables the colour attribute in the vertex shader
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLES, 0, vertexPoints.length);
//disables the position and colour attribute in the vertex shader
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glfwSwapBuffers(window);
}
//clear vaos and vbos
for (int vao : vaos) {
glDeleteVertexArrays(vao);
}
for (int vbo : vbos) {
glDeleteBuffers(vbo);
}
//delete shaders
glDetachShader(shaderProgramID, vertexShaderID);
glDetachShader(shaderProgramID, fragmentShaderID);
glDeleteShader(vertexShaderID);
glDeleteShader(fragmentShaderID);
glDeleteProgram(shaderProgramID);
Callbacks.glfwFreeCallbacks(window);
glfwDestroyWindow(window);
glfwTerminate();
}
I have provided my full code file on github (250 lines) here: https://github.com/OneEgg42/opengl
Would someone be so kind and explain to me how this works?
Thanks in advance!
The relation between a VBO and a shader is not established by glBindAttribLocation.
The relevant lines are actually
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
where you tell OpenGL that the attribute with location 0 (the first parameter in the first glVertexAttribPointer call) should read the first three floats from vboID.
glBindAttribLocation is responsible for binding a certain attribute to a specific location. Since this has to be done before linking the shader, they aren't doing anything in your code. Even more so, because this code tells the attribute to use the location that it already has:
int positionAttribute = glGetAttribLocation(shaderProgramID, "position");
glEnableVertexAttribArray(positionAttribute);
glBindAttribLocation(shaderProgramID, positionAttribute, "position");
You basically query the location, and then assign the location again to the attribute.
Two side notes:
At the moment your code works by pure luck. You assume in the glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0); line that the "position" attribute is at location 0, which it probably is as long as it is the first attribute written in the shader. Same for colors. It is very likely that your code breaks when you exchange the order of the two attribute definitions in your shader.
Either query the attribute location with glGetAttribLocation and use the result instead of 0 and 1, or use glBindAttribLocation before linking to make sure that the attributes are at the location you expect them.
The calls to glEnableVertexAttribArray should be in the VAO setup code and not in the render loop.
Setup:
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
Render:
glBindVertexArray(vaoID);
glDrawArrays(GL_TRIANGLES, 0, vertexPoints.length);
I am relatively new to OpenGL, so this question might seem a bit trivial. I have this code in my Main.java, which is supposed to output a rectangle with the texture of the image on it:
float vertices[] = {
// positions // colors // texture coords
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top left
};
int indices[] = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
int VBO = glGenBuffers(), VAO = glGenVertexArrays(), EBO = glGenBuffers();
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, false, 8, 0);
glEnableVertexAttribArray(0);
// color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, false, 8, 2);
glEnableVertexAttribArray(1);
// texture coord attribute
glVertexAttribPointer(2, 2, GL_FLOAT, false, 8, 5);
glEnableVertexAttribArray(2);
Texture texture = null;
try {
log.info("Loading texture");
texture = Texture.loadTexture("texture.png");
} catch (IOException e) {
log.severe("Texture loading failed due to IOException: " + e.getMessage());
e.printStackTrace();
}
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT);
if(mainProgram != null) {
mainProgram.use();
}
glBindTexture(GL_TEXTURE_2D, texture.getId());
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
//...
}
My shaders are the following:
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
layout(location = 2) in vec2 aTexCoord;
out vec3 vertexColor;
out vec2 texCoord;
void main()
{
gl_Position = vec4(aPos , 1.0);
vertexColor = aColor;
texCoord = aTexCoord;
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
// texture sampler
uniform sampler2D texture1;
void main()
{
FragColor = texture(texture1, TexCoord);
}
The Texture class referenced above is the following:
public class Texture {
public static Texture loadTexture(String fileName) throws IOException{
//load png file
PNGDecoder decoder = new PNGDecoder(new java.io.FileInputStream(new File("C:/Users/Using/Absolute/Paths/For/Demonstration/texture.png")));
//create a byte buffer big enough to store RGBA values
ByteBuffer buffer = BufferUtils.createByteBuffer(4 * decoder.getWidth() * decoder.getHeight());
//decode
decoder.decode(buffer, decoder.getWidth() * 4, PNGDecoder.Format.RGBA);
//flip the buffer so its ready to read
buffer.flip();
//create a texture
int id = glGenTextures();
//bind the texture
glBindTexture(GL_TEXTURE_2D, id);
//tell opengl how to unpack bytes
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
//set the texture parameters, can be GL_LINEAR or GL_NEAREST
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//upload texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// Generate Mip Map
glGenerateMipmap(GL_TEXTURE_2D);
return new Texture(id);
}
private int id;
public Texture(int id){
this.id = id;
}
public int getId(){
return id;
}
}
Note that the path of the texture image is not the problem, as the code works without any Exceptions or Compiler errors. The PNGDecoder i used can be found here.
The stride and offset argument must be set in bytes when specifying the array of generic vertex attribute data by glVertexAttribPointer.
Furthermore, the uv coordinates are the 7th and 8th element in the attribute tuple:
glVertexAttribPointer(0, 3, GL_FLOAT, false, 8, 0);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 8*4, 0);
glVertexAttribPointer(1, 3, GL_FLOAT, false, 8, 2);
glVertexAttribPointer(1, 3, GL_FLOAT, false, 8*4, 3*4);
glVertexAttribPointer(2, 2, GL_FLOAT, false, 8, 5);
glVertexAttribPointer(2, 2, GL_FLOAT, false, 8*4, 6*4);
even if this question was ask multiple times (i readed all of that and no solution worked for me), I am trying to model a rectangle with LWJGL and OpenGL
, but it crashes every time. Here my PC-Stats:
AMD-Ryzen 1600x |
MSI Nvidia GTX 1060 (6GB) |
MSI x370 Carbon Pro Motherboard
I also tried this on an Intel-Setup, with an i7 Processor and a Nvidia Quadro K 1000M setup, but same Error you can see in the following:
https://hastebin.com/ayiqiritov.makefile
My Drawing Method:
public void render(RawModel model){
GL30.glBindVertexArray(model.getVaoID());
GL20.glEnableVertexAttribArray(0);
GL11.glDrawArrays(GL11.GL_TRIANGLES, 0, model.getVertexCount());
GL20.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
}
In this class I create VAOs and the VBOs and store data into those:
private List<Integer> vaos = new ArrayList<Integer>();
private List<Integer> vbos = new ArrayList<Integer>();
public RawModel loadToVAO(float[] positions) {
int vaoID = createVAO();
storeDataInAttributeList(0, positions);
unbindVAO();
return new RawModel(vaoID, positions.length / 3);
}
public void cleanUp() {
for (int vao : vaos) {
GL30.glDeleteVertexArrays(vao);
}
for (int vbo : vbos) {
GL15.glDeleteBuffers(vbo);
}
}
private int createVAO() {
int vaoID = GL30.glGenVertexArrays();
vaos.add(vaoID);
GL30.glBindVertexArray(vaoID);
return vaoID;
}
private void storeDataInAttributeList(int attributeNumber, float[] data) {
int vboID = GL15.glGenBuffers();
vbos.add(vboID);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboID);
FloatBuffer buffer = storeDataInFloatBuffer(data);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, buffer, GL15.GL_STATIC_DRAW);
GL20.glVertexAttribPointer(attributeNumber, 3, GL11.GL_FLOAT, false, 0, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
}
private void unbindVAO() {
GL30.glBindVertexArray(0);
}
private FloatBuffer storeDataInFloatBuffer(float[] data) {
FloatBuffer buffer = BufferUtils.createFloatBuffer(data.length);
buffer.put(data).position(0);
buffer.flip();
return buffer;
}
And my main Method:
public static void main(String[] args){
if(!glfwInit()){
throw new IllegalStateException("Failed");
}
System.out.println(GL11.glGetString(GL11.GL_VERSION));
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE);
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_VERSION_MINOR, 3);
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_VERSION_MAJOR, 4);
long window = GLFW.glfwCreateWindow(640, 480, "Hello World", 0, 0);
if(window == 0){
throw new IllegalStateException("Failed to create Window");
}
GLFWVidMode vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor());
glfwSetWindowPos(window, (vidmode.width() - 640) / 2, (vidmode.height() - 480) / 2);
glfwShowWindow(window);
Loader loader = new Loader();
Renderer renderer = new Renderer();
float[] vertices = {
-0.5f, 0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
0.5f, 0.5f, 0f,
-0.5f, 0.5f, 0f
};
RawModel model = loader.loadToVAO(vertices);
while(!glfwWindowShouldClose(window)){
renderer.prepare();
renderer.render(model);
glfwPollEvents();
}
loader.cleanUp();
GLFW.glfwTerminate();
}
So I have already tried:
Update drivers for Graphic-card, update java, update Windows, setting up a new eclipse, reinstall java and deleting .metadata in eclipse.
Can anyone pls help me?
According to the comment
I dont have implemented a shader yet
The state of the art way of rendering in OpenGL, would be to use a Shader.
If you don't use a shader, than you have to define the array of vertex data by glVertexPointer. glVertexPointer specifies a array for Fixed-Function vertex coordinate attribute. If you don't have a shader program, then you have to use the Fixed Function Pipeline.
private void storeDataInAttributeList(int attributeNumber, float[] data) {
int vboID = GL15.glGenBuffers();
vbos.add(vboID);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboID);
FloatBuffer buffer = storeDataInFloatBuffer(data);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, buffer, GL15.GL_STATIC_DRAW);
GL11.glVertexPointer( 3, GL11.GL_FLOAT, 0, 0 ); // <---------
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
}
Further you have to enable the client-side capability for vertex coordinates by glEnableClientState( GL_VERTEX_ARRAY ):
public void render(RawModel model){
GL30.glBindVertexArray(model.getVaoID());
GL11.glEnableClientState( GL11.GL_VERTEX_ARRAY ); // <---------
GL11.glDrawArrays(GL11.GL_TRIANGLES, 0, model.getVertexCount());
GL11.glDisableClientState( GL11.GL_VERTEX_ARRAY ); // <---------
GL30.glBindVertexArray(0);
}
Further note, that you have to creates the "GLCapabilities" instance and makes the OpenGL bindings available for use before you use an OpenGL instruction like GL30.glGenVertexArrays() and you have to ensure the the OpenGL context is current.
Call glfwMakeContextCurrent(window) and GL.createCapabilities() after creating the window and before any OpenGL instruction:
long window = GLFW.glfwCreateWindow(640, 480, "Hello World", 0, 0);
if(window == 0){
throw new IllegalStateException("Failed to create Window");
}
GLFWVidMode vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor());
glfwSetWindowPos(window, (vidmode.width() - 640) / 2, (vidmode.height() - 480) / 2);
glfwMakeContextCurrent(window); // <-----
glfwShowWindow(window);
GL.createCapabilities(); // <-----
.....
Finally you are missing glfwSwapBuffers int the render loop. glfwSwapBuffers Swaps the front and back buffers of the specified window. In other, simple words it binges the buffer where you rendered to, onto the screen:
while(!glfwWindowShouldClose(window)){
renderer.prepare();
renderer.render(model);
glfwSwapBuffers(window); // <-----
glfwPollEvents();
}
See also LWJGL 3 Guide - Getting Started
I am writing a modern openGL model loader, and am running into some issues using shader attribute locations other than 0. I can get the position attribute into the shader, but not the values needed for Blinn-Phong shading (bound in the program to locations: Ks-3, Ka-4, Kd-5). Here is the code I am using to load and render the OBJ models:
public class Model{
private List<Vector3f> vertices=new ArrayList<>();
private List<Vector3f> normals=new ArrayList<>();
private ArrayList<Integer> vaos=new ArrayList<>();
private float[] faces=new float[fvaoSize];
private float[] ks=new float[kvaoSize];
private float[] ka=new float[kvaoSize];
private float[] kd=new float[kvaoSize];
private int currentPlace=0,kcurrentPlace=0;
private static final int fvaoSize=glGetInteger(GL12.GL_MAX_ELEMENTS_VERTICES)-(glGetInteger(GL12.GL_MAX_ELEMENTS_VERTICES)%12);
private static final int kvaoSize=(fvaoSize/4)*3;
private int facesd=0;
private HashMap<String,Material> materials=new HashMap<>();
#SuppressWarnings("unused")
private int plainFaceVAO=0,texFaceVAO=0,normalsFaceVAO=0,normalsTexVAO=0;
#SuppressWarnings("unused")
private int faceOffset=0,
texOffset=0,texTexture=0,texCoords=0,
normalOffset=0,normalCoords=0,
normalTexOffset=0,normalTexNormalCoords,normalTexTexCoords,normalTexTexture=0;
#SuppressWarnings("unused")
private Shader faceShader=null,texShader=null,normalsShader=null,normalTexShader=null;
private Material currMtl=null;
public Model(File f,Shader faceShader,Shader texShader,Shader normalsShader,Shader normalTexShader){
loadModelData(f);
this.faceShader=faceShader;
this.texShader=texShader;
this.normalsShader=normalsShader;
this.normalTexShader=normalTexShader;
faceOffset=glGetUniformLocation(faceShader.getID(),"trans");
texOffset=glGetUniformLocation(texShader.getID(),"trans");
texTexture=glGetAttribLocation(texShader.getID(),"tex");
texCoords=glGetAttribLocation(texShader.getID(),"texCoords");
normalOffset=glGetUniformLocation(normalsShader.getID(),"trans");
normalCoords=glGetAttribLocation(normalsShader.getID(),"normalsCoords");
}
#SuppressWarnings("null")
private void loadModelData(File f){
try(BufferedReader br=new BufferedReader(new FileReader(f))){
String line="";
while((line=br.readLine())!=null){
String[] words=line.split(" ");
//System.out.println(line);
if(line.startsWith("#"))
continue;
switch(words[0]){
case OBJ_VERTEX:
vertices.add(new Vector3f(parseFloat(words[1]),parseFloat(words[2]),-parseFloat(words[3])));
break;
case OBJ_VERTEX_NORMAL:
normals.add(new Vector3f(parseFloat(words[1]),parseFloat(words[2]),-parseFloat(words[3])));
break;
case OBJ_FACE:
facesd++;
FaceType cft=null;
int slashCount=0;
for(char c:words[1].toCharArray()){
if(c=='/')
slashCount++;
}
if(slashCount==0){
cft=FaceType.COORDSONLY;
}else if(slashCount==1){
cft=FaceType.TEXTURE;
}else if(slashCount==2){
if(words[0].contains("//")){
cft=FaceType.NORMALS;
}else{
cft=FaceType.NORMALS_AND_TEXTURE;
}
}
switch(cft){
case COORDSONLY:
Vector3f pos1=vertices.get(Integer.parseInt(words[1])-1);
Vector3f pos2=vertices.get(Integer.parseInt(words[2])-1);
Vector3f pos3=vertices.get(Integer.parseInt(words[3])-1);
Vec3 Ks=(currMtl.getKs()!=null?currMtl.getKs():new Vec3(1));
Vec3 Ka=(currMtl.getKa()!=null?currMtl.getKa():new Vec3(1));
Vec3 Kd=(currMtl.getKd()!=null?currMtl.getKd():new Vec3(1));
float[] temp=new float[]
{
pos1.x,pos1.y,pos1.z,1.0f,
pos2.x,pos2.y,pos2.z,1.0f,
pos3.x,pos3.y,pos3.z,1.0f
};
for(int i=0;i<12;i++){
faces[currentPlace+i]=temp[i];
}
float[] ktemp=new float[]
{
Ks.x,Ks.y,Ks.z,
Ks.x,Ks.y,Ks.z,
Ks.x,Ks.y,Ks.z
};
for(int i=0;i<9;i++){
ks[kcurrentPlace+i]=ktemp[i];
}
ktemp=new float[]
{
Ka.x,Ka.y,Ka.z,
Ka.x,Ka.y,Ka.z,
Ka.x,Ka.y,Ka.z
};
for(int i=0;i<9;i++){
ka[kcurrentPlace+i]=ktemp[i];
}
ktemp=new float[]
{
Kd.x,Kd.y,Kd.z,
Kd.x,Kd.y,Kd.z,
Kd.x,Kd.y,Kd.z
};
for(int i=0;i<9;i++){
kd[kcurrentPlace+i]=ktemp[i];
}
kcurrentPlace+=9;
currentPlace+=12;
if(currentPlace==fvaoSize){
int fvbo=glGenBuffers();
FloatBuffer vertexPositionsBuffer=BufferUtils.createFloatBuffer(fvaoSize);
vertexPositionsBuffer.put(faces);
vertexPositionsBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, fvbo);
glBufferData(GL_ARRAY_BUFFER, vertexPositionsBuffer, GL_STATIC_DRAW);
int ksvbo=glGenBuffers();
FloatBuffer ksBuffer=BufferUtils.createFloatBuffer(kvaoSize);
ksBuffer.put(ks);
ksBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, ksvbo);
glBufferData(GL_ARRAY_BUFFER, ksBuffer, GL_STATIC_DRAW);
int kavbo=glGenBuffers();
FloatBuffer kaBuffer=BufferUtils.createFloatBuffer(kvaoSize);
kaBuffer.put(ka);
kaBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, kavbo);
glBufferData(GL_ARRAY_BUFFER, kaBuffer, GL_STATIC_DRAW);
int kdvbo=glGenBuffers();
FloatBuffer kdBuffer=BufferUtils.createFloatBuffer(kvaoSize);
kdBuffer.put(kd);
kdBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, kdvbo);
glBufferData(GL_ARRAY_BUFFER, kdBuffer, GL_STATIC_DRAW);
int vao = glGenVertexArrays();
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, fvbo);
glVertexAttribPointer(0, 4, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, ksvbo);
glVertexAttribPointer(3, 3, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, kavbo);
glVertexAttribPointer(4, 3, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, kdvbo);
glVertexAttribPointer(5, 3, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(3);
glEnableVertexAttribArray(4);
glEnableVertexAttribArray(5);
glBindVertexArray(0);
glDeleteBuffers(fvbo);
glDeleteBuffers(ksvbo);
glDeleteBuffers(kavbo);
glDeleteBuffers(kdvbo);
ksBuffer=null;
kaBuffer=null;
kdBuffer=null;
vertexPositionsBuffer=null;
vaos.add(vao);
glBindBuffer(GL_ARRAY_BUFFER, 0);
currentPlace=0;
kcurrentPlace=0;
faces=new float[fvaoSize];
ks=new float[kvaoSize];
ka=new float[kvaoSize];
kd=new float[kvaoSize];
}
break;
case NORMALS:
throw new RuntimeException("File is unsupported.");
case NORMALS_AND_TEXTURE:
throw new RuntimeException("File is unsupported.");
case TEXTURE:
throw new RuntimeException("File is unsupported.");
default:
throw new RuntimeException("File is unsupported.");
}
break;
case OBJ_MTLLIB:
materials=MTLLibLoader.loadMTLLib(f.toPath().getParent()+"/"+words[1]);
break;
case OBJ_USEMTL:
System.out.println("Using Material "+words[1]+": Exists in hmap: "+materials.containsKey(words[1]));
currMtl=materials.get(words[1]);
break;
default:
break;
}
}
int fvbo=glGenBuffers();
FloatBuffer vertexPositionsBuffer=BufferUtils.createFloatBuffer(fvaoSize);
vertexPositionsBuffer.put(faces);
vertexPositionsBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, fvbo);
glBufferData(GL_ARRAY_BUFFER, vertexPositionsBuffer, GL_STATIC_DRAW);
int ksvbo=glGenBuffers();
FloatBuffer ksBuffer=BufferUtils.createFloatBuffer(kvaoSize);
ksBuffer.put(ks);
ksBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, ksvbo);
glBufferData(GL_ARRAY_BUFFER, ksBuffer, GL_STATIC_DRAW);
int kavbo=glGenBuffers();
FloatBuffer kaBuffer=BufferUtils.createFloatBuffer(kvaoSize);
kaBuffer.put(ka);
kaBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, kavbo);
glBufferData(GL_ARRAY_BUFFER, kaBuffer, GL_STATIC_DRAW);
int kdvbo=glGenBuffers();
FloatBuffer kdBuffer=BufferUtils.createFloatBuffer(kvaoSize);
kdBuffer.put(kd);
kdBuffer.flip();
glBindBuffer(GL_ARRAY_BUFFER, kdvbo);
glBufferData(GL_ARRAY_BUFFER, kdBuffer, GL_STATIC_DRAW);
int vao = glGenVertexArrays();
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, fvbo);
glVertexAttribPointer(0, 4, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, ksvbo);
glVertexAttribPointer(3, 3, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, kavbo);
glVertexAttribPointer(4, 3, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, kdvbo);
glVertexAttribPointer(5, 3, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(3);
glEnableVertexAttribArray(4);
glEnableVertexAttribArray(5);
glBindVertexArray(0);
glDeleteBuffers(fvbo);
glDeleteBuffers(ksvbo);
glDeleteBuffers(kavbo);
glDeleteBuffers(kdvbo);
ksBuffer=null;
kaBuffer=null;
kdBuffer=null;
vertexPositionsBuffer=null;
vaos.add(vao);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}catch(FileNotFoundException e){
// TODO Auto-generated catch block
e.printStackTrace();
}catch(IOException e){
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("Object \""+f.getName().substring(0,f.getName().lastIndexOf("."))+"\" loaded, has "+facesd+" faces");
}
public void drawModel(Camera c,Vec3 offset){
//System.err.format("rendering model, %d vaos\n",vaos.size());
//Matrix4f modelMat=RenderMatrixHelper.getModelMatrix(offset,new Vec3(0));
faceShader.useShader();
c.useCameraView();
glUniform4f(faceOffset,offset.x,offset.y,offset.z,0f);
for(Integer i:vaos){
glBindVertexArray(i);
glDrawArrays(GL_TRIANGLES, 0, fvaoSize/4);
}
Shader.stopShader();
glBindVertexArray(0);
}
public int getNumFaces(){
return facesd;
}
private enum FaceType{
COORDSONLY,
TEXTURE,
NORMALS,
NORMALS_AND_TEXTURE,
}
}
I think I have correctly bound the K-VBO's to the correct location, but the shader is rendering them as black. Here is the vertex shader code:
#version 330
in vec4 position;
in vec3 normals;
in vec2 texCoords;
uniform vec4 trans;
uniform mat4 view;
uniform mat4 projection;
uniform mat4 model;
void main()
{
gl_Position = projection*view* (position+trans);//-camera_position;
}
The fragment shader outputs black if the Kd has a value of (0,0,0), white otherwise. In my code, I have checked and know that Kd is (.64,.64,.64)
And the fragment shader:
#version 330
out vec4 outputColor;
in vec3 Ks;
in vec3 Ka;
in vec3 Kd;
uniform mat4 view;
uniform mat4 projection;
void main()
{
if(Kd==vec3(0f)){
outputColor=vec4(0f);
}else{
outputColor=vec4(1f);
}
}
I can't find the issue, but if anyone could it would be a great help. Thanks!
I didn't look at your code in full detail, but there seems to be a basic misunderstanding about how vertex attributes work. In the vertex shader, you have these attributes, which look fine:
in vec4 position;
in vec3 normals;
in vec2 texCoords;
But then in the fragment shader, you have these definitions:
in vec3 Ks;
in vec3 Ka;
in vec3 Kd;
Based on the code, it looks like you're planning to use these as vertex attributes. This will not work. You can't feed vertex attributes directly into the fragment shader. They need to be in variables in the vertex shader. If you need the (interpolated) values in the fragment shader, you need to pass them from vertex shader to fragment shader using out variables in the vertex shader, and in variables in the fragment shader.
If you do error checking, I'm surprised that your shader program even links. in variables in the fragment shader (at least as long as they are used in the code) need to have matching out variables in the vertex shader, which you do not have e.g. for Kd.
Of course if your material properties are constant at least for a single object, you could also make them uniforms instead of attributes.
One other aspect to watch out for once you have the more basic problems addressed is the location of the attributes. You have values 3, 4 and 5 hardwired for the material attributes. You will need to use calls like glGetAttribLocation(), glSetAttribLocation(), or location qualifiers in the shader code, to ensure that the locations you use for calls like glVertexAttribPointer() match up with the attribute locations in the shader program.
This is the content of my Texture class:
public int id;
public Texture(InputStream inputStream) {
ByteBuffer buf = null;
int tWidth = 0;
int tHeight = 0;
try {
PNGDecoder decoder = new PNGDecoder(inputStream);
buf = ByteBuffer.allocateDirect(4*decoder.getWidth()*decoder.getHeight());
decoder.decode(buf, decoder.getWidth()*4, PNGDecoder.TextureFormat.RGBA);
buf.rewind();
inputStream.close();
} catch (IOException exception) {
ErrorHandler.handleError("Failed to load image", exception);
}
id = glGenTextures();
glActiveTexture(id);
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tWidth, tHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glBindTexture(GL_TEXTURE_2D, 0);
}
This is how i render:
glActiveTexture(background.id);
glBindTexture(GL_TEXTURE_2D, background.id);
glBindBuffer(GL_ARRAY_BUFFER, vboVertexHandle);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
glVertexAttribPointer(1, 2, GL_FLOAT, false, 0, 4*18);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindTexture(GL_TEXTURE_2D, 0);
and this is the fragment shader:
#version 330
in vec2 textureCoordinate;
out vec4 outputColor;
uniform sampler2D texture_diffuse;
void main() {
outputColor.rgb = vec3(1.0f, 1.0f, 1.0f);
outputColor += texture2D(texture_diffuse, textureCoordinate);
}
What do i do wrong? The texture coordinates passed to the shader program are 100% correct (i checked). But i still get a white quad.
Note: i use this png decoder.
EDIT:
I printed out floats for every 4 bytes to the console, and i got 0.00.00.00.0.... Doest that mean that the texture is loaded incorectly, or the informations is stored to the buffer in a different format?
Your fragment shader looks wrong - you set a white colour and add the value from the texture, so it will clamp to white. Just do something more like this
void main() {
outputColor.a = 1.0f;
outputColor.rgb = texture2D(texture_diffuse, textureCoordinate);
}