Texture Loading using PNGDecoder and LWJGL - java

I am relatively new to OpenGL, so this question might seem a bit trivial. I have this code in my Main.java, which is supposed to output a rectangle with the texture of the image on it:
float vertices[] = {
// positions // colors // texture coords
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top left
};
int indices[] = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
int VBO = glGenBuffers(), VAO = glGenVertexArrays(), EBO = glGenBuffers();
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, false, 8, 0);
glEnableVertexAttribArray(0);
// color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, false, 8, 2);
glEnableVertexAttribArray(1);
// texture coord attribute
glVertexAttribPointer(2, 2, GL_FLOAT, false, 8, 5);
glEnableVertexAttribArray(2);
Texture texture = null;
try {
log.info("Loading texture");
texture = Texture.loadTexture("texture.png");
} catch (IOException e) {
log.severe("Texture loading failed due to IOException: " + e.getMessage());
e.printStackTrace();
}
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT);
if(mainProgram != null) {
mainProgram.use();
}
glBindTexture(GL_TEXTURE_2D, texture.getId());
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
//...
}
My shaders are the following:
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
layout(location = 2) in vec2 aTexCoord;
out vec3 vertexColor;
out vec2 texCoord;
void main()
{
gl_Position = vec4(aPos , 1.0);
vertexColor = aColor;
texCoord = aTexCoord;
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
// texture sampler
uniform sampler2D texture1;
void main()
{
FragColor = texture(texture1, TexCoord);
}
The Texture class referenced above is the following:
public class Texture {
public static Texture loadTexture(String fileName) throws IOException{
//load png file
PNGDecoder decoder = new PNGDecoder(new java.io.FileInputStream(new File("C:/Users/Using/Absolute/Paths/For/Demonstration/texture.png")));
//create a byte buffer big enough to store RGBA values
ByteBuffer buffer = BufferUtils.createByteBuffer(4 * decoder.getWidth() * decoder.getHeight());
//decode
decoder.decode(buffer, decoder.getWidth() * 4, PNGDecoder.Format.RGBA);
//flip the buffer so its ready to read
buffer.flip();
//create a texture
int id = glGenTextures();
//bind the texture
glBindTexture(GL_TEXTURE_2D, id);
//tell opengl how to unpack bytes
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
//set the texture parameters, can be GL_LINEAR or GL_NEAREST
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//upload texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// Generate Mip Map
glGenerateMipmap(GL_TEXTURE_2D);
return new Texture(id);
}
private int id;
public Texture(int id){
this.id = id;
}
public int getId(){
return id;
}
}
Note that the path of the texture image is not the problem, as the code works without any Exceptions or Compiler errors. The PNGDecoder i used can be found here.

The stride and offset argument must be set in bytes when specifying the array of generic vertex attribute data by glVertexAttribPointer.
Furthermore, the uv coordinates are the 7th and 8th element in the attribute tuple:
glVertexAttribPointer(0, 3, GL_FLOAT, false, 8, 0);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 8*4, 0);
glVertexAttribPointer(1, 3, GL_FLOAT, false, 8, 2);
glVertexAttribPointer(1, 3, GL_FLOAT, false, 8*4, 3*4);
glVertexAttribPointer(2, 2, GL_FLOAT, false, 8, 5);
glVertexAttribPointer(2, 2, GL_FLOAT, false, 8*4, 6*4);

Related

Depth not being render but instead is displaying overlay

I have an issue in my project where I am trying to draw a pyramid on screen but the depth is not being displayed.
public void create(float[] vertices, int[] indices, int numberOfVertices, int numberOfIndices) {
indexCount = numberOfIndices;
vao = gl.genVertexArrays();
gl.bindVertexArray(vao);
IntBuffer indicesBuffer = factory.create(indices);
ibo = gl.genBuffers();
gl.bindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
gl.bufferData(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer, GL_STATIC_DRAW);
FloatBuffer verticesBuffer = factory.create(vertices);
vbo = gl.genBuffers();
gl.bindBuffer(GL_ARRAY_BUFFER, vbo);
gl.bufferData(GL_ARRAY_BUFFER, verticesBuffer, GL_STATIC_DRAW);
gl.vertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
gl.bindBuffer(GL_ARRAY_BUFFER, 0);
gl.bindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
gl.bindVertexArray(0);
}
I create my mesh object with following code where Vertices and Indices are defined as :
private void createTriangles() {
float[] vertices = new float[] {
//x y z
-1.0f, -1.0f, 0.0f, //0
0.0f, 0.0f, 1.0f, //1
1.0f, -1.0f, 0.0f, //2
0.0f, 1.0f, 0.0f //3
};
int[] indices = new int[] {
0, 3, 1,
1, 3, 2,
2, 3, 0,
0, 1, 2
};
mesh.create(vertices, indices, 12, 12);
}
In order to display them to screen I call my render function from my game loop which is defined as.
public void render() {
gl.bindVertexArray(vao);
gl.bindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
gl.drawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_INT, 0);
gl.bindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
gl.bindVertexArray(0);
}
To create my FloatBuffer and IntBuffer I have a factory class defined as.
public class BufferFactory {
public IntBuffer create(int[] indices) {
IntBuffer buffer = BufferUtils.createIntBuffer(indices.length);
buffer.put(indices);
buffer.flip();
return buffer;
}
public FloatBuffer create(float[] vertices) {
FloatBuffer buffer = BufferUtils.createFloatBuffer(vertices.length);
buffer.put(vertices);
buffer.flip();
return buffer;
}
}
So the issue i'm getting is that it should have drawn four triangles to screen to form a pyramid, however my output looks like this.
I have rotated the image to try and see the depth but it is a flat object.
I have tried to identify where the issue may be coming from by attempting to draw each triangle individually by changing my render method to gl.drawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, 0); So that it only draws one triangle. I have tried to draw all four faces individually and all are drawn to screen.
I found the issue. I was originally scaling the triangle such that
model.scale(new Vector3f(0.2f, 0.2f, 0.0f));
As a result the z axis was being multiplied by 0. Silly mistake, hope this helps someone in the future.

OpenGL (LWJGL): Render to Texture not working

I've got some code that's supposed to render text to a texture so that I don't have to render each character each draw step, and can instead use the rendered texture. However, my code does not as it's supposed to, and the texture is left blank. After a few hours of trying different things, I cannot figure it out, and so I bring the question to you.
I'm fairly certain the problem is somewhere in this code chunk below, but if you think it's not, I'll gladly post whatever other samples of code you would like. I just really want to get this done already. The exact problem is that the created texture is blank, and never is rendered to (it seems like). I've tried just drawing one massive quad on it, and that didn't seem to work either.
Edit: After flipping the buffer, I can get some color to be rendered to the texture, but it's all just one color (which makes me think it's only sampling one pixel), and I can't figure out how to get the actual image I want to render to show on it.
public Text(String text, int x, int y, Font font, float size, GUIComponent parent, Binding binding) {
super(null, x, y, font.getStringWidth(size, text), font.getStringHeight(size), parent, binding, false);
this.text = text;
this.font = font;
this.width = font.getStringWidth(size, text);
this.height = font.getStringHeight(size);
int fbo = glGenFramebuffers();
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
int tex = glGenTextures();
glBindTexture(GL_TEXTURE_2D, tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, tex, 0);
IntBuffer intBuffer = BufferUtils.createIntBuffer(1);
intBuffer.put(GL_COLOR_ATTACHMENT0);
intBuffer.flip();
glDrawBuffers(intBuffer);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
throw new RuntimeException("Something really bad happened");
}
//RENDER
RenderUtil.recalibrate(width, height, 1.0f); //Does glViewport(width, height), and some matrix stuff
Camera.updateShader("textshader", "projection", false); //Update projection matrix
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
int width = 0;
float f = this.width / 1.0f;
int charWidth = 0;
for (char c : text.toCharArray()) {
font.bind(0, c % 256); // calls glBindTexture, this works, have tested
//ResourceManager.getTexture("grassroadedger1").bind(0, 0);
charWidth = font.getCharWidth(size, c);
//float[] verts = new float[] { -1f, 1f, 1f, 1f, 1f, -1f, -1f, -1f };
float[] verts = new float[] { -1.0f + (width / f), 1.0f, 1.0f + ((width + charWidth) / f), 1.0f, 1.0f + ((width + charWidth) / f), -1.0f, -1.0f + (width / f), -1.0f };
width += charWidth;
glBindBuffer(GL_ARRAY_BUFFER, vertexPointer);
glBufferSubData(GL_ARRAY_BUFFER, 0, RenderUtil.createBuffer(verts));
glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, RenderUtil.getIndicesPointer());
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
//END
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
RenderUtil.recalibrate(Window.getWidth(), Window.getHeight(), LuminaEngine.getGlobalImageScale());
this.setTexture(new Texture(tex, "text_"+size+"_"+text));
}
FragmentShader
#version 330 core
in vec2 uv;
layout(location = 0) out vec4 color;
uniform sampler2D sampler;
void main(){
color = texture2D( sampler, uv );
}
Vertex Shader
#version 330 core
layout(location = 0) in vec3 vertices;
layout(location = 1) in vec2 textures;
out vec2 uv;
uniform mat4 projection;
void main(){
gl_Position = projection * vec4(vertices,1);
uv = textures;
}
Edit: After flipping the intBuffer for drawBuffers, I can get some things to appear, mostly just a big blue square. Progress nonetheless
You never defined an array of generic vertex attribute data for the texture coordinates (in vec2 textures;).
Add something like this to your code:
int texCoordBuffer;
glGenBuffers(1, texCoordBuffer);
float[] texCoord = new float[] { 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
glBindBuffer(GL_ARRAY_BUFFER, texCoordBuffer);
glBufferData(GL_ARRAY_BUFFER, RenderUtil.createBuffer(texCoord), GL_STATIC_DRAW);
int tax_attr_i = 1; // layout(location = 1) in vec2 textures;
glVertexAttribPointer(tax_attr_i, 2, GL_FLOAT, false, 0, 0);

Texture being rendered as black quad

I have been trying to follow the code as specified in this tutorial on OpenGL3+ textures, but my result ends up black instead of the texture.
I am using stbimage to load the image the texture uses into a direct ByteBuffer and can guarantee the RGB data in the buffer is, at least, not uniform - so it can't be that.
I usually do not like to dump code, but I don't see much else I can do at this point. Here's my java code and shaders:
GL is an interface pointing to all the GL## functionality in LWJGL31.
ShaderProgram wraps all the shader specific stuff into a nice blackbox that generates a shaderprogram from the attached shaders on the first call of use(GL) and subsequently reuses that program. This works just fine for rendering a coloured triangle, so I rule out any errors in there.
Util.checkError(GL, boolean); does check for any OpenGL errors that have accumulated since its last execution and throws a runtime exception if the boolean is not set (silently writes to the log instead, if set).
The rendering code, update(GL, long) is run once every frame
private static final ResourceAPI res = API.get(ResourceAPI.class);
Image lwjgl32;
ShaderProgram prog = new ShaderProgram();
int vbo, vao, ebo;
int texture;
#Override
public void init(GL gl) {
try {
prog.attach(res.get("shaders/texDemo.vert", ShaderSource.class));
prog.attach(res.get("shaders/texDemo.frag", ShaderSource.class));
lwjgl32 = res.get("textures/lwjgl32.png", Image.class);
} catch(ResourceException e) {
throw new RuntimeException(e);
}
float[] vertices = {
// positions // colors // texture coords
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top left
};
int[] indices = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
vao = gl.glGenVertexArrays();
vbo = gl.glGenBuffers();
ebo = gl.glGenBuffers();
gl.glBindVertexArray(vao);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, vbo);
gl.glBufferData(GL.GL_ARRAY_BUFFER, vertices, GL.GL_STATIC_DRAW);
gl.glBindBuffer(GL.GL_ELEMENT_ARRAY_BUFFER, ebo);
gl.glBufferData(GL.GL_ELEMENT_ARRAY_BUFFER, indices, GL.GL_STATIC_DRAW);
gl.glVertexAttribPointer(0, 3, GL.GL_FLOAT, false, 8 * Float.BYTES, 0);
gl.glEnableVertexAttribArray(0);
gl.glVertexAttribPointer(1, 3, GL.GL_FLOAT, false, 8 * Float.BYTES, 3 * Float.BYTES);
gl.glEnableVertexAttribArray(0);
gl.glVertexAttribPointer(2, 2, GL.GL_FLOAT, false, 8 * Float.BYTES, 6 * Float.BYTES);
gl.glEnableVertexAttribArray(0);
texture = gl.glGenTextures();
gl.glBindTexture(GL.GL_TEXTURE_2D, texture);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_REPEAT);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_REPEAT);
gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGB8, lwjgl32.getWidth(), lwjgl32.getHeight(), 0, GL.GL_RGB, GL.GL_UNSIGNED_BYTE, lwjgl32.getImageData());
gl.glGenerateMipmap(GL.GL_TEXTURE_2D);
prog.use(gl);
gl.glUniform1i(gl.glGetUniformLocation(prog.getId(gl), "texture"), 0);
Util.checkError(gl, false);
}
#Override
protected void update(GL gl, long deltaFrame) {
gl.glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glActiveTexture(GL.GL_TEXTURE0);
gl.glBindTexture(GL.GL_TEXTURE_2D, texture);
prog.use(gl);
gl.glBindVertexArray(vao);
gl.glDrawElements(GL.GL_TRIANGLES, 6, GL.GL_UNSIGNED_INT, 0);
}
#Override
public void clean(GL gl) {
gl.glDeleteVertexArrays(vao);
gl.glDeleteBuffers(vbo);
gl.glDeleteBuffers(ebo);
ShaderProgram.clearUse(gl);
prog.dispose(gl);
}
Vertex shader
#version 330 core
layout (location = 0) in vec3 in_position;
layout (location = 1) in vec3 in_color;
layout (location = 2) in vec2 in_texCoord;
out vec3 color;
out vec2 texCoord;
void main() {
gl_Position = vec4(in_position, 1.0);
color = in_color;
texCoord = vec2(in_texCoord.x, in_texCoord.y);
}
Fragment shader
#version 330 core
out vec4 frag_colour;
in vec3 color;
in vec2 texCoord;
uniform sampler2D texture;
void main() {
frag_colour = texture(texture, texCoord) * vec4(color, 1.0);
}
1I wrapped LWJGL3's GL## static classes into a single interface and implementation so I can have a bunch of stateful methods that do things such as identifying the context that is being rendered to, etc. I also did my best to remove non-core functionality from the interface so I don't even get tempted to use deprecated stuff
You only enable the vertex attribute with index 0, but this 3 times.
Adapt your code like this:
gl.glVertexAttribPointer(0, 3, GL.GL_FLOAT, false, 8 * Float.BYTES, 0);
gl.glEnableVertexAttribArray(0);
gl.glVertexAttribPointer(1, 3, GL.GL_FLOAT, false, 8 * Float.BYTES, 3 * Float.BYTES);
gl.glEnableVertexAttribArray(1); // <-------
gl.glVertexAttribPointer(2, 2, GL.GL_FLOAT, false, 8 * Float.BYTES, 6 * Float.BYTES);
gl.glEnableVertexAttribArray(2); // <------
It's hard to tell from just looking at the code but a black quad means that this row in your fragment shader:
frag_colour = texture(texture, texCoord) * vec4(color, 1.0);
evaluates to 0. Which means either the texture is'nt read/bound properly, your texture coordinates are off or color is a zero vector. Make sure your texture image is properly loaded (file exists, has a width and height etc) and has the correct format. What I usually do to debug the shader is to set each parameter as a color to give a hint if it has the correct value:
frag_colour = vec4(color, 1.0); //Makes sure the color is right
or
frag_colour = texture(texture, texCoord); //Makes sure the texture is loaded and bound
And if that doesn't give enough information, even more detail:
frag_colour = vec4(color.x, color.x, color.x, 1.0);
or
frag_colour = vec4(texCoord.x, texCoord.x, texCoord.x, 1.0);

LWJGL - Getting error : "Cannot use offsets when Element Array Buffer Object is disabled"

I am trying to render a basic cube in LWJGL (Java). But the program keeps crashing, telling me that it "Cannot use offsets when Element Array Buffer Object is disabled". I am guessing that this is not the exact error right here and there might be something less obvious than that.
(I actually did the code that I'm about to show you in C++ and it was working very fine)
Init function (called once)
FloatBuffer vertices = BufferUtils.createFloatBuffer(4 * 5);
vertices.put(new float[]{
// pos // Color
0.5f, 0.5f, 1.0f, 0.0f, 0.5f,
0.5f, -0.5f, 0.5f, 0.0f, 0.75f,
-0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
-0.5f, 0.5f, 0.5f, 0.5f, 1.0f
});
vertices.flip();
indices = BufferUtils.createByteBuffer(2 * 3);
indices.put(new byte[]{
0, 1, 3,
1, 2, 3
});
indices.flip();
// VAO
VAO = GL30.glGenVertexArrays();
GL30.glBindVertexArray(VAO);
// VBO
VBO = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, vertices, GL_STATIC_DRAW);
// IBO
IBO = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, IBO);
glBufferData(GL_ARRAY_BUFFER, indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glEnableVertexAttribArray(0);
// ,-- position in layout (see shader)
// | ,-- Nb of component per vertex (2 for 2D (x, y))
// | | ,-- Normalized ? (between 0 - 1)
// | | | ,-- Offset between things (size of a line)
// | | | | ,- Where to start ?
glVertexAttribPointer(0, 2, GL11.GL_FLOAT, false, 5 * Float.SIZE , 0);
glDisableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL11.GL_FLOAT, false, 5 * Float.SIZE , 2 * Float.SIZE);
glDisableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, IBO);
// Unbinds the VAO
GL30.glBindVertexArray(0);
And here is the render function (static for now, until I figure this out)
glUseProgram(shaderProgram.getID());
GL30.glBindVertexArray(VAO);
// Error in the line bellow
GL11.glDrawElements(GL11.GL_TRIANGLES, 4, GL11.GL_UNSIGNED_BYTE, 0);
GL30.glBindVertexArray(0);
A little help would be very appreciated. Also, I might be something very wrong that I'm not aware of, so if you spot something that would be awesome.
Thank you
Vertex Shader:
#version 330 core
layout(location = 0) in vec2 position;
layout(location = 1) in vec3 color;
out vec4 Color;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
Color = vec4(color, 1.0);
}
Framgent Shader:
#version 330 core
in vec4 Color;
out vec4 color;
void main()
{
color = Color;
}
The following lines
IBO = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, IBO);
glBufferData(GL_ARRAY_BUFFER, indices, GL_STATIC_DRAW);
are creating a new array buffer. from the name you choose and the data you copy to, I guess this is your index buffer and thus should be of type GL_ELEMENT_ARRAY_BUFFER.
What the error message is telling you is, that you draw command cannot use 0 as offset into your index buffer if there is non.
Why is there nothing drawn after fixing the index buffer?
Problem 1: You are only drawing 4 indices. If you want to draw 2 triangles, you will need 6 indices.
Problem 2: OpenGL is a statemachine and VAOs only store the last state that was set when they are unbound. This means your calls to glDisableVertexAttribArray will disable the attributes for your VAO. You should move this calls after GL30.glBindVertexArray(0);

Opengl texture on quad

This is the content of my Texture class:
public int id;
public Texture(InputStream inputStream) {
ByteBuffer buf = null;
int tWidth = 0;
int tHeight = 0;
try {
PNGDecoder decoder = new PNGDecoder(inputStream);
buf = ByteBuffer.allocateDirect(4*decoder.getWidth()*decoder.getHeight());
decoder.decode(buf, decoder.getWidth()*4, PNGDecoder.TextureFormat.RGBA);
buf.rewind();
inputStream.close();
} catch (IOException exception) {
ErrorHandler.handleError("Failed to load image", exception);
}
id = glGenTextures();
glActiveTexture(id);
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tWidth, tHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glBindTexture(GL_TEXTURE_2D, 0);
}
This is how i render:
glActiveTexture(background.id);
glBindTexture(GL_TEXTURE_2D, background.id);
glBindBuffer(GL_ARRAY_BUFFER, vboVertexHandle);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
glVertexAttribPointer(1, 2, GL_FLOAT, false, 0, 4*18);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindTexture(GL_TEXTURE_2D, 0);
and this is the fragment shader:
#version 330
in vec2 textureCoordinate;
out vec4 outputColor;
uniform sampler2D texture_diffuse;
void main() {
outputColor.rgb = vec3(1.0f, 1.0f, 1.0f);
outputColor += texture2D(texture_diffuse, textureCoordinate);
}
What do i do wrong? The texture coordinates passed to the shader program are 100% correct (i checked). But i still get a white quad.
Note: i use this png decoder.
EDIT:
I printed out floats for every 4 bytes to the console, and i got 0.00.00.00.0.... Doest that mean that the texture is loaded incorectly, or the informations is stored to the buffer in a different format?
Your fragment shader looks wrong - you set a white colour and add the value from the texture, so it will clamp to white. Just do something more like this
void main() {
outputColor.a = 1.0f;
outputColor.rgb = texture2D(texture_diffuse, textureCoordinate);
}

Categories