I am new to opengl, i am trying to apply texture to the 3D figure ,but i am getting glGetUniformLocation: glError 1282 error,please help me with this, i searched online but couldn't rectify it.would be happy if someone explains the issue also.
MY Shaders:
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 texCoord;" +
"varying vec2 texCoordOut;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
" texCoordOut = texCoord;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 texColor"+
"varying vec2 texCoordOut;" +
"uniform sampler2D Texture;" +
"void main() {" +
" texColor = texture2D(Texture, texCoordOut);" +
" gl_FragColor = texColor;" +
"}";
Draw method:
public void draw1(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
vsTextureCoord = GLES20.glGetAttribLocation(mProgram, "texCoord");
fsTexture = GLES20.glGetUniformLocation(mProgram, "Texture");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
GLES20.glEnableVertexAttribArray(vsTextureCoord);
GLES20.glVertexAttribPointer(vsTextureCoord, COORDS_PER_TEXTURE,
GLES20.GL_FLOAT, false,
TextureStride, texBuffer);
// get handle to shape's transformation matrix
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glUniform1i(fsTexture, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, tablelamp21NumVerts);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(vsTextureCoord);
}
LoadTexture:
public void loadGLTexture(Context context) {
GLES20.glGenTextures(4, textures, 0);
Bitmap bitmap = null;
for (int i=0;i<4;i++)
{
// Create a bitmap
bitmap = BitmapFactory.decodeResource(context.getResources(), resourceIds[i]);
//...and bind it to our array
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
//Create Nearest Filtered Texture
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
//Clean up
bitmap = null;
}
}
You shouldn't even be able to compile the shaders in the first place.
In the fragment shader, you've declared
Sampler2D as variable Texture, but you're using u_texture to sample it.
Use same variable name in both places, which should also be same as
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 texCoord;" +
"varying vec2 texCoordOut;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
" texCoordOut = texCoord;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 texcColor"+
"varying vec2 texCoordOut;" +
"uniform sampler2D u_texture;" +
"void main() {" +
" texColor = texture2D(u_texture, texCoordOut);" +
" gl_FragColor = texColor;" +
"}";
And in the GLES code,
fsTexture = GLES20.glGetUniformLocation(mProgram, "u_texture");
Related
I'm trying to add two textures to a 3d cube. I achieved my goal, but on the way I lost the background color.
I want to show the original color of the images and also the background color. I use a mix, but it displays the background completely dark.
This is how it looks my fragmentShaderCode:
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D u_Texture0;" +
"uniform sampler2D u_Texture1;" +
"uniform vec4 aColor;" +
"varying vec2 v_TexCoordinate0;" +
"varying vec2 v_TexCoordinate1;" +
"void main() {" +
" vec4 base = texture2D(u_Texture0, v_TexCoordinate0);" +
" vec4 overlay = texture2D(u_Texture1, v_TexCoordinate1);" +
" mediump float ra = (overlay.a) * overlay.r + (1.0 - overlay.a) * base.r;" +
" mediump float ga = (overlay.a) * overlay.g + (1.0 - overlay.a) * base.g;" +
" mediump float ba = (overlay.a) * overlay.b + (1.0 - overlay.a) * base.b;" +
" gl_FragColor = vec4(mix(aColor.rgb, vec4(ra, ga, ba, 1.0).rgb , vec4(ra, ga, ba, 1.0).a), 1.0);" +
"}";
The alpha channel of vec4(ra, ga, ba, 1.0) is 1.0. Therefore the result of vec4(ra, ga, ba, 1.0).a is always 1.0.
You need to use the texture's alpha channels. e.g.: max(base.a, overlay.a):
vec3 textureColor = vec3(ra, ga, ba);
float textureAlpha = max(base.a, overlay.a);
gl_FragColor = vec4(mix(aColor.rgb, textureColor, textureAlpha), 1.0);
Simplify the code by mixing the texture colors with the mix function:
void main() {
vec4 base = texture2D(u_Texture0, v_TexCoordinate0);
vec4 overlay = texture2D(u_Texture1, v_TexCoordinate1);
vec3 textureColor = mix(base.rgb, overlay.rgb, overlay.a);
float textureAlpha = max(base.a, overlay.a);
gl_FragColor = vec4(mix(aColor.rgb, textureColor, textureAlpha), 1.0);
}
I have a ByteBuffer with an RGB frame from a video, so it has size 1280x720x3.
I want to render it using OpenGL. Here's my fragment shader:
public static final String videoFragment =
"#version 320 es\n"+
"precision mediump float;" +
"uniform float alpha;\n" +
"uniform sampler2D sampler;\n"+
"\n" +
"in vec2 TexCoord;\n" +
"out vec4 FragColor;\n" +
"void main()\n" +
"{\n" +
" vec4 rgba;\n" +
" rgba.rgb = texture(sampler, TexCoord).rgb;\n" +
" rgba.a = 1.0f;\n" +
" //FragColor = vec4(1.0f,0,0,1.0f);\n" +
" FragColor = rgba;\n" +
"}\n";
How should I fill it using glTexImage2D? I want to put my RGB data into it. I tried many combinations of GLES20.GL_RGB and others but they won't work or I simply get a black and white image:
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D,
0,
GLES20.?,
1280,
720,
0,
GLES20.?,
GLES20.?,
buffer);
I need a shader that create a diffuse light that is only affected by the angle between the normal and the light source. My current shader that is affected by both distance and angle looks like this:
private final String vertexShaderCode2 =
"uniform mat4 uMVPMatrix;" +
"uniform mat4 uMVMatrix;" +
"uniform vec3 uLightPos;" +
"attribute vec4 vPosition;" +
"attribute vec4 aColor;" +
"attribute vec3 aNormal;" +
"varying vec4 vColor;" +
"void main() {" +
" vec3 modelViewVertex = vec3(uMVMatrix * vPosition); " +
" vec3 modelViewNormal = vec3(uMVMatrix * vec4(aNormal, 0.0));" +
" float distance = length(uLightPos - modelViewVertex);" +
" vec3 lightVector = normalize(uLightPos - modelViewVertex);" +
" float diffuse = max(dot(modelViewNormal, lightVector), 0.1);" +
" diffuse = diffuse * (1.0 / (1.0 + (0.25 * distance * distance)));" +
" vColor = aColor * diffuse;" +
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"varying vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
Here is code for populating the shaders:
// Calculate position of the light.
Matrix.setIdentityM(mLightModelMatrix, 0);
Matrix.rotateM(mLightModelMatrix, 0, LightAngleInDegrees, 0.0f, 1.0f, 1.0f);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 100.0f, -10.0f);
Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);
// Player
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, py, px, pz);
Matrix.rotateM(mModelMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.scaleM(mModelMatrix, 0, scalef, scalef, scalef * 2);
Matrix.multiplyMM(mMVMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);
cube.draw(mMVPMatrix, mMVMatrix, mLightPosInWorldSpace);
And finally, the cube.draw method:
public void draw(float[] mvpMatrix, float[] mvMatrix, float[] mLightPosInWorldSpace) {
...
// Pass in the light position in eye space.
GLES20.glUniform3f(LightPosHandle, mLightPosInWorldSpace[0], mLightPosInWorldSpace[1], mLightPosInWorldSpace[2]);
...
}
If I leave the following line out of the Shader:
diffuse = diffuse * (1.0 / (1.0 + (0.25 * distance * distance)));
Then, distance is not accounted for but a side effect is that rotating the cube is not affecting the color of the cube sides, like if the light source is following the cube around. LightAngleInDegrees (light) and mAngle (cube) is two separate variables.
The light position for diffuse calculations needs to be done in world space. That means you need to pass in mLightPosInWorldSpace, not mLightPosInEyeSpace.
public void draw(float[] mvpMatrix, float[] mvMatrix, float[] mLightPosInEyeSpace) {
...
// Pass in the light position in world space.
GLES20.glUniform3f(LightPosHandle, mLightPosInWorldSpace[0], mLightPosInWorldSpace[1], mLightPosInWorldSpace[2]);
...
}
Good thing you named your variables so clearly, otherwise I would not have spotted that.
Hello I would like to know if there is a way to color a triangle from a mesh without VertexAtribute color. But save it in a seperate array.
EDIT:
Now what I whant is that vertices only has POSITION and no COLOR.
The color of each triangle should be set by a seperate array that holds the color.
I know how to send a uniform to the shader but the render method does render the whole mesh at one and not each triangle.
public class TestBench implements ApplicationListener {
public static final String VERT_SHADER =
"attribute vec2 a_position;\n" +
"attribute vec4 a_color;\n" +
"uniform mat4 u_projTrans;\n" +
"varying vec4 vColor;\n" +
"void main() {\n" +
" vColor = a_color;\n" +
" gl_Position = u_projTrans * vec4(a_position.xy, 0.0, 1.0);\n" +
"}";
public static final String FRAG_SHADER =
"#ifdef GL_ES\n" +
"precision mediump float;\n" +
"#endif\n" +
"uniform vec4 aTest;\n" +
"varying vec4 vColor;\n" +
"void main() {\n" +
" gl_FragColor = vColor;\n" +
"}";
public void create() {
mesh = new Mesh(true, MAX_VERTS, MAX_INDICES,
new VertexAttribute(VertexAttributes.Usage.Position, POSITION_COMPONENTS, "a_position"),
new VertexAttribute(VertexAttributes.Usage.ColorUnpacked, COLOR_COMPONENTS, "a_color"));
}
public void render() {
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
Gdx.gl.glClearColor(0, 0, 0, 1);
flush();
}
void flush() {
mesh.setVertices(vertices);
mesh.setIndices(indices);
Gdx.gl.glDepthMask(false);
Gdx.gl.glEnable(GL20.GL_BLEND);
Gdx.gl.glBlendFunc(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
shader.begin();
shader.setUniformMatrix("u_projTrans", camera.combined);
mesh.render(shader, GL20.GL_TRIANGLES, 0, vertices.lenght);
shader.end();
Gdx.gl.glDepthMask(true);
}
}
If you give a more complete example of your code I'll try to show you how to do it using a uniform in the shaders.
John
I am using a shader I found provided in another stack overflow question to render my screen in grayscale:
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
public class GrayscaleShader {
static String vertexShader = "attribute vec4 a_position;\n" +
"attribute vec4 a_color;\n" +
"attribute vec2 a_texCoord0;\n" +
"\n" +
"uniform mat4 u_projTrans;\n" +
"\n" +
"varying vec4 v_color;\n" +
"varying vec2 v_texCoords;\n" +
"\n" +
"void main() {\n" +
" v_color = a_color;\n" +
" v_texCoords = a_texCoord0;\n" +
" gl_Position = u_projTrans * a_position;\n" +
"}";
static String fragmentShader = "#ifdef GL_ES\n" +
" precision mediump float;\n" +
"#endif\n" +
"\n" +
"varying vec4 v_color;\n" +
"varying vec2 v_texCoords;\n" +
"uniform sampler2D u_texture;\n" +
"\n" +
"void main() {\n" +
" vec4 c = v_color * texture2D(u_texture, v_texCoords);\n" +
" float grey = (c.r + c.g + c.b) / 3.0;\n" +
" gl_FragColor = vec4(grey, grey, grey, c.a);\n" +
"}";
public static ShaderProgram grayscaleShader = new ShaderProgram(vertexShader,
fragmentShader);
}
I would like to be able to fade to grayscale from full color. I'm imagining I should be able to do this by tweening the r,g and b colors which are then set to gl_FragColor (im imagining i will need to create a new ShaderProgram in each setter method used in the tweening process and just use the new color value each time). I understand why (c.r + c.g + c.b)/3.0 is gray, but what can I use to get the values from color to this point? I hope this makes sense!
You can put a uniform float in your fragment shader that blends between the original color and gray like this:
static final String FRAG = "#ifdef GL_ES\n" +
" precision mediump float;\n" +
"#endif\n" +
"\n" +
"varying vec4 v_color;\n" +
"varying vec2 v_texCoords;\n" +
"uniform sampler2D u_texture;\n" +
"uniform float u_grayness;\n" +
"\n" +
"void main() {\n" +
" vec4 c = v_color * texture2D(u_texture, v_texCoords);\n" +
" float grey = (c.r + c.g + c.b) / 3.0;\n" +
" vec3 blendedColor = mix(c.rgb, vec3(grey), u_grayness);\n" +
" gl_FragColor = vec4(blendedColor.rgb, c.a);\n" +
"}";
You can keep a float that tracks the "grayness" from 0 to 1 and apply it like this:
batch.setShader(grayscaleShader);
batch.begin();
grayscaleShader.setUniformf("u_grayness", grayness);
//draw stuff
batch.end();
batch.setShader(null);
By the way, you might have better results by using a proper luminance scale for calculating gray, instead of just summing and dividing by 3. This is because red, green, and blue have different relative luminance as perceived by the eye. Example:
float grey = dot( c.rgb, vec3(0.22, 0.707, 0.071) );