Java: OpenGl ES 2.0 Shader Light - java

I need a shader that create a diffuse light that is only affected by the angle between the normal and the light source. My current shader that is affected by both distance and angle looks like this:
private final String vertexShaderCode2 =
"uniform mat4 uMVPMatrix;" +
"uniform mat4 uMVMatrix;" +
"uniform vec3 uLightPos;" +
"attribute vec4 vPosition;" +
"attribute vec4 aColor;" +
"attribute vec3 aNormal;" +
"varying vec4 vColor;" +
"void main() {" +
" vec3 modelViewVertex = vec3(uMVMatrix * vPosition); " +
" vec3 modelViewNormal = vec3(uMVMatrix * vec4(aNormal, 0.0));" +
" float distance = length(uLightPos - modelViewVertex);" +
" vec3 lightVector = normalize(uLightPos - modelViewVertex);" +
" float diffuse = max(dot(modelViewNormal, lightVector), 0.1);" +
" diffuse = diffuse * (1.0 / (1.0 + (0.25 * distance * distance)));" +
" vColor = aColor * diffuse;" +
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"varying vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
Here is code for populating the shaders:
// Calculate position of the light.
Matrix.setIdentityM(mLightModelMatrix, 0);
Matrix.rotateM(mLightModelMatrix, 0, LightAngleInDegrees, 0.0f, 1.0f, 1.0f);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 100.0f, -10.0f);
Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);
// Player
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, py, px, pz);
Matrix.rotateM(mModelMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.scaleM(mModelMatrix, 0, scalef, scalef, scalef * 2);
Matrix.multiplyMM(mMVMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);
cube.draw(mMVPMatrix, mMVMatrix, mLightPosInWorldSpace);
And finally, the cube.draw method:
public void draw(float[] mvpMatrix, float[] mvMatrix, float[] mLightPosInWorldSpace) {
...
// Pass in the light position in eye space.
GLES20.glUniform3f(LightPosHandle, mLightPosInWorldSpace[0], mLightPosInWorldSpace[1], mLightPosInWorldSpace[2]);
...
}
If I leave the following line out of the Shader:
diffuse = diffuse * (1.0 / (1.0 + (0.25 * distance * distance)));
Then, distance is not accounted for but a side effect is that rotating the cube is not affecting the color of the cube sides, like if the light source is following the cube around. LightAngleInDegrees (light) and mAngle (cube) is two separate variables.

The light position for diffuse calculations needs to be done in world space. That means you need to pass in mLightPosInWorldSpace, not mLightPosInEyeSpace.
public void draw(float[] mvpMatrix, float[] mvMatrix, float[] mLightPosInEyeSpace) {
...
// Pass in the light position in world space.
GLES20.glUniform3f(LightPosHandle, mLightPosInWorldSpace[0], mLightPosInWorldSpace[1], mLightPosInWorldSpace[2]);
...
}
Good thing you named your variables so clearly, otherwise I would not have spotted that.

Related

How to display the background color of a cube and also the colors of two textures in OpenGL

I'm trying to add two textures to a 3d cube. I achieved my goal, but on the way I lost the background color.
I want to show the original color of the images and also the background color. I use a mix, but it displays the background completely dark.
This is how it looks my fragmentShaderCode:
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D u_Texture0;" +
"uniform sampler2D u_Texture1;" +
"uniform vec4 aColor;" +
"varying vec2 v_TexCoordinate0;" +
"varying vec2 v_TexCoordinate1;" +
"void main() {" +
" vec4 base = texture2D(u_Texture0, v_TexCoordinate0);" +
" vec4 overlay = texture2D(u_Texture1, v_TexCoordinate1);" +
" mediump float ra = (overlay.a) * overlay.r + (1.0 - overlay.a) * base.r;" +
" mediump float ga = (overlay.a) * overlay.g + (1.0 - overlay.a) * base.g;" +
" mediump float ba = (overlay.a) * overlay.b + (1.0 - overlay.a) * base.b;" +
" gl_FragColor = vec4(mix(aColor.rgb, vec4(ra, ga, ba, 1.0).rgb , vec4(ra, ga, ba, 1.0).a), 1.0);" +
"}";
The alpha channel of vec4(ra, ga, ba, 1.0) is 1.0. Therefore the result of vec4(ra, ga, ba, 1.0).a is always 1.0.
You need to use the texture's alpha channels. e.g.: max(base.a, overlay.a):
vec3 textureColor = vec3(ra, ga, ba);
float textureAlpha = max(base.a, overlay.a);
gl_FragColor = vec4(mix(aColor.rgb, textureColor, textureAlpha), 1.0);
Simplify the code by mixing the texture colors with the mix function:
void main() {
vec4 base = texture2D(u_Texture0, v_TexCoordinate0);
vec4 overlay = texture2D(u_Texture1, v_TexCoordinate1);
vec3 textureColor = mix(base.rgb, overlay.rgb, overlay.a);
float textureAlpha = max(base.a, overlay.a);
gl_FragColor = vec4(mix(aColor.rgb, textureColor, textureAlpha), 1.0);
}

Color Grading using LUT and PShader

I am trying to implement a color grading shader using LUT into processing as described here by Matt DesLauriers and here by Lev Zelensky but I get a strange result when I apply the shader with a standard lut :
image test|690x345
On the left you can see the result I get when applying the LUT shader vs the desired result on the right.
Here my implementation on processing :
PImage source;
PShader PP_LUT;
PGraphics buffer;
PGraphics lut;
PImage lutsrc;
void setup() {
size(512, 512), P2D);
source = loadImage("test.png");
lutsrc = loadImage("_LUT/lookup.png");
lut = createGraphics(lutsrc.width, lutsrc.height, P2D);
((PGraphicsOpenGL)lut).textureSampling(2);
lut.beginDraw();
lut.image(lutsrc, 0, 0);
lut.endDraw();
buffer = createGraphics(source.width, source.height, P3D);
PP_LUT = loadShader("PP_LUT.glsl");
PP_LUT.set("resolution", (float) buffer.width, (float) buffer.height);
PP_LUT.set("lut", lut);
}
void draw() {
buffer.beginDraw();
buffer.background(0);
buffer.shader(PP_LUT);
buffer.image(source, 0, 0);
buffer.endDraw();
image(buffer, 0, 0, width, height);
image(lut, 0, 0, width * 0.25, height * 0.25);
}
and the shader part :
#version 150
#ifdef GL_ES
#endif
uniform sampler2D texture;
uniform sampler2D lut;
in vec4 vertTexCoord;
out vec4 fragColor;
//https://github.com/mattdesl/glsl-lut
vec4 lookup(vec4 color_, sampler2D lut_){
color_ = clamp(color_, vec4(0), vec4(1));
//define blue
mediump float blue = color_.b * 63.0;
//define quad 1
mediump vec2 quaduv1;
quaduv1.y = floor(floor(blue) / 8.0); //devide blue by the number of col on the LUT
quaduv1.x = floor(blue) - (quaduv1.y * 8.0);
//define quad 2
mediump vec2 quaduv2;
quaduv2.y = floor(ceil(blue) / 8.0); //devide blue by the number of col on the LUT
quaduv2.x = ceil(blue) - (quaduv2.y * 8.0);
//define colorUV 1
highp vec2 coloruv1;
coloruv1.x = (quaduv1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * color_.r);
coloruv1.y = (quaduv1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * color_.g);
//define colorUV 2
highp vec2 coloruv2;
coloruv2.x = (quaduv2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * color_.r);
coloruv2.y = (quaduv2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * color_.g);
//PROCESSING NEED TO FLIP y uv
//coloruv1.y = 1.0 - coloruv1.y;
//coloruv2.y = 1.0 - coloruv2.y;
//define new color 1 & 2
lowp vec4 ncolor1 = texture2D(lut_, coloruv1);
lowp vec4 ncolor2 = texture2D(lut_, coloruv2);
//return looked up color
lowp vec4 lookedcolor = mix(ncolor1, ncolor2, fract(blue));
return vec4(lookedcolor.rgb, color_.w);
}
void main()
{
vec2 uv = vertTexCoord.xy;
vec4 color = texture2D(texture, uv);
vec4 lutColor = lookup(color, lut);
fragColor = lutColor;
}
As I understand it seems to be a problem on the texture filtering part so I tried to write my lut into an offscreen buffer and set the texture filtering mode as nearest as described on the wiki page of processing but the result is quite the same
I don't know what I am missing here. Can anyone has an idea on this ?
Thanks

OpenGlES2.0 Texture issue

I am new to opengl, i am trying to apply texture to the 3D figure ,but i am getting glGetUniformLocation: glError 1282 error,please help me with this, i searched online but couldn't rectify it.would be happy if someone explains the issue also.
MY Shaders:
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 texCoord;" +
"varying vec2 texCoordOut;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
" texCoordOut = texCoord;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 texColor"+
"varying vec2 texCoordOut;" +
"uniform sampler2D Texture;" +
"void main() {" +
" texColor = texture2D(Texture, texCoordOut);" +
" gl_FragColor = texColor;" +
"}";
Draw method:
public void draw1(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
vsTextureCoord = GLES20.glGetAttribLocation(mProgram, "texCoord");
fsTexture = GLES20.glGetUniformLocation(mProgram, "Texture");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
GLES20.glEnableVertexAttribArray(vsTextureCoord);
GLES20.glVertexAttribPointer(vsTextureCoord, COORDS_PER_TEXTURE,
GLES20.GL_FLOAT, false,
TextureStride, texBuffer);
// get handle to shape's transformation matrix
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glUniform1i(fsTexture, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, tablelamp21NumVerts);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(vsTextureCoord);
}
LoadTexture:
public void loadGLTexture(Context context) {
GLES20.glGenTextures(4, textures, 0);
Bitmap bitmap = null;
for (int i=0;i<4;i++)
{
// Create a bitmap
bitmap = BitmapFactory.decodeResource(context.getResources(), resourceIds[i]);
//...and bind it to our array
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
//Create Nearest Filtered Texture
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
//Clean up
bitmap = null;
}
}
You shouldn't even be able to compile the shaders in the first place.
In the fragment shader, you've declared
Sampler2D as variable Texture, but you're using u_texture to sample it.
Use same variable name in both places, which should also be same as
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 texCoord;" +
"varying vec2 texCoordOut;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
" texCoordOut = texCoord;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 texcColor"+
"varying vec2 texCoordOut;" +
"uniform sampler2D u_texture;" +
"void main() {" +
" texColor = texture2D(u_texture, texCoordOut);" +
" gl_FragColor = texColor;" +
"}";
And in the GLES code,
fsTexture = GLES20.glGetUniformLocation(mProgram, "u_texture");

Shaders in libgdx have no effect [Desktop]

This is mostly a general question, since I can't get any shader to work at all. The usual sprites and textures render just fine, it just doesn't happen anything with the shaders. Not getting any error messages from the shader log either. As far as I understand, for a filter of the type below, one only need to set the shader to the batch, like batch.setShader(shader), and set any uniforms, and the batch will take care of the rest. If I am wrong, please tell me my errors.
Fragment shader, supposed to blur
//"in" attributes from our vertex shader
varying vec2 v_texCoord0;
//declare uniforms
uniform sampler2D uImage0;
uniform vec2 uResolution;
uniform float radius;
uniform float dirx;
uniform float diry;
void main()
{
//this will be our RGBA sum
vec4 sum = vec4(0.0);
//our original texcoord for this fragment
vec2 tc = v_texCoord0;
//the amount to blur, i.e. how far off center to sample from
//1.0 -> blur by one pixel
//2.0 -> blur by two pixels, etc.
float blur = radius / uResolution.x;
//the direction of our blur
//(1.0, 0.0) -> x-axis blur
//(0.0, 1.0) -> y-axis blur
float hstep = dirx;
float vstep = diry;
//apply blurring, using a 9-tap filter with predefined gaussian weights
sum += texture2D(uImage0, vec2(tc.x - 4.0*blur*hstep, tc.y - 4.0*blur*vstep)) * 0.0162162162;
sum += texture2D(uImage0, vec2(tc.x - 3.0*blur*hstep, tc.y - 3.0*blur*vstep)) * 0.0540540541;
sum += texture2D(uImage0, vec2(tc.x - 2.0*blur*hstep, tc.y - 2.0*blur*vstep)) * 0.1216216216;
sum += texture2D(uImage0, vec2(tc.x - 1.0*blur*hstep, tc.y - 1.0*blur*vstep)) * 0.1945945946;
sum += texture2D(uImage0, vec2(tc.x, tc.y)) * 0.2270270270;
sum += texture2D(uImage0, vec2(tc.x + 1.0*blur*hstep, tc.y + 1.0*blur*vstep)) * 0.1945945946;
sum += texture2D(uImage0, vec2(tc.x + 2.0*blur*hstep, tc.y + 2.0*blur*vstep)) * 0.1216216216;
sum += texture2D(uImage0, vec2(tc.x + 3.0*blur*hstep, tc.y + 3.0*blur*vstep)) * 0.0540540541;
sum += texture2D(uImage0, vec2(tc.x + 4.0*blur*hstep, tc.y + 4.0*blur*vstep)) * 0.0162162162;
//discard alpha for our simple demo, multiply by vertex color and return
gl_FragColor = vec4(sum.rgb, 1.0);
}
Vertex shader
attribute vec4 a_color;
attribute vec2 a_texCoord0;
attribute vec3 a_position;
uniform mat4 u_projTrans;
varying vec4 v_color;
varying vec2 v_texCoord0;
void main(){
v_color = a_color;
v_texCoord0 = a_texCoord0;
gl_Position = u_projTrans * vec4(a_position,1.0) ;
}
Setting up the shader. Tried different values here
public void setupShader(){
ShaderProgram.pedantic=true;
shader = new ShaderProgram(Gdx.files.internal("shaders/pass.vert"),Gdx.files.internal("shaders/scanlines.frag"));
shader.begin();
shader.setUniformf("radius", 5f);
shader.setUniformf("dirx", 5f);
shader.setUniformf("diry", 5f);
shader.end();
if(shader.isCompiled())
batch.setShader(shader);
else
Settings.log(shader.getLog());
}
The render method. I've not put anything concerning shaders here.
#Override
public void render(float delta) {
Settings.clearScreen(); //usual clear screen calls from here
batch.setProjectionMatrix(cam.combined);
cam.update();
detectClicks();
checkBallScreenEdges();
batch.begin();
draw(delta);
batch.end();
}
Thanks to Tenfour04 I got a nice scanlines shader to work (tested with another than the one above):
#Override
public void render(float delta) {
Settings.clearScreen();
batch.setProjectionMatrix(cam.combined);
cam.update();
batch.setShader(SpriteBatch.createDefaultShader());
main.buffer.begin();
batch.begin();
draw(delta);
batch.end();
main.buffer.end();
//POST PROCESSING
Texture bufferedTexture = main.buffer.getColorBufferTexture();
batch.setShader(main.shader);
batch.begin();
batch.draw(bufferedTexture, 0, 0, Settings.WIDTH, Settings.HEIGHT, 0, 0, Settings.WIDTH, Settings.HEIGHT, false, true); //need to flip texture
batch.end();
}
and setting up the shader:
buffer = new FrameBuffer(Pixmap.Format.RGBA8888,Settings.WIDTH,Settings.HEIGHT,false);
ShaderProgram.pedantic=false;
shader = new ShaderProgram(Gdx.files.internal("shaders/pass.vert"),Gdx.files.internal("shaders/scanlines.frag"));
shader.begin();
shader.setUniformf("uResolution",(float)Settings.WIDTH,(float)Settings.HEIGHT);
shader.end();
Wished the libgdx wiki had more examples.

libGdx: Coloring a mesh

Hello I would like to know if there is a way to color a triangle from a mesh without VertexAtribute color. But save it in a seperate array.
EDIT:
Now what I whant is that vertices only has POSITION and no COLOR.
The color of each triangle should be set by a seperate array that holds the color.
I know how to send a uniform to the shader but the render method does render the whole mesh at one and not each triangle.
public class TestBench implements ApplicationListener {
public static final String VERT_SHADER =
"attribute vec2 a_position;\n" +
"attribute vec4 a_color;\n" +
"uniform mat4 u_projTrans;\n" +
"varying vec4 vColor;\n" +
"void main() {\n" +
" vColor = a_color;\n" +
" gl_Position = u_projTrans * vec4(a_position.xy, 0.0, 1.0);\n" +
"}";
public static final String FRAG_SHADER =
"#ifdef GL_ES\n" +
"precision mediump float;\n" +
"#endif\n" +
"uniform vec4 aTest;\n" +
"varying vec4 vColor;\n" +
"void main() {\n" +
" gl_FragColor = vColor;\n" +
"}";
public void create() {
mesh = new Mesh(true, MAX_VERTS, MAX_INDICES,
new VertexAttribute(VertexAttributes.Usage.Position, POSITION_COMPONENTS, "a_position"),
new VertexAttribute(VertexAttributes.Usage.ColorUnpacked, COLOR_COMPONENTS, "a_color"));
}
public void render() {
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
Gdx.gl.glClearColor(0, 0, 0, 1);
flush();
}
void flush() {
mesh.setVertices(vertices);
mesh.setIndices(indices);
Gdx.gl.glDepthMask(false);
Gdx.gl.glEnable(GL20.GL_BLEND);
Gdx.gl.glBlendFunc(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
shader.begin();
shader.setUniformMatrix("u_projTrans", camera.combined);
mesh.render(shader, GL20.GL_TRIANGLES, 0, vertices.lenght);
shader.end();
Gdx.gl.glDepthMask(true);
}
}
If you give a more complete example of your code I'll try to show you how to do it using a uniform in the shaders.
John

Categories