的OpenGL ES 2.0:多光源:着色问题

问题描述:

更新3(非常感谢你的帮助)的OpenGL ES 2.0:多光源:着色问题

我删除了什么建议。此外u_IT_MVMatrix似乎是错误的(这是什么都对)事情看起来好一点,但地板应该焕发和纹理砖应该从颜色砖光(蓝色,红色等)

enter image description here

顶点(片段保持不变)的纹理对象

uniform mat4 u_MVPMatrix;  // A constant representing the combined model/view/projection matrix. 
uniform mat4 u_MVMatrix;  // A constant representing the combined model/view matrix. 

attribute vec4 a_Position;  // Per-vertex position information we will pass in. 
attribute vec3 a_Normal;  // Per-vertex normal information we will pass in. 
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in. 

varying vec3 v_Position;  // This will be passed into the fragment shader. 
varying vec3 v_Normal;   // This will be passed into the fragment shader. 
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader. 

uniform vec4 u_PointLightPositions[3]; // In eye space 
uniform vec3 u_PointLightColors[3]; 
vec4 eyeSpacePosition; 
vec3 eyeSpaceNormal; 

uniform vec4 v_Color; 
varying vec3 lighting; 
vec3 materialColor; 

vec3 getAmbientLighting(); 
vec3 getDirectionalLighting(); 
vec3 getPointLighting(); 

// The entry point for our vertex shader. 
void main() 
{ 
    //materialColor = vec3(v_Color.xyz); // Will be modified by the texture later. 
    materialColor = vec3(1.0, 1.0, 1.0); 

    // Transform the vertex into eye space. 
    v_Position = vec3(u_MVMatrix * a_Position); 

    // Pass through the texture coordinate. 
    v_TexCoordinate = a_TexCoordinate; 

    // Transform the normal's orientation into eye space. 
    v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0)); 

    // gl_Position is a special variable used to store the final position. 
    // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. 
    eyeSpacePosition = u_MVMatrix * a_Position; 

    // The model normals need to be adjusted as per the transpose of the inverse of the modelview matrix. 
    eyeSpaceNormal = normalize(vec3(u_MVMatrix * vec4(a_Normal, 0.0))); 
    gl_Position = u_MVPMatrix * a_Position; 

    lighting = getAmbientLighting(); 
    lighting += getPointLighting(); 
} 

vec3 getAmbientLighting() 
{ 
    return materialColor * 0.2; 
} 

vec3 getPointLighting() 
{ 
    vec3 lightingSum = vec3(0.0); 

    for (int i = 0; i < 3; i++) { 
     vec3 toPointLight = vec3(u_PointLightPositions[i]) - vec3(eyeSpacePosition); 
     float distance = length(toPointLight); 
     //distance = distance/5.0; 
     toPointLight = normalize(toPointLight); 

     float cosine = max(dot(eyeSpaceNormal, toPointLight), 0.0); 
     lightingSum += (materialColor * u_PointLightColors[i] * 20.0 * cosine) 
        /distance; 
    } 

    return lightingSum; 
} 

**Vertex for light bricks (no texture)** 





uniform mat4 u_MVPMatrix;  // A constant representing the combined model/view/projection matrix. 
uniform mat4 u_MVMatrix;  // A constant representing the combined model/view matrix. 

attribute vec4 a_Position;  // Per-vertex position information we will pass in. 
attribute vec3 a_Normal;  // Per-vertex normal information we will pass in. 

varying vec3 v_Position;  // This will be passed into the fragment shader. 
varying vec3 v_Normal;   // This will be passed into the fragment shader. 

uniform vec4 u_PointLightPositions[3]; // In eye space 
uniform vec3 u_PointLightColors[3]; 
vec4 eyeSpacePosition; 
vec3 eyeSpaceNormal; 

uniform vec4 v_Color; 
varying vec3 lighting; 

vec3 getAmbientLighting(); 
vec3 getDirectionalLighting(); 
vec3 getPointLighting(); 

// The entry point for our vertex shader. 
void main() 
{ 
    // Transform the vertex into eye space. 
    v_Position = vec3(u_MVMatrix * a_Position); 

    // Transform the normal's orientation into eye space. 
    v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0)); 

    // gl_Position is a special variable used to store the final position. 
    // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. 
    gl_Position = u_MVPMatrix * a_Position; 
    eyeSpacePosition = u_MVMatrix * a_Position; 

    // The model normals need to be adjusted as per the transpose of the inverse of the modelview matrix. 
    eyeSpaceNormal = normalize(vec3(u_MVMatrix * vec4(a_Normal, 0.0))); 

    lighting = getAmbientLighting(); 
    lighting += getPointLighting(); 
} 

vec3 getAmbientLighting() 
{ 
    return v_Color.xyz * 0.2; 
} 

vec3 getPointLighting() 
{ 
    vec3 lightingSum = vec3(0.0); 

    for (int i = 0; i < 3; i++) { 
     vec3 toPointLight = vec3(u_PointLightPositions[i]) - vec3(eyeSpacePosition); 
     float distance = length(toPointLight); 
     toPointLight = normalize(toPointLight); 

     float cosine = max(dot(eyeSpaceNormal, toPointLight), 0.0); 
     lightingSum += (v_Color.xyz * u_PointLightColors[i] * 20.0 * cosine) 
        /distance; 
    } 

    return lightingSum; 
} 

我总是在着色器使用多个光源挣扎但是我发现在我的Android 2.0的OpenGL快速入门书的例子。

以为我会在之前给它,可悲的是,无论我做什么,我似乎都是光,所以当我接近一个物体时,它会变得更轻,我想要使3个不同的地方(比如路灯)成为光源。

我定义我在我的光的地方,颜色渲染

// new lighting 
    public final float[] pointLightPositions = new float[] 
      {0f, 1f, 0f, 1f, 
       100f, 1f, 0f, 1f, 
       50f, 1f, 0f, 1f}; 

    public final float[] pointLightColors = new float[] 
      {1.00f, 0.20f, 0.20f, 
        0.02f, 0.25f, 0.02f, 
        0.02f, 0.20f, 1.00f}; 

渲染

uPointLightPositionsLocation = 
         glGetUniformLocation(mProgramHandle, "u_PointLightPositions"); 
    uPointLightColorsLocation = 
         glGetUniformLocation(mProgramHandle, "u_PointLightColors"); 

    glUniform4fv(uPointLightPositionsLocation, 3, mRenderer.pointLightPositions, 0); 
    glUniform3fv(uPointLightColorsLocation, 3, mRenderer.pointLightColors, 0); 

    // not sure why I need this 
    // lighting 
    final float[] pointPositionsInEyeSpace = new float[12]; 
    multiplyMV(pointPositionsInEyeSpace, 0, mVMatrix, 0, mRenderer.pointLightPositions, 0); 
    multiplyMV(pointPositionsInEyeSpace, 4, mVMatrix, 0, mRenderer.pointLightPositions, 4); 
    multiplyMV(pointPositionsInEyeSpace, 8, mVMatrix, 0, mRenderer.pointLightPositions, 8); 

Matrix.multiplyMM(mRenderer.mMVPMatrix, 0, mVMatrix, 0, mRenderer.mModelMatrix, 0); 

着色器(顶点)

uniform mat4 u_MVPMatrix;  // A constant representing the combined model/view/projection matrix.     
uniform mat4 u_MVMatrix;  // A constant representing the combined model/view matrix.    

attribute vec4 a_Position;  // Per-vertex position information we will pass in.        
attribute vec3 a_Normal;  // Per-vertex normal information we will pass in.  
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in.  

varying vec3 v_Position;  // This will be passed into the fragment shader.        
varying vec3 v_Normal;   // This will be passed into the fragment shader. 
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader. 

uniform vec4 u_PointLightPositions[3]; // In eye space 
uniform vec3 u_PointLightColors[3]; 

// The entry point for our vertex shader. 
void main()              
{   

    // Transform the vertex into eye space.  
    v_Position = vec3(u_MVMatrix * a_Position);     

    // Pass through the texture coordinate. 
    v_TexCoordinate = a_TexCoordinate;          

    // Transform the normal's orientation into eye space. 
    v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0)); 

    // gl_Position is a special variable used to store the final position. 
    // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. 
    gl_Position = u_MVPMatrix * a_Position;        
} 

片段

precision mediump float;  // Set the default precision to medium. We don't need as high of a 
           // precision in the fragment shader. 
uniform vec3 u_LightPos;  // The position of the light in eye space. 
uniform sampler2D u_Texture; // The input texture. 

varying vec3 v_Position;  // Interpolated position for this fragment. 
varying vec3 v_Normal;   // Interpolated normal for this fragment. 
varying vec2 v_TexCoordinate; // Interpolated texture coordinate per fragment. 

uniform vec4 v_Color; 

uniform vec4 u_PointLightPositions[3]; // In eye space 
uniform vec3 u_PointLightColors[3]; 

vec3 getPointLighting(); 

// The entry point for our fragment shader. 
void main()       
{        
    // Will be used for attenuation. 
    float distance = length(u_LightPos - v_Position); 

    // Get a lighting direction vector from the light to the vertex. 
    vec3 lightVector = normalize(u_LightPos - v_Position); 

    // Calculate the dot product of the light vector and vertex normal. If the normal and light vector are 
    // pointing in the same direction then it will get max illumination. 
    float diffuse = max(dot(v_Normal, lightVector), 0.0);                     

    // Add attenuation. 
    diffuse = diffuse * (1.0/(1.0 + (0.25 * distance))); 

    // Add ambient lighting 
    diffuse = diffuse + 0.7; 

    // Multiply the color by the diffuse illumination level and texture value to get final output color. 
    //gl_FragColor = (diffuse * texture2D(u_Texture, v_TexCoordinate)); 
    gl_FragColor = diffuse * texture2D(u_Texture, v_TexCoordinate) ; 
    gl_FragColor *= (v_Color * vec4(getPointLighting(),v_Color.w)); 
    }                   

vec3 getPointLighting() 
    { 
     vec3 lightingSum = vec3(0.0); 

     for (int i = 0; i < 3; i++) { 
      vec3 toPointLight = vec3(u_PointLightPositions[i]) 
          - vec3(v_Position); 
      float distance = length(toPointLight); 
      toPointLight = normalize(toPointLight); 

      float cosine = max(dot(v_Normal, toPointLight), 0.0); 

      //lightingSum += vec3(0.0, 0.0, 1.0); 
      lightingSum += (vec3(v_Color.xyz) * u_PointLightColors[i] * 5.0 * cosine)/distance; 
     } 

     return lightingSum; 
    } 

,我会非常高兴,如果有人可以帮助:)

更新2

我有灯光,不同的颜色,但是当我得到真正地接近他们只发光?我相信它是与u_IT_MVMatrix矩阵

片段

uniform vec3 u_LightPos;  // The position of the light in eye space. 
uniform sampler2D u_Texture; // The input texture. 

varying vec3 v_Position;  // Interpolated position for this fragment. 
varying vec3 v_Normal;   // Interpolated normal for this fragment. 
varying vec2 v_TexCoordinate; // Interpolated texture coordinate per fragment. 

uniform vec4 v_Color; 
varying vec3 lighting; 
// The entry point for our fragment shader. 
void main()       
{ 

    gl_FragColor = texture2D(u_Texture, v_TexCoordinate) ; 
    gl_FragColor *= vec4(lighting,1.0); 
} 

顶点

uniform mat4 u_MVPMatrix;  // A constant representing the combined model/view/projection matrix. 
uniform mat4 u_MVMatrix;  // A constant representing the combined model/view matrix. 

attribute vec4 a_Position;  // Per-vertex position information we will pass in. 
attribute vec3 a_Normal;  // Per-vertex normal information we will pass in. 
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in. 

varying vec3 v_Position;  // This will be passed into the fragment shader. 
varying vec3 v_Normal;   // This will be passed into the fragment shader. 
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader. 

uniform vec4 u_PointLightPositions[3]; // In eye space 
uniform vec3 u_PointLightColors[3]; 

uniform vec3 u_VectorToLight;    // In eye space 
uniform mat4 u_IT_MVMatrix; 
vec4 eyeSpacePosition; 
vec3 eyeSpaceNormal; 

uniform vec4 v_Color; 
varying vec3 lighting; 
vec3 materialColor; 


vec3 getAmbientLighting(); 
vec3 getDirectionalLighting(); 
vec3 getPointLighting(); 

// The entry point for our vertex shader. 
void main() 
{ 
    materialColor = vec3(1.0, 1.0, 1.0); // Will be modified by the texture later. 


    // Transform the vertex into eye space. 
    v_Position = vec3(u_MVMatrix * a_Position); 

    // Pass through the texture coordinate. 
    v_TexCoordinate = a_TexCoordinate; 

    // Transform the normal's orientation into eye space. 
    v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0)); 

    // gl_Position is a special variable used to store the final position. 
    // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. 

    eyeSpacePosition = u_MVMatrix * a_Position; 

     // The model normals need to be adjusted as per the transpose 
     // of the inverse of the modelview matrix. 
    eyeSpaceNormal = normalize(vec3(u_IT_MVMatrix * vec4(a_Normal, 0.0))); 

    gl_Position = u_MVPMatrix * a_Position; 

    lighting = getAmbientLighting(); 
    lighting += getDirectionalLighting(); 
    lighting += getPointLighting(); 

} 

vec3 getAmbientLighting() 
{ 
    return materialColor * 0.2; 
} 

vec3 getDirectionalLighting() 
{ 
    return materialColor * max(dot(eyeSpaceNormal, u_VectorToLight), 0.0); 
} 

vec3 getPointLighting() 
{ 
    vec3 lightingSum = vec3(0.0); 

    for (int i = 0; i < 3; i++) { 
     vec3 toPointLight = vec3(u_PointLightPositions[i]) - vec3(eyeSpacePosition); 
     float distance = length(toPointLight); 
     toPointLight = normalize(toPointLight); 

     float cosine = max(dot(eyeSpaceNormal, toPointLight), 0.0); 
     lightingSum += (materialColor * u_PointLightColors[i] * 5.0 * cosine) 
        /distance; 
    } 

    return lightingSum; 
} 

所以我相信它的东西与我的位置

//multiplyMM(mModelMatrix, 0, VMatrix, 0, mModelMatrix, 0); 
//invertM(tempMatrix, 0, mModelMatrix, 0); 
transposeM(it_modelViewMatrix, 0, VMatrix, 0); 
+1

禁用getDirectionalLighting()(您的手电筒)一段时间,然后在lightingSum + =行上增加5.0,直到您找对了。大! – mikkokoo 2014-10-09 19:01:05

+0

我认为照明不能用于地板,因为我缩放了它?有任何想法吗? – Burf2000 2014-10-09 22:10:56

+1

这不是缩放。如果您有大三角形,顶点可能会完全漏光,并且插值对于该三角形中的所有片段都不显示光。然后(对于那些)你需要在片段着色器中完成它。 – mikkokoo 2014-10-10 06:18:13

在你的代码中你有四个灯,第四个灯位于u_LightPos。

我建议你完全删除漫反射变量(第四个灯光),并且所有对v_Color的引用(因为你也有纹理)。那么你应该开始只看到你的三个路灯的照明。

ps。为了表现,我也将光线计算移动到顶点着色器。

+0

尝试了我认为你的建议,请帮助:) – Burf2000 2014-10-06 16:35:41

+1

现在,您需要计算每个顶点的颜色数量(光线)。就像你之前在片段着色器中做的那样。然后通过一个不同的vec3将它传递给片段着色器。并乘以(例如)纹理。目前它只显示纹理。 – mikkokoo 2014-10-07 06:54:10

+0

我非常快乐,再次感谢您的帮助!如果你觉得无聊,可以将着色器粘贴到你的建议中? – Burf2000 2014-10-07 19:01:43