WebGL: loop index cannot compare with undefined expression

I have a webgl blur shader:

precision mediump float;
precision mediump int;

uniform sampler2D u_image;
uniform float blur;       
uniform int u_horizontalpass; // 0 or 1 to indicate vertical or horizontal pass
uniform float sigma;        // The sigma value for the gaussian function: higher value means more blur
                            // A good value for 9x9 is around 3 to 5
                            // A good value for 7x7 is around 2.5 to 4
                            // A good value for 5x5 is around 2 to 3.5
                            // ... play around with this based on what you need :)

varying vec4 v_texCoord;

const vec2 texOffset = vec2(1.0, 1.0);
// uniform vec2 texOffset;
const float PI = 3.14159265;

void main() {  
  vec2 p = v_texCoord.st;
  float numBlurPixelsPerSide = blur / 2.0; 

  // Incremental Gaussian Coefficent Calculation (See GPU Gems 3 pp. 877 - 889)
  vec3 incrementalGaussian;
  incrementalGaussian.x = 1.0 / (sqrt(2.0 * PI) * sigma);
  incrementalGaussian.y = exp(-0.5 / (sigma * sigma));
  incrementalGaussian.z = incrementalGaussian.y * incrementalGaussian.y;

  vec4 avgValue = vec4(0.0, 0.0, 0.0, 0.0);
  float coefficientSum = 0.0;

  // Take the central sample first...
  avgValue += texture2D(u_image, p) * incrementalGaussian.x;
  coefficientSum += incrementalGaussian.x;
  incrementalGaussian.xy *= incrementalGaussian.yz;

  // Go through the remaining 8 vertical samples (4 on each side of the center)
  for (float i = 1.0; i <= numBlurPixelsPerSide; i += 1.0) { 
    avgValue += texture2D(u_image, p - i * texOffset) * incrementalGaussian.x;         
    avgValue += texture2D(u_image, p + i * texOffset) * incrementalGaussian.x;         
    coefficientSum += 2.0 * incrementalGaussian.x;
    incrementalGaussian.xy *= incrementalGaussian.yz;
  }

  gl_FragColor = avgValue / coefficientSum;
}

When I create, I get the following error message:

webgl-renderer.js? 2eb3: 137 Unused could not compile shader: ERROR: 0:38: 'i': loop index cannot compare with non-constant expression

I also tried using only uniform float blur to compare me with. Is there any way to fix this?

The problem is described in more detail here: https://www.khronos.org/webgl/public-mailing-list/archives/1012/msg00063.php

The solution I found around is to use a constant expression when comparing a var loop. This is not what I need to do, which depends on how many times I loop based on the blur radius.

?

+4
4

- :

const float MAX_ITERATIONS = 100.0;

// Go through the remaining 8 vertical samples (4 on each side of the center)
for (float i = 1.0; i <= MAX_ITERATIONS; i += 1.0) { 
    if (i >= numBlurPixelsPerSide){break;}
    avgValue += texture2D(u_image, p - i * texOffset) * incrementalGaussian.x;         
    avgValue += texture2D(u_image, p + i * texOffset) * incrementalGaussian.x;         
    coefficientSum += 2.0 * incrementalGaussian.x;
    incrementalGaussian.xy *= incrementalGaussian.yz;
}
+3

, - GLSL . , for, . numBlurPixelsPerSide const float #define, . .

: break return for, . , Mandelbrot. GLSL Sandbox, :

precision mediump float;
uniform float time;
uniform vec2 mouse;
uniform vec2 resolution;
varying vec2 surfacePosition;

const float max_its = 100.;

float mandelbrot(vec2 z){
    vec2 c = z;
    for(float i=0.;i<max_its;i++){     // for loop is here.
        if(dot(z,z)>4.) return i;      // conditional early return here.
        z = vec2(z.x*z.x-z.y*z.y,2.*z.x*z.y)+c;
    }
    return max_its;
}


void main( void ) {
    vec2 p = surfacePosition;
    gl_FragColor = vec4(mandelbrot(p)/max_its);
}

max_its const, , . return , .

- , , , .

+5

. :

for (int dx = -2 * SCALE_FACTOR; dx < 2 * SCALE_FACTOR; dx += 2) {
    for (int dy = -2 * SCALE_FACTOR; dy < 2 * SCALE_FACTOR; dy += 2) {
        /* accumulate fragment color */
    }
}

, , - SCALE_FACTOR ( , 4). #define ... :

function insertDefines (shaderCode, defines) {
    var defineString = '';

    for (var define in defines) {
        if (defines.hasOwnProperty(define)) {
            defineString +=
                '#define ' + define + ' ' + defines[define] + '\n';
        }
    }

    var versionIdx = shaderCode.indexOf('#version');

    if (versionIdx == -1) {
        return defineString + shaderCode;
    }

    var nextLineIdx = shaderCode.indexOf('\n', versionIdx) + 1;

    return shaderCode.slice(0, nextLineIdx) +
        defineString +
        shaderCode.slice(nextLineIdx);
}

, #version , .

SCALE_FACROR:

#ifndef SCALE_FACTOR
#   error SCALE_FACTOR is undefined
#endif

And in my javascript code, I did something like this:

var SCALE_FACTORS = [4, 8, 16, 32],
    shaderCode, // the code of my shader
    shaderPrograms = SCALE_FACTORS.map(function (factor) {
        var codeWithDefines = insertDefines(shaderCode, { SCALE_FACTOR: factor });
        /* compile shaders, link program, return */
    });
0
source

I am using opengl es3 on android and will solve this problem using the extension above the beginning of the program as follows:

#extension GL_EXT_gpu_shader5 : require

I don't know if it works on webGL, but you can try. Hope this helps.

0
source

Source: https://habr.com/ru/post/1651460/


All Articles