slowly interpolated z depth at center of the screen in a 2d filter

I made this little DOF filter and i need a way to determine the “focused” depth. currently i use the depth at the center of the screen at each moment for this; but it’s a huge eyesore when there are discontinuities in the scene and the depth changes drastically from a frame to the next. I would like to know how to get the depth at the center of the screen using python so that i can interpolate it and pass it into the filter using an object property.

Code and a picture in case anyone wanted to see it:




// ## Sebastian Mestre 2017 ##

uniform sampler2D bgl_RenderedTexture;
uniform sampler2D bgl_DepthTexture;
uniform float bgl_RenderedTextureWidth;
uniform float bgl_RenderedTextureHeight;


vec2 viewCoord = gl_TexCoord[0].xy;


float width = bgl_RenderedTextureWidth;
float height = bgl_RenderedTextureHeight;


//#######################################
//######## USER INPUT CONSTANTS #########
//#######################################


const float znear = 0.1;
const float zfar  = 100.0;


int   KERNEL_SIZE = 4;
float FILTER_SIZE = height * 0.025;
float   FULL_DIST = 50.0;


//#######################################


float  RATIO_SIZE = FILTER_SIZE / float(KERNEL_SIZE);


vec3 colorAt(vec2 coord){
    return pow(texture2D(bgl_RenderedTexture, coord).xyz, vec3(2.2));
}


void setColor(vec3 color){
    gl_FragColor = vec4(pow(color, vec3(1.0/2.2)), 1.0);
}


float depthAt(vec2 coord){
    float zdepth = texture2D(bgl_DepthTexture,coord).x;
    return -zfar * znear / (zdepth * (zfar - znear) - zfar);
}


vec2 addPix(vec2 coord, float dx, float dy){
    float ox = dx / width;
    float oy = dy / height;
    return coord + vec2(ox, oy);
}


float weightAt(int x, int y){
    float xf = x / float(KERNEL_SIZE + 1);
    float yf = y / float(KERNEL_SIZE + 1);
    
    float dsq = xf*xf + yf*yf;
    float w = dsq;
    
    return dsq < 1.0 ? 1.0 - w * w : 0.0;
}


vec3 blurredAt(vec2 coord){
    vec3 sum = vec3(0.0);
    float samples = 0.0;
    for(int i = -KERNEL_SIZE; i <= KERNEL_SIZE; i++){
        for(int j = -KERNEL_SIZE; j <= KERNEL_SIZE; j++){
            float sampleWeight = weightAt(j, i);
            sum += colorAt(addPix(coord, float(j) * RATIO_SIZE, float(i) * RATIO_SIZE)) * sampleWeight;
            samples += sampleWeight;
        }
    }
    return sum / samples;
}


void main(){


    vec3 sharp = colorAt(viewCoord);
    vec3 blurry = blurredAt(viewCoord);


    float focusDepth = depthAt(vec2(0.5));


    float deltaDepth = focusDepth - depthAt(viewCoord);
    float mixValue = clamp(abs(1.0 - deltaDepth * deltaDepth) / FULL_DIST / focusDepth , 0.0, 1.0);


    setColor( mix(sharp, blurry, mixValue) );
}


focus =( (oldfocus *( v-1))+newfocus)/v

like

focus = ((old*15)+new)/16

or

also check

vector.lerp(newVector, rate)

Sorry, maybe i didn’t make myself clear.

I would like to know how to find values for the new variable (e.g. using some form of ray casting into the scene?). Interpolation is the bit i already know how to do.

Thank you very much.


import bge
from mathutils import Vector


cam = own.scene.active_camera
end = cam.worldTransform*Vector([0,0,-maxDist])
start = cam.worldPosition
ray = cam.rayCast(end,start,0,'',0,0,0)
if ray[0]:
     dist = cam.getDistanceTo(ray[1])
else:
     dist= maxDist

oh yes that must be it, thank you!