Nvidia DirectX10 SSAO简要翻译

    技术2022-05-19  19

    详细资料:

    http://developer.download.nvidia.com/SDK/10.5/direct3d/Source/ScreenSpaceAO/doc/ScreenSpaceAO.pdf ; http://developer.download.nvidia.com/presentations/2008/SIGGRAPH/HBAO_SIG08b.pdf

     

    主要计算方式:

    在屏幕空间进行射线检测,比较其与XY平面的夹角Sin值,最后进行积分,获得整个半球面的AO值。

    图示如下:

    ao计算公式

    需要补充几点:

    法线是基于像素的,不能插值 ,但可通过ddx/ddy微分计算

    角度加个偏移值,防止采样点的sin h过小

    ao衰减参数 W(r) = 1 -((s-p)/R)2

    半球面ao积分计算

     

     

    流程图,最后分别对X方向和Y方向进行高斯平滑即可。

    具体shader代码如下(ssao.frag):

    // 参数说明: m_R,相机坐标系ao计算半径;m_NumSteps,每方向的计算次数;m_NumDir,随机方向个数;m_inv_R,半径倒数;

    // m_sqr_R,半径平方;m_FocalLen,Vector2f(1f / (tanFovY * aspectRatio), 1f / tanFovY);m_Resolution,ao

    // depthTexture的宽高;m_Contrast, 3;

    float M_PI = 3.14159265; varying vec2 texCoord; uniform sampler2D m_DepthTexture; vec3 uv_to_eye(vec2 uv, float z) {     // texture origin is at bottom left corner in OpenGL     uv = (uv * vec2(2.0, 2.0) - vec2(1.0, 1.0));         return vec3(uv * m_InvFocalLen * z, z); } vec3 fetch_eye_pos(vec2 uv) {     float z = texture2D(m_DepthTexture, uv).x;     z = m_invFrustumFar / (1.0 - z);// (-2.0 * 10000 / (z * 9999 - 10001)) / 9999; to be precise, but only approximation is required     return uv_to_eye(uv, z); } float length2(vec3 v) {     return dot(v, v); } vec3 min_diff(vec3 P, vec3 Pr, vec3 Pl) {     vec3 V1 = Pr - P;     vec3 V2 = P - Pl;     return (length2(V1) < length2(V2)) ? V1 : V2; } vec2 rotate_direction(vec2 Dir, vec2 CosSin) {     return vec2(Dir.x * CosSin.x - Dir.y * CosSin.y, Dir.x * CosSin.y + Dir.y * CosSin.x); } vec2 snap_uv_offset(vec2 uv) {     return round(uv * m_Resolution) * m_InvResolution; } vec3 tangent_vector(vec2 deltaUV, vec3 dPdu, vec3 dPdv) {     return deltaUV.x * dPdu + deltaUV.y * dPdv; } float tangent(vec3 T) {     return -T.z / length(T.xy); } vec2 snap_uv_coord(vec2 uv) {     return uv - (fract(uv * m_Resolution) - 0.5) * m_InvResolution; } float tan_to_sin(float x) {     return x / sqrt(1.0 + x * x); } float falloff(float r) {     return 1.0 - m_Attenuation * r * r; } float tangent(vec3 P, vec3 S) {     return (P.z - S.z) / length(S.xy - P.xy); } float AccumulatedHorizonOcclusion_Quality(vec2 deltaUV,                                           vec2 uv0,                                           vec3 P,                                           float numSteps,                                           float randstep,                                           vec3 dPdu,                                           vec3 dPdv) {     // Jitter starting point within the first sample distance     vec2 uv = (uv0 + deltaUV) + randstep * deltaUV;         // Snap first sample uv and initialize horizon tangent     vec2 snapped_duv = snap_uv_offset(uv - uv0);     vec3 T = tangent_vector(snapped_duv, dPdu, dPdv);     float tanH = tangent(T) + m_TanAngleBias;     float ao = 0.0;     float h0 = 0.0;     for(float j = 0.0; j < numSteps; ++j) {         vec2 snapped_uv = snap_uv_coord(uv);         vec3 S = fetch_eye_pos(snapped_uv);         uv += deltaUV;         // Ignore any samples outside the radius of influence         float d2 = length2(S - P);         if (d2 < m_sqr_R) {             float tanS = tangent(P, S);             if (tanS > tanH) {                 // Compute tangent vector associated with snapped_uv                 vec2 snapped_duv = snapped_uv - uv0;                 vec3 T = tangent_vector(snapped_duv, dPdu, dPdv);                 float tanT = tangent(T) + m_TanAngleBias;                 // Compute AO between tangent T and sample S                 float sinS = tan_to_sin(tanS);                 float sinT = tan_to_sin(tanT);                 float r = sqrt(d2) * m_inv_R;                 float h = sinS - sinT;                 ao += falloff(r) * (h - h0);                 h0 = h;                 // Update the current horizon angle                 tanH = tanS;             }         }     }     return ao; } void main() {     vec3 P = fetch_eye_pos(texCoord);     // Project the radius of influence m_R from eye space to texture space.     // The scaling by 0.5 is to go from [-1,1] to [0,1].     vec2 step_size = 0.5 * m_R  * m_FocalLen / P.z;     // Early out if the projected radius is smaller than 1 pixel.     float numSteps = min (m_NumSteps, min(step_size.x * m_Resolution.x, step_size.y * m_Resolution.y));     if( numSteps < 1.0 ) {         gl_FragColor = vec4(1.0);         return;     }     step_size = step_size / ( numSteps + 1.0 );     // Nearest neighbor pixels on the tangent plane     vec3 Pr, Pl, Pt, Pb;     Pr = fetch_eye_pos(texCoord + vec2(m_InvResolution.x, 0));     Pl = fetch_eye_pos(texCoord + vec2(-m_InvResolution.x, 0));     Pt = fetch_eye_pos(texCoord + vec2(0, m_InvResolution.y));     Pb = fetch_eye_pos(texCoord + vec2(0, -m_InvResolution.y));     // Screen-aligned basis for the tangent plane     vec3 dPdu = min_diff(P, Pr, Pl);     vec3 dPdv = min_diff(P, Pt, Pb) * (m_Resolution.y * m_InvResolution.x);     vec2 jitter = texCoord * 1000.0;     vec3 rand = normalize(vec3(sin(jitter.x) + sin(jitter.y), cos(jitter.x) + cos(jitter.y), 0));     float ao = 0.0;     float d;     float alpha = 2.0 * M_PI / m_NumDir;     for (d = 0.0; d < m_NumDir; d++) {         float angle = alpha * float(d);         vec2 dir = vec2(cos(angle), sin(angle));         vec2 deltaUV = rotate_direction(dir, rand.xy) * step_size.xy;         ao += AccumulatedHorizonOcclusion_Quality(deltaUV, texCoord, P, numSteps, rand.z, dPdu, dPdv);     }     gl_FragColor = vec4(vec3(1.0 - ao / m_NumDir * m_Contrast), 1); }


    最新回复(0)