Advertisement
Guest User

HBAO

a guest
Apr 9th, 2022
226
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.26 KB | None | 0 0
  1. #version 450
  2.  
  3. layout(binding = 0) uniform SSAOubo {
  4. vec4 samples[64];
  5. int sample_amount;
  6. float radius;
  7.  
  8. int hbaoSampleDirection;
  9. float hbaoSteps;
  10. int hbaoNumberOfSteps;
  11. float hbaoAmbientLightLevel;
  12.  
  13. int alchemySampleTurns;
  14. float alchemySigma;
  15. float alchemyKappa;
  16. }ssao;
  17.  
  18. layout(binding = 4) uniform CameraProjection {
  19. mat4 model;
  20. mat4 view;
  21. mat4 proj;
  22. }camera;
  23.  
  24. layout(binding = 1) uniform sampler2D texNoise;
  25. layout(binding = 2) uniform sampler2D gPosition;
  26. layout(binding = 3) uniform sampler2D depthMap;
  27.  
  28. layout(location = 0) in vec3 fragColor;
  29. layout(location = 1) in vec2 uvCoords;
  30.  
  31. layout(location = 0) out vec4 outColor;
  32.  
  33. #define PI 3.1415926535897932384626433832795
  34.  
  35. float RADIUS = ssao.radius;
  36. float NUMBER_OF_SAMPLING_DIRECTIONS = ssao.hbaoSampleDirection;
  37. float STEP = ssao.hbaoSteps; //0.04
  38. float NUMBER_OF_STEPS = ssao.hbaoNumberOfSteps;
  39. float TANGENT_BIAS = 0.3;
  40.  
  41. float sum = 0.0;
  42. float occlusion = 0.0;
  43.  
  44. const vec2 noiseScale = vec2(1920.0/4, 1080.0/4);
  45.  
  46. vec4 depthToPosition(vec2 uv) {
  47.  
  48. float depth = texture(depthMap, uv).x;
  49. vec4 clipSpace = vec4(uv * 2.0 - 1.0, depth, 1.0);
  50. vec4 viewSpace = inverse(camera.proj) * clipSpace;
  51. viewSpace.xyz /= viewSpace.w;
  52.  
  53. return vec4(vec3(viewSpace), 1.0);
  54. }
  55.  
  56. vec4 depthToNormal(vec2 tc)
  57. {
  58. float depth = texture(depthMap, tc).x;
  59.  
  60. vec4 clipSpace = vec4(tc * 2.0 - 1.0, depth, 1.0);
  61. vec4 viewSpace = inverse(camera.proj) * clipSpace;
  62. viewSpace.xyz /= viewSpace.w;
  63.  
  64. vec3 pos = viewSpace.xyz;
  65. vec3 n = normalize(cross(dFdx(pos), dFdy(pos)));
  66. n *= - 1;
  67.  
  68. return vec4(n, 1.0);
  69. }
  70.  
  71. vec2 RotateDirection(vec2 Dir, vec2 CosSin)
  72. {
  73. return vec2(Dir.x*CosSin.x - Dir.y*CosSin.y, Dir.x*CosSin.y + Dir.y*CosSin.x);
  74. }
  75.  
  76. vec4 GetJitter()
  77. {
  78. return textureLod(texNoise, (gl_FragCoord.xy / 4), 0);
  79. }
  80.  
  81. void main()
  82. {
  83.  
  84. // position of current fragment
  85. vec3 pos = vec3(uvCoords, texture(depthMap, uvCoords).r);
  86.  
  87. vec4 normal = depthToNormal(uvCoords);
  88. normal.y = -normal.y;
  89. //normal.x = -normal.x;
  90.  
  91. vec3 NDC_POS = (2.0 * pos) - 1.0; // normalized device coordinates
  92. vec4 unprojectPosition = inverse(camera.proj) * vec4(NDC_POS, 1.0);
  93. vec3 viewPosition = unprojectPosition.xyz / unprojectPosition.w;
  94.  
  95. // paper suggests to jitter samples by random offset
  96. vec3 sampleNoise = texture(texNoise, uvCoords * noiseScale).xyz;
  97. sampleNoise.xy = sampleNoise.xy * 2.0 - vec2(1.0);
  98.  
  99. // A single direction
  100. float samplingDiskDirection = 2 * PI / NUMBER_OF_SAMPLING_DIRECTIONS;
  101. vec4 Rand = GetJitter();
  102.  
  103. for(int i = 0; i < NUMBER_OF_SAMPLING_DIRECTIONS; i++) {
  104.  
  105. // use i to get a new direction by * given direction
  106. float samplingDirectionAngle = i * samplingDiskDirection;
  107. //jitter direction
  108. vec2 samplingDirection = RotateDirection(vec2(cos(samplingDirectionAngle), sin(samplingDirectionAngle)), Rand.xy);
  109.  
  110. //tangent angle : inverse cosine
  111. float tangentAngle = acos(dot(vec3(samplingDirection, 0.0), normal.xyz)) - (0.5 * PI) + TANGENT_BIAS;
  112. float horizonAngle = tangentAngle; //set the horizon angle to the tangent angle to begin with
  113.  
  114. vec3 LastDifference = vec3(0);
  115.  
  116. // for each direction we step in the direction of that sampling direction to sample
  117. for(int j = 0; j < NUMBER_OF_STEPS; j++){
  118.  
  119. // step forward in the sampling direction
  120. vec2 stepForward = (Rand.z + float(j+1)) * STEP * samplingDirection;
  121. // use the stepforward position as an offset from the current fragment position in order to move to that location
  122. vec2 stepPosition = uvCoords + stepForward;
  123. // sample at the stepped location to get the depth value
  124. float steppedLocationZ = texture(depthMap, stepPosition.st).x;
  125. // complete sample position
  126. vec3 steppedLocationPosition = vec3(stepPosition, steppedLocationZ);
  127. //convert to NDC
  128. vec3 steppedPositionNDC = (2.0 * steppedLocationPosition) - 1.0;
  129. vec4 SteppedPositionUnProj = inverse(camera.proj) * vec4(steppedPositionNDC, 1.0);
  130. vec3 viewSpaceSteppedPosition = SteppedPositionUnProj.xyz / SteppedPositionUnProj.w;
  131.  
  132. // Now that we have the view-space position of the offset sample point
  133. // We can check the distance from our current fragment to the offset point
  134.  
  135. vec3 diff = viewSpaceSteppedPosition.xyz - viewPosition;
  136. // If the distance is less than the set radius
  137. if(length(diff) < RADIUS){
  138.  
  139. LastDifference = diff;
  140. float FoundElevationAngle = atan(diff.z / length(diff.xy));
  141. // update horizon angle if new found elevation angle is larger
  142. horizonAngle = max(horizonAngle, FoundElevationAngle);
  143. }
  144. }
  145.  
  146. float norm = length(LastDifference) / RADIUS;
  147. float attenuation = 1 - norm * norm;
  148.  
  149. occlusion = clamp(attenuation * (sin(horizonAngle) - sin(tangentAngle)), 0.0, 1.0);
  150. sum += 1.0 - occlusion * ssao.hbaoAmbientLightLevel; //control AO darkness
  151. }
  152.  
  153. sum /= NUMBER_OF_SAMPLING_DIRECTIONS;
  154.  
  155. outColor = vec4(sum, sum, sum, 1.0);
  156.  
  157. }
  158.  
  159.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement