SSAO.hlsl 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462
  1. #ifndef UNIVERSAL_SSAO_INCLUDED
  2. #define UNIVERSAL_SSAO_INCLUDED
  3. // Includes
  4. #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
  5. #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/ShaderVariablesFunctions.hlsl"
  6. #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareDepthTexture.hlsl"
  7. #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareNormalsTexture.hlsl"
  8. // Textures & Samplers
  9. TEXTURE2D_X(_BaseMap);
  10. TEXTURE2D_X(_ScreenSpaceOcclusionTexture);
  11. SAMPLER(sampler_BaseMap);
  12. SAMPLER(sampler_ScreenSpaceOcclusionTexture);
  13. // Params
  14. half4 _SSAOParams;
  15. half4 _CameraViewTopLeftCorner[2];
  16. half4x4 _CameraViewProjections[2]; // This is different from UNITY_MATRIX_VP (platform-agnostic projection matrix is used). Handle both non-XR and XR modes.
  17. float4 _SourceSize;
  18. float4 _ProjectionParams2;
  19. float4 _CameraViewXExtent[2];
  20. float4 _CameraViewYExtent[2];
  21. float4 _CameraViewZExtent[2];
  22. // Hardcoded random UV values that improves performance.
  23. // The values were taken from this function:
  24. // r = frac(43758.5453 * sin( dot(float2(12.9898, 78.233), uv)) ));
  25. // Indices 0 to 19 are for u = 0.0
  26. // Indices 20 to 39 are for u = 1.0
  27. static half SSAORandomUV[40] =
  28. {
  29. 0.00000000, // 00
  30. 0.33984375, // 01
  31. 0.75390625, // 02
  32. 0.56640625, // 03
  33. 0.98437500, // 04
  34. 0.07421875, // 05
  35. 0.23828125, // 06
  36. 0.64062500, // 07
  37. 0.35937500, // 08
  38. 0.50781250, // 09
  39. 0.38281250, // 10
  40. 0.98437500, // 11
  41. 0.17578125, // 12
  42. 0.53906250, // 13
  43. 0.28515625, // 14
  44. 0.23137260, // 15
  45. 0.45882360, // 16
  46. 0.54117650, // 17
  47. 0.12941180, // 18
  48. 0.64313730, // 19
  49. 0.92968750, // 20
  50. 0.76171875, // 21
  51. 0.13333330, // 22
  52. 0.01562500, // 23
  53. 0.00000000, // 24
  54. 0.10546875, // 25
  55. 0.64062500, // 26
  56. 0.74609375, // 27
  57. 0.67968750, // 28
  58. 0.35156250, // 29
  59. 0.49218750, // 30
  60. 0.12500000, // 31
  61. 0.26562500, // 32
  62. 0.62500000, // 33
  63. 0.44531250, // 34
  64. 0.17647060, // 35
  65. 0.44705890, // 36
  66. 0.93333340, // 37
  67. 0.87058830, // 38
  68. 0.56862750, // 39
  69. };
  70. // SSAO Settings
  71. #define INTENSITY _SSAOParams.x
  72. #define RADIUS _SSAOParams.y
  73. #define DOWNSAMPLE _SSAOParams.z
  74. // GLES2: In many cases, dynamic looping is not supported.
  75. #if defined(SHADER_API_GLES) && !defined(SHADER_API_GLES3)
  76. #define SAMPLE_COUNT 3
  77. #else
  78. #define SAMPLE_COUNT int(_SSAOParams.w)
  79. #endif
  80. // Function defines
  81. #define SCREEN_PARAMS GetScaledScreenParams()
  82. #define SAMPLE_BASEMAP(uv) SAMPLE_TEXTURE2D_X(_BaseMap, sampler_BaseMap, UnityStereoTransformScreenSpaceTex(uv));
  83. // Constants
  84. // kContrast determines the contrast of occlusion. This allows users to control over/under
  85. // occlusion. At the moment, this is not exposed to the editor because it's rarely useful.
  86. // The range is between 0 and 1.
  87. static const half kContrast = half(0.5);
  88. // The constant below controls the geometry-awareness of the bilateral
  89. // filter. The higher value, the more sensitive it is.
  90. static const half kGeometryCoeff = half(0.8);
  91. // The constants below are used in the AO estimator. Beta is mainly used for suppressing
  92. // self-shadowing noise, and Epsilon is used to prevent calculation underflow. See the paper
  93. // (Morgan 2011 https://casual-effects.com/research/McGuire2011AlchemyAO/index.html)
  94. // for further details of these constants.
  95. static const half kBeta = half(0.002);
  96. static const half kEpsilon = half(0.0001);
  97. #if defined(USING_STEREO_MATRICES)
  98. #define unity_eyeIndex unity_StereoEyeIndex
  99. #else
  100. #define unity_eyeIndex 0
  101. #endif
  102. half4 PackAONormal(half ao, half3 n)
  103. {
  104. return half4(ao, n * half(0.5) + half(0.5));
  105. }
  106. half3 GetPackedNormal(half4 p)
  107. {
  108. return p.gba * half(2.0) - half(1.0);
  109. }
  110. half GetPackedAO(half4 p)
  111. {
  112. return p.r;
  113. }
  114. half EncodeAO(half x)
  115. {
  116. #if UNITY_COLORSPACE_GAMMA
  117. return half(1.0 - max(LinearToSRGB(1.0 - saturate(x)), 0.0));
  118. #else
  119. return x;
  120. #endif
  121. }
  122. half CompareNormal(half3 d1, half3 d2)
  123. {
  124. return smoothstep(kGeometryCoeff, half(1.0), dot(d1, d2));
  125. }
  126. // Trigonometric function utility
  127. half2 CosSin(half theta)
  128. {
  129. half sn, cs;
  130. sincos(theta, sn, cs);
  131. return half2(cs, sn);
  132. }
  133. // Pseudo random number generator with 2D coordinates
  134. half GetRandomUVForSSAO(float u, int sampleIndex)
  135. {
  136. return SSAORandomUV[u * 20 + sampleIndex];
  137. }
  138. float2 GetScreenSpacePosition(float2 uv)
  139. {
  140. return float2(uv * SCREEN_PARAMS.xy * DOWNSAMPLE);
  141. }
  142. // Sample point picker
  143. half3 PickSamplePoint(float2 uv, int sampleIndex)
  144. {
  145. const float2 positionSS = GetScreenSpacePosition(uv);
  146. const half gn = half(InterleavedGradientNoise(positionSS, sampleIndex));
  147. const half u = frac(GetRandomUVForSSAO(half(0.0), sampleIndex) + gn) * half(2.0) - half(1.0);
  148. const half theta = (GetRandomUVForSSAO(half(1.0), sampleIndex) + gn) * half(TWO_PI);
  149. return half3(CosSin(theta) * sqrt(half(1.0) - u * u), u);
  150. }
  151. float SampleAndGetLinearEyeDepth(float2 uv)
  152. {
  153. float rawDepth = SampleSceneDepth(uv.xy);
  154. #if defined(_ORTHOGRAPHIC)
  155. return LinearDepthToEyeDepth(rawDepth);
  156. #else
  157. return LinearEyeDepth(rawDepth, _ZBufferParams);
  158. #endif
  159. }
  160. // This returns a vector in world unit (not a position), from camera to the given point described by uv screen coordinate and depth (in absolute world unit).
  161. half3 ReconstructViewPos(float2 uv, float depth)
  162. {
  163. // Screen is y-inverted.
  164. uv.y = 1.0 - uv.y;
  165. // view pos in world space
  166. #if defined(_ORTHOGRAPHIC)
  167. float zScale = depth * _ProjectionParams.w; // divide by far plane
  168. float3 viewPos = _CameraViewTopLeftCorner[unity_eyeIndex].xyz
  169. + _CameraViewXExtent[unity_eyeIndex].xyz * uv.x
  170. + _CameraViewYExtent[unity_eyeIndex].xyz * uv.y
  171. + _CameraViewZExtent[unity_eyeIndex].xyz * zScale;
  172. #else
  173. float zScale = depth * _ProjectionParams2.x; // divide by near plane
  174. float3 viewPos = _CameraViewTopLeftCorner[unity_eyeIndex].xyz
  175. + _CameraViewXExtent[unity_eyeIndex].xyz * uv.x
  176. + _CameraViewYExtent[unity_eyeIndex].xyz * uv.y;
  177. viewPos *= zScale;
  178. #endif
  179. return half3(viewPos);
  180. }
  181. // Try reconstructing normal accurately from depth buffer.
  182. // Low: DDX/DDY on the current pixel
  183. // Medium: 3 taps on each direction | x | * | y |
  184. // High: 5 taps on each direction: | z | x | * | y | w |
  185. // https://atyuwen.github.io/posts/normal-reconstruction/
  186. // https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/
  187. half3 ReconstructNormal(float2 uv, float depth, float3 vpos)
  188. {
  189. #if defined(_RECONSTRUCT_NORMAL_LOW)
  190. return half3(normalize(cross(ddy(vpos), ddx(vpos))));
  191. #else
  192. float2 delta = float2(_SourceSize.zw * 2.0);
  193. // Sample the neighbour fragments
  194. float2 lUV = float2(-delta.x, 0.0);
  195. float2 rUV = float2( delta.x, 0.0);
  196. float2 uUV = float2(0.0, delta.y);
  197. float2 dUV = float2(0.0, -delta.y);
  198. float3 l1 = float3(uv + lUV, 0.0); l1.z = SampleAndGetLinearEyeDepth(l1.xy); // Left1
  199. float3 r1 = float3(uv + rUV, 0.0); r1.z = SampleAndGetLinearEyeDepth(r1.xy); // Right1
  200. float3 u1 = float3(uv + uUV, 0.0); u1.z = SampleAndGetLinearEyeDepth(u1.xy); // Up1
  201. float3 d1 = float3(uv + dUV, 0.0); d1.z = SampleAndGetLinearEyeDepth(d1.xy); // Down1
  202. // Determine the closest horizontal and vertical pixels...
  203. // horizontal: left = 0.0 right = 1.0
  204. // vertical : down = 0.0 up = 1.0
  205. #if defined(_RECONSTRUCT_NORMAL_MEDIUM)
  206. uint closest_horizontal = l1.z > r1.z ? 0 : 1;
  207. uint closest_vertical = d1.z > u1.z ? 0 : 1;
  208. #else
  209. float3 l2 = float3(uv + lUV * 2.0, 0.0); l2.z = SampleAndGetLinearEyeDepth(l2.xy); // Left2
  210. float3 r2 = float3(uv + rUV * 2.0, 0.0); r2.z = SampleAndGetLinearEyeDepth(r2.xy); // Right2
  211. float3 u2 = float3(uv + uUV * 2.0, 0.0); u2.z = SampleAndGetLinearEyeDepth(u2.xy); // Up2
  212. float3 d2 = float3(uv + dUV * 2.0, 0.0); d2.z = SampleAndGetLinearEyeDepth(d2.xy); // Down2
  213. const uint closest_horizontal = abs( (2.0 * l1.z - l2.z) - depth) < abs( (2.0 * r1.z - r2.z) - depth) ? 0 : 1;
  214. const uint closest_vertical = abs( (2.0 * d1.z - d2.z) - depth) < abs( (2.0 * u1.z - u2.z) - depth) ? 0 : 1;
  215. #endif
  216. // Calculate the triangle, in a counter-clockwize order, to
  217. // use based on the closest horizontal and vertical depths.
  218. // h == 0.0 && v == 0.0: p1 = left, p2 = down
  219. // h == 1.0 && v == 0.0: p1 = down, p2 = right
  220. // h == 1.0 && v == 1.0: p1 = right, p2 = up
  221. // h == 0.0 && v == 1.0: p1 = up, p2 = left
  222. // Calculate the view space positions for the three points...
  223. float3 P1;
  224. float3 P2;
  225. if (closest_vertical == 0)
  226. {
  227. P1 = closest_horizontal == 0 ? l1 : d1;
  228. P2 = closest_horizontal == 0 ? d1 : r1;
  229. }
  230. else
  231. {
  232. P1 = closest_horizontal == 0 ? u1 : r1;
  233. P2 = closest_horizontal == 0 ? l1 : u1;
  234. }
  235. // Use the cross product to calculate the normal...
  236. return half3(normalize(cross(ReconstructViewPos(P2.xy, P2.z) - vpos, ReconstructViewPos(P1.xy, P1.z) - vpos)));
  237. #endif
  238. }
  239. // For when we don't need to output the depth or view position
  240. // Used in the blur passes
  241. half3 SampleNormal(float2 uv)
  242. {
  243. #if defined(_SOURCE_DEPTH_NORMALS)
  244. return half3(SampleSceneNormals(uv));
  245. #else
  246. float depth = SampleAndGetLinearEyeDepth(uv);
  247. half3 vpos = ReconstructViewPos(uv, depth);
  248. return ReconstructNormal(uv, depth, vpos);
  249. #endif
  250. }
  251. void SampleDepthNormalView(float2 uv, out float depth, out half3 normal, out half3 vpos)
  252. {
  253. depth = SampleAndGetLinearEyeDepth(uv);
  254. vpos = ReconstructViewPos(uv, depth);
  255. #if defined(_SOURCE_DEPTH_NORMALS)
  256. normal = half3(SampleSceneNormals(uv));
  257. #else
  258. normal = ReconstructNormal(uv, depth, vpos);
  259. #endif
  260. }
  261. // Distance-based AO estimator based on Morgan 2011
  262. // "Alchemy screen-space ambient obscurance algorithm"
  263. // http://graphics.cs.williams.edu/papers/AlchemyHPG11/
  264. half4 SSAO(Varyings input) : SV_Target
  265. {
  266. UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
  267. float2 uv = input.uv;
  268. // Parameters used in coordinate conversion
  269. half3x3 camTransform = (half3x3)_CameraViewProjections[unity_eyeIndex]; // camera viewProjection matrix
  270. // Get the depth, normal and view position for this fragment
  271. float depth_o;
  272. half3 norm_o;
  273. half3 vpos_o;
  274. SampleDepthNormalView(uv, depth_o, norm_o, vpos_o);
  275. // This was added to avoid a NVIDIA driver issue.
  276. const half rcpSampleCount = half(rcp(SAMPLE_COUNT));
  277. half ao = 0.0;
  278. for (int s = 0; s < SAMPLE_COUNT; s++)
  279. {
  280. // Sample point
  281. half3 v_s1 = PickSamplePoint(uv, s);
  282. // Make it distributed between [0, _Radius]
  283. v_s1 *= sqrt((half(s) + half(1.0)) * rcpSampleCount) * RADIUS;
  284. v_s1 = faceforward(v_s1, -norm_o, v_s1);
  285. half3 vpos_s1 = vpos_o + v_s1;
  286. // Reproject the sample point
  287. half3 spos_s1 = mul(camTransform, vpos_s1);
  288. #if defined(_ORTHOGRAPHIC)
  289. float2 uv_s1_01 = clamp((spos_s1.xy + float(1.0)) * float(0.5), float(0.0), float(1.0));
  290. #else
  291. float zdist = -dot(UNITY_MATRIX_V[2].xyz, vpos_s1);
  292. float2 uv_s1_01 = clamp((spos_s1.xy * rcp(zdist) + float(1.0)) * float(0.5), float(0.0), float(1.0));
  293. #endif
  294. // Depth at the sample point
  295. float depth_s1 = SampleAndGetLinearEyeDepth(uv_s1_01);
  296. // Relative position of the sample point
  297. half3 vpos_s2 = ReconstructViewPos(uv_s1_01, depth_s1);
  298. half3 v_s2 = vpos_s2 - vpos_o;
  299. // Estimate the obscurance value
  300. half dotVal = dot(v_s2, norm_o);
  301. #if defined(_ORTHOGRAPHIC)
  302. dotVal -= half(2.0 * kBeta * depth_o);
  303. #else
  304. dotVal -= half(kBeta * depth_o);
  305. #endif
  306. half a1 = max(dotVal, half(0.0));
  307. half a2 = dot(v_s2, v_s2) + kEpsilon;
  308. ao += a1 * rcp(a2);
  309. }
  310. // Intensity normalization
  311. ao *= RADIUS;
  312. // Apply contrast
  313. ao = PositivePow(ao * INTENSITY * rcpSampleCount, kContrast);
  314. return PackAONormal(ao, norm_o);
  315. }
  316. // Geometry-aware separable bilateral filter
  317. half4 Blur(float2 uv, float2 delta) : SV_Target
  318. {
  319. half4 p0 = (half4) SAMPLE_BASEMAP(uv );
  320. half4 p1a = (half4) SAMPLE_BASEMAP(uv - delta * 1.3846153846);
  321. half4 p1b = (half4) SAMPLE_BASEMAP(uv + delta * 1.3846153846);
  322. half4 p2a = (half4) SAMPLE_BASEMAP(uv - delta * 3.2307692308);
  323. half4 p2b = (half4) SAMPLE_BASEMAP(uv + delta * 3.2307692308);
  324. #if defined(BLUR_SAMPLE_CENTER_NORMAL)
  325. #if defined(_SOURCE_DEPTH_NORMALS)
  326. half3 n0 = half3(SampleSceneNormals(uv));
  327. #else
  328. half3 n0 = SampleNormal(uv);
  329. #endif
  330. #else
  331. half3 n0 = GetPackedNormal(p0);
  332. #endif
  333. half w0 = half(0.2270270270);
  334. half w1a = CompareNormal(n0, GetPackedNormal(p1a)) * half(0.3162162162);
  335. half w1b = CompareNormal(n0, GetPackedNormal(p1b)) * half(0.3162162162);
  336. half w2a = CompareNormal(n0, GetPackedNormal(p2a)) * half(0.0702702703);
  337. half w2b = CompareNormal(n0, GetPackedNormal(p2b)) * half(0.0702702703);
  338. half s = half(0.0);
  339. s += GetPackedAO(p0) * w0;
  340. s += GetPackedAO(p1a) * w1a;
  341. s += GetPackedAO(p1b) * w1b;
  342. s += GetPackedAO(p2a) * w2a;
  343. s += GetPackedAO(p2b) * w2b;
  344. s *= rcp(w0 + w1a + w1b + w2a + w2b);
  345. return PackAONormal(s, n0);
  346. }
  347. // Geometry-aware bilateral filter (single pass/small kernel)
  348. half BlurSmall(float2 uv, float2 delta)
  349. {
  350. half4 p0 = (half4) SAMPLE_BASEMAP(uv );
  351. half4 p1 = (half4) SAMPLE_BASEMAP(uv + float2(-delta.x, -delta.y));
  352. half4 p2 = (half4) SAMPLE_BASEMAP(uv + float2( delta.x, -delta.y));
  353. half4 p3 = (half4) SAMPLE_BASEMAP(uv + float2(-delta.x, delta.y));
  354. half4 p4 = (half4) SAMPLE_BASEMAP(uv + float2( delta.x, delta.y));
  355. half3 n0 = GetPackedNormal(p0);
  356. half w0 = half(1.0);
  357. half w1 = CompareNormal(n0, GetPackedNormal(p1));
  358. half w2 = CompareNormal(n0, GetPackedNormal(p2));
  359. half w3 = CompareNormal(n0, GetPackedNormal(p3));
  360. half w4 = CompareNormal(n0, GetPackedNormal(p4));
  361. half s = half(0.0);
  362. s += GetPackedAO(p0) * w0;
  363. s += GetPackedAO(p1) * w1;
  364. s += GetPackedAO(p2) * w2;
  365. s += GetPackedAO(p3) * w3;
  366. s += GetPackedAO(p4) * w4;
  367. return s *= rcp(w0 + w1 + w2 + w3 + w4);
  368. }
  369. half4 HorizontalBlur(Varyings input) : SV_Target
  370. {
  371. UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
  372. const float2 uv = input.uv;
  373. const float2 delta = float2(_SourceSize.z, 0.0);
  374. return Blur(uv, delta);
  375. }
  376. half4 VerticalBlur(Varyings input) : SV_Target
  377. {
  378. UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
  379. const float2 uv = input.uv;
  380. const float2 delta = float2(0.0, _SourceSize.w * rcp(DOWNSAMPLE));
  381. return Blur(uv, delta);
  382. }
  383. half4 FinalBlur(Varyings input) : SV_Target
  384. {
  385. UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
  386. const float2 uv = input.uv;
  387. const float2 delta = _SourceSize.zw;
  388. return half(1.0) - BlurSmall(uv, delta );
  389. }
  390. #endif //UNIVERSAL_SSAO_INCLUDED