UniversalRenderer.cs 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188
  1. using System.Collections.Generic;
  2. using UnityEngine.Rendering.Universal.Internal;
  3. namespace UnityEngine.Rendering.Universal
  4. {
  5. /// <summary>
  6. /// Rendering modes for Universal renderer.
  7. /// </summary>
  8. public enum RenderingMode
  9. {
  10. /// <summary>Render all objects and lighting in one pass, with a hard limit on the number of lights that can be applied on an object.</summary>
  11. Forward,
  12. /// <summary>Render all objects first in a g-buffer pass, then apply all lighting in a separate pass using deferred shading.</summary>
  13. Deferred
  14. };
  15. /// <summary>
  16. /// When the Universal Renderer should use Depth Priming in Forward mode.
  17. /// </summary>
  18. public enum DepthPrimingMode
  19. {
  20. /// <summary>Depth Priming will never be used.</summary>
  21. Disabled,
  22. /// <summary>Depth Priming will only be used if there is a depth prepass needed by any of the render passes.</summary>
  23. Auto,
  24. /// <summary>A depth prepass will be explicitly requested so Depth Priming can be used.</summary>
  25. Forced,
  26. }
  27. /// <summary>
  28. /// Default renderer for Universal RP.
  29. /// This renderer is supported on all Universal RP supported platforms.
  30. /// It uses a classic forward rendering strategy with per-object light culling.
  31. /// </summary>
  32. public sealed class UniversalRenderer : ScriptableRenderer
  33. {
  34. const int k_DepthStencilBufferBits = 32;
  35. static readonly List<ShaderTagId> k_DepthNormalsOnly = new List<ShaderTagId> { new ShaderTagId("DepthNormalsOnly") };
  36. private static class Profiling
  37. {
  38. private const string k_Name = nameof(UniversalRenderer);
  39. public static readonly ProfilingSampler createCameraRenderTarget = new ProfilingSampler($"{k_Name}.{nameof(CreateCameraRenderTarget)}");
  40. }
  41. // Rendering mode setup from UI.
  42. internal RenderingMode renderingMode => m_RenderingMode;
  43. // Actual rendering mode, which may be different (ex: wireframe rendering, harware not capable of deferred rendering).
  44. internal RenderingMode actualRenderingMode => (GL.wireframe || (DebugHandler != null && DebugHandler.IsActiveModeUnsupportedForDeferred) || m_DeferredLights == null || !m_DeferredLights.IsRuntimeSupportedThisFrame() || m_DeferredLights.IsOverlay)
  45. ? RenderingMode.Forward
  46. : this.renderingMode;
  47. internal bool accurateGbufferNormals => m_DeferredLights != null ? m_DeferredLights.AccurateGbufferNormals : false;
  48. #if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
  49. internal bool needTransparencyPass { get { return !UniversalRenderPipeline.asset.useAdaptivePerformance || !AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipTransparentObjects;; } }
  50. #endif
  51. /// <summary>Property to control the depth priming behavior of the forward rendering path.</summary>
  52. public DepthPrimingMode depthPrimingMode { get { return m_DepthPrimingMode; } set { m_DepthPrimingMode = value; } }
  53. DepthOnlyPass m_DepthPrepass;
  54. DepthNormalOnlyPass m_DepthNormalPrepass;
  55. CopyDepthPass m_PrimedDepthCopyPass;
  56. MotionVectorRenderPass m_MotionVectorPass;
  57. MainLightShadowCasterPass m_MainLightShadowCasterPass;
  58. AdditionalLightsShadowCasterPass m_AdditionalLightsShadowCasterPass;
  59. GBufferPass m_GBufferPass;
  60. CopyDepthPass m_GBufferCopyDepthPass;
  61. TileDepthRangePass m_TileDepthRangePass;
  62. TileDepthRangePass m_TileDepthRangeExtraPass; // TODO use subpass API to hide this pass
  63. DeferredPass m_DeferredPass;
  64. OculusMotionVectorPass m_OculusMotionVecPass;
  65. DrawObjectsPass m_RenderOpaqueForwardOnlyPass;
  66. DrawObjectsPass m_RenderOpaqueForwardPass;
  67. DrawSkyboxPass m_DrawSkyboxPass;
  68. CopyDepthPass m_CopyDepthPass;
  69. CopyColorPass m_CopyColorPass;
  70. TransparentSettingsPass m_TransparentSettingsPass;
  71. DrawObjectsPass m_RenderTransparentForwardPass;
  72. InvokeOnRenderObjectCallbackPass m_OnRenderObjectCallbackPass;
  73. FinalBlitPass m_FinalBlitPass;
  74. CapturePass m_CapturePass;
  75. #if ENABLE_VR && ENABLE_XR_MODULE
  76. XROcclusionMeshPass m_XROcclusionMeshPass;
  77. CopyDepthPass m_XRCopyDepthPass;
  78. #endif
  79. #if UNITY_EDITOR
  80. CopyDepthPass m_FinalDepthCopyPass;
  81. #endif
  82. internal RenderTargetBufferSystem m_ColorBufferSystem;
  83. RenderTargetHandle m_ActiveCameraColorAttachment;
  84. RenderTargetHandle m_ColorFrontBuffer;
  85. RenderTargetHandle m_ActiveCameraDepthAttachment;
  86. RenderTargetHandle m_CameraDepthAttachment;
  87. RenderTargetHandle m_DepthTexture;
  88. RenderTargetHandle m_NormalsTexture;
  89. RenderTargetHandle m_OpaqueColor;
  90. // For tiled-deferred shading.
  91. RenderTargetHandle m_DepthInfoTexture;
  92. RenderTargetHandle m_TileDepthInfoTexture;
  93. ForwardLights m_ForwardLights;
  94. DeferredLights m_DeferredLights;
  95. RenderingMode m_RenderingMode;
  96. DepthPrimingMode m_DepthPrimingMode;
  97. bool m_DepthPrimingRecommended;
  98. StencilState m_DefaultStencilState;
  99. LightCookieManager m_LightCookieManager;
  100. IntermediateTextureMode m_IntermediateTextureMode;
  101. // Materials used in URP Scriptable Render Passes
  102. Material m_BlitMaterial = null;
  103. Material m_CopyDepthMaterial = null;
  104. Material m_SamplingMaterial = null;
  105. Material m_TileDepthInfoMaterial = null;
  106. Material m_TileDeferredMaterial = null;
  107. Material m_StencilDeferredMaterial = null;
  108. Material m_CameraMotionVecMaterial = null;
  109. Material m_ObjectMotionVecMaterial = null;
  110. PostProcessPasses m_PostProcessPasses;
  111. internal ColorGradingLutPass colorGradingLutPass { get => m_PostProcessPasses.colorGradingLutPass; }
  112. internal PostProcessPass postProcessPass { get => m_PostProcessPasses.postProcessPass; }
  113. internal PostProcessPass finalPostProcessPass { get => m_PostProcessPasses.finalPostProcessPass; }
  114. internal RenderTargetHandle colorGradingLut { get => m_PostProcessPasses.colorGradingLut; }
  115. internal DeferredLights deferredLights { get => m_DeferredLights; }
  116. public UniversalRenderer(UniversalRendererData data) : base(data)
  117. {
  118. #if ENABLE_VR && ENABLE_XR_MODULE
  119. UniversalRenderPipeline.m_XRSystem.InitializeXRSystemData(data.xrSystemData);
  120. #endif
  121. // TODO: should merge shaders with HDRP into core, XR dependency for now.
  122. // TODO: replace/merge URP blit into core blitter.
  123. Blitter.Initialize(data.shaders.coreBlitPS, data.shaders.coreBlitColorAndDepthPS);
  124. m_BlitMaterial = CoreUtils.CreateEngineMaterial(data.shaders.blitPS);
  125. m_CopyDepthMaterial = CoreUtils.CreateEngineMaterial(data.shaders.copyDepthPS);
  126. m_SamplingMaterial = CoreUtils.CreateEngineMaterial(data.shaders.samplingPS);
  127. //m_TileDepthInfoMaterial = CoreUtils.CreateEngineMaterial(data.shaders.tileDepthInfoPS);
  128. //m_TileDeferredMaterial = CoreUtils.CreateEngineMaterial(data.shaders.tileDeferredPS);
  129. m_StencilDeferredMaterial = CoreUtils.CreateEngineMaterial(data.shaders.stencilDeferredPS);
  130. m_CameraMotionVecMaterial = CoreUtils.CreateEngineMaterial(data.shaders.cameraMotionVector);
  131. m_ObjectMotionVecMaterial = CoreUtils.CreateEngineMaterial(data.shaders.objectMotionVector);
  132. StencilStateData stencilData = data.defaultStencilState;
  133. m_DefaultStencilState = StencilState.defaultValue;
  134. m_DefaultStencilState.enabled = stencilData.overrideStencilState;
  135. m_DefaultStencilState.SetCompareFunction(stencilData.stencilCompareFunction);
  136. m_DefaultStencilState.SetPassOperation(stencilData.passOperation);
  137. m_DefaultStencilState.SetFailOperation(stencilData.failOperation);
  138. m_DefaultStencilState.SetZFailOperation(stencilData.zFailOperation);
  139. m_IntermediateTextureMode = data.intermediateTextureMode;
  140. {
  141. var settings = LightCookieManager.Settings.GetDefault();
  142. var asset = UniversalRenderPipeline.asset;
  143. if (asset)
  144. {
  145. settings.atlas.format = asset.additionalLightsCookieFormat;
  146. settings.atlas.resolution = asset.additionalLightsCookieResolution;
  147. }
  148. m_LightCookieManager = new LightCookieManager(ref settings);
  149. }
  150. this.stripShadowsOffVariants = true;
  151. this.stripAdditionalLightOffVariants = true;
  152. ForwardLights.InitParams forwardInitParams;
  153. forwardInitParams.lightCookieManager = m_LightCookieManager;
  154. forwardInitParams.clusteredRendering = data.clusteredRendering;
  155. forwardInitParams.tileSize = (int)data.tileSize;
  156. m_ForwardLights = new ForwardLights(forwardInitParams);
  157. //m_DeferredLights.LightCulling = data.lightCulling;
  158. this.m_RenderingMode = data.renderingMode;
  159. this.m_DepthPrimingMode = data.depthPrimingMode;
  160. useRenderPassEnabled = data.useNativeRenderPass && SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
  161. #if UNITY_ANDROID || UNITY_IOS || UNITY_TVOS
  162. this.m_DepthPrimingRecommended = false;
  163. #else
  164. this.m_DepthPrimingRecommended = true;
  165. #endif
  166. // Note: Since all custom render passes inject first and we have stable sort,
  167. // we inject the builtin passes in the before events.
  168. m_MainLightShadowCasterPass = new MainLightShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
  169. m_AdditionalLightsShadowCasterPass = new AdditionalLightsShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
  170. #if ENABLE_VR && ENABLE_XR_MODULE
  171. m_XROcclusionMeshPass = new XROcclusionMeshPass(RenderPassEvent.BeforeRenderingOpaques);
  172. // Schedule XR copydepth right after m_FinalBlitPass(AfterRendering + 1)
  173. m_XRCopyDepthPass = new CopyDepthPass(RenderPassEvent.AfterRendering + 2, m_CopyDepthMaterial);
  174. #endif
  175. m_DepthPrepass = new DepthOnlyPass(RenderPassEvent.BeforeRenderingPrePasses, RenderQueueRange.opaque, data.opaqueLayerMask);
  176. m_DepthNormalPrepass = new DepthNormalOnlyPass(RenderPassEvent.BeforeRenderingPrePasses, RenderQueueRange.opaque, data.opaqueLayerMask);
  177. m_MotionVectorPass = new MotionVectorRenderPass(m_CameraMotionVecMaterial, m_ObjectMotionVecMaterial);
  178. if (this.renderingMode == RenderingMode.Forward)
  179. {
  180. m_PrimedDepthCopyPass = new CopyDepthPass(RenderPassEvent.AfterRenderingPrePasses, m_CopyDepthMaterial);
  181. }
  182. if (this.renderingMode == RenderingMode.Deferred)
  183. {
  184. var deferredInitParams = new DeferredLights.InitParams();
  185. deferredInitParams.tileDepthInfoMaterial = m_TileDepthInfoMaterial;
  186. deferredInitParams.tileDeferredMaterial = m_TileDeferredMaterial;
  187. deferredInitParams.stencilDeferredMaterial = m_StencilDeferredMaterial;
  188. deferredInitParams.lightCookieManager = m_LightCookieManager;
  189. m_DeferredLights = new DeferredLights(deferredInitParams, useRenderPassEnabled);
  190. m_DeferredLights.AccurateGbufferNormals = data.accurateGbufferNormals;
  191. //m_DeferredLights.TiledDeferredShading = data.tiledDeferredShading;
  192. m_DeferredLights.TiledDeferredShading = false;
  193. m_GBufferPass = new GBufferPass(RenderPassEvent.BeforeRenderingGbuffer, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference, m_DeferredLights);
  194. // Forward-only pass only runs if deferred renderer is enabled.
  195. // It allows specific materials to be rendered in a forward-like pass.
  196. // We render both gbuffer pass and forward-only pass before the deferred lighting pass so we can minimize copies of depth buffer and
  197. // benefits from some depth rejection.
  198. // - If a material can be rendered either forward or deferred, then it should declare a UniversalForward and a UniversalGBuffer pass.
  199. // - If a material cannot be lit in deferred (unlit, bakedLit, special material such as hair, skin shader), then it should declare UniversalForwardOnly pass
  200. // - Legacy materials have unamed pass, which is implicitely renamed as SRPDefaultUnlit. In that case, they are considered forward-only too.
  201. // TO declare a material with unnamed pass and UniversalForward/UniversalForwardOnly pass is an ERROR, as the material will be rendered twice.
  202. StencilState forwardOnlyStencilState = DeferredLights.OverwriteStencil(m_DefaultStencilState, (int)StencilUsage.MaterialMask);
  203. ShaderTagId[] forwardOnlyShaderTagIds = new ShaderTagId[]
  204. {
  205. new ShaderTagId("UniversalForwardOnly"),
  206. new ShaderTagId("SRPDefaultUnlit"), // Legacy shaders (do not have a gbuffer pass) are considered forward-only for backward compatibility
  207. new ShaderTagId("LightweightForward") // Legacy shaders (do not have a gbuffer pass) are considered forward-only for backward compatibility
  208. };
  209. int forwardOnlyStencilRef = stencilData.stencilReference | (int)StencilUsage.MaterialUnlit;
  210. m_GBufferCopyDepthPass = new CopyDepthPass(RenderPassEvent.BeforeRenderingGbuffer + 1, m_CopyDepthMaterial);
  211. m_TileDepthRangePass = new TileDepthRangePass(RenderPassEvent.BeforeRenderingGbuffer + 2, m_DeferredLights, 0);
  212. m_TileDepthRangeExtraPass = new TileDepthRangePass(RenderPassEvent.BeforeRenderingGbuffer + 3, m_DeferredLights, 1);
  213. m_DeferredPass = new DeferredPass(RenderPassEvent.BeforeRenderingDeferredLights, m_DeferredLights);
  214. m_RenderOpaqueForwardOnlyPass = new DrawObjectsPass("Render Opaques Forward Only", forwardOnlyShaderTagIds, true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, forwardOnlyStencilState, forwardOnlyStencilRef);
  215. }
  216. m_OculusMotionVecPass = new OculusMotionVectorPass(URPProfileId.DrawMVOpaqueObjects, true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference);
  217. // Always create this pass even in deferred because we use it for wireframe rendering in the Editor or offscreen depth texture rendering.
  218. m_RenderOpaqueForwardPass = new DrawObjectsPass(URPProfileId.DrawOpaqueObjects, true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference);
  219. m_CopyDepthPass = new CopyDepthPass(RenderPassEvent.AfterRenderingSkybox, m_CopyDepthMaterial);
  220. m_DrawSkyboxPass = new DrawSkyboxPass(RenderPassEvent.BeforeRenderingSkybox);
  221. m_CopyColorPass = new CopyColorPass(RenderPassEvent.AfterRenderingSkybox, m_SamplingMaterial, m_BlitMaterial);
  222. #if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
  223. if (needTransparencyPass)
  224. #endif
  225. {
  226. m_TransparentSettingsPass = new TransparentSettingsPass(RenderPassEvent.BeforeRenderingTransparents, data.shadowTransparentReceive);
  227. m_RenderTransparentForwardPass = new DrawObjectsPass(URPProfileId.DrawTransparentObjects, false, RenderPassEvent.BeforeRenderingTransparents, RenderQueueRange.transparent, data.transparentLayerMask, m_DefaultStencilState, stencilData.stencilReference);
  228. }
  229. m_OnRenderObjectCallbackPass = new InvokeOnRenderObjectCallbackPass(RenderPassEvent.BeforeRenderingPostProcessing);
  230. m_PostProcessPasses = new PostProcessPasses(data.postProcessData, m_BlitMaterial);
  231. m_CapturePass = new CapturePass(RenderPassEvent.AfterRendering);
  232. m_FinalBlitPass = new FinalBlitPass(RenderPassEvent.AfterRendering + 1, m_BlitMaterial);
  233. #if UNITY_EDITOR
  234. m_FinalDepthCopyPass = new CopyDepthPass(RenderPassEvent.AfterRendering + 9, m_CopyDepthMaterial);
  235. #endif
  236. // RenderTexture format depends on camera and pipeline (HDR, non HDR, etc)
  237. // Samples (MSAA) depend on camera and pipeline
  238. m_ColorBufferSystem = new RenderTargetBufferSystem("_CameraColorAttachment");
  239. m_CameraDepthAttachment.Init("_CameraDepthAttachment");
  240. m_DepthTexture.Init("_CameraDepthTexture");
  241. m_NormalsTexture.Init("_CameraNormalsTexture");
  242. m_OpaqueColor.Init("_CameraOpaqueTexture");
  243. m_DepthInfoTexture.Init("_DepthInfoTexture");
  244. m_TileDepthInfoTexture.Init("_TileDepthInfoTexture");
  245. supportedRenderingFeatures = new RenderingFeatures()
  246. {
  247. cameraStacking = true,
  248. };
  249. if (this.renderingMode == RenderingMode.Deferred)
  250. {
  251. // Deferred rendering does not support MSAA.
  252. this.supportedRenderingFeatures.msaa = false;
  253. // Avoid legacy platforms: use vulkan instead.
  254. unsupportedGraphicsDeviceTypes = new GraphicsDeviceType[]
  255. {
  256. GraphicsDeviceType.OpenGLCore,
  257. GraphicsDeviceType.OpenGLES2,
  258. GraphicsDeviceType.OpenGLES3
  259. };
  260. }
  261. LensFlareCommonSRP.mergeNeeded = 0;
  262. LensFlareCommonSRP.maxLensFlareWithOcclusionTemporalSample = 1;
  263. LensFlareCommonSRP.Initialize();
  264. }
  265. /// <inheritdoc />
  266. protected override void Dispose(bool disposing)
  267. {
  268. m_ForwardLights.Cleanup();
  269. m_PostProcessPasses.Dispose();
  270. CoreUtils.Destroy(m_BlitMaterial);
  271. CoreUtils.Destroy(m_CopyDepthMaterial);
  272. CoreUtils.Destroy(m_SamplingMaterial);
  273. CoreUtils.Destroy(m_TileDepthInfoMaterial);
  274. CoreUtils.Destroy(m_TileDeferredMaterial);
  275. CoreUtils.Destroy(m_StencilDeferredMaterial);
  276. CoreUtils.Destroy(m_CameraMotionVecMaterial);
  277. CoreUtils.Destroy(m_ObjectMotionVecMaterial);
  278. Blitter.Cleanup();
  279. LensFlareCommonSRP.Dispose();
  280. }
  281. private void SetupFinalPassDebug(ref CameraData cameraData)
  282. {
  283. if ((DebugHandler != null) && DebugHandler.IsActiveForCamera(ref cameraData))
  284. {
  285. if (DebugHandler.TryGetFullscreenDebugMode(out DebugFullScreenMode fullScreenDebugMode, out int textureHeightPercent))
  286. {
  287. Camera camera = cameraData.camera;
  288. float screenWidth = camera.pixelWidth;
  289. float screenHeight = camera.pixelHeight;
  290. float height = Mathf.Clamp01(textureHeightPercent / 100f) * screenHeight;
  291. float width = height * (screenWidth / screenHeight);
  292. float normalizedSizeX = width / screenWidth;
  293. float normalizedSizeY = height / screenHeight;
  294. Rect normalizedRect = new Rect(1 - normalizedSizeX, 1 - normalizedSizeY, normalizedSizeX, normalizedSizeY);
  295. switch (fullScreenDebugMode)
  296. {
  297. case DebugFullScreenMode.Depth:
  298. {
  299. DebugHandler.SetDebugRenderTarget(m_DepthTexture.Identifier(), normalizedRect, true);
  300. break;
  301. }
  302. case DebugFullScreenMode.AdditionalLightsShadowMap:
  303. {
  304. DebugHandler.SetDebugRenderTarget(m_AdditionalLightsShadowCasterPass.m_AdditionalLightsShadowmapTexture, normalizedRect, false);
  305. break;
  306. }
  307. case DebugFullScreenMode.MainLightShadowMap:
  308. {
  309. DebugHandler.SetDebugRenderTarget(m_MainLightShadowCasterPass.m_MainLightShadowmapTexture, normalizedRect, false);
  310. break;
  311. }
  312. default:
  313. {
  314. break;
  315. }
  316. }
  317. }
  318. else
  319. {
  320. DebugHandler.ResetDebugRenderTarget();
  321. }
  322. }
  323. }
  324. /// <inheritdoc />
  325. public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
  326. {
  327. m_ForwardLights.ProcessLights(ref renderingData);
  328. ref CameraData cameraData = ref renderingData.cameraData;
  329. Camera camera = cameraData.camera;
  330. RenderTextureDescriptor cameraTargetDescriptor = cameraData.cameraTargetDescriptor;
  331. DebugHandler?.Setup(context, ref cameraData);
  332. if (cameraData.cameraType != CameraType.Game)
  333. useRenderPassEnabled = false;
  334. // Special path for depth only offscreen cameras. Only write opaques + transparents.
  335. bool isOffscreenDepthTexture = cameraData.targetTexture != null && cameraData.targetTexture.format == RenderTextureFormat.Depth;
  336. if (isOffscreenDepthTexture)
  337. {
  338. ConfigureCameraTarget(BuiltinRenderTextureType.CameraTarget, BuiltinRenderTextureType.CameraTarget);
  339. AddRenderPasses(ref renderingData);
  340. EnqueuePass(m_RenderOpaqueForwardPass);
  341. // TODO: Do we need to inject transparents and skybox when rendering depth only camera? They don't write to depth.
  342. EnqueuePass(m_DrawSkyboxPass);
  343. #if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
  344. if (!needTransparencyPass)
  345. return;
  346. #endif
  347. EnqueuePass(m_RenderTransparentForwardPass);
  348. return;
  349. }
  350. if (m_DeferredLights != null)
  351. {
  352. m_DeferredLights.ResolveMixedLightingMode(ref renderingData);
  353. m_DeferredLights.IsOverlay = cameraData.renderType == CameraRenderType.Overlay;
  354. }
  355. // Assign the camera color target early in case it is needed during AddRenderPasses.
  356. bool isPreviewCamera = cameraData.isPreviewCamera;
  357. var createColorTexture = m_IntermediateTextureMode == IntermediateTextureMode.Always && !isPreviewCamera;
  358. if (createColorTexture)
  359. {
  360. m_ActiveCameraColorAttachment = m_ColorBufferSystem.GetBackBuffer();
  361. var activeColorRenderTargetId = m_ActiveCameraColorAttachment.Identifier();
  362. #if ENABLE_VR && ENABLE_XR_MODULE
  363. if (cameraData.xr.enabled) activeColorRenderTargetId = new RenderTargetIdentifier(activeColorRenderTargetId, 0, CubemapFace.Unknown, -1);
  364. #endif
  365. ConfigureCameraColorTarget(activeColorRenderTargetId);
  366. }
  367. // Add render passes and gather the input requirements
  368. isCameraColorTargetValid = true;
  369. AddRenderPasses(ref renderingData);
  370. isCameraColorTargetValid = false;
  371. RenderPassInputSummary renderPassInputs = GetRenderPassInputs(ref renderingData);
  372. // Should apply post-processing after rendering this camera?
  373. bool applyPostProcessing = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated;
  374. // There's at least a camera in the camera stack that applies post-processing
  375. bool anyPostProcessing = renderingData.postProcessingEnabled && m_PostProcessPasses.isCreated;
  376. // If Camera's PostProcessing is enabled and if there any enabled PostProcessing requires depth texture as shader read resource (Motion Blur/DoF)
  377. bool cameraHasPostProcessingWithDepth = applyPostProcessing && cameraData.postProcessingRequiresDepthTexture;
  378. // TODO: We could cache and generate the LUT before rendering the stack
  379. bool generateColorGradingLUT = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated;
  380. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  381. // This indicates whether the renderer will output a depth texture.
  382. bool requiresDepthTexture = cameraData.requiresDepthTexture || renderPassInputs.requiresDepthTexture || m_DepthPrimingMode == DepthPrimingMode.Forced;
  383. #if UNITY_EDITOR
  384. bool isGizmosEnabled = UnityEditor.Handles.ShouldRenderGizmos();
  385. #else
  386. bool isGizmosEnabled = false;
  387. #endif
  388. bool mainLightShadows = m_MainLightShadowCasterPass.Setup(ref renderingData);
  389. bool additionalLightShadows = m_AdditionalLightsShadowCasterPass.Setup(ref renderingData);
  390. bool transparentsNeedSettingsPass = m_TransparentSettingsPass.Setup(ref renderingData);
  391. // Depth prepass is generated in the following cases:
  392. // - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
  393. // - Scene or preview cameras always require a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
  394. // - Render passes require it
  395. bool requiresDepthPrepass = (requiresDepthTexture || cameraHasPostProcessingWithDepth) && !CanCopyDepth(ref renderingData.cameraData);
  396. requiresDepthPrepass |= isSceneViewCamera;
  397. requiresDepthPrepass |= isGizmosEnabled;
  398. requiresDepthPrepass |= isPreviewCamera;
  399. requiresDepthPrepass |= renderPassInputs.requiresDepthPrepass;
  400. requiresDepthPrepass |= renderPassInputs.requiresNormalsTexture;
  401. // Current aim of depth prepass is to generate a copy of depth buffer, it is NOT to prime depth buffer and reduce overdraw on non-mobile platforms.
  402. // When deferred renderer is enabled, depth buffer is already accessible so depth prepass is not needed.
  403. // The only exception is for generating depth-normal textures: SSAO pass needs it and it must run before forward-only geometry.
  404. // DepthNormal prepass will render:
  405. // - forward-only geometry when deferred renderer is enabled
  406. // - all geometry when forward renderer is enabled
  407. if (requiresDepthPrepass && this.actualRenderingMode == RenderingMode.Deferred && !renderPassInputs.requiresNormalsTexture)
  408. requiresDepthPrepass = false;
  409. requiresDepthPrepass |= m_DepthPrimingMode == DepthPrimingMode.Forced;
  410. // If possible try to merge the opaque and skybox passes instead of splitting them when "Depth Texture" is required.
  411. // The copying of depth should normally happen after rendering opaques.
  412. // But if we only require it for post processing or the scene camera then we do it after rendering transparent objects
  413. // Aim to have the most optimized render pass event for Depth Copy (The aim is to minimize the number of render passes)
  414. if (requiresDepthTexture)
  415. {
  416. RenderPassEvent copyDepthPassEvent = RenderPassEvent.AfterRenderingOpaques;
  417. // RenderPassInputs's requiresDepthTexture is configured through ScriptableRenderPass's ConfigureInput function
  418. if (renderPassInputs.requiresDepthTexture)
  419. {
  420. // Do depth copy before the render pass that requires depth texture as shader read resource
  421. copyDepthPassEvent = (RenderPassEvent)Mathf.Min((int)RenderPassEvent.AfterRenderingTransparents, ((int)renderPassInputs.requiresDepthTextureEarliestEvent) - 1);
  422. }
  423. m_CopyDepthPass.renderPassEvent = copyDepthPassEvent;
  424. }
  425. else if (cameraHasPostProcessingWithDepth || isSceneViewCamera || isGizmosEnabled)
  426. {
  427. // If only post process requires depth texture, we can re-use depth buffer from main geometry pass instead of enqueuing a depth copy pass, but no proper API to do that for now, so resort to depth copy pass for now
  428. m_CopyDepthPass.renderPassEvent = RenderPassEvent.AfterRenderingTransparents;
  429. }
  430. createColorTexture |= RequiresIntermediateColorTexture(ref cameraData);
  431. createColorTexture |= renderPassInputs.requiresColorTexture;
  432. createColorTexture &= !isPreviewCamera;
  433. // If camera requires depth and there's no depth pre-pass we create a depth texture that can be read later by effect requiring it.
  434. // When deferred renderer is enabled, we must always create a depth texture and CANNOT use BuiltinRenderTextureType.CameraTarget. This is to get
  435. // around a bug where during gbuffer pass (MRT pass), the camera depth attachment is correctly bound, but during
  436. // deferred pass ("camera color" + "camera depth"), the implicit depth surface of "camera color" is used instead of "camera depth",
  437. // because BuiltinRenderTextureType.CameraTarget for depth means there is no explicit depth attachment...
  438. bool createDepthTexture = (requiresDepthTexture || cameraHasPostProcessingWithDepth) && !requiresDepthPrepass;
  439. createDepthTexture |= (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget);
  440. // Deferred renderer always need to access depth buffer.
  441. createDepthTexture |= (this.actualRenderingMode == RenderingMode.Deferred && !useRenderPassEnabled);
  442. // Some render cases (e.g. Material previews) have shown we need to create a depth texture when we're forcing a prepass.
  443. createDepthTexture |= m_DepthPrimingMode == DepthPrimingMode.Forced;
  444. #if ENABLE_VR && ENABLE_XR_MODULE
  445. if (cameraData.xr.enabled)
  446. {
  447. // URP can't handle msaa/size mismatch between depth RT and color RT(for now we create intermediate textures to ensure they match)
  448. createDepthTexture |= createColorTexture;
  449. createColorTexture = createDepthTexture;
  450. }
  451. #endif
  452. #if UNITY_ANDROID || UNITY_WEBGL
  453. if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan)
  454. {
  455. // GLES can not use render texture's depth buffer with the color buffer of the backbuffer
  456. // in such case we create a color texture for it too.
  457. createColorTexture |= createDepthTexture;
  458. }
  459. #endif
  460. bool useDepthPriming = (m_DepthPrimingRecommended && m_DepthPrimingMode == DepthPrimingMode.Auto) || (m_DepthPrimingMode == DepthPrimingMode.Forced);
  461. useDepthPriming &= requiresDepthPrepass && (createDepthTexture || createColorTexture) && m_RenderingMode == RenderingMode.Forward && (cameraData.renderType == CameraRenderType.Base || cameraData.clearDepth);
  462. // Temporarily disable depth priming on certain platforms such as Vulkan because we lack proper depth resolve support.
  463. useDepthPriming &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan || cameraTargetDescriptor.msaaSamples == 1;
  464. if (useRenderPassEnabled || useDepthPriming)
  465. {
  466. createDepthTexture |= createColorTexture;
  467. createColorTexture = createDepthTexture;
  468. }
  469. // Configure all settings require to start a new camera stack (base camera only)
  470. if (cameraData.renderType == CameraRenderType.Base)
  471. {
  472. RenderTargetHandle cameraTargetHandle = RenderTargetHandle.GetCameraTarget(cameraData.xr);
  473. bool sceneViewFilterEnabled = camera.sceneViewFilterMode == Camera.SceneViewFilterMode.ShowFiltered;
  474. //Scene filtering redraws the objects on top of the resulting frame. It has to draw directly to the sceneview buffer.
  475. m_ActiveCameraColorAttachment = (createColorTexture && !sceneViewFilterEnabled) ? m_ColorBufferSystem.GetBackBuffer() : cameraTargetHandle;
  476. m_ActiveCameraDepthAttachment = (createDepthTexture && !sceneViewFilterEnabled) ? m_CameraDepthAttachment : cameraTargetHandle;
  477. bool intermediateRenderTexture = createColorTexture || createDepthTexture;
  478. // Doesn't create texture for Overlay cameras as they are already overlaying on top of created textures.
  479. if (intermediateRenderTexture)
  480. CreateCameraRenderTarget(context, ref cameraTargetDescriptor, useDepthPriming);
  481. }
  482. else
  483. {
  484. m_ActiveCameraColorAttachment = m_ColorBufferSystem.GetBackBuffer();
  485. m_ActiveCameraDepthAttachment = m_CameraDepthAttachment;
  486. }
  487. cameraData.renderer.useDepthPriming = useDepthPriming;
  488. bool requiresDepthCopyPass = !requiresDepthPrepass
  489. && (requiresDepthTexture || cameraHasPostProcessingWithDepth)
  490. && createDepthTexture;
  491. bool copyColorPass = renderingData.cameraData.requiresOpaqueTexture || renderPassInputs.requiresColorTexture;
  492. if ((DebugHandler != null) && DebugHandler.IsActiveForCamera(ref cameraData))
  493. {
  494. DebugHandler.TryGetFullscreenDebugMode(out var fullScreenMode);
  495. if (fullScreenMode == DebugFullScreenMode.Depth)
  496. {
  497. requiresDepthPrepass = true;
  498. }
  499. if (!DebugHandler.IsLightingActive)
  500. {
  501. mainLightShadows = false;
  502. additionalLightShadows = false;
  503. if (!isSceneViewCamera)
  504. {
  505. requiresDepthPrepass = false;
  506. generateColorGradingLUT = false;
  507. copyColorPass = false;
  508. requiresDepthCopyPass = false;
  509. }
  510. }
  511. if (useRenderPassEnabled)
  512. useRenderPassEnabled = DebugHandler.IsRenderPassSupported;
  513. }
  514. // Assign camera targets (color and depth)
  515. {
  516. var activeColorRenderTargetId = m_ActiveCameraColorAttachment.Identifier();
  517. var activeDepthRenderTargetId = m_ActiveCameraDepthAttachment.Identifier();
  518. #if ENABLE_VR && ENABLE_XR_MODULE
  519. if (cameraData.xr.enabled)
  520. {
  521. activeColorRenderTargetId = new RenderTargetIdentifier(activeColorRenderTargetId, 0, CubemapFace.Unknown, -1);
  522. activeDepthRenderTargetId = new RenderTargetIdentifier(activeDepthRenderTargetId, 0, CubemapFace.Unknown, -1);
  523. }
  524. #endif
  525. ConfigureCameraTarget(activeColorRenderTargetId, activeDepthRenderTargetId);
  526. }
  527. bool hasPassesAfterPostProcessing = activeRenderPassQueue.Find(x => x.renderPassEvent == RenderPassEvent.AfterRenderingPostProcessing) != null;
  528. if (mainLightShadows)
  529. EnqueuePass(m_MainLightShadowCasterPass);
  530. if (additionalLightShadows)
  531. EnqueuePass(m_AdditionalLightsShadowCasterPass);
  532. if (requiresDepthPrepass)
  533. {
  534. if (renderPassInputs.requiresNormalsTexture)
  535. {
  536. if (this.actualRenderingMode == RenderingMode.Deferred)
  537. {
  538. // In deferred mode, depth-normal prepass does really primes the depth and normal buffers, instead of creating a copy.
  539. // It is necessary because we need to render depth&normal for forward-only geometry and it is the only way
  540. // to get them before the SSAO pass.
  541. int gbufferNormalIndex = m_DeferredLights.GBufferNormalSmoothnessIndex;
  542. m_DepthNormalPrepass.Setup(cameraTargetDescriptor, m_ActiveCameraDepthAttachment, m_DeferredLights.GbufferAttachments[gbufferNormalIndex]);
  543. // Change the normal format to the one used by the gbuffer.
  544. RenderTextureDescriptor normalDescriptor = m_DepthNormalPrepass.normalDescriptor;
  545. normalDescriptor.graphicsFormat = m_DeferredLights.GetGBufferFormat(gbufferNormalIndex);
  546. m_DepthNormalPrepass.normalDescriptor = normalDescriptor;
  547. // Depth is allocated by this renderer.
  548. m_DepthNormalPrepass.allocateDepth = false;
  549. // Only render forward-only geometry, as standard geometry will be rendered as normal into the gbuffer.
  550. if (RenderPassEvent.AfterRenderingGbuffer <= renderPassInputs.requiresDepthNormalAtEvent &&
  551. renderPassInputs.requiresDepthNormalAtEvent <= RenderPassEvent.BeforeRenderingOpaques)
  552. m_DepthNormalPrepass.shaderTagIds = k_DepthNormalsOnly;
  553. }
  554. else
  555. {
  556. m_DepthNormalPrepass.Setup(cameraTargetDescriptor, m_DepthTexture, m_NormalsTexture);
  557. }
  558. EnqueuePass(m_DepthNormalPrepass);
  559. }
  560. else
  561. {
  562. // Deferred renderer does not require a depth-prepass to generate samplable depth texture.
  563. if (this.actualRenderingMode != RenderingMode.Deferred)
  564. {
  565. m_DepthPrepass.Setup(cameraTargetDescriptor, m_DepthTexture);
  566. EnqueuePass(m_DepthPrepass);
  567. }
  568. }
  569. }
  570. // Depth priming requires a manual resolve of MSAA depth right after the depth prepass. If autoresolve is supported but MSAA is 1x then a copy is still required.
  571. if (useDepthPriming && (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan || cameraTargetDescriptor.msaaSamples == 1))
  572. {
  573. m_PrimedDepthCopyPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
  574. m_PrimedDepthCopyPass.AllocateRT = false;
  575. EnqueuePass(m_PrimedDepthCopyPass);
  576. }
  577. if (generateColorGradingLUT)
  578. {
  579. colorGradingLutPass.Setup(colorGradingLut);
  580. EnqueuePass(colorGradingLutPass);
  581. }
  582. #if ENABLE_VR && ENABLE_XR_MODULE
  583. if (cameraData.xr.hasValidOcclusionMesh)
  584. EnqueuePass(m_XROcclusionMeshPass);
  585. #endif
  586. #if !UNITY_EDITOR
  587. if (cameraData.xr.motionVectorRenderTargetValid)
  588. {
  589. RenderTargetHandle motionVecHandle = new RenderTargetHandle(cameraData.xr.motionVectorRenderTarget);
  590. var rtMotionId = motionVecHandle.Identifier();
  591. rtMotionId = new RenderTargetIdentifier(rtMotionId, 0, CubemapFace.Unknown, -1);
  592. // ID is the same since a RenderTexture encapsulates all the attachments, including both color+depth.
  593. m_OculusMotionVecPass.Setup(rtMotionId, rtMotionId);
  594. EnqueuePass(m_OculusMotionVecPass);
  595. }
  596. #endif
  597. if (this.actualRenderingMode == RenderingMode.Deferred)
  598. {
  599. if (m_DeferredLights.UseRenderPass && (RenderPassEvent.AfterRenderingGbuffer == renderPassInputs.requiresDepthNormalAtEvent || !useRenderPassEnabled))
  600. m_DeferredLights.DisableFramebufferFetchInput();
  601. EnqueueDeferred(ref renderingData, requiresDepthPrepass, renderPassInputs.requiresNormalsTexture, mainLightShadows, additionalLightShadows);
  602. }
  603. else
  604. {
  605. // Optimized store actions are very important on tile based GPUs and have a great impact on performance.
  606. // if MSAA is enabled and any of the following passes need a copy of the color or depth target, make sure the MSAA'd surface is stored
  607. // if following passes won't use it then just resolve (the Resolve action will still store the resolved surface, but discard the MSAA'd surface, which is very expensive to store).
  608. RenderBufferStoreAction opaquePassColorStoreAction = RenderBufferStoreAction.Store;
  609. if (cameraTargetDescriptor.msaaSamples > 1)
  610. opaquePassColorStoreAction = copyColorPass ? RenderBufferStoreAction.StoreAndResolve : RenderBufferStoreAction.Store;
  611. // make sure we store the depth only if following passes need it.
  612. RenderBufferStoreAction opaquePassDepthStoreAction = (copyColorPass || requiresDepthCopyPass) ? RenderBufferStoreAction.Store : RenderBufferStoreAction.DontCare;
  613. #if ENABLE_VR && ENABLE_XR_MODULE
  614. if (cameraData.xr.enabled && cameraData.xr.copyDepth)
  615. {
  616. opaquePassDepthStoreAction = RenderBufferStoreAction.Store;
  617. }
  618. #endif
  619. m_RenderOpaqueForwardPass.ConfigureColorStoreAction(opaquePassColorStoreAction);
  620. m_RenderOpaqueForwardPass.ConfigureDepthStoreAction(opaquePassDepthStoreAction);
  621. EnqueuePass(m_RenderOpaqueForwardPass);
  622. }
  623. if (camera.clearFlags == CameraClearFlags.Skybox && cameraData.renderType != CameraRenderType.Overlay)
  624. {
  625. if (RenderSettings.skybox != null || (camera.TryGetComponent(out Skybox cameraSkybox) && cameraSkybox.material != null))
  626. EnqueuePass(m_DrawSkyboxPass);
  627. }
  628. // If a depth texture was created we necessarily need to copy it, otherwise we could have render it to a renderbuffer.
  629. if (requiresDepthCopyPass)
  630. {
  631. m_CopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
  632. if (this.actualRenderingMode == RenderingMode.Deferred && !useRenderPassEnabled)
  633. m_CopyDepthPass.AllocateRT = false; // m_DepthTexture is already allocated by m_GBufferCopyDepthPass but it's not called when using RenderPass API.
  634. EnqueuePass(m_CopyDepthPass);
  635. }
  636. // For Base Cameras: Set the depth texture to the far Z if we do not have a depth prepass or copy depth
  637. if (cameraData.renderType == CameraRenderType.Base && !requiresDepthPrepass && !requiresDepthCopyPass)
  638. {
  639. Shader.SetGlobalTexture(m_DepthTexture.id, SystemInfo.usesReversedZBuffer ? Texture2D.blackTexture : Texture2D.whiteTexture);
  640. }
  641. if (copyColorPass)
  642. {
  643. // TODO: Downsampling method should be store in the renderer instead of in the asset.
  644. // We need to migrate this data to renderer. For now, we query the method in the active asset.
  645. Downsampling downsamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;
  646. m_CopyColorPass.Setup(m_ActiveCameraColorAttachment.Identifier(), m_OpaqueColor, downsamplingMethod);
  647. EnqueuePass(m_CopyColorPass);
  648. }
  649. if (renderPassInputs.requiresMotionVectors && !cameraData.xr.enabled)
  650. {
  651. SupportedRenderingFeatures.active.motionVectors = true; // hack for enabling UI
  652. var data = MotionVectorRendering.instance.GetMotionDataForCamera(camera, cameraData);
  653. m_MotionVectorPass.Setup(data);
  654. EnqueuePass(m_MotionVectorPass);
  655. }
  656. bool lastCameraInTheStack = cameraData.resolveFinalTarget;
  657. #if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
  658. if (needTransparencyPass)
  659. #endif
  660. {
  661. if (transparentsNeedSettingsPass)
  662. {
  663. EnqueuePass(m_TransparentSettingsPass);
  664. }
  665. // if this is not lastCameraInTheStack we still need to Store, since the MSAA buffer might be needed by the Overlay cameras
  666. RenderBufferStoreAction transparentPassColorStoreAction = cameraTargetDescriptor.msaaSamples > 1 && lastCameraInTheStack ? RenderBufferStoreAction.Resolve : RenderBufferStoreAction.Store;
  667. RenderBufferStoreAction transparentPassDepthStoreAction = RenderBufferStoreAction.DontCare;
  668. // If CopyDepthPass pass event is scheduled on or after AfterRenderingTransparent, we will need to store the depth buffer or resolve (store for now until latest trunk has depth resolve support) it for MSAA case
  669. if (requiresDepthCopyPass && m_CopyDepthPass.renderPassEvent >= RenderPassEvent.AfterRenderingTransparents)
  670. transparentPassDepthStoreAction = RenderBufferStoreAction.Store;
  671. m_RenderTransparentForwardPass.ConfigureColorStoreAction(transparentPassColorStoreAction);
  672. m_RenderTransparentForwardPass.ConfigureDepthStoreAction(transparentPassDepthStoreAction);
  673. EnqueuePass(m_RenderTransparentForwardPass);
  674. }
  675. EnqueuePass(m_OnRenderObjectCallbackPass);
  676. bool hasCaptureActions = renderingData.cameraData.captureActions != null && lastCameraInTheStack;
  677. // When FXAA or scaling is active, we must perform an additional pass at the end of the frame for the following reasons:
  678. // 1. FXAA expects to be the last shader running on the image before it's presented to the screen. Since users are allowed
  679. // to add additional render passes after post processing occurs, we can't run FXAA until all of those passes complete as well.
  680. // The FinalPost pass is guaranteed to execute after user authored passes so FXAA is always run inside of it.
  681. // 2. UberPost can only handle upscaling with linear filtering. All other filtering methods require the FinalPost pass.
  682. bool applyFinalPostProcessing = anyPostProcessing && lastCameraInTheStack &&
  683. ((renderingData.cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing) ||
  684. ((renderingData.cameraData.imageScalingMode == ImageScalingMode.Upscaling) && (renderingData.cameraData.upscalingFilter != ImageUpscalingFilter.Linear)));
  685. // When post-processing is enabled we can use the stack to resolve rendering to camera target (screen or RT).
  686. // However when there are render passes executing after post we avoid resolving to screen so rendering continues (before sRGBConvertion etc)
  687. bool resolvePostProcessingToCameraTarget = !hasCaptureActions && !hasPassesAfterPostProcessing && !applyFinalPostProcessing;
  688. if (lastCameraInTheStack)
  689. {
  690. SetupFinalPassDebug(ref cameraData);
  691. // Post-processing will resolve to final target. No need for final blit pass.
  692. if (applyPostProcessing)
  693. {
  694. // if resolving to screen we need to be able to perform sRGBConversion in post-processing if necessary
  695. bool doSRGBConversion = resolvePostProcessingToCameraTarget;
  696. postProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, resolvePostProcessingToCameraTarget, m_ActiveCameraDepthAttachment, colorGradingLut, applyFinalPostProcessing, doSRGBConversion, hasPassesAfterPostProcessing);
  697. EnqueuePass(postProcessPass);
  698. }
  699. var sourceForFinalPass = m_ActiveCameraColorAttachment;
  700. // Do FXAA or any other final post-processing effect that might need to run after AA.
  701. if (applyFinalPostProcessing)
  702. {
  703. finalPostProcessPass.SetupFinalPass(sourceForFinalPass, true, hasPassesAfterPostProcessing);
  704. EnqueuePass(finalPostProcessPass);
  705. }
  706. if (renderingData.cameraData.captureActions != null)
  707. {
  708. m_CapturePass.Setup(sourceForFinalPass);
  709. EnqueuePass(m_CapturePass);
  710. }
  711. // if post-processing then we already resolved to camera target while doing post.
  712. // Also only do final blit if camera is not rendering to RT.
  713. bool cameraTargetResolved =
  714. // final PP always blit to camera target
  715. applyFinalPostProcessing ||
  716. // no final PP but we have PP stack. In that case it blit unless there are render pass after PP
  717. (applyPostProcessing && !hasPassesAfterPostProcessing && !hasCaptureActions) ||
  718. // offscreen camera rendering to a texture, we don't need a blit pass to resolve to screen
  719. m_ActiveCameraColorAttachment == RenderTargetHandle.GetCameraTarget(cameraData.xr);
  720. // We need final blit to resolve to screen
  721. if (!cameraTargetResolved)
  722. {
  723. m_FinalBlitPass.Setup(cameraTargetDescriptor, sourceForFinalPass);
  724. EnqueuePass(m_FinalBlitPass);
  725. }
  726. #if ENABLE_VR && ENABLE_XR_MODULE
  727. if (cameraData.xr.enabled)
  728. {
  729. bool depthTargetResolved =
  730. // active depth is depth target, we don't need a blit pass to resolve
  731. m_ActiveCameraDepthAttachment == RenderTargetHandle.GetCameraTarget(cameraData.xr);
  732. if (!depthTargetResolved && cameraData.xr.copyDepth)
  733. {
  734. m_XRCopyDepthPass.Setup(m_ActiveCameraDepthAttachment, RenderTargetHandle.GetCameraTarget(cameraData.xr));
  735. EnqueuePass(m_XRCopyDepthPass);
  736. }
  737. }
  738. #endif
  739. }
  740. // stay in RT so we resume rendering on stack after post-processing
  741. else if (applyPostProcessing)
  742. {
  743. postProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, false, m_ActiveCameraDepthAttachment, colorGradingLut, false, false, true);
  744. EnqueuePass(postProcessPass);
  745. }
  746. #if UNITY_EDITOR
  747. if (isSceneViewCamera || (isGizmosEnabled && lastCameraInTheStack))
  748. {
  749. // Scene view camera should always resolve target (not stacked)
  750. m_FinalDepthCopyPass.Setup(m_DepthTexture, RenderTargetHandle.CameraTarget);
  751. m_FinalDepthCopyPass.MssaSamples = 0;
  752. EnqueuePass(m_FinalDepthCopyPass);
  753. }
  754. #endif
  755. }
  756. /// <inheritdoc />
  757. public override void SetupLights(ScriptableRenderContext context, ref RenderingData renderingData)
  758. {
  759. m_ForwardLights.Setup(context, ref renderingData);
  760. // Perform per-tile light culling on CPU
  761. if (this.actualRenderingMode == RenderingMode.Deferred)
  762. m_DeferredLights.SetupLights(context, ref renderingData);
  763. }
  764. /// <inheritdoc />
  765. public override void SetupCullingParameters(ref ScriptableCullingParameters cullingParameters,
  766. ref CameraData cameraData)
  767. {
  768. // TODO: PerObjectCulling also affect reflection probes. Enabling it for now.
  769. // if (asset.additionalLightsRenderingMode == LightRenderingMode.Disabled ||
  770. // asset.maxAdditionalLightsCount == 0)
  771. // {
  772. // cullingParameters.cullingOptions |= CullingOptions.DisablePerObjectCulling;
  773. // }
  774. // We disable shadow casters if both shadow casting modes are turned off
  775. // or the shadow distance has been turned down to zero
  776. bool isShadowCastingDisabled = !UniversalRenderPipeline.asset.supportsMainLightShadows && !UniversalRenderPipeline.asset.supportsAdditionalLightShadows;
  777. bool isShadowDistanceZero = Mathf.Approximately(cameraData.maxShadowDistance, 0.0f);
  778. if (isShadowCastingDisabled || isShadowDistanceZero)
  779. {
  780. cullingParameters.cullingOptions &= ~CullingOptions.ShadowCasters;
  781. }
  782. if (this.actualRenderingMode == RenderingMode.Deferred)
  783. cullingParameters.maximumVisibleLights = 0xFFFF;
  784. else
  785. {
  786. // We set the number of maximum visible lights allowed and we add one for the mainlight...
  787. //
  788. // Note: However ScriptableRenderContext.Cull() does not differentiate between light types.
  789. // If there is no active main light in the scene, ScriptableRenderContext.Cull() might return ( cullingParameters.maximumVisibleLights ) visible additional lights.
  790. // i.e ScriptableRenderContext.Cull() might return ( UniversalRenderPipeline.maxVisibleAdditionalLights + 1 ) visible additional lights !
  791. cullingParameters.maximumVisibleLights = UniversalRenderPipeline.maxVisibleAdditionalLights + 1;
  792. }
  793. cullingParameters.shadowDistance = cameraData.maxShadowDistance;
  794. cullingParameters.conservativeEnclosingSphere = UniversalRenderPipeline.asset.conservativeEnclosingSphere;
  795. cullingParameters.numIterationsEnclosingSphere = UniversalRenderPipeline.asset.numIterationsEnclosingSphere;
  796. }
  797. /// <inheritdoc />
  798. public override void FinishRendering(CommandBuffer cmd)
  799. {
  800. m_ColorBufferSystem.Clear(cmd);
  801. if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
  802. {
  803. m_ActiveCameraColorAttachment = RenderTargetHandle.CameraTarget;
  804. }
  805. if (m_ActiveCameraDepthAttachment != RenderTargetHandle.CameraTarget)
  806. {
  807. cmd.ReleaseTemporaryRT(m_ActiveCameraDepthAttachment.id);
  808. m_ActiveCameraDepthAttachment = RenderTargetHandle.CameraTarget;
  809. }
  810. }
  811. void EnqueueDeferred(ref RenderingData renderingData, bool hasDepthPrepass, bool hasNormalPrepass, bool applyMainShadow, bool applyAdditionalShadow)
  812. {
  813. m_DeferredLights.Setup(
  814. ref renderingData,
  815. applyAdditionalShadow ? m_AdditionalLightsShadowCasterPass : null,
  816. hasDepthPrepass,
  817. hasNormalPrepass,
  818. m_DepthTexture,
  819. m_DepthInfoTexture,
  820. m_TileDepthInfoTexture,
  821. m_ActiveCameraDepthAttachment,
  822. m_ActiveCameraColorAttachment
  823. );
  824. // Need to call Configure for both of these passes to setup input attachments as first frame otherwise will raise errors
  825. if (useRenderPassEnabled && m_DeferredLights.UseRenderPass)
  826. {
  827. m_GBufferPass.Configure(null, renderingData.cameraData.cameraTargetDescriptor);
  828. m_DeferredPass.Configure(null, renderingData.cameraData.cameraTargetDescriptor);
  829. }
  830. EnqueuePass(m_GBufferPass);
  831. //Must copy depth for deferred shading: TODO wait for API fix to bind depth texture as read-only resource.
  832. if (!useRenderPassEnabled || !m_DeferredLights.UseRenderPass)
  833. {
  834. m_GBufferCopyDepthPass.Setup(m_CameraDepthAttachment, m_DepthTexture);
  835. EnqueuePass(m_GBufferCopyDepthPass);
  836. }
  837. // Note: DeferredRender.Setup is called by UniversalRenderPipeline.RenderSingleCamera (overrides ScriptableRenderer.Setup).
  838. // At this point, we do not know if m_DeferredLights.m_Tilers[x].m_Tiles actually contain any indices of lights intersecting tiles (If there are no lights intersecting tiles, we could skip several following passes) : this information is computed in DeferredRender.SetupLights, which is called later by UniversalRenderPipeline.RenderSingleCamera (via ScriptableRenderer.Execute).
  839. // However HasTileLights uses m_HasTileVisLights which is calculated by CheckHasTileLights from all visibleLights. visibleLights is the list of lights that have passed camera culling, so we know they are in front of the camera. So we can assume m_DeferredLights.m_Tilers[x].m_Tiles will not be empty in that case.
  840. // m_DeferredLights.m_Tilers[x].m_Tiles could be empty if we implemented an algorithm accessing scene depth information on the CPU side, but this (access depth from CPU) will probably not happen.
  841. if (m_DeferredLights.HasTileLights())
  842. {
  843. // Compute for each tile a 32bits bitmask in which a raised bit means "this 1/32th depth slice contains geometry that could intersect with lights".
  844. // Per-tile bitmasks are obtained by merging together the per-pixel bitmasks computed for each individual pixel of the tile.
  845. EnqueuePass(m_TileDepthRangePass);
  846. // On some platform, splitting the bitmasks computation into two passes:
  847. // 1/ Compute bitmasks for individual or small blocks of pixels
  848. // 2/ merge those individual bitmasks into per-tile bitmasks
  849. // provides better performance that doing it in a single above pass.
  850. if (m_DeferredLights.HasTileDepthRangeExtraPass())
  851. EnqueuePass(m_TileDepthRangeExtraPass);
  852. }
  853. EnqueuePass(m_DeferredPass);
  854. EnqueuePass(m_RenderOpaqueForwardOnlyPass);
  855. }
  856. private struct RenderPassInputSummary
  857. {
  858. internal bool requiresDepthTexture;
  859. internal bool requiresDepthPrepass;
  860. internal bool requiresNormalsTexture;
  861. internal bool requiresColorTexture;
  862. internal bool requiresMotionVectors;
  863. internal RenderPassEvent requiresDepthNormalAtEvent;
  864. internal RenderPassEvent requiresDepthTextureEarliestEvent;
  865. }
  866. private RenderPassInputSummary GetRenderPassInputs(ref RenderingData renderingData)
  867. {
  868. RenderPassEvent beforeMainRenderingEvent = m_RenderingMode == RenderingMode.Deferred ? RenderPassEvent.BeforeRenderingGbuffer : RenderPassEvent.BeforeRenderingOpaques;
  869. RenderPassInputSummary inputSummary = new RenderPassInputSummary();
  870. inputSummary.requiresDepthNormalAtEvent = RenderPassEvent.BeforeRenderingOpaques;
  871. inputSummary.requiresDepthTextureEarliestEvent = RenderPassEvent.BeforeRenderingPostProcessing;
  872. for (int i = 0; i < activeRenderPassQueue.Count; ++i)
  873. {
  874. ScriptableRenderPass pass = activeRenderPassQueue[i];
  875. bool needsDepth = (pass.input & ScriptableRenderPassInput.Depth) != ScriptableRenderPassInput.None;
  876. bool needsNormals = (pass.input & ScriptableRenderPassInput.Normal) != ScriptableRenderPassInput.None;
  877. bool needsColor = (pass.input & ScriptableRenderPassInput.Color) != ScriptableRenderPassInput.None;
  878. bool needsMotion = (pass.input & ScriptableRenderPassInput.Motion) != ScriptableRenderPassInput.None;
  879. bool eventBeforeMainRendering = pass.renderPassEvent <= beforeMainRenderingEvent;
  880. inputSummary.requiresDepthTexture |= needsDepth;
  881. inputSummary.requiresDepthPrepass |= needsNormals || needsDepth && eventBeforeMainRendering;
  882. inputSummary.requiresNormalsTexture |= needsNormals;
  883. inputSummary.requiresColorTexture |= needsColor;
  884. inputSummary.requiresMotionVectors |= needsMotion;
  885. if (needsDepth)
  886. inputSummary.requiresDepthTextureEarliestEvent = (RenderPassEvent)Mathf.Min((int)pass.renderPassEvent, (int)inputSummary.requiresDepthTextureEarliestEvent);
  887. if (needsNormals || needsDepth)
  888. inputSummary.requiresDepthNormalAtEvent = (RenderPassEvent)Mathf.Min((int)pass.renderPassEvent, (int)inputSummary.requiresDepthNormalAtEvent);
  889. }
  890. return inputSummary;
  891. }
  892. void CreateCameraRenderTarget(ScriptableRenderContext context, ref RenderTextureDescriptor descriptor, bool primedDepth)
  893. {
  894. CommandBuffer cmd = CommandBufferPool.Get();
  895. using (new ProfilingScope(null, Profiling.createCameraRenderTarget))
  896. {
  897. if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
  898. {
  899. bool useDepthRenderBuffer = m_ActiveCameraDepthAttachment == RenderTargetHandle.CameraTarget;
  900. var colorDescriptor = descriptor;
  901. colorDescriptor.useMipMap = false;
  902. colorDescriptor.autoGenerateMips = false;
  903. colorDescriptor.depthBufferBits = (useDepthRenderBuffer) ? k_DepthStencilBufferBits : 0;
  904. m_ColorBufferSystem.SetCameraSettings(cmd, colorDescriptor, FilterMode.Bilinear);
  905. if (useDepthRenderBuffer)
  906. ConfigureCameraTarget(m_ColorBufferSystem.GetBackBuffer(cmd).id, m_ColorBufferSystem.GetBufferA().id);
  907. else
  908. ConfigureCameraColorTarget(m_ColorBufferSystem.GetBackBuffer(cmd).id);
  909. m_ActiveCameraColorAttachment = m_ColorBufferSystem.GetBackBuffer(cmd);
  910. cmd.SetGlobalTexture("_CameraColorTexture", m_ActiveCameraColorAttachment.id);
  911. //Set _AfterPostProcessTexture, users might still rely on this although it is now always the cameratarget due to swapbuffer
  912. cmd.SetGlobalTexture("_AfterPostProcessTexture", m_ActiveCameraColorAttachment.id);
  913. }
  914. if (m_ActiveCameraDepthAttachment != RenderTargetHandle.CameraTarget)
  915. {
  916. var depthDescriptor = descriptor;
  917. depthDescriptor.useMipMap = false;
  918. depthDescriptor.autoGenerateMips = false;
  919. depthDescriptor.bindMS = depthDescriptor.msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve && (SystemInfo.supportsMultisampledTextures != 0);
  920. depthDescriptor.colorFormat = RenderTextureFormat.Depth;
  921. depthDescriptor.depthBufferBits = k_DepthStencilBufferBits;
  922. cmd.GetTemporaryRT(m_ActiveCameraDepthAttachment.id, depthDescriptor, FilterMode.Point);
  923. }
  924. }
  925. context.ExecuteCommandBuffer(cmd);
  926. CommandBufferPool.Release(cmd);
  927. }
  928. bool PlatformRequiresExplicitMsaaResolve()
  929. {
  930. #if UNITY_EDITOR
  931. // In the editor play-mode we use a Game View Render Texture, with
  932. // samples count forced to 1 so we always need to do an explicit MSAA resolve.
  933. return true;
  934. #else
  935. // On Metal/iOS the MSAA resolve is done implicitly as part of the renderpass, so we do not need an extra intermediate pass for the explicit autoresolve.
  936. // Note: On Vulkan Standalone, despite SystemInfo.supportsMultisampleAutoResolve being true, the backbuffer has only 1 sample, so we still require
  937. // the explicit resolve on non-mobile platforms with supportsMultisampleAutoResolve.
  938. return !(SystemInfo.supportsMultisampleAutoResolve && Application.isMobilePlatform)
  939. && SystemInfo.graphicsDeviceType != GraphicsDeviceType.Metal;
  940. #endif
  941. }
  942. /// <summary>
  943. /// Checks if the pipeline needs to create a intermediate render texture.
  944. /// </summary>
  945. /// <param name="cameraData">CameraData contains all relevant render target information for the camera.</param>
  946. /// <seealso cref="CameraData"/>
  947. /// <returns>Return true if pipeline needs to render to a intermediate render texture.</returns>
  948. bool RequiresIntermediateColorTexture(ref CameraData cameraData)
  949. {
  950. // When rendering a camera stack we always create an intermediate render texture to composite camera results.
  951. // We create it upon rendering the Base camera.
  952. if (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget)
  953. return true;
  954. // Always force rendering into intermediate color texture if deferred rendering mode is selected.
  955. // Reason: without intermediate color texture, the target camera texture is y-flipped.
  956. // However, the target camera texture is bound during gbuffer pass and deferred pass.
  957. // Gbuffer pass will not be y-flipped because it is MRT (see ScriptableRenderContext implementation),
  958. // while deferred pass will be y-flipped, which breaks rendering.
  959. // This incurs an extra blit into at the end of rendering.
  960. if (this.actualRenderingMode == RenderingMode.Deferred)
  961. return true;
  962. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  963. var cameraTargetDescriptor = cameraData.cameraTargetDescriptor;
  964. int msaaSamples = cameraTargetDescriptor.msaaSamples;
  965. bool isScaledRender = cameraData.imageScalingMode != ImageScalingMode.None;
  966. bool isCompatibleBackbufferTextureDimension = cameraTargetDescriptor.dimension == TextureDimension.Tex2D;
  967. bool requiresExplicitMsaaResolve = msaaSamples > 1 && PlatformRequiresExplicitMsaaResolve();
  968. bool isOffscreenRender = cameraData.targetTexture != null && !isSceneViewCamera;
  969. bool isCapturing = cameraData.captureActions != null;
  970. #if ENABLE_VR && ENABLE_XR_MODULE
  971. if (cameraData.xr.enabled)
  972. {
  973. isScaledRender = false;
  974. isCompatibleBackbufferTextureDimension = cameraData.xr.renderTargetDesc.dimension == cameraTargetDescriptor.dimension;
  975. }
  976. #endif
  977. bool requiresBlitForOffscreenCamera = cameraData.postProcessEnabled || cameraData.requiresOpaqueTexture || requiresExplicitMsaaResolve || !cameraData.isDefaultViewport;
  978. if (isOffscreenRender)
  979. return requiresBlitForOffscreenCamera;
  980. return requiresBlitForOffscreenCamera || isSceneViewCamera || isScaledRender || cameraData.isHdrEnabled ||
  981. !isCompatibleBackbufferTextureDimension || isCapturing || cameraData.requireSrgbConversion;
  982. }
  983. bool CanCopyDepth(ref CameraData cameraData)
  984. {
  985. bool msaaEnabledForCamera = cameraData.cameraTargetDescriptor.msaaSamples > 1;
  986. bool supportsTextureCopy = SystemInfo.copyTextureSupport != CopyTextureSupport.None;
  987. bool supportsDepthTarget = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.Depth);
  988. bool supportsDepthCopy = !msaaEnabledForCamera && (supportsDepthTarget || supportsTextureCopy);
  989. bool msaaDepthResolve = msaaEnabledForCamera && SystemInfo.supportsMultisampledTextures != 0;
  990. // copying depth on GLES3 is giving invalid results. Needs investigation (Fogbugz issue 1339401)
  991. if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3)
  992. return false;
  993. return supportsDepthCopy || msaaDepthResolve;
  994. }
  995. internal override void SwapColorBuffer(CommandBuffer cmd)
  996. {
  997. m_ColorBufferSystem.Swap();
  998. //Check if we are using the depth that is attached to color buffer
  999. if (m_ActiveCameraDepthAttachment == RenderTargetHandle.CameraTarget)
  1000. ConfigureCameraTarget(m_ColorBufferSystem.GetBackBuffer(cmd).id, m_ColorBufferSystem.GetBufferA().id);
  1001. else ConfigureCameraColorTarget(m_ColorBufferSystem.GetBackBuffer(cmd).id);
  1002. m_ActiveCameraColorAttachment = m_ColorBufferSystem.GetBackBuffer();
  1003. cmd.SetGlobalTexture("_CameraColorTexture", m_ActiveCameraColorAttachment.id);
  1004. //Set _AfterPostProcessTexture, users might still rely on this although it is now always the cameratarget due to swapbuffer
  1005. cmd.SetGlobalTexture("_AfterPostProcessTexture", m_ActiveCameraColorAttachment.id);
  1006. }
  1007. internal override RenderTargetIdentifier GetCameraColorFrontBuffer(CommandBuffer cmd)
  1008. {
  1009. return m_ColorBufferSystem.GetFrontBuffer(cmd).id;
  1010. }
  1011. internal override void EnableSwapBufferMSAA(bool enable)
  1012. {
  1013. m_ColorBufferSystem.EnableMSAA(enable);
  1014. }
  1015. }
  1016. }