UniversalRenderPipeline.cs 69 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409
  1. using System;
  2. using Unity.Collections;
  3. using System.Collections.Generic;
  4. #if UNITY_EDITOR
  5. using UnityEditor;
  6. using UnityEditor.Rendering.Universal;
  7. #endif
  8. using UnityEngine.Scripting.APIUpdating;
  9. using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
  10. using UnityEngine.Experimental.Rendering;
  11. using UnityEngine.Profiling;
  12. namespace UnityEngine.Rendering.Universal
  13. {
  14. public sealed partial class UniversalRenderPipeline : RenderPipeline
  15. {
  16. public const string k_ShaderTagName = "UniversalPipeline";
  17. private static class Profiling
  18. {
  19. private static Dictionary<int, ProfilingSampler> s_HashSamplerCache = new Dictionary<int, ProfilingSampler>();
  20. public static readonly ProfilingSampler unknownSampler = new ProfilingSampler("Unknown");
  21. // Specialization for camera loop to avoid allocations.
  22. public static ProfilingSampler TryGetOrAddCameraSampler(Camera camera)
  23. {
  24. #if UNIVERSAL_PROFILING_NO_ALLOC
  25. return unknownSampler;
  26. #else
  27. ProfilingSampler ps = null;
  28. int cameraId = camera.GetHashCode();
  29. bool exists = s_HashSamplerCache.TryGetValue(cameraId, out ps);
  30. if (!exists)
  31. {
  32. // NOTE: camera.name allocates!
  33. ps = new ProfilingSampler($"{nameof(UniversalRenderPipeline)}.{nameof(RenderSingleCamera)}: {camera.name}");
  34. s_HashSamplerCache.Add(cameraId, ps);
  35. }
  36. return ps;
  37. #endif
  38. }
  39. public static class Pipeline
  40. {
  41. // TODO: Would be better to add Profiling name hooks into RenderPipeline.cs, requires changes outside of Universal.
  42. #if UNITY_2021_1_OR_NEWER
  43. public static readonly ProfilingSampler beginContextRendering = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(BeginContextRendering)}");
  44. public static readonly ProfilingSampler endContextRendering = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(EndContextRendering)}");
  45. #else
  46. public static readonly ProfilingSampler beginFrameRendering = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(BeginFrameRendering)}");
  47. public static readonly ProfilingSampler endFrameRendering = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(EndFrameRendering)}");
  48. #endif
  49. public static readonly ProfilingSampler beginCameraRendering = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(BeginCameraRendering)}");
  50. public static readonly ProfilingSampler endCameraRendering = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(EndCameraRendering)}");
  51. const string k_Name = nameof(UniversalRenderPipeline);
  52. public static readonly ProfilingSampler initializeCameraData = new ProfilingSampler($"{k_Name}.{nameof(InitializeCameraData)}");
  53. public static readonly ProfilingSampler initializeStackedCameraData = new ProfilingSampler($"{k_Name}.{nameof(InitializeStackedCameraData)}");
  54. public static readonly ProfilingSampler initializeAdditionalCameraData = new ProfilingSampler($"{k_Name}.{nameof(InitializeAdditionalCameraData)}");
  55. public static readonly ProfilingSampler initializeRenderingData = new ProfilingSampler($"{k_Name}.{nameof(InitializeRenderingData)}");
  56. public static readonly ProfilingSampler initializeShadowData = new ProfilingSampler($"{k_Name}.{nameof(InitializeShadowData)}");
  57. public static readonly ProfilingSampler initializeLightData = new ProfilingSampler($"{k_Name}.{nameof(InitializeLightData)}");
  58. public static readonly ProfilingSampler getPerObjectLightFlags = new ProfilingSampler($"{k_Name}.{nameof(GetPerObjectLightFlags)}");
  59. public static readonly ProfilingSampler getMainLightIndex = new ProfilingSampler($"{k_Name}.{nameof(GetMainLightIndex)}");
  60. public static readonly ProfilingSampler setupPerFrameShaderConstants = new ProfilingSampler($"{k_Name}.{nameof(SetupPerFrameShaderConstants)}");
  61. public static class Renderer
  62. {
  63. const string k_Name = nameof(ScriptableRenderer);
  64. public static readonly ProfilingSampler setupCullingParameters = new ProfilingSampler($"{k_Name}.{nameof(ScriptableRenderer.SetupCullingParameters)}");
  65. public static readonly ProfilingSampler setup = new ProfilingSampler($"{k_Name}.{nameof(ScriptableRenderer.Setup)}");
  66. };
  67. public static class Context
  68. {
  69. const string k_Name = nameof(ScriptableRenderContext);
  70. public static readonly ProfilingSampler submit = new ProfilingSampler($"{k_Name}.{nameof(ScriptableRenderContext.Submit)}");
  71. };
  72. public static class XR
  73. {
  74. public static readonly ProfilingSampler mirrorView = new ProfilingSampler("XR Mirror View");
  75. };
  76. };
  77. }
  78. #if ENABLE_VR && ENABLE_XR_MODULE
  79. internal static XRSystem m_XRSystem = new XRSystem();
  80. #endif
  81. public static float maxShadowBias
  82. {
  83. get => 10.0f;
  84. }
  85. public static float minRenderScale
  86. {
  87. get => 0.1f;
  88. }
  89. public static float maxRenderScale
  90. {
  91. get => 2.0f;
  92. }
  93. // Amount of Lights that can be shaded per object (in the for loop in the shader)
  94. public static int maxPerObjectLights
  95. {
  96. // No support to bitfield mask and int[] in gles2. Can't index fast more than 4 lights.
  97. // Check Lighting.hlsl for more details.
  98. get => (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2) ? 4 : 8;
  99. }
  100. // These limits have to match same limits in Input.hlsl
  101. internal const int k_MaxVisibleAdditionalLightsMobileShaderLevelLessThan45 = 16;
  102. internal const int k_MaxVisibleAdditionalLightsMobile = 32;
  103. internal const int k_MaxVisibleAdditionalLightsNonMobile = 256;
  104. public static int maxVisibleAdditionalLights
  105. {
  106. get
  107. {
  108. // Must match: Input.hlsl, MAX_VISIBLE_LIGHTS
  109. bool isMobile = GraphicsSettings.HasShaderDefine(BuiltinShaderDefine.SHADER_API_MOBILE);
  110. if (isMobile && (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2 || (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3 && Graphics.minOpenGLESVersion <= OpenGLESVersion.OpenGLES30)))
  111. return k_MaxVisibleAdditionalLightsMobileShaderLevelLessThan45;
  112. // GLES can be selected as platform on Windows (not a mobile platform) but uniform buffer size so we must use a low light count.
  113. return (isMobile || SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore || SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2 || SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3)
  114. ? k_MaxVisibleAdditionalLightsMobile : k_MaxVisibleAdditionalLightsNonMobile;
  115. }
  116. }
  117. // Match with values in Input.hlsl
  118. internal static int lightsPerTile => ((maxVisibleAdditionalLights + 31) / 32) * 32;
  119. internal static int maxZBins => 1024 * 4;
  120. internal static int maxTileVec4s => 4096;
  121. internal const int k_DefaultRenderingLayerMask = 0x00000001;
  122. private readonly DebugDisplaySettingsUI m_DebugDisplaySettingsUI = new DebugDisplaySettingsUI();
  123. private UniversalRenderPipelineGlobalSettings m_GlobalSettings;
  124. public override RenderPipelineGlobalSettings defaultSettings => m_GlobalSettings;
  125. public UniversalRenderPipeline(UniversalRenderPipelineAsset asset)
  126. {
  127. #if UNITY_EDITOR
  128. m_GlobalSettings = UniversalRenderPipelineGlobalSettings.Ensure();
  129. #else
  130. m_GlobalSettings = UniversalRenderPipelineGlobalSettings.instance;
  131. #endif
  132. SetSupportedRenderingFeatures();
  133. // In QualitySettings.antiAliasing disabled state uses value 0, where in URP 1
  134. int qualitySettingsMsaaSampleCount = QualitySettings.antiAliasing > 0 ? QualitySettings.antiAliasing : 1;
  135. bool msaaSampleCountNeedsUpdate = qualitySettingsMsaaSampleCount != asset.msaaSampleCount;
  136. // Let engine know we have MSAA on for cases where we support MSAA backbuffer
  137. if (msaaSampleCountNeedsUpdate)
  138. {
  139. QualitySettings.antiAliasing = asset.msaaSampleCount;
  140. #if ENABLE_VR && ENABLE_XR_MODULE
  141. XRSystem.UpdateMSAALevel(asset.msaaSampleCount);
  142. #endif
  143. }
  144. #if ENABLE_VR && ENABLE_XR_MODULE
  145. XRSystem.UpdateRenderScale(asset.renderScale);
  146. #endif
  147. Shader.globalRenderPipeline = "UniversalPipeline";
  148. Lightmapping.SetDelegate(lightsDelegate);
  149. CameraCaptureBridge.enabled = true;
  150. RenderingUtils.ClearSystemInfoCache();
  151. DecalProjector.defaultMaterial = asset.decalMaterial;
  152. DebugManager.instance.RefreshEditor();
  153. m_DebugDisplaySettingsUI.RegisterDebug(DebugDisplaySettings.Instance);
  154. }
  155. protected override void Dispose(bool disposing)
  156. {
  157. m_DebugDisplaySettingsUI.UnregisterDebug();
  158. base.Dispose(disposing);
  159. Shader.globalRenderPipeline = "";
  160. SupportedRenderingFeatures.active = new SupportedRenderingFeatures();
  161. ShaderData.instance.Dispose();
  162. DeferredShaderData.instance.Dispose();
  163. #if ENABLE_VR && ENABLE_XR_MODULE
  164. m_XRSystem?.Dispose();
  165. #endif
  166. #if UNITY_EDITOR
  167. SceneViewDrawMode.ResetDrawMode();
  168. #endif
  169. Lightmapping.ResetDelegate();
  170. CameraCaptureBridge.enabled = false;
  171. }
  172. #if UNITY_2021_1_OR_NEWER
  173. protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
  174. {
  175. Render(renderContext, new List<Camera>(cameras));
  176. }
  177. #endif
  178. #if UNITY_2021_1_OR_NEWER
  179. protected override void Render(ScriptableRenderContext renderContext, List<Camera> cameras)
  180. #else
  181. protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
  182. #endif
  183. {
  184. // TODO: Would be better to add Profiling name hooks into RenderPipelineManager.
  185. // C#8 feature, only in >= 2020.2
  186. using var profScope = new ProfilingScope(null, ProfilingSampler.Get(URPProfileId.UniversalRenderTotal));
  187. #if UNITY_2021_1_OR_NEWER
  188. using (new ProfilingScope(null, Profiling.Pipeline.beginContextRendering))
  189. {
  190. BeginContextRendering(renderContext, cameras);
  191. }
  192. #else
  193. using (new ProfilingScope(null, Profiling.Pipeline.beginFrameRendering))
  194. {
  195. BeginFrameRendering(renderContext, cameras);
  196. }
  197. #endif
  198. GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear);
  199. GraphicsSettings.lightsUseColorTemperature = true;
  200. GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher;
  201. GraphicsSettings.defaultRenderingLayerMask = k_DefaultRenderingLayerMask;
  202. SetupPerFrameShaderConstants();
  203. #if ENABLE_VR && ENABLE_XR_MODULE
  204. // Update XR MSAA level per frame.
  205. XRSystem.UpdateMSAALevel(asset.msaaSampleCount);
  206. #endif
  207. #if UNITY_EDITOR
  208. // We do not want to start rendering if URP global settings are not ready (m_globalSettings is null)
  209. // or been deleted/moved (m_globalSettings is not necessarily null)
  210. if (m_GlobalSettings == null || UniversalRenderPipelineGlobalSettings.instance == null)
  211. {
  212. m_GlobalSettings = UniversalRenderPipelineGlobalSettings.Ensure();
  213. if(m_GlobalSettings == null) return;
  214. }
  215. #endif
  216. SortCameras(cameras);
  217. #if UNITY_2021_1_OR_NEWER
  218. for (int i = 0; i < cameras.Count; ++i)
  219. #else
  220. for (int i = 0; i < cameras.Length; ++i)
  221. #endif
  222. {
  223. var camera = cameras[i];
  224. if (IsGameCamera(camera))
  225. {
  226. RenderCameraStack(renderContext, camera);
  227. }
  228. else
  229. {
  230. using (new ProfilingScope(null, Profiling.Pipeline.beginCameraRendering))
  231. {
  232. BeginCameraRendering(renderContext, camera);
  233. }
  234. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  235. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  236. VFX.VFXManager.PrepareCamera(camera);
  237. #endif
  238. UpdateVolumeFramework(camera, null);
  239. RenderSingleCamera(renderContext, camera);
  240. using (new ProfilingScope(null, Profiling.Pipeline.endCameraRendering))
  241. {
  242. EndCameraRendering(renderContext, camera);
  243. }
  244. }
  245. }
  246. #if UNITY_2021_1_OR_NEWER
  247. using (new ProfilingScope(null, Profiling.Pipeline.endContextRendering))
  248. {
  249. EndContextRendering(renderContext, cameras);
  250. }
  251. #else
  252. using (new ProfilingScope(null, Profiling.Pipeline.endFrameRendering))
  253. {
  254. EndFrameRendering(renderContext, cameras);
  255. }
  256. #endif
  257. }
  258. /// <summary>
  259. /// Standalone camera rendering. Use this to render procedural cameras.
  260. /// This method doesn't call <c>BeginCameraRendering</c> and <c>EndCameraRendering</c> callbacks.
  261. /// </summary>
  262. /// <param name="context">Render context used to record commands during execution.</param>
  263. /// <param name="camera">Camera to render.</param>
  264. /// <seealso cref="ScriptableRenderContext"/>
  265. public static void RenderSingleCamera(ScriptableRenderContext context, Camera camera)
  266. {
  267. UniversalAdditionalCameraData additionalCameraData = null;
  268. if (IsGameCamera(camera))
  269. camera.gameObject.TryGetComponent(out additionalCameraData);
  270. if (additionalCameraData != null && additionalCameraData.renderType != CameraRenderType.Base)
  271. {
  272. Debug.LogWarning("Only Base cameras can be rendered with standalone RenderSingleCamera. Camera will be skipped.");
  273. return;
  274. }
  275. InitializeCameraData(camera, additionalCameraData, true, out var cameraData);
  276. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  277. if (asset.useAdaptivePerformance)
  278. ApplyAdaptivePerformance(ref cameraData);
  279. #endif
  280. RenderSingleCamera(context, cameraData, cameraData.postProcessEnabled);
  281. }
  282. static bool TryGetCullingParameters(CameraData cameraData, out ScriptableCullingParameters cullingParams)
  283. {
  284. #if ENABLE_VR && ENABLE_XR_MODULE
  285. if (cameraData.xr.enabled)
  286. {
  287. cullingParams = cameraData.xr.cullingParams;
  288. // Sync the FOV on the camera to match the projection from the XR device
  289. if (!cameraData.camera.usePhysicalProperties && !XRGraphicsAutomatedTests.enabled)
  290. cameraData.camera.fieldOfView = Mathf.Rad2Deg * Mathf.Atan(1.0f / cullingParams.stereoProjectionMatrix.m11) * 2.0f;
  291. return true;
  292. }
  293. #endif
  294. return cameraData.camera.TryGetCullingParameters(false, out cullingParams);
  295. }
  296. /// <summary>
  297. /// Renders a single camera. This method will do culling, setup and execution of the renderer.
  298. /// </summary>
  299. /// <param name="context">Render context used to record commands during execution.</param>
  300. /// <param name="cameraData">Camera rendering data. This might contain data inherited from a base camera.</param>
  301. /// <param name="anyPostProcessingEnabled">True if at least one camera has post-processing enabled in the stack, false otherwise.</param>
  302. static void RenderSingleCamera(ScriptableRenderContext context, CameraData cameraData, bool anyPostProcessingEnabled)
  303. {
  304. Camera camera = cameraData.camera;
  305. var renderer = cameraData.renderer;
  306. if (renderer == null)
  307. {
  308. Debug.LogWarning(string.Format("Trying to render {0} with an invalid renderer. Camera rendering will be skipped.", camera.name));
  309. return;
  310. }
  311. if (!TryGetCullingParameters(cameraData, out var cullingParameters))
  312. return;
  313. ScriptableRenderer.current = renderer;
  314. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  315. // NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
  316. // Currently there's an issue which results in mismatched markers.
  317. // The named CommandBuffer will close its "profiling scope" on execution.
  318. // That will orphan ProfilingScope markers as the named CommandBuffer markers are their parents.
  319. // Resulting in following pattern:
  320. // exec(cmd.start, scope.start, cmd.end) and exec(cmd.start, scope.end, cmd.end)
  321. CommandBuffer cmd = CommandBufferPool.Get();
  322. // TODO: move skybox code from C++ to URP in order to remove the call to context.Submit() inside DrawSkyboxPass
  323. // Until then, we can't use nested profiling scopes with XR multipass
  324. CommandBuffer cmdScope = cameraData.xr.enabled ? null : cmd;
  325. ProfilingSampler sampler = Profiling.TryGetOrAddCameraSampler(camera);
  326. using (new ProfilingScope(cmdScope, sampler)) // Enqueues a "BeginSample" command into the CommandBuffer cmd
  327. {
  328. renderer.Clear(cameraData.renderType);
  329. using (new ProfilingScope(null, Profiling.Pipeline.Renderer.setupCullingParameters))
  330. {
  331. renderer.OnPreCullRenderPasses(in cameraData);
  332. renderer.SetupCullingParameters(ref cullingParameters, ref cameraData);
  333. }
  334. context.ExecuteCommandBuffer(cmd); // Send all the commands enqueued so far in the CommandBuffer cmd, to the ScriptableRenderContext context
  335. cmd.Clear();
  336. #if UNITY_EDITOR
  337. // Emit scene view UI
  338. if (isSceneViewCamera)
  339. {
  340. ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
  341. }
  342. #endif
  343. var cullResults = context.Cull(ref cullingParameters);
  344. InitializeRenderingData(asset, ref cameraData, ref cullResults, anyPostProcessingEnabled, out var renderingData);
  345. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  346. if (asset.useAdaptivePerformance)
  347. ApplyAdaptivePerformance(ref renderingData);
  348. #endif
  349. using (new ProfilingScope(null, Profiling.Pipeline.Renderer.setup))
  350. {
  351. renderer.Setup(context, ref renderingData);
  352. }
  353. // Timing scope inside
  354. renderer.Execute(context, ref renderingData);
  355. CleanupLightData(ref renderingData.lightData);
  356. } // When ProfilingSample goes out of scope, an "EndSample" command is enqueued into CommandBuffer cmd
  357. cameraData.xr.EndCamera(cmd, cameraData);
  358. context.ExecuteCommandBuffer(cmd); // Sends to ScriptableRenderContext all the commands enqueued since cmd.Clear, i.e the "EndSample" command
  359. CommandBufferPool.Release(cmd);
  360. using (new ProfilingScope(null, Profiling.Pipeline.Context.submit))
  361. {
  362. if (renderer.useRenderPassEnabled && !context.SubmitForRenderPassValidation())
  363. {
  364. renderer.useRenderPassEnabled = false;
  365. CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.RenderPassEnabled, false);
  366. Debug.LogWarning("Rendering command not supported inside a native RenderPass found. Falling back to non-RenderPass rendering path");
  367. }
  368. context.Submit(); // Actually execute the commands that we previously sent to the ScriptableRenderContext context
  369. }
  370. ScriptableRenderer.current = null;
  371. }
  372. /// <summary>
  373. // Renders a camera stack. This method calls RenderSingleCamera for each valid camera in the stack.
  374. // The last camera resolves the final target to screen.
  375. /// </summary>
  376. /// <param name="context">Render context used to record commands during execution.</param>
  377. /// <param name="camera">Camera to render.</param>
  378. static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera)
  379. {
  380. using var profScope = new ProfilingScope(null, ProfilingSampler.Get(URPProfileId.RenderCameraStack));
  381. baseCamera.TryGetComponent<UniversalAdditionalCameraData>(out var baseCameraAdditionalData);
  382. // Overlay cameras will be rendered stacked while rendering base cameras
  383. if (baseCameraAdditionalData != null && baseCameraAdditionalData.renderType == CameraRenderType.Overlay)
  384. return;
  385. // renderer contains a stack if it has additional data and the renderer supports stacking
  386. var renderer = baseCameraAdditionalData?.scriptableRenderer;
  387. bool supportsCameraStacking = renderer != null && renderer.supportedRenderingFeatures.cameraStacking;
  388. List<Camera> cameraStack = (supportsCameraStacking) ? baseCameraAdditionalData?.cameraStack : null;
  389. bool anyPostProcessingEnabled = baseCameraAdditionalData != null && baseCameraAdditionalData.renderPostProcessing;
  390. // We need to know the last active camera in the stack to be able to resolve
  391. // rendering to screen when rendering it. The last camera in the stack is not
  392. // necessarily the last active one as it users might disable it.
  393. int lastActiveOverlayCameraIndex = -1;
  394. if (cameraStack != null)
  395. {
  396. var baseCameraRendererType = baseCameraAdditionalData?.scriptableRenderer.GetType();
  397. bool shouldUpdateCameraStack = false;
  398. for (int i = 0; i < cameraStack.Count; ++i)
  399. {
  400. Camera currCamera = cameraStack[i];
  401. if (currCamera == null)
  402. {
  403. shouldUpdateCameraStack = true;
  404. continue;
  405. }
  406. if (currCamera.isActiveAndEnabled)
  407. {
  408. currCamera.TryGetComponent<UniversalAdditionalCameraData>(out var data);
  409. if (data == null || data.renderType != CameraRenderType.Overlay)
  410. {
  411. Debug.LogWarning(string.Format("Stack can only contain Overlay cameras. {0} will skip rendering.", currCamera.name));
  412. continue;
  413. }
  414. var currCameraRendererType = data?.scriptableRenderer.GetType();
  415. if (currCameraRendererType != baseCameraRendererType)
  416. {
  417. var renderer2DType = typeof(Renderer2D);
  418. if (currCameraRendererType != renderer2DType && baseCameraRendererType != renderer2DType)
  419. {
  420. Debug.LogWarning(string.Format("Only cameras with compatible renderer types can be stacked. {0} will skip rendering", currCamera.name));
  421. continue;
  422. }
  423. }
  424. anyPostProcessingEnabled |= data.renderPostProcessing;
  425. lastActiveOverlayCameraIndex = i;
  426. }
  427. }
  428. if (shouldUpdateCameraStack)
  429. {
  430. baseCameraAdditionalData.UpdateCameraStack();
  431. }
  432. }
  433. // Post-processing not supported in GLES2.
  434. anyPostProcessingEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
  435. bool isStackedRendering = lastActiveOverlayCameraIndex != -1;
  436. #if ENABLE_VR && ENABLE_XR_MODULE
  437. var xrActive = false;
  438. var xrRendering = true;
  439. if (baseCameraAdditionalData != null)
  440. xrRendering = baseCameraAdditionalData.allowXRRendering;
  441. var xrPasses = m_XRSystem.SetupFrame(baseCamera, xrRendering);
  442. foreach (XRPass xrPass in xrPasses)
  443. {
  444. if (xrPass.enabled)
  445. {
  446. xrActive = true;
  447. UpdateCameraStereoMatrices(baseCamera, xrPass);
  448. }
  449. #endif
  450. using (new ProfilingScope(null, Profiling.Pipeline.beginCameraRendering))
  451. {
  452. BeginCameraRendering(context, baseCamera);
  453. }
  454. // Update volumeframework before initializing additional camera data
  455. UpdateVolumeFramework(baseCamera, baseCameraAdditionalData);
  456. InitializeCameraData(baseCamera, baseCameraAdditionalData, !isStackedRendering, out var baseCameraData);
  457. RenderTextureDescriptor originalTargetDesc = baseCameraData.cameraTargetDescriptor;
  458. #if ENABLE_VR && ENABLE_XR_MODULE
  459. if (xrPass.enabled)
  460. {
  461. baseCameraData.xr = xrPass;
  462. // XRTODO: remove isStereoEnabled in 2021.x
  463. #pragma warning disable 0618
  464. baseCameraData.isStereoEnabled = xrPass.enabled;
  465. #pragma warning restore 0618
  466. // Helper function for updating cameraData with xrPass Data
  467. m_XRSystem.UpdateCameraData(ref baseCameraData, baseCameraData.xr);
  468. // Need to update XRSystem using baseCameraData to handle the case where camera position is modified in BeginCameraRendering
  469. m_XRSystem.UpdateFromCamera(ref baseCameraData.xr, baseCameraData);
  470. m_XRSystem.BeginLateLatching(baseCamera, xrPass);
  471. }
  472. #endif
  473. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  474. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  475. VFX.VFXManager.PrepareCamera(baseCamera);
  476. #endif
  477. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  478. if (asset.useAdaptivePerformance)
  479. ApplyAdaptivePerformance(ref baseCameraData);
  480. #endif
  481. RenderSingleCamera(context, baseCameraData, anyPostProcessingEnabled);
  482. using (new ProfilingScope(null, Profiling.Pipeline.endCameraRendering))
  483. {
  484. EndCameraRendering(context, baseCamera);
  485. }
  486. #if ENABLE_VR && ENABLE_XR_MODULE
  487. m_XRSystem.EndLateLatching(baseCamera, xrPass);
  488. #endif
  489. if (isStackedRendering)
  490. {
  491. for (int i = 0; i < cameraStack.Count; ++i)
  492. {
  493. var currCamera = cameraStack[i];
  494. if (!currCamera.isActiveAndEnabled)
  495. continue;
  496. currCamera.TryGetComponent<UniversalAdditionalCameraData>(out var currCameraData);
  497. // Camera is overlay and enabled
  498. if (currCameraData != null)
  499. {
  500. // Copy base settings from base camera data and initialize initialize remaining specific settings for this camera type.
  501. CameraData overlayCameraData = baseCameraData;
  502. bool lastCamera = i == lastActiveOverlayCameraIndex;
  503. #if ENABLE_VR && ENABLE_XR_MODULE
  504. UpdateCameraStereoMatrices(currCameraData.camera, xrPass);
  505. #endif
  506. using (new ProfilingScope(null, Profiling.Pipeline.beginCameraRendering))
  507. {
  508. BeginCameraRendering(context, currCamera);
  509. }
  510. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  511. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  512. VFX.VFXManager.PrepareCamera(currCamera);
  513. #endif
  514. UpdateVolumeFramework(currCamera, currCameraData);
  515. InitializeAdditionalCameraData(currCamera, currCameraData, lastCamera, ref overlayCameraData);
  516. #if ENABLE_VR && ENABLE_XR_MODULE
  517. if (baseCameraData.xr.enabled)
  518. m_XRSystem.UpdateFromCamera(ref overlayCameraData.xr, overlayCameraData);
  519. #endif
  520. RenderSingleCamera(context, overlayCameraData, anyPostProcessingEnabled);
  521. using (new ProfilingScope(null, Profiling.Pipeline.endCameraRendering))
  522. {
  523. EndCameraRendering(context, currCamera);
  524. }
  525. }
  526. }
  527. }
  528. #if ENABLE_VR && ENABLE_XR_MODULE
  529. if (baseCameraData.xr.enabled)
  530. baseCameraData.cameraTargetDescriptor = originalTargetDesc;
  531. }
  532. if (xrActive)
  533. {
  534. CommandBuffer cmd = CommandBufferPool.Get();
  535. using (new ProfilingScope(cmd, Profiling.Pipeline.XR.mirrorView))
  536. {
  537. m_XRSystem.RenderMirrorView(cmd, baseCamera);
  538. }
  539. context.ExecuteCommandBuffer(cmd);
  540. context.Submit();
  541. CommandBufferPool.Release(cmd);
  542. }
  543. m_XRSystem.ReleaseFrame();
  544. #endif
  545. }
  546. static void UpdateVolumeFramework(Camera camera, UniversalAdditionalCameraData additionalCameraData)
  547. {
  548. using var profScope = new ProfilingScope(null, ProfilingSampler.Get(URPProfileId.UpdateVolumeFramework));
  549. // We update the volume framework for:
  550. // * All cameras in the editor when not in playmode
  551. // * scene cameras
  552. // * cameras with update mode set to EveryFrame
  553. // * cameras with update mode set to UsePipelineSettings and the URP Asset set to EveryFrame
  554. bool shouldUpdate = camera.cameraType == CameraType.SceneView;
  555. shouldUpdate |= additionalCameraData != null && additionalCameraData.requiresVolumeFrameworkUpdate;
  556. #if UNITY_EDITOR
  557. shouldUpdate |= Application.isPlaying == false;
  558. #endif
  559. // When we have volume updates per-frame disabled...
  560. if (!shouldUpdate && additionalCameraData)
  561. {
  562. // Create a local volume stack and cache the state if it's null
  563. if (additionalCameraData.volumeStack == null)
  564. {
  565. camera.UpdateVolumeStack(additionalCameraData);
  566. }
  567. VolumeManager.instance.stack = additionalCameraData.volumeStack;
  568. return;
  569. }
  570. // When we want to update the volumes every frame...
  571. // We destroy the volumeStack in the additional camera data, if present, to make sure
  572. // it gets recreated and initialized if the update mode gets later changed to ViaScripting...
  573. if (additionalCameraData && additionalCameraData.volumeStack != null)
  574. {
  575. camera.DestroyVolumeStack(additionalCameraData);
  576. }
  577. // Get the mask + trigger and update the stack
  578. camera.GetVolumeLayerMaskAndTrigger(additionalCameraData, out LayerMask layerMask, out Transform trigger);
  579. VolumeManager.instance.ResetMainStack();
  580. VolumeManager.instance.Update(trigger, layerMask);
  581. }
  582. static bool CheckPostProcessForDepth(in CameraData cameraData)
  583. {
  584. if (!cameraData.postProcessEnabled)
  585. return false;
  586. if (cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing)
  587. return true;
  588. var stack = VolumeManager.instance.stack;
  589. if (stack.GetComponent<DepthOfField>().IsActive())
  590. return true;
  591. if (stack.GetComponent<MotionBlur>().IsActive())
  592. return true;
  593. return false;
  594. }
  595. static void SetSupportedRenderingFeatures()
  596. {
  597. #if UNITY_EDITOR
  598. SupportedRenderingFeatures.active = new SupportedRenderingFeatures()
  599. {
  600. reflectionProbeModes = SupportedRenderingFeatures.ReflectionProbeModes.None,
  601. defaultMixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive,
  602. mixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive | SupportedRenderingFeatures.LightmapMixedBakeModes.IndirectOnly | SupportedRenderingFeatures.LightmapMixedBakeModes.Shadowmask,
  603. lightmapBakeTypes = LightmapBakeType.Baked | LightmapBakeType.Mixed | LightmapBakeType.Realtime,
  604. lightmapsModes = LightmapsMode.CombinedDirectional | LightmapsMode.NonDirectional,
  605. lightProbeProxyVolumes = false,
  606. motionVectors = false,
  607. receiveShadows = false,
  608. reflectionProbes = false,
  609. reflectionProbesBlendDistance = true,
  610. particleSystemInstancing = true
  611. };
  612. SceneViewDrawMode.SetupDrawMode();
  613. #endif
  614. }
  615. static void InitializeCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, out CameraData cameraData)
  616. {
  617. using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeCameraData);
  618. cameraData = new CameraData();
  619. InitializeStackedCameraData(camera, additionalCameraData, ref cameraData);
  620. InitializeAdditionalCameraData(camera, additionalCameraData, resolveFinalTarget, ref cameraData);
  621. ///////////////////////////////////////////////////////////////////
  622. // Descriptor settings /
  623. ///////////////////////////////////////////////////////////////////
  624. var renderer = additionalCameraData?.scriptableRenderer;
  625. bool rendererSupportsMSAA = renderer != null && renderer.supportedRenderingFeatures.msaa;
  626. int msaaSamples = 1;
  627. if (camera.allowMSAA && asset.msaaSampleCount > 1 && rendererSupportsMSAA)
  628. msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : asset.msaaSampleCount;
  629. #if ENABLE_VR && ENABLE_XR_MODULE
  630. // Use XR's MSAA if camera is XR camera. XR MSAA needs special handle here because it is not per Camera.
  631. // Multiple cameras could render into the same XR display and they should share the same MSAA level.
  632. if (cameraData.xrRendering && rendererSupportsMSAA)
  633. msaaSamples = XRSystem.GetMSAALevel();
  634. #endif
  635. bool needsAlphaChannel = Graphics.preserveFramebufferAlpha;
  636. cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(camera, cameraData.renderScale,
  637. cameraData.isHdrEnabled, msaaSamples, needsAlphaChannel, cameraData.requiresOpaqueTexture);
  638. }
  639. /// <summary>
  640. /// Initialize camera data settings common for all cameras in the stack. Overlay cameras will inherit
  641. /// settings from base camera.
  642. /// </summary>
  643. /// <param name="baseCamera">Base camera to inherit settings from.</param>
  644. /// <param name="baseAdditionalCameraData">Component that contains additional base camera data.</param>
  645. /// <param name="cameraData">Camera data to initialize setttings.</param>
  646. static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCameraData baseAdditionalCameraData, ref CameraData cameraData)
  647. {
  648. using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeStackedCameraData);
  649. var settings = asset;
  650. cameraData.targetTexture = baseCamera.targetTexture;
  651. cameraData.cameraType = baseCamera.cameraType;
  652. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  653. ///////////////////////////////////////////////////////////////////
  654. // Environment and Post-processing settings /
  655. ///////////////////////////////////////////////////////////////////
  656. if (isSceneViewCamera)
  657. {
  658. cameraData.volumeLayerMask = 1; // "Default"
  659. cameraData.volumeTrigger = null;
  660. cameraData.isStopNaNEnabled = false;
  661. cameraData.isDitheringEnabled = false;
  662. cameraData.antialiasing = AntialiasingMode.None;
  663. cameraData.antialiasingQuality = AntialiasingQuality.High;
  664. #if ENABLE_VR && ENABLE_XR_MODULE
  665. cameraData.xrRendering = false;
  666. #endif
  667. }
  668. else if (baseAdditionalCameraData != null)
  669. {
  670. cameraData.volumeLayerMask = baseAdditionalCameraData.volumeLayerMask;
  671. cameraData.volumeTrigger = baseAdditionalCameraData.volumeTrigger == null ? baseCamera.transform : baseAdditionalCameraData.volumeTrigger;
  672. cameraData.isStopNaNEnabled = baseAdditionalCameraData.stopNaN && SystemInfo.graphicsShaderLevel >= 35;
  673. cameraData.isDitheringEnabled = baseAdditionalCameraData.dithering;
  674. cameraData.antialiasing = baseAdditionalCameraData.antialiasing;
  675. cameraData.antialiasingQuality = baseAdditionalCameraData.antialiasingQuality;
  676. #if ENABLE_VR && ENABLE_XR_MODULE
  677. cameraData.xrRendering = baseAdditionalCameraData.allowXRRendering && m_XRSystem.RefreshXrSdk();
  678. #endif
  679. }
  680. else
  681. {
  682. cameraData.volumeLayerMask = 1; // "Default"
  683. cameraData.volumeTrigger = null;
  684. cameraData.isStopNaNEnabled = false;
  685. cameraData.isDitheringEnabled = false;
  686. cameraData.antialiasing = AntialiasingMode.None;
  687. cameraData.antialiasingQuality = AntialiasingQuality.High;
  688. #if ENABLE_VR && ENABLE_XR_MODULE
  689. cameraData.xrRendering = m_XRSystem.RefreshXrSdk();
  690. #endif
  691. }
  692. ///////////////////////////////////////////////////////////////////
  693. // Settings that control output of the camera /
  694. ///////////////////////////////////////////////////////////////////
  695. cameraData.isHdrEnabled = baseCamera.allowHDR && settings.supportsHDR;
  696. Rect cameraRect = baseCamera.rect;
  697. cameraData.pixelRect = baseCamera.pixelRect;
  698. cameraData.pixelWidth = baseCamera.pixelWidth;
  699. cameraData.pixelHeight = baseCamera.pixelHeight;
  700. cameraData.aspectRatio = (float)cameraData.pixelWidth / (float)cameraData.pixelHeight;
  701. cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f ||
  702. Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f));
  703. // Discard variations lesser than kRenderScaleThreshold.
  704. // Scale is only enabled for gameview.
  705. const float kRenderScaleThreshold = 0.05f;
  706. cameraData.renderScale = (Mathf.Abs(1.0f - settings.renderScale) < kRenderScaleThreshold) ? 1.0f : settings.renderScale;
  707. // Convert the upscaling filter selection from the pipeline asset into an image upscaling filter
  708. cameraData.upscalingFilter = ResolveUpscalingFilterSelection(new Vector2(cameraData.pixelWidth, cameraData.pixelHeight), cameraData.renderScale, settings.upscalingFilter);
  709. if (cameraData.renderScale > 1.0f)
  710. {
  711. cameraData.imageScalingMode = ImageScalingMode.Downscaling;
  712. }
  713. else if ((cameraData.renderScale < 1.0f) || (cameraData.upscalingFilter == ImageUpscalingFilter.FSR))
  714. {
  715. // When FSR is enabled, we still consider 100% render scale an upscaling operation.
  716. // This allows us to run the FSR shader passes all the time since they improve visual quality even at 100% scale.
  717. cameraData.imageScalingMode = ImageScalingMode.Upscaling;
  718. }
  719. else
  720. {
  721. cameraData.imageScalingMode = ImageScalingMode.None;
  722. }
  723. cameraData.fsrOverrideSharpness = settings.fsrOverrideSharpness;
  724. cameraData.fsrSharpness = settings.fsrSharpness;
  725. #if ENABLE_VR && ENABLE_XR_MODULE
  726. cameraData.xr = m_XRSystem.emptyPass;
  727. XRSystem.UpdateRenderScale(cameraData.renderScale);
  728. #else
  729. cameraData.xr = XRPass.emptyPass;
  730. #endif
  731. var commonOpaqueFlags = SortingCriteria.CommonOpaque;
  732. var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
  733. bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU;
  734. bool canSkipFrontToBackSorting = (baseCamera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || baseCamera.opaqueSortMode == OpaqueSortMode.NoDistanceSort;
  735. cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags;
  736. cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(baseCamera);
  737. }
  738. /// <summary>
  739. /// Initialize settings that can be different for each camera in the stack.
  740. /// </summary>
  741. /// <param name="camera">Camera to initialize settings from.</param>
  742. /// <param name="additionalCameraData">Additional camera data component to initialize settings from.</param>
  743. /// <param name="resolveFinalTarget">True if this is the last camera in the stack and rendering should resolve to camera target.</param>
  744. /// <param name="cameraData">Settings to be initilized.</param>
  745. static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, ref CameraData cameraData)
  746. {
  747. using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeAdditionalCameraData);
  748. var settings = asset;
  749. cameraData.camera = camera;
  750. bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows;
  751. cameraData.maxShadowDistance = Mathf.Min(settings.shadowDistance, camera.farClipPlane);
  752. cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ? cameraData.maxShadowDistance : 0.0f;
  753. // Getting the background color from preferences to add to the preview camera
  754. #if UNITY_EDITOR
  755. if (cameraData.camera.cameraType == CameraType.Preview)
  756. {
  757. camera.backgroundColor = CoreRenderPipelinePreferences.previewBackgroundColor;
  758. }
  759. #endif
  760. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  761. if (isSceneViewCamera)
  762. {
  763. cameraData.renderType = CameraRenderType.Base;
  764. cameraData.clearDepth = true;
  765. cameraData.postProcessEnabled = CoreUtils.ArePostProcessesEnabled(camera);
  766. cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
  767. cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
  768. cameraData.renderer = asset.scriptableRenderer;
  769. }
  770. else if (additionalCameraData != null)
  771. {
  772. cameraData.renderType = additionalCameraData.renderType;
  773. cameraData.clearDepth = (additionalCameraData.renderType != CameraRenderType.Base) ? additionalCameraData.clearDepth : true;
  774. cameraData.postProcessEnabled = additionalCameraData.renderPostProcessing;
  775. cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f;
  776. cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture;
  777. cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture;
  778. cameraData.renderer = additionalCameraData.scriptableRenderer;
  779. }
  780. else
  781. {
  782. cameraData.renderType = CameraRenderType.Base;
  783. cameraData.clearDepth = true;
  784. cameraData.postProcessEnabled = false;
  785. cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
  786. cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
  787. cameraData.renderer = asset.scriptableRenderer;
  788. }
  789. // Disables post if GLes2
  790. cameraData.postProcessEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
  791. cameraData.requiresDepthTexture |= isSceneViewCamera;
  792. cameraData.postProcessingRequiresDepthTexture |= CheckPostProcessForDepth(cameraData);
  793. cameraData.resolveFinalTarget = resolveFinalTarget;
  794. // Disable depth and color copy. We should add it in the renderer instead to avoid performance pitfalls
  795. // of camera stacking breaking render pass execution implicitly.
  796. bool isOverlayCamera = (cameraData.renderType == CameraRenderType.Overlay);
  797. if (isOverlayCamera)
  798. {
  799. cameraData.requiresDepthTexture = false;
  800. cameraData.requiresOpaqueTexture = false;
  801. cameraData.postProcessingRequiresDepthTexture = false;
  802. }
  803. Matrix4x4 projectionMatrix = camera.projectionMatrix;
  804. // Overlay cameras inherit viewport from base.
  805. // If the viewport is different between them we might need to patch the projection to adjust aspect ratio
  806. // matrix to prevent squishing when rendering objects in overlay cameras.
  807. if (isOverlayCamera && !camera.orthographic && cameraData.pixelRect != camera.pixelRect)
  808. {
  809. // m00 = (cotangent / aspect), therefore m00 * aspect gives us cotangent.
  810. float cotangent = camera.projectionMatrix.m00 * camera.aspect;
  811. // Get new m00 by dividing by base camera aspectRatio.
  812. float newCotangent = cotangent / cameraData.aspectRatio;
  813. projectionMatrix.m00 = newCotangent;
  814. }
  815. cameraData.SetViewAndProjectionMatrix(camera.worldToCameraMatrix, projectionMatrix);
  816. cameraData.worldSpaceCameraPos = camera.transform.position;
  817. }
  818. static void InitializeRenderingData(UniversalRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults,
  819. bool anyPostProcessingEnabled, out RenderingData renderingData)
  820. {
  821. using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeRenderingData);
  822. var visibleLights = cullResults.visibleLights;
  823. int mainLightIndex = GetMainLightIndex(settings, visibleLights);
  824. bool mainLightCastShadows = false;
  825. bool additionalLightsCastShadows = false;
  826. if (cameraData.maxShadowDistance > 0.0f)
  827. {
  828. mainLightCastShadows = (mainLightIndex != -1 && visibleLights[mainLightIndex].light != null &&
  829. visibleLights[mainLightIndex].light.shadows != LightShadows.None);
  830. // If additional lights are shaded per-pixel they cannot cast shadows
  831. if (settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel)
  832. {
  833. for (int i = 0; i < visibleLights.Length; ++i)
  834. {
  835. if (i == mainLightIndex)
  836. continue;
  837. Light light = visibleLights[i].light;
  838. // UniversalRP doesn't support additional directional light shadows yet
  839. if ((visibleLights[i].lightType == LightType.Spot || visibleLights[i].lightType == LightType.Point) && light != null && light.shadows != LightShadows.None)
  840. {
  841. additionalLightsCastShadows = true;
  842. break;
  843. }
  844. }
  845. }
  846. }
  847. renderingData.cullResults = cullResults;
  848. renderingData.cameraData = cameraData;
  849. InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData);
  850. InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData);
  851. InitializePostProcessingData(settings, out renderingData.postProcessingData);
  852. renderingData.supportsDynamicBatching = settings.supportsDynamicBatching;
  853. renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount);
  854. renderingData.postProcessingEnabled = anyPostProcessingEnabled;
  855. CheckAndApplyDebugSettings(ref renderingData);
  856. }
  857. static void InitializeShadowData(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights, bool mainLightCastShadows, bool additionalLightsCastShadows, out ShadowData shadowData)
  858. {
  859. using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeShadowData);
  860. m_ShadowBiasData.Clear();
  861. m_ShadowResolutionData.Clear();
  862. for (int i = 0; i < visibleLights.Length; ++i)
  863. {
  864. Light light = visibleLights[i].light;
  865. UniversalAdditionalLightData data = null;
  866. if (light != null)
  867. {
  868. light.gameObject.TryGetComponent(out data);
  869. }
  870. if (data && !data.usePipelineSettings)
  871. m_ShadowBiasData.Add(new Vector4(light.shadowBias, light.shadowNormalBias, 0.0f, 0.0f));
  872. else
  873. m_ShadowBiasData.Add(new Vector4(settings.shadowDepthBias, settings.shadowNormalBias, 0.0f, 0.0f));
  874. if (data && (data.additionalLightsShadowResolutionTier == UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierCustom))
  875. {
  876. m_ShadowResolutionData.Add((int)light.shadowResolution); // native code does not clamp light.shadowResolution between -1 and 3
  877. }
  878. else if (data && (data.additionalLightsShadowResolutionTier != UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierCustom))
  879. {
  880. int resolutionTier = Mathf.Clamp(data.additionalLightsShadowResolutionTier, UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierLow, UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierHigh);
  881. m_ShadowResolutionData.Add(settings.GetAdditionalLightsShadowResolution(resolutionTier));
  882. }
  883. else
  884. {
  885. m_ShadowResolutionData.Add(settings.GetAdditionalLightsShadowResolution(UniversalAdditionalLightData.AdditionalLightsShadowDefaultResolutionTier));
  886. }
  887. }
  888. shadowData.bias = m_ShadowBiasData;
  889. shadowData.resolution = m_ShadowResolutionData;
  890. shadowData.supportsMainLightShadows = SystemInfo.supportsShadows && settings.supportsMainLightShadows && mainLightCastShadows;
  891. // We no longer use screen space shadows in URP.
  892. // This change allows us to have particles & transparent objects receive shadows.
  893. #pragma warning disable 0618
  894. shadowData.requiresScreenSpaceShadowResolve = false;
  895. #pragma warning restore 0618
  896. shadowData.mainLightShadowCascadesCount = settings.shadowCascadeCount;
  897. shadowData.mainLightShadowmapWidth = settings.mainLightShadowmapResolution;
  898. shadowData.mainLightShadowmapHeight = settings.mainLightShadowmapResolution;
  899. switch (shadowData.mainLightShadowCascadesCount)
  900. {
  901. case 1:
  902. shadowData.mainLightShadowCascadesSplit = new Vector3(1.0f, 0.0f, 0.0f);
  903. break;
  904. case 2:
  905. shadowData.mainLightShadowCascadesSplit = new Vector3(settings.cascade2Split, 1.0f, 0.0f);
  906. break;
  907. case 3:
  908. shadowData.mainLightShadowCascadesSplit = new Vector3(settings.cascade3Split.x, settings.cascade3Split.y, 0.0f);
  909. break;
  910. default:
  911. shadowData.mainLightShadowCascadesSplit = settings.cascade4Split;
  912. break;
  913. }
  914. shadowData.mainLightShadowCascadeBorder = settings.cascadeBorder;
  915. shadowData.supportsAdditionalLightShadows = SystemInfo.supportsShadows && settings.supportsAdditionalLightShadows && additionalLightsCastShadows;
  916. shadowData.additionalLightsShadowmapWidth = shadowData.additionalLightsShadowmapHeight = settings.additionalLightsShadowmapResolution;
  917. shadowData.supportsSoftShadows = settings.supportsSoftShadows && (shadowData.supportsMainLightShadows || shadowData.supportsAdditionalLightShadows);
  918. shadowData.shadowmapDepthBufferBits = 16;
  919. // This will be setup in AdditionalLightsShadowCasterPass.
  920. shadowData.isKeywordAdditionalLightShadowsEnabled = false;
  921. shadowData.isKeywordSoftShadowsEnabled = false;
  922. }
  923. static void InitializePostProcessingData(UniversalRenderPipelineAsset settings, out PostProcessingData postProcessingData)
  924. {
  925. postProcessingData.gradingMode = settings.supportsHDR
  926. ? settings.colorGradingMode
  927. : ColorGradingMode.LowDynamicRange;
  928. postProcessingData.lutSize = settings.colorGradingLutSize;
  929. postProcessingData.useFastSRGBLinearConversion = settings.useFastSRGBLinearConversion;
  930. }
  931. static void InitializeLightData(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights, int mainLightIndex, out LightData lightData)
  932. {
  933. using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeLightData);
  934. int maxPerObjectAdditionalLights = UniversalRenderPipeline.maxPerObjectLights;
  935. int maxVisibleAdditionalLights = UniversalRenderPipeline.maxVisibleAdditionalLights;
  936. lightData.mainLightIndex = mainLightIndex;
  937. if (settings.additionalLightsRenderingMode != LightRenderingMode.Disabled)
  938. {
  939. lightData.additionalLightsCount =
  940. Math.Min((mainLightIndex != -1) ? visibleLights.Length - 1 : visibleLights.Length,
  941. maxVisibleAdditionalLights);
  942. lightData.maxPerObjectAdditionalLightsCount = Math.Min(settings.maxAdditionalLightsCount, maxPerObjectAdditionalLights);
  943. }
  944. else
  945. {
  946. lightData.additionalLightsCount = 0;
  947. lightData.maxPerObjectAdditionalLightsCount = 0;
  948. }
  949. lightData.supportsAdditionalLights = settings.additionalLightsRenderingMode != LightRenderingMode.Disabled;
  950. lightData.shadeAdditionalLightsPerVertex = settings.additionalLightsRenderingMode == LightRenderingMode.PerVertex;
  951. lightData.visibleLights = visibleLights;
  952. lightData.supportsMixedLighting = settings.supportsMixedLighting;
  953. lightData.reflectionProbeBlending = settings.reflectionProbeBlending;
  954. lightData.reflectionProbeBoxProjection = settings.reflectionProbeBoxProjection;
  955. lightData.supportsLightLayers = RenderingUtils.SupportsLightLayers(SystemInfo.graphicsDeviceType) && settings.supportsLightLayers;
  956. lightData.originalIndices = new NativeArray<int>(visibleLights.Length, Allocator.Temp);
  957. for (var i = 0; i < lightData.originalIndices.Length; i++)
  958. {
  959. lightData.originalIndices[i] = i;
  960. }
  961. }
  962. static void CleanupLightData(ref LightData lightData)
  963. {
  964. lightData.originalIndices.Dispose();
  965. }
  966. static void UpdateCameraStereoMatrices(Camera camera, XRPass xr)
  967. {
  968. #if ENABLE_VR && ENABLE_XR_MODULE
  969. if (xr.enabled)
  970. {
  971. if (xr.singlePassEnabled)
  972. {
  973. for (int i = 0; i < Mathf.Min(2, xr.viewCount); i++)
  974. {
  975. camera.SetStereoProjectionMatrix((Camera.StereoscopicEye)i, xr.GetProjMatrix(i));
  976. camera.SetStereoViewMatrix((Camera.StereoscopicEye)i, xr.GetViewMatrix(i));
  977. }
  978. }
  979. else
  980. {
  981. camera.SetStereoProjectionMatrix((Camera.StereoscopicEye)xr.multipassId, xr.GetProjMatrix(0));
  982. camera.SetStereoViewMatrix((Camera.StereoscopicEye)xr.multipassId, xr.GetViewMatrix(0));
  983. }
  984. }
  985. #endif
  986. }
  987. static PerObjectData GetPerObjectLightFlags(int additionalLightsCount)
  988. {
  989. using var profScope = new ProfilingScope(null, Profiling.Pipeline.getPerObjectLightFlags);
  990. var configuration = PerObjectData.ReflectionProbes | PerObjectData.Lightmaps | PerObjectData.LightProbe | PerObjectData.LightData | PerObjectData.OcclusionProbe | PerObjectData.ShadowMask;
  991. if (additionalLightsCount > 0)
  992. {
  993. configuration |= PerObjectData.LightData;
  994. // In this case we also need per-object indices (unity_LightIndices)
  995. if (!RenderingUtils.useStructuredBuffer)
  996. configuration |= PerObjectData.LightIndices;
  997. }
  998. return configuration;
  999. }
  1000. // Main Light is always a directional light
  1001. static int GetMainLightIndex(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights)
  1002. {
  1003. using var profScope = new ProfilingScope(null, Profiling.Pipeline.getMainLightIndex);
  1004. int totalVisibleLights = visibleLights.Length;
  1005. if (totalVisibleLights == 0 || settings.mainLightRenderingMode != LightRenderingMode.PerPixel)
  1006. return -1;
  1007. Light sunLight = RenderSettings.sun;
  1008. int brightestDirectionalLightIndex = -1;
  1009. float brightestLightIntensity = 0.0f;
  1010. for (int i = 0; i < totalVisibleLights; ++i)
  1011. {
  1012. VisibleLight currVisibleLight = visibleLights[i];
  1013. Light currLight = currVisibleLight.light;
  1014. // Particle system lights have the light property as null. We sort lights so all particles lights
  1015. // come last. Therefore, if first light is particle light then all lights are particle lights.
  1016. // In this case we either have no main light or already found it.
  1017. if (currLight == null)
  1018. break;
  1019. if (currVisibleLight.lightType == LightType.Directional)
  1020. {
  1021. // Sun source needs be a directional light
  1022. if (currLight == sunLight)
  1023. return i;
  1024. // In case no sun light is present we will return the brightest directional light
  1025. if (currLight.intensity > brightestLightIntensity)
  1026. {
  1027. brightestLightIntensity = currLight.intensity;
  1028. brightestDirectionalLightIndex = i;
  1029. }
  1030. }
  1031. }
  1032. return brightestDirectionalLightIndex;
  1033. }
  1034. static void SetupPerFrameShaderConstants()
  1035. {
  1036. using var profScope = new ProfilingScope(null, Profiling.Pipeline.setupPerFrameShaderConstants);
  1037. // When glossy reflections are OFF in the shader we set a constant color to use as indirect specular
  1038. SphericalHarmonicsL2 ambientSH = RenderSettings.ambientProbe;
  1039. Color linearGlossyEnvColor = new Color(ambientSH[0, 0], ambientSH[1, 0], ambientSH[2, 0]) * RenderSettings.reflectionIntensity;
  1040. Color glossyEnvColor = CoreUtils.ConvertLinearToActiveColorSpace(linearGlossyEnvColor);
  1041. Shader.SetGlobalVector(ShaderPropertyId.glossyEnvironmentColor, glossyEnvColor);
  1042. // Used as fallback cubemap for reflections
  1043. Shader.SetGlobalVector(ShaderPropertyId.glossyEnvironmentCubeMapHDR, ReflectionProbe.defaultTextureHDRDecodeValues);
  1044. Shader.SetGlobalTexture(ShaderPropertyId.glossyEnvironmentCubeMap, ReflectionProbe.defaultTexture);
  1045. // Ambient
  1046. Shader.SetGlobalVector(ShaderPropertyId.ambientSkyColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.ambientSkyColor));
  1047. Shader.SetGlobalVector(ShaderPropertyId.ambientEquatorColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.ambientEquatorColor));
  1048. Shader.SetGlobalVector(ShaderPropertyId.ambientGroundColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.ambientGroundColor));
  1049. // Used when subtractive mode is selected
  1050. Shader.SetGlobalVector(ShaderPropertyId.subtractiveShadowColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.subtractiveShadowColor));
  1051. // Required for 2D Unlit Shadergraph master node as it doesn't currently support hidden properties.
  1052. Shader.SetGlobalColor(ShaderPropertyId.rendererColor, Color.white);
  1053. }
  1054. static void CheckAndApplyDebugSettings(ref RenderingData renderingData)
  1055. {
  1056. DebugDisplaySettings debugDisplaySettings = DebugDisplaySettings.Instance;
  1057. ref CameraData cameraData = ref renderingData.cameraData;
  1058. if (debugDisplaySettings.AreAnySettingsActive && !cameraData.isPreviewCamera)
  1059. {
  1060. DebugDisplaySettingsRendering renderingSettings = debugDisplaySettings.RenderingSettings;
  1061. int msaaSamples = cameraData.cameraTargetDescriptor.msaaSamples;
  1062. if (!renderingSettings.enableMsaa)
  1063. msaaSamples = 1;
  1064. if (!renderingSettings.enableHDR)
  1065. cameraData.isHdrEnabled = false;
  1066. if (!debugDisplaySettings.IsPostProcessingAllowed)
  1067. cameraData.postProcessEnabled = false;
  1068. cameraData.cameraTargetDescriptor.graphicsFormat = MakeRenderTextureGraphicsFormat(cameraData.isHdrEnabled, true);
  1069. cameraData.cameraTargetDescriptor.msaaSamples = msaaSamples;
  1070. }
  1071. }
  1072. /// <summary>
  1073. /// Returns the best supported image upscaling filter based on the provided upscaling filter selection
  1074. /// </summary>
  1075. /// <param name="imageSize">Size of the final image</param>
  1076. /// <param name="renderScale">Scale being applied to the final image size</param>
  1077. /// <param name="selection">Upscaling filter selected by the user</param>
  1078. /// <returns>Either the original filter provided, or the best replacement available</returns>
  1079. static ImageUpscalingFilter ResolveUpscalingFilterSelection(Vector2 imageSize, float renderScale, UpscalingFilterSelection selection)
  1080. {
  1081. // By default we just use linear filtering since it's the most compatible choice
  1082. ImageUpscalingFilter filter = ImageUpscalingFilter.Linear;
  1083. // Fall back to the automatic filter if FSR was selected, but isn't supported on the current platform
  1084. if ((selection == UpscalingFilterSelection.FSR) && !FSRUtils.IsSupported())
  1085. {
  1086. selection = UpscalingFilterSelection.Auto;
  1087. }
  1088. switch (selection)
  1089. {
  1090. case UpscalingFilterSelection.Auto:
  1091. {
  1092. // The user selected "auto" for their upscaling filter so we should attempt to choose the best filter
  1093. // for the current situation. When the current resolution and render scale are compatible with integer
  1094. // scaling we use the point sampling filter. Otherwise we just use the default filter (linear).
  1095. float pixelScale = (1.0f / renderScale);
  1096. bool isIntegerScale = Mathf.Approximately((pixelScale - Mathf.Floor(pixelScale)), 0.0f);
  1097. if (isIntegerScale)
  1098. {
  1099. float widthScale = (imageSize.x / pixelScale);
  1100. float heightScale = (imageSize.y / pixelScale);
  1101. bool isImageCompatible = (Mathf.Approximately((widthScale - Mathf.Floor(widthScale)), 0.0f) &&
  1102. Mathf.Approximately((heightScale - Mathf.Floor(heightScale)), 0.0f));
  1103. if (isImageCompatible)
  1104. {
  1105. filter = ImageUpscalingFilter.Point;
  1106. }
  1107. }
  1108. break;
  1109. }
  1110. case UpscalingFilterSelection.Linear:
  1111. {
  1112. // Do nothing since linear is already the default
  1113. break;
  1114. }
  1115. case UpscalingFilterSelection.Point:
  1116. {
  1117. filter = ImageUpscalingFilter.Point;
  1118. break;
  1119. }
  1120. case UpscalingFilterSelection.FSR:
  1121. {
  1122. filter = ImageUpscalingFilter.FSR;
  1123. break;
  1124. }
  1125. }
  1126. return filter;
  1127. }
  1128. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  1129. static void ApplyAdaptivePerformance(ref CameraData cameraData)
  1130. {
  1131. var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
  1132. if (AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipFrontToBackSorting)
  1133. cameraData.defaultOpaqueSortFlags = noFrontToBackOpaqueFlags;
  1134. var MaxShadowDistanceMultiplier = AdaptivePerformance.AdaptivePerformanceRenderSettings.MaxShadowDistanceMultiplier;
  1135. cameraData.maxShadowDistance *= MaxShadowDistanceMultiplier;
  1136. var RenderScaleMultiplier = AdaptivePerformance.AdaptivePerformanceRenderSettings.RenderScaleMultiplier;
  1137. cameraData.renderScale *= RenderScaleMultiplier;
  1138. // TODO
  1139. if (!cameraData.xr.enabled)
  1140. {
  1141. cameraData.cameraTargetDescriptor.width = (int)(cameraData.camera.pixelWidth * cameraData.renderScale);
  1142. cameraData.cameraTargetDescriptor.height = (int)(cameraData.camera.pixelHeight * cameraData.renderScale);
  1143. }
  1144. var antialiasingQualityIndex = (int)cameraData.antialiasingQuality - AdaptivePerformance.AdaptivePerformanceRenderSettings.AntiAliasingQualityBias;
  1145. if (antialiasingQualityIndex < 0)
  1146. cameraData.antialiasing = AntialiasingMode.None;
  1147. cameraData.antialiasingQuality = (AntialiasingQuality)Mathf.Clamp(antialiasingQualityIndex, (int)AntialiasingQuality.Low, (int)AntialiasingQuality.High);
  1148. }
  1149. static void ApplyAdaptivePerformance(ref RenderingData renderingData)
  1150. {
  1151. if (AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipDynamicBatching)
  1152. renderingData.supportsDynamicBatching = false;
  1153. var MainLightShadowmapResolutionMultiplier = AdaptivePerformance.AdaptivePerformanceRenderSettings.MainLightShadowmapResolutionMultiplier;
  1154. renderingData.shadowData.mainLightShadowmapWidth = (int)(renderingData.shadowData.mainLightShadowmapWidth * MainLightShadowmapResolutionMultiplier);
  1155. renderingData.shadowData.mainLightShadowmapHeight = (int)(renderingData.shadowData.mainLightShadowmapHeight * MainLightShadowmapResolutionMultiplier);
  1156. var MainLightShadowCascadesCountBias = AdaptivePerformance.AdaptivePerformanceRenderSettings.MainLightShadowCascadesCountBias;
  1157. renderingData.shadowData.mainLightShadowCascadesCount = Mathf.Clamp(renderingData.shadowData.mainLightShadowCascadesCount - MainLightShadowCascadesCountBias, 0, 4);
  1158. var shadowQualityIndex = AdaptivePerformance.AdaptivePerformanceRenderSettings.ShadowQualityBias;
  1159. for (int i = 0; i < shadowQualityIndex; i++)
  1160. {
  1161. if (renderingData.shadowData.supportsSoftShadows)
  1162. {
  1163. renderingData.shadowData.supportsSoftShadows = false;
  1164. continue;
  1165. }
  1166. if (renderingData.shadowData.supportsAdditionalLightShadows)
  1167. {
  1168. renderingData.shadowData.supportsAdditionalLightShadows = false;
  1169. continue;
  1170. }
  1171. if (renderingData.shadowData.supportsMainLightShadows)
  1172. {
  1173. renderingData.shadowData.supportsMainLightShadows = false;
  1174. continue;
  1175. }
  1176. break;
  1177. }
  1178. if (AdaptivePerformance.AdaptivePerformanceRenderSettings.LutBias >= 1 && renderingData.postProcessingData.lutSize == 32)
  1179. renderingData.postProcessingData.lutSize = 16;
  1180. }
  1181. #endif
  1182. }
  1183. }