UniversalRenderPipelineCore.cs 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908
  1. using System;
  2. using System.Collections.Generic;
  3. using Unity.Collections;
  4. using UnityEngine.Experimental.GlobalIllumination;
  5. using UnityEngine.Experimental.Rendering;
  6. using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
  7. namespace UnityEngine.Rendering.Universal
  8. {
  9. public enum MixedLightingSetup
  10. {
  11. None,
  12. ShadowMask,
  13. Subtractive,
  14. };
  15. /// <summary>
  16. /// Enumeration that indicates what kind of image scaling is occurring if any
  17. /// </summary>
  18. internal enum ImageScalingMode
  19. {
  20. /// No scaling
  21. None,
  22. /// Upscaling to a larger image
  23. Upscaling,
  24. /// Downscaling to a smaller image
  25. Downscaling
  26. }
  27. /// <summary>
  28. /// Enumeration that indicates what kind of upscaling filter is being used
  29. /// </summary>
  30. internal enum ImageUpscalingFilter
  31. {
  32. /// Bilinear filtering
  33. Linear,
  34. /// Nearest-Neighbor filtering
  35. Point,
  36. /// FidelityFX Super Resolution
  37. FSR
  38. }
  39. public struct RenderingData
  40. {
  41. public CullingResults cullResults;
  42. public CameraData cameraData;
  43. public LightData lightData;
  44. public ShadowData shadowData;
  45. public PostProcessingData postProcessingData;
  46. public bool supportsDynamicBatching;
  47. public PerObjectData perObjectData;
  48. /// <summary>
  49. /// True if post-processing effect is enabled while rendering the camera stack.
  50. /// </summary>
  51. public bool postProcessingEnabled;
  52. }
  53. public struct LightData
  54. {
  55. public int mainLightIndex;
  56. public int additionalLightsCount;
  57. public int maxPerObjectAdditionalLightsCount;
  58. public NativeArray<VisibleLight> visibleLights;
  59. internal NativeArray<int> originalIndices;
  60. public bool shadeAdditionalLightsPerVertex;
  61. public bool supportsMixedLighting;
  62. public bool reflectionProbeBoxProjection;
  63. public bool reflectionProbeBlending;
  64. public bool supportsLightLayers;
  65. /// <summary>
  66. /// True if additional lights enabled.
  67. /// </summary>
  68. public bool supportsAdditionalLights;
  69. }
  70. public struct CameraData
  71. {
  72. // Internal camera data as we are not yet sure how to expose View in stereo context.
  73. // We might change this API soon.
  74. Matrix4x4 m_ViewMatrix;
  75. Matrix4x4 m_ProjectionMatrix;
  76. internal void SetViewAndProjectionMatrix(Matrix4x4 viewMatrix, Matrix4x4 projectionMatrix)
  77. {
  78. m_ViewMatrix = viewMatrix;
  79. m_ProjectionMatrix = projectionMatrix;
  80. }
  81. /// <summary>
  82. /// Returns the camera view matrix.
  83. /// </summary>
  84. /// <returns></returns>
  85. public Matrix4x4 GetViewMatrix(int viewIndex = 0)
  86. {
  87. #if ENABLE_VR && ENABLE_XR_MODULE
  88. if (xr.enabled)
  89. return xr.GetViewMatrix(viewIndex);
  90. #endif
  91. return m_ViewMatrix;
  92. }
  93. /// <summary>
  94. /// Returns the camera projection matrix.
  95. /// </summary>
  96. /// <returns></returns>
  97. public Matrix4x4 GetProjectionMatrix(int viewIndex = 0)
  98. {
  99. #if ENABLE_VR && ENABLE_XR_MODULE
  100. if (xr.enabled)
  101. return xr.GetProjMatrix(viewIndex);
  102. #endif
  103. return m_ProjectionMatrix;
  104. }
  105. /// <summary>
  106. /// Returns the camera GPU projection matrix. This contains platform specific changes to handle y-flip and reverse z.
  107. /// Similar to <c>GL.GetGPUProjectionMatrix</c> but queries URP internal state to know if the pipeline is rendering to render texture.
  108. /// For more info on platform differences regarding camera projection check: https://docs.unity3d.com/Manual/SL-PlatformDifferences.html
  109. /// </summary>
  110. /// <seealso cref="GL.GetGPUProjectionMatrix(Matrix4x4, bool)"/>
  111. /// <returns></returns>
  112. public Matrix4x4 GetGPUProjectionMatrix(int viewIndex = 0)
  113. {
  114. return GL.GetGPUProjectionMatrix(GetProjectionMatrix(viewIndex), IsCameraProjectionMatrixFlipped());
  115. }
  116. public Camera camera;
  117. public CameraRenderType renderType;
  118. public RenderTexture targetTexture;
  119. public RenderTextureDescriptor cameraTargetDescriptor;
  120. internal Rect pixelRect;
  121. internal int pixelWidth;
  122. internal int pixelHeight;
  123. internal float aspectRatio;
  124. public float renderScale;
  125. internal ImageScalingMode imageScalingMode;
  126. internal ImageUpscalingFilter upscalingFilter;
  127. internal bool fsrOverrideSharpness;
  128. internal float fsrSharpness;
  129. public bool clearDepth;
  130. public CameraType cameraType;
  131. public bool isDefaultViewport;
  132. public bool isHdrEnabled;
  133. public bool requiresDepthTexture;
  134. public bool requiresOpaqueTexture;
  135. /// <summary>
  136. /// Returns true if post processing passes require depth texture.
  137. /// </summary>
  138. public bool postProcessingRequiresDepthTexture;
  139. #if ENABLE_VR && ENABLE_XR_MODULE
  140. public bool xrRendering;
  141. #endif
  142. internal bool requireSrgbConversion
  143. {
  144. get
  145. {
  146. #if ENABLE_VR && ENABLE_XR_MODULE
  147. if (xr.enabled)
  148. return !xr.renderTargetDesc.sRGB && (QualitySettings.activeColorSpace == ColorSpace.Linear);
  149. #endif
  150. return targetTexture == null && Display.main.requiresSrgbBlitToBackbuffer;
  151. }
  152. }
  153. /// <summary>
  154. /// True if the camera rendering is for the scene window in the editor
  155. /// </summary>
  156. public bool isSceneViewCamera => cameraType == CameraType.SceneView;
  157. /// <summary>
  158. /// True if the camera rendering is for the preview window in the editor
  159. /// </summary>
  160. public bool isPreviewCamera => cameraType == CameraType.Preview;
  161. internal bool isRenderPassSupportedCamera => (cameraType == CameraType.Game || cameraType == CameraType.Reflection);
  162. /// <summary>
  163. /// True if the camera device projection matrix is flipped. This happens when the pipeline is rendering
  164. /// to a render texture in non OpenGL platforms. If you are doing a custom Blit pass to copy camera textures
  165. /// (_CameraColorTexture, _CameraDepthAttachment) you need to check this flag to know if you should flip the
  166. /// matrix when rendering with for cmd.Draw* and reading from camera textures.
  167. /// </summary>
  168. public bool IsCameraProjectionMatrixFlipped()
  169. {
  170. // Users only have access to CameraData on URP rendering scope. The current renderer should never be null.
  171. var renderer = ScriptableRenderer.current;
  172. Debug.Assert(renderer != null, "IsCameraProjectionMatrixFlipped is being called outside camera rendering scope.");
  173. if (renderer != null)
  174. {
  175. bool renderingToBackBufferTarget = renderer.cameraColorTarget == BuiltinRenderTextureType.CameraTarget;
  176. #if ENABLE_VR && ENABLE_XR_MODULE
  177. if (xr.enabled)
  178. renderingToBackBufferTarget |= renderer.cameraColorTarget == xr.renderTarget && !xr.renderTargetIsRenderTexture;
  179. #endif
  180. bool renderingToTexture = !renderingToBackBufferTarget || targetTexture != null;
  181. return SystemInfo.graphicsUVStartsAtTop && renderingToTexture;
  182. }
  183. return true;
  184. }
  185. public SortingCriteria defaultOpaqueSortFlags;
  186. internal XRPass xr;
  187. [Obsolete("Please use xr.enabled instead.")]
  188. public bool isStereoEnabled;
  189. public float maxShadowDistance;
  190. public bool postProcessEnabled;
  191. public IEnumerator<Action<RenderTargetIdentifier, CommandBuffer>> captureActions;
  192. public LayerMask volumeLayerMask;
  193. public Transform volumeTrigger;
  194. public bool isStopNaNEnabled;
  195. public bool isDitheringEnabled;
  196. public AntialiasingMode antialiasing;
  197. public AntialiasingQuality antialiasingQuality;
  198. /// <summary>
  199. /// Returns the current renderer used by this camera.
  200. /// <see cref="ScriptableRenderer"/>
  201. /// </summary>
  202. public ScriptableRenderer renderer;
  203. /// <summary>
  204. /// True if this camera is resolving rendering to the final camera render target.
  205. /// When rendering a stack of cameras only the last camera in the stack will resolve to camera target.
  206. /// </summary>
  207. public bool resolveFinalTarget;
  208. /// <summary>
  209. /// Camera position in world space.
  210. /// </summary>
  211. public Vector3 worldSpaceCameraPos;
  212. }
  213. public struct ShadowData
  214. {
  215. public bool supportsMainLightShadows;
  216. [Obsolete("Obsolete, this feature was replaced by new 'ScreenSpaceShadows' renderer feature")]
  217. public bool requiresScreenSpaceShadowResolve;
  218. public int mainLightShadowmapWidth;
  219. public int mainLightShadowmapHeight;
  220. public int mainLightShadowCascadesCount;
  221. public Vector3 mainLightShadowCascadesSplit;
  222. /// <summary>
  223. /// Main light last cascade shadow fade border.
  224. /// Value represents the width of shadow fade that ranges from 0 to 1.
  225. /// Where value 0 is used for no shadow fade.
  226. /// </summary>
  227. public float mainLightShadowCascadeBorder;
  228. public bool supportsAdditionalLightShadows;
  229. public int additionalLightsShadowmapWidth;
  230. public int additionalLightsShadowmapHeight;
  231. public bool supportsSoftShadows;
  232. public int shadowmapDepthBufferBits;
  233. public List<Vector4> bias;
  234. public List<int> resolution;
  235. internal bool isKeywordAdditionalLightShadowsEnabled;
  236. internal bool isKeywordSoftShadowsEnabled;
  237. }
  238. // Precomputed tile data.
  239. public struct PreTile
  240. {
  241. // Tile left, right, bottom and top plane equations in view space.
  242. // Normals are pointing out.
  243. public Unity.Mathematics.float4 planeLeft;
  244. public Unity.Mathematics.float4 planeRight;
  245. public Unity.Mathematics.float4 planeBottom;
  246. public Unity.Mathematics.float4 planeTop;
  247. }
  248. // Actual tile data passed to the deferred shaders.
  249. public struct TileData
  250. {
  251. public uint tileID; // 2x 16 bits
  252. public uint listBitMask; // 32 bits
  253. public uint relLightOffset; // 16 bits is enough
  254. public uint unused;
  255. }
  256. // Actual point/spot light data passed to the deferred shaders.
  257. public struct PunctualLightData
  258. {
  259. public Vector3 wsPos;
  260. public float radius; // TODO remove? included in attenuation
  261. public Vector4 color;
  262. public Vector4 attenuation; // .xy are used by DistanceAttenuation - .zw are used by AngleAttenuation (for SpotLights)
  263. public Vector3 spotDirection; // for spotLights
  264. public int flags;
  265. public Vector4 occlusionProbeInfo;
  266. public uint layerMask;
  267. }
  268. internal static class ShaderPropertyId
  269. {
  270. public static readonly int glossyEnvironmentColor = Shader.PropertyToID("_GlossyEnvironmentColor");
  271. public static readonly int subtractiveShadowColor = Shader.PropertyToID("_SubtractiveShadowColor");
  272. public static readonly int glossyEnvironmentCubeMap = Shader.PropertyToID("_GlossyEnvironmentCubeMap");
  273. public static readonly int glossyEnvironmentCubeMapHDR = Shader.PropertyToID("_GlossyEnvironmentCubeMap_HDR");
  274. public static readonly int ambientSkyColor = Shader.PropertyToID("unity_AmbientSky");
  275. public static readonly int ambientEquatorColor = Shader.PropertyToID("unity_AmbientEquator");
  276. public static readonly int ambientGroundColor = Shader.PropertyToID("unity_AmbientGround");
  277. public static readonly int time = Shader.PropertyToID("_Time");
  278. public static readonly int sinTime = Shader.PropertyToID("_SinTime");
  279. public static readonly int cosTime = Shader.PropertyToID("_CosTime");
  280. public static readonly int deltaTime = Shader.PropertyToID("unity_DeltaTime");
  281. public static readonly int timeParameters = Shader.PropertyToID("_TimeParameters");
  282. public static readonly int scaledScreenParams = Shader.PropertyToID("_ScaledScreenParams");
  283. public static readonly int worldSpaceCameraPos = Shader.PropertyToID("_WorldSpaceCameraPos");
  284. public static readonly int screenParams = Shader.PropertyToID("_ScreenParams");
  285. public static readonly int projectionParams = Shader.PropertyToID("_ProjectionParams");
  286. public static readonly int zBufferParams = Shader.PropertyToID("_ZBufferParams");
  287. public static readonly int orthoParams = Shader.PropertyToID("unity_OrthoParams");
  288. public static readonly int globalMipBias = Shader.PropertyToID("_GlobalMipBias");
  289. public static readonly int screenSize = Shader.PropertyToID("_ScreenSize");
  290. public static readonly int viewMatrix = Shader.PropertyToID("unity_MatrixV");
  291. public static readonly int projectionMatrix = Shader.PropertyToID("glstate_matrix_projection");
  292. public static readonly int viewAndProjectionMatrix = Shader.PropertyToID("unity_MatrixVP");
  293. public static readonly int inverseViewMatrix = Shader.PropertyToID("unity_MatrixInvV");
  294. public static readonly int inverseProjectionMatrix = Shader.PropertyToID("unity_MatrixInvP");
  295. public static readonly int inverseViewAndProjectionMatrix = Shader.PropertyToID("unity_MatrixInvVP");
  296. public static readonly int cameraProjectionMatrix = Shader.PropertyToID("unity_CameraProjection");
  297. public static readonly int inverseCameraProjectionMatrix = Shader.PropertyToID("unity_CameraInvProjection");
  298. public static readonly int worldToCameraMatrix = Shader.PropertyToID("unity_WorldToCamera");
  299. public static readonly int cameraToWorldMatrix = Shader.PropertyToID("unity_CameraToWorld");
  300. public static readonly int cameraWorldClipPlanes = Shader.PropertyToID("unity_CameraWorldClipPlanes");
  301. public static readonly int billboardNormal = Shader.PropertyToID("unity_BillboardNormal");
  302. public static readonly int billboardTangent = Shader.PropertyToID("unity_BillboardTangent");
  303. public static readonly int billboardCameraParams = Shader.PropertyToID("unity_BillboardCameraParams");
  304. public static readonly int sourceTex = Shader.PropertyToID("_SourceTex");
  305. public static readonly int scaleBias = Shader.PropertyToID("_ScaleBias");
  306. public static readonly int scaleBiasRt = Shader.PropertyToID("_ScaleBiasRt");
  307. // Required for 2D Unlit Shadergraph master node as it doesn't currently support hidden properties.
  308. public static readonly int rendererColor = Shader.PropertyToID("_RendererColor");
  309. }
  310. public struct PostProcessingData
  311. {
  312. public ColorGradingMode gradingMode;
  313. public int lutSize;
  314. /// <summary>
  315. /// True if fast approximation functions are used when converting between the sRGB and Linear color spaces, false otherwise.
  316. /// </summary>
  317. public bool useFastSRGBLinearConversion;
  318. }
  319. public static class ShaderKeywordStrings
  320. {
  321. public static readonly string MainLightShadows = "_MAIN_LIGHT_SHADOWS";
  322. public static readonly string MainLightShadowCascades = "_MAIN_LIGHT_SHADOWS_CASCADE";
  323. public static readonly string MainLightShadowScreen = "_MAIN_LIGHT_SHADOWS_SCREEN";
  324. public static readonly string CastingPunctualLightShadow = "_CASTING_PUNCTUAL_LIGHT_SHADOW"; // This is used during shadow map generation to differentiate between directional and punctual light shadows, as they use different formulas to apply Normal Bias
  325. public static readonly string AdditionalLightsVertex = "_ADDITIONAL_LIGHTS_VERTEX";
  326. public static readonly string AdditionalLightsPixel = "_ADDITIONAL_LIGHTS";
  327. internal static readonly string ClusteredRendering = "_CLUSTERED_RENDERING";
  328. public static readonly string AdditionalLightShadows = "_ADDITIONAL_LIGHT_SHADOWS";
  329. public static readonly string ReflectionProbeBoxProjection = "_REFLECTION_PROBE_BOX_PROJECTION";
  330. public static readonly string ReflectionProbeBlending = "_REFLECTION_PROBE_BLENDING";
  331. public static readonly string SoftShadows = "_SHADOWS_SOFT";
  332. public static readonly string MixedLightingSubtractive = "_MIXED_LIGHTING_SUBTRACTIVE"; // Backward compatibility
  333. public static readonly string LightmapShadowMixing = "LIGHTMAP_SHADOW_MIXING";
  334. public static readonly string ShadowsShadowMask = "SHADOWS_SHADOWMASK";
  335. public static readonly string LightLayers = "_LIGHT_LAYERS";
  336. public static readonly string RenderPassEnabled = "_RENDER_PASS_ENABLED";
  337. public static readonly string BillboardFaceCameraPos = "BILLBOARD_FACE_CAMERA_POS";
  338. public static readonly string LightCookies = "_LIGHT_COOKIES";
  339. public static readonly string DepthNoMsaa = "_DEPTH_NO_MSAA";
  340. public static readonly string DepthMsaa2 = "_DEPTH_MSAA_2";
  341. public static readonly string DepthMsaa4 = "_DEPTH_MSAA_4";
  342. public static readonly string DepthMsaa8 = "_DEPTH_MSAA_8";
  343. public static readonly string LinearToSRGBConversion = "_LINEAR_TO_SRGB_CONVERSION";
  344. internal static readonly string UseFastSRGBLinearConversion = "_USE_FAST_SRGB_LINEAR_CONVERSION";
  345. public static readonly string DBufferMRT1 = "_DBUFFER_MRT1";
  346. public static readonly string DBufferMRT2 = "_DBUFFER_MRT2";
  347. public static readonly string DBufferMRT3 = "_DBUFFER_MRT3";
  348. public static readonly string DecalNormalBlendLow = "_DECAL_NORMAL_BLEND_LOW";
  349. public static readonly string DecalNormalBlendMedium = "_DECAL_NORMAL_BLEND_MEDIUM";
  350. public static readonly string DecalNormalBlendHigh = "_DECAL_NORMAL_BLEND_HIGH";
  351. public static readonly string SmaaLow = "_SMAA_PRESET_LOW";
  352. public static readonly string SmaaMedium = "_SMAA_PRESET_MEDIUM";
  353. public static readonly string SmaaHigh = "_SMAA_PRESET_HIGH";
  354. public static readonly string PaniniGeneric = "_GENERIC";
  355. public static readonly string PaniniUnitDistance = "_UNIT_DISTANCE";
  356. public static readonly string BloomLQ = "_BLOOM_LQ";
  357. public static readonly string BloomHQ = "_BLOOM_HQ";
  358. public static readonly string BloomLQDirt = "_BLOOM_LQ_DIRT";
  359. public static readonly string BloomHQDirt = "_BLOOM_HQ_DIRT";
  360. public static readonly string UseRGBM = "_USE_RGBM";
  361. public static readonly string Distortion = "_DISTORTION";
  362. public static readonly string ChromaticAberration = "_CHROMATIC_ABERRATION";
  363. public static readonly string HDRGrading = "_HDR_GRADING";
  364. public static readonly string TonemapACES = "_TONEMAP_ACES";
  365. public static readonly string TonemapNeutral = "_TONEMAP_NEUTRAL";
  366. public static readonly string FilmGrain = "_FILM_GRAIN";
  367. public static readonly string Fxaa = "_FXAA";
  368. public static readonly string Dithering = "_DITHERING";
  369. public static readonly string ScreenSpaceOcclusion = "_SCREEN_SPACE_OCCLUSION";
  370. public static readonly string PointSampling = "_POINT_SAMPLING";
  371. public static readonly string Rcas = "_RCAS";
  372. public static readonly string Gamma20 = "_GAMMA_20";
  373. public static readonly string HighQualitySampling = "_HIGH_QUALITY_SAMPLING";
  374. public static readonly string DOWNSAMPLING_SIZE_2 = "DOWNSAMPLING_SIZE_2";
  375. public static readonly string DOWNSAMPLING_SIZE_4 = "DOWNSAMPLING_SIZE_4";
  376. public static readonly string DOWNSAMPLING_SIZE_8 = "DOWNSAMPLING_SIZE_8";
  377. public static readonly string DOWNSAMPLING_SIZE_16 = "DOWNSAMPLING_SIZE_16";
  378. public static readonly string _SPOT = "_SPOT";
  379. public static readonly string _DIRECTIONAL = "_DIRECTIONAL";
  380. public static readonly string _POINT = "_POINT";
  381. public static readonly string _DEFERRED_STENCIL = "_DEFERRED_STENCIL";
  382. public static readonly string _DEFERRED_FIRST_LIGHT = "_DEFERRED_FIRST_LIGHT";
  383. public static readonly string _DEFERRED_MAIN_LIGHT = "_DEFERRED_MAIN_LIGHT";
  384. public static readonly string _GBUFFER_NORMALS_OCT = "_GBUFFER_NORMALS_OCT";
  385. public static readonly string _DEFERRED_MIXED_LIGHTING = "_DEFERRED_MIXED_LIGHTING";
  386. public static readonly string LIGHTMAP_ON = "LIGHTMAP_ON";
  387. public static readonly string DYNAMICLIGHTMAP_ON = "DYNAMICLIGHTMAP_ON";
  388. public static readonly string _ALPHATEST_ON = "_ALPHATEST_ON";
  389. public static readonly string DIRLIGHTMAP_COMBINED = "DIRLIGHTMAP_COMBINED";
  390. public static readonly string _DETAIL_MULX2 = "_DETAIL_MULX2";
  391. public static readonly string _DETAIL_SCALED = "_DETAIL_SCALED";
  392. public static readonly string _CLEARCOAT = "_CLEARCOAT";
  393. public static readonly string _CLEARCOATMAP = "_CLEARCOATMAP";
  394. public static readonly string DEBUG_DISPLAY = "DEBUG_DISPLAY";
  395. public static readonly string _EMISSION = "_EMISSION";
  396. public static readonly string _RECEIVE_SHADOWS_OFF = "_RECEIVE_SHADOWS_OFF";
  397. public static readonly string _SURFACE_TYPE_TRANSPARENT = "_SURFACE_TYPE_TRANSPARENT";
  398. public static readonly string _ALPHAPREMULTIPLY_ON = "_ALPHAPREMULTIPLY_ON";
  399. public static readonly string _ALPHAMODULATE_ON = "_ALPHAMODULATE_ON";
  400. public static readonly string _NORMALMAP = "_NORMALMAP";
  401. public static readonly string EDITOR_VISUALIZATION = "EDITOR_VISUALIZATION";
  402. // XR
  403. public static readonly string UseDrawProcedural = "_USE_DRAW_PROCEDURAL";
  404. }
  405. public sealed partial class UniversalRenderPipeline
  406. {
  407. // Holds light direction for directional lights or position for punctual lights.
  408. // When w is set to 1.0, it means it's a punctual light.
  409. static Vector4 k_DefaultLightPosition = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
  410. static Vector4 k_DefaultLightColor = Color.black;
  411. // Default light attenuation is setup in a particular way that it causes
  412. // directional lights to return 1.0 for both distance and angle attenuation
  413. static Vector4 k_DefaultLightAttenuation = new Vector4(0.0f, 1.0f, 0.0f, 1.0f);
  414. static Vector4 k_DefaultLightSpotDirection = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
  415. static Vector4 k_DefaultLightsProbeChannel = new Vector4(0.0f, 0.0f, 0.0f, 0.0f);
  416. static List<Vector4> m_ShadowBiasData = new List<Vector4>();
  417. static List<int> m_ShadowResolutionData = new List<int>();
  418. /// <summary>
  419. /// Checks if a camera is a game camera.
  420. /// </summary>
  421. /// <param name="camera">Camera to check state from.</param>
  422. /// <returns>true if given camera is a game camera, false otherwise.</returns>
  423. public static bool IsGameCamera(Camera camera)
  424. {
  425. if (camera == null)
  426. throw new ArgumentNullException("camera");
  427. return camera.cameraType == CameraType.Game || camera.cameraType == CameraType.VR;
  428. }
  429. /// <summary>
  430. /// Checks if a camera is rendering in stereo mode.
  431. /// </summary>
  432. /// <param name="camera">Camera to check state from.</param>
  433. /// <returns>Returns true if the given camera is rendering in stereo mode, false otherwise.</returns>
  434. [Obsolete("Please use CameraData.xr.enabled instead.")]
  435. public static bool IsStereoEnabled(Camera camera)
  436. {
  437. if (camera == null)
  438. throw new ArgumentNullException("camera");
  439. return IsGameCamera(camera) && (camera.stereoTargetEye == StereoTargetEyeMask.Both);
  440. }
  441. /// <summary>
  442. /// Returns the current render pipeline asset for the current quality setting.
  443. /// If no render pipeline asset is assigned in QualitySettings, then returns the one assigned in GraphicsSettings.
  444. /// </summary>
  445. public static UniversalRenderPipelineAsset asset
  446. {
  447. get => GraphicsSettings.currentRenderPipeline as UniversalRenderPipelineAsset;
  448. }
  449. /// <summary>
  450. /// Checks if a camera is rendering in MultiPass stereo mode.
  451. /// </summary>
  452. /// <param name="camera">Camera to check state from.</param>
  453. /// <returns>Returns true if the given camera is rendering in multi pass stereo mode, false otherwise.</returns>
  454. [Obsolete("Please use CameraData.xr.singlePassEnabled instead.")]
  455. static bool IsMultiPassStereoEnabled(Camera camera)
  456. {
  457. if (camera == null)
  458. throw new ArgumentNullException("camera");
  459. return false;
  460. }
  461. Comparison<Camera> cameraComparison = (camera1, camera2) => { return (int)camera1.depth - (int)camera2.depth; };
  462. #if UNITY_2021_1_OR_NEWER
  463. void SortCameras(List<Camera> cameras)
  464. {
  465. if (cameras.Count > 1)
  466. cameras.Sort(cameraComparison);
  467. }
  468. #else
  469. void SortCameras(Camera[] cameras)
  470. {
  471. if (cameras.Length > 1)
  472. Array.Sort(cameras, cameraComparison);
  473. }
  474. #endif
  475. static GraphicsFormat MakeRenderTextureGraphicsFormat(bool isHdrEnabled, bool needsAlpha)
  476. {
  477. if (isHdrEnabled)
  478. {
  479. if (!needsAlpha && RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.B10G11R11_UFloatPack32, FormatUsage.Linear | FormatUsage.Render))
  480. return GraphicsFormat.B10G11R11_UFloatPack32;
  481. if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat, FormatUsage.Linear | FormatUsage.Render))
  482. return GraphicsFormat.R16G16B16A16_SFloat;
  483. return SystemInfo.GetGraphicsFormat(DefaultFormat.HDR); // This might actually be a LDR format on old devices.
  484. }
  485. return SystemInfo.GetGraphicsFormat(DefaultFormat.LDR);
  486. }
  487. static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera camera, float renderScale,
  488. bool isHdrEnabled, int msaaSamples, bool needsAlpha, bool requiresOpaqueTexture)
  489. {
  490. RenderTextureDescriptor desc;
  491. if (camera.targetTexture == null)
  492. {
  493. desc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
  494. desc.width = (int)((float)desc.width * renderScale);
  495. desc.height = (int)((float)desc.height * renderScale);
  496. desc.graphicsFormat = MakeRenderTextureGraphicsFormat(isHdrEnabled, needsAlpha);
  497. desc.depthBufferBits = 32;
  498. desc.msaaSamples = msaaSamples;
  499. desc.sRGB = (QualitySettings.activeColorSpace == ColorSpace.Linear);
  500. }
  501. else
  502. {
  503. desc = camera.targetTexture.descriptor;
  504. desc.width = camera.pixelWidth;
  505. desc.height = camera.pixelHeight;
  506. if (camera.cameraType == CameraType.SceneView && !isHdrEnabled)
  507. {
  508. desc.graphicsFormat = SystemInfo.GetGraphicsFormat(DefaultFormat.LDR);
  509. }
  510. // SystemInfo.SupportsRenderTextureFormat(camera.targetTexture.descriptor.colorFormat)
  511. // will assert on R8_SINT since it isn't a valid value of RenderTextureFormat.
  512. // If this is fixed then we can implement debug statement to the user explaining why some
  513. // RenderTextureFormats available resolves in a black render texture when no warning or error
  514. // is given.
  515. }
  516. // Make sure dimension is non zero
  517. desc.width = Mathf.Max(1, desc.width);
  518. desc.height = Mathf.Max(1, desc.height);
  519. desc.enableRandomWrite = false;
  520. desc.bindMS = false;
  521. desc.useDynamicScale = camera.allowDynamicResolution;
  522. // The way RenderTextures handle MSAA fallback when an unsupported sample count of 2 is requested (falling back to numSamples = 1), differs fom the way
  523. // the fallback is handled when setting up the Vulkan swapchain (rounding up numSamples to 4, if supported). This caused an issue on Mali GPUs which don't support
  524. // 2x MSAA.
  525. // The following code makes sure that on Vulkan the MSAA unsupported fallback behaviour is consistent between RenderTextures and Swapchain.
  526. // TODO: we should review how all backends handle MSAA fallbacks and move these implementation details in engine code.
  527. if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Vulkan)
  528. {
  529. // if the requested number of samples is 2, and the supported value is 1x, it means that 2x is unsupported on this GPU.
  530. // Then we bump up the requested value to 4.
  531. if (desc.msaaSamples == 2 && SystemInfo.GetRenderTextureSupportedMSAASampleCount(desc) == 1)
  532. desc.msaaSamples = 4;
  533. }
  534. // check that the requested MSAA samples count is supported by the current platform. If it's not supported,
  535. // replace the requested desc.msaaSamples value with the actual value the engine falls back to
  536. desc.msaaSamples = SystemInfo.GetRenderTextureSupportedMSAASampleCount(desc);
  537. // if the target platform doesn't support storing multisampled RTs and we are doing a separate opaque pass, using a Load load action on the subsequent passes
  538. // will result in loading Resolved data, which on some platforms is discarded, resulting in losing the results of the previous passes.
  539. // As a workaround we disable MSAA to make sure that the results of previous passes are stored. (fix for Case 1247423).
  540. if (!SystemInfo.supportsStoreAndResolveAction && requiresOpaqueTexture)
  541. desc.msaaSamples = 1;
  542. return desc;
  543. }
  544. private static Lightmapping.RequestLightsDelegate lightsDelegate = (Light[] requests, NativeArray<LightDataGI> lightsOutput) =>
  545. {
  546. LightDataGI lightData = new LightDataGI();
  547. #if UNITY_EDITOR
  548. // Always extract lights in the Editor.
  549. for (int i = 0; i < requests.Length; i++)
  550. {
  551. Light light = requests[i];
  552. var additionalLightData = light.GetUniversalAdditionalLightData();
  553. LightmapperUtils.Extract(light, out Cookie cookie);
  554. switch (light.type)
  555. {
  556. case LightType.Directional:
  557. DirectionalLight directionalLight = new DirectionalLight();
  558. LightmapperUtils.Extract(light, ref directionalLight);
  559. if (light.cookie != null)
  560. {
  561. // Size == 1 / Scale
  562. cookie.sizes = additionalLightData.lightCookieSize;
  563. // Offset, Map cookie UV offset to light position on along local axes.
  564. if (additionalLightData.lightCookieOffset != Vector2.zero)
  565. {
  566. var r = light.transform.right * additionalLightData.lightCookieOffset.x;
  567. var u = light.transform.up * additionalLightData.lightCookieOffset.y;
  568. var offset = r + u;
  569. directionalLight.position += offset;
  570. }
  571. }
  572. lightData.Init(ref directionalLight, ref cookie);
  573. break;
  574. case LightType.Point:
  575. PointLight pointLight = new PointLight();
  576. LightmapperUtils.Extract(light, ref pointLight);
  577. lightData.Init(ref pointLight, ref cookie);
  578. break;
  579. case LightType.Spot:
  580. SpotLight spotLight = new SpotLight();
  581. LightmapperUtils.Extract(light, ref spotLight);
  582. spotLight.innerConeAngle = light.innerSpotAngle * Mathf.Deg2Rad;
  583. spotLight.angularFalloff = AngularFalloffType.AnalyticAndInnerAngle;
  584. lightData.Init(ref spotLight, ref cookie);
  585. break;
  586. case LightType.Area:
  587. RectangleLight rectangleLight = new RectangleLight();
  588. LightmapperUtils.Extract(light, ref rectangleLight);
  589. rectangleLight.mode = LightMode.Baked;
  590. lightData.Init(ref rectangleLight);
  591. break;
  592. case LightType.Disc:
  593. DiscLight discLight = new DiscLight();
  594. LightmapperUtils.Extract(light, ref discLight);
  595. discLight.mode = LightMode.Baked;
  596. lightData.Init(ref discLight);
  597. break;
  598. default:
  599. lightData.InitNoBake(light.GetInstanceID());
  600. break;
  601. }
  602. lightData.falloff = FalloffType.InverseSquared;
  603. lightsOutput[i] = lightData;
  604. }
  605. #else
  606. // If Enlighten realtime GI isn't active, we don't extract lights.
  607. if (SupportedRenderingFeatures.active.enlighten == false || ((int)SupportedRenderingFeatures.active.lightmapBakeTypes | (int)LightmapBakeType.Realtime) == 0)
  608. {
  609. for (int i = 0; i < requests.Length; i++)
  610. {
  611. Light light = requests[i];
  612. lightData.InitNoBake(light.GetInstanceID());
  613. lightsOutput[i] = lightData;
  614. }
  615. }
  616. else
  617. {
  618. for (int i = 0; i < requests.Length; i++)
  619. {
  620. Light light = requests[i];
  621. switch (light.type)
  622. {
  623. case LightType.Directional:
  624. DirectionalLight directionalLight = new DirectionalLight();
  625. LightmapperUtils.Extract(light, ref directionalLight);
  626. lightData.Init(ref directionalLight);
  627. break;
  628. case LightType.Point:
  629. PointLight pointLight = new PointLight();
  630. LightmapperUtils.Extract(light, ref pointLight);
  631. lightData.Init(ref pointLight);
  632. break;
  633. case LightType.Spot:
  634. SpotLight spotLight = new SpotLight();
  635. LightmapperUtils.Extract(light, ref spotLight);
  636. spotLight.innerConeAngle = light.innerSpotAngle * Mathf.Deg2Rad;
  637. spotLight.angularFalloff = AngularFalloffType.AnalyticAndInnerAngle;
  638. lightData.Init(ref spotLight);
  639. break;
  640. case LightType.Area:
  641. // Rect area light is baked only in URP.
  642. lightData.InitNoBake(light.GetInstanceID());
  643. break;
  644. case LightType.Disc:
  645. // Disc light is baked only.
  646. lightData.InitNoBake(light.GetInstanceID());
  647. break;
  648. default:
  649. lightData.InitNoBake(light.GetInstanceID());
  650. break;
  651. }
  652. lightData.falloff = FalloffType.InverseSquared;
  653. lightsOutput[i] = lightData;
  654. }
  655. }
  656. #endif
  657. };
  658. // called from DeferredLights.cs too
  659. public static void GetLightAttenuationAndSpotDirection(
  660. LightType lightType, float lightRange, Matrix4x4 lightLocalToWorldMatrix,
  661. float spotAngle, float? innerSpotAngle,
  662. out Vector4 lightAttenuation, out Vector4 lightSpotDir)
  663. {
  664. lightAttenuation = k_DefaultLightAttenuation;
  665. lightSpotDir = k_DefaultLightSpotDirection;
  666. // Directional Light attenuation is initialize so distance attenuation always be 1.0
  667. if (lightType != LightType.Directional)
  668. {
  669. // Light attenuation in universal matches the unity vanilla one.
  670. // attenuation = 1.0 / distanceToLightSqr
  671. // We offer two different smoothing factors.
  672. // The smoothing factors make sure that the light intensity is zero at the light range limit.
  673. // The first smoothing factor is a linear fade starting at 80 % of the light range.
  674. // smoothFactor = (lightRangeSqr - distanceToLightSqr) / (lightRangeSqr - fadeStartDistanceSqr)
  675. // We rewrite smoothFactor to be able to pre compute the constant terms below and apply the smooth factor
  676. // with one MAD instruction
  677. // smoothFactor = distanceSqr * (1.0 / (fadeDistanceSqr - lightRangeSqr)) + (-lightRangeSqr / (fadeDistanceSqr - lightRangeSqr)
  678. // distanceSqr * oneOverFadeRangeSqr + lightRangeSqrOverFadeRangeSqr
  679. // The other smoothing factor matches the one used in the Unity lightmapper but is slower than the linear one.
  680. // smoothFactor = (1.0 - saturate((distanceSqr * 1.0 / lightrangeSqr)^2))^2
  681. float lightRangeSqr = lightRange * lightRange;
  682. float fadeStartDistanceSqr = 0.8f * 0.8f * lightRangeSqr;
  683. float fadeRangeSqr = (fadeStartDistanceSqr - lightRangeSqr);
  684. float oneOverFadeRangeSqr = 1.0f / fadeRangeSqr;
  685. float lightRangeSqrOverFadeRangeSqr = -lightRangeSqr / fadeRangeSqr;
  686. float oneOverLightRangeSqr = 1.0f / Mathf.Max(0.0001f, lightRange * lightRange);
  687. // On untethered devices: Use the faster linear smoothing factor (SHADER_HINT_NICE_QUALITY).
  688. // On other devices: Use the smoothing factor that matches the GI.
  689. lightAttenuation.x = GraphicsSettings.HasShaderDefine(Graphics.activeTier, BuiltinShaderDefine.SHADER_API_MOBILE) || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Switch ? oneOverFadeRangeSqr : oneOverLightRangeSqr;
  690. lightAttenuation.y = lightRangeSqrOverFadeRangeSqr;
  691. }
  692. if (lightType == LightType.Spot)
  693. {
  694. Vector4 dir = lightLocalToWorldMatrix.GetColumn(2);
  695. lightSpotDir = new Vector4(-dir.x, -dir.y, -dir.z, 0.0f);
  696. // Spot Attenuation with a linear falloff can be defined as
  697. // (SdotL - cosOuterAngle) / (cosInnerAngle - cosOuterAngle)
  698. // This can be rewritten as
  699. // invAngleRange = 1.0 / (cosInnerAngle - cosOuterAngle)
  700. // SdotL * invAngleRange + (-cosOuterAngle * invAngleRange)
  701. // If we precompute the terms in a MAD instruction
  702. float cosOuterAngle = Mathf.Cos(Mathf.Deg2Rad * spotAngle * 0.5f);
  703. // We neeed to do a null check for particle lights
  704. // This should be changed in the future
  705. // Particle lights will use an inline function
  706. float cosInnerAngle;
  707. if (innerSpotAngle.HasValue)
  708. cosInnerAngle = Mathf.Cos(innerSpotAngle.Value * Mathf.Deg2Rad * 0.5f);
  709. else
  710. cosInnerAngle = Mathf.Cos((2.0f * Mathf.Atan(Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad) * (64.0f - 18.0f) / 64.0f)) * 0.5f);
  711. float smoothAngleRange = Mathf.Max(0.001f, cosInnerAngle - cosOuterAngle);
  712. float invAngleRange = 1.0f / smoothAngleRange;
  713. float add = -cosOuterAngle * invAngleRange;
  714. lightAttenuation.z = invAngleRange;
  715. lightAttenuation.w = add;
  716. }
  717. }
  718. public static void InitializeLightConstants_Common(NativeArray<VisibleLight> lights, int lightIndex, out Vector4 lightPos, out Vector4 lightColor, out Vector4 lightAttenuation, out Vector4 lightSpotDir, out Vector4 lightOcclusionProbeChannel)
  719. {
  720. lightPos = k_DefaultLightPosition;
  721. lightColor = k_DefaultLightColor;
  722. lightOcclusionProbeChannel = k_DefaultLightsProbeChannel;
  723. lightAttenuation = k_DefaultLightAttenuation;
  724. lightSpotDir = k_DefaultLightSpotDirection;
  725. // When no lights are visible, main light will be set to -1.
  726. // In this case we initialize it to default values and return
  727. if (lightIndex < 0)
  728. return;
  729. VisibleLight lightData = lights[lightIndex];
  730. if (lightData.lightType == LightType.Directional)
  731. {
  732. Vector4 dir = -lightData.localToWorldMatrix.GetColumn(2);
  733. lightPos = new Vector4(dir.x, dir.y, dir.z, 0.0f);
  734. }
  735. else
  736. {
  737. Vector4 pos = lightData.localToWorldMatrix.GetColumn(3);
  738. lightPos = new Vector4(pos.x, pos.y, pos.z, 1.0f);
  739. }
  740. // VisibleLight.finalColor already returns color in active color space
  741. lightColor = lightData.finalColor;
  742. GetLightAttenuationAndSpotDirection(
  743. lightData.lightType, lightData.range, lightData.localToWorldMatrix,
  744. lightData.spotAngle, lightData.light?.innerSpotAngle,
  745. out lightAttenuation, out lightSpotDir);
  746. Light light = lightData.light;
  747. if (light != null && light.bakingOutput.lightmapBakeType == LightmapBakeType.Mixed &&
  748. 0 <= light.bakingOutput.occlusionMaskChannel &&
  749. light.bakingOutput.occlusionMaskChannel < 4)
  750. {
  751. lightOcclusionProbeChannel[light.bakingOutput.occlusionMaskChannel] = 1.0f;
  752. }
  753. }
  754. }
  755. internal enum URPProfileId
  756. {
  757. // CPU
  758. UniversalRenderTotal,
  759. UpdateVolumeFramework,
  760. RenderCameraStack,
  761. // GPU
  762. AdditionalLightsShadow,
  763. ColorGradingLUT,
  764. CopyColor,
  765. CopyDepth,
  766. DepthNormalPrepass,
  767. DepthPrepass,
  768. // DrawObjectsPass
  769. DrawOpaqueObjects,
  770. DrawTransparentObjects,
  771. DrawMVOpaqueObjects,
  772. // RenderObjectsPass
  773. //RenderObjects,
  774. LightCookies,
  775. MainLightShadow,
  776. ResolveShadows,
  777. SSAO,
  778. // PostProcessPass
  779. StopNaNs,
  780. SMAA,
  781. GaussianDepthOfField,
  782. BokehDepthOfField,
  783. MotionBlur,
  784. PaniniProjection,
  785. UberPostProcess,
  786. Bloom,
  787. LensFlareDataDriven,
  788. MotionVectors,
  789. FinalBlit
  790. }
  791. }