UniversalRenderPipeline.cs 45 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957
  1. using System;
  2. using Unity.Collections;
  3. using System.Collections.Generic;
  4. #if UNITY_EDITOR
  5. using UnityEditor;
  6. using UnityEditor.Rendering.Universal;
  7. #endif
  8. using UnityEngine.Scripting.APIUpdating;
  9. using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
  10. #if ENABLE_VR && ENABLE_XR_MODULE
  11. using UnityEngine.XR;
  12. #endif
  13. namespace UnityEngine.Rendering.LWRP
  14. {
  15. [Obsolete("LWRP -> Universal (UnityUpgradable) -> UnityEngine.Rendering.Universal.UniversalRenderPipeline", true)]
  16. public class LightweightRenderPipeline
  17. {
  18. public LightweightRenderPipeline(LightweightRenderPipelineAsset asset)
  19. {
  20. }
  21. }
  22. }
  23. namespace UnityEngine.Rendering.Universal
  24. {
  25. public sealed partial class UniversalRenderPipeline : RenderPipeline
  26. {
  27. internal static class PerFrameBuffer
  28. {
  29. public static int _GlossyEnvironmentColor;
  30. public static int _SubtractiveShadowColor;
  31. public static int _Time;
  32. public static int _SinTime;
  33. public static int _CosTime;
  34. public static int unity_DeltaTime;
  35. public static int _TimeParameters;
  36. }
  37. static internal class PerCameraBuffer
  38. {
  39. // TODO: This needs to account for stereo rendering
  40. public static int _InvCameraViewProj;
  41. public static int _ScaledScreenParams;
  42. public static int _ScreenParams;
  43. public static int _WorldSpaceCameraPos;
  44. }
  45. public const string k_ShaderTagName = "UniversalPipeline";
  46. const string k_RenderCameraTag = "Render Camera";
  47. static ProfilingSampler _CameraProfilingSampler = new ProfilingSampler(k_RenderCameraTag);
  48. public static float maxShadowBias
  49. {
  50. get => 10.0f;
  51. }
  52. public static float minRenderScale
  53. {
  54. get => 0.1f;
  55. }
  56. public static float maxRenderScale
  57. {
  58. get => 2.0f;
  59. }
  60. // Amount of Lights that can be shaded per object (in the for loop in the shader)
  61. public static int maxPerObjectLights
  62. {
  63. // No support to bitfield mask and int[] in gles2. Can't index fast more than 4 lights.
  64. // Check Lighting.hlsl for more details.
  65. get => (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2) ? 4 : 8;
  66. }
  67. // These limits have to match same limits in Input.hlsl
  68. const int k_MaxVisibleAdditionalLightsSSBO = 256;
  69. const int k_MaxVisibleAdditionalLightsUBO = 32;
  70. public static int maxVisibleAdditionalLights
  71. {
  72. get
  73. {
  74. // There are some performance issues by using SSBO in mobile.
  75. // Also some GPUs don't supports SSBO in vertex shader.
  76. if (RenderingUtils.useStructuredBuffer)
  77. return k_MaxVisibleAdditionalLightsSSBO;
  78. // We don't use SSBO in D3D because we can't figure out without adding shader variants if platforms is D3D10.
  79. // We don't use SSBO on Nintendo Switch as UBO path is faster.
  80. // However here we use same limits as SSBO path.
  81. var deviceType = SystemInfo.graphicsDeviceType;
  82. if (deviceType == GraphicsDeviceType.Direct3D11 || deviceType == GraphicsDeviceType.Direct3D12 ||
  83. deviceType == GraphicsDeviceType.Switch)
  84. return k_MaxVisibleAdditionalLightsSSBO;
  85. // We use less limits for mobile as some mobile GPUs have small SP cache for constants
  86. // Using more than 32 might cause spilling to main memory.
  87. return k_MaxVisibleAdditionalLightsUBO;
  88. }
  89. }
  90. // Internal max count for how many ScriptableRendererData can be added to a single Universal RP asset
  91. internal static int maxScriptableRenderers
  92. {
  93. get => 8;
  94. }
  95. public UniversalRenderPipeline(UniversalRenderPipelineAsset asset)
  96. {
  97. SetSupportedRenderingFeatures();
  98. PerFrameBuffer._GlossyEnvironmentColor = Shader.PropertyToID("_GlossyEnvironmentColor");
  99. PerFrameBuffer._SubtractiveShadowColor = Shader.PropertyToID("_SubtractiveShadowColor");
  100. PerFrameBuffer._Time = Shader.PropertyToID("_Time");
  101. PerFrameBuffer._SinTime = Shader.PropertyToID("_SinTime");
  102. PerFrameBuffer._CosTime = Shader.PropertyToID("_CosTime");
  103. PerFrameBuffer.unity_DeltaTime = Shader.PropertyToID("unity_DeltaTime");
  104. PerFrameBuffer._TimeParameters = Shader.PropertyToID("_TimeParameters");
  105. PerCameraBuffer._InvCameraViewProj = Shader.PropertyToID("_InvCameraViewProj");
  106. PerCameraBuffer._ScreenParams = Shader.PropertyToID("_ScreenParams");
  107. PerCameraBuffer._ScaledScreenParams = Shader.PropertyToID("_ScaledScreenParams");
  108. PerCameraBuffer._WorldSpaceCameraPos = Shader.PropertyToID("_WorldSpaceCameraPos");
  109. // Let engine know we have MSAA on for cases where we support MSAA backbuffer
  110. if (QualitySettings.antiAliasing != asset.msaaSampleCount)
  111. {
  112. QualitySettings.antiAliasing = asset.msaaSampleCount;
  113. #if ENABLE_VR && ENABLE_VR_MODULE
  114. XR.XRDevice.UpdateEyeTextureMSAASetting();
  115. #endif
  116. }
  117. #if ENABLE_VR && ENABLE_VR_MODULE
  118. XRGraphics.eyeTextureResolutionScale = asset.renderScale;
  119. #endif
  120. // For compatibility reasons we also match old LightweightPipeline tag.
  121. Shader.globalRenderPipeline = "UniversalPipeline,LightweightPipeline";
  122. Lightmapping.SetDelegate(lightsDelegate);
  123. CameraCaptureBridge.enabled = true;
  124. RenderingUtils.ClearSystemInfoCache();
  125. }
  126. protected override void Dispose(bool disposing)
  127. {
  128. base.Dispose(disposing);
  129. Shader.globalRenderPipeline = "";
  130. SupportedRenderingFeatures.active = new SupportedRenderingFeatures();
  131. ShaderData.instance.Dispose();
  132. #if UNITY_EDITOR
  133. SceneViewDrawMode.ResetDrawMode();
  134. #endif
  135. Lightmapping.ResetDelegate();
  136. CameraCaptureBridge.enabled = false;
  137. }
  138. #if ENABLE_VR && ENABLE_XR_MODULE
  139. static List<XRDisplaySubsystem> xrDisplayList = new List<XRDisplaySubsystem>();
  140. static bool xrSkipRender = false;
  141. internal void SetupXRStates()
  142. {
  143. SubsystemManager.GetInstances(xrDisplayList);
  144. if (xrDisplayList.Count > 0)
  145. {
  146. if (xrDisplayList.Count > 1)
  147. throw new NotImplementedException("Only 1 XR display is supported.");
  148. XRDisplaySubsystem display = xrDisplayList[0];
  149. if(display.GetRenderPassCount() == 0)
  150. {
  151. // Disable XR rendering if display contains 0 renderpass
  152. if(!xrSkipRender)
  153. {
  154. xrSkipRender = true;
  155. Debug.Log("XR display is not ready. Skip XR rendering.");
  156. }
  157. }
  158. else
  159. {
  160. // Enable XR rendering if display contains >0 renderpass
  161. if (xrSkipRender)
  162. {
  163. xrSkipRender = false;
  164. Debug.Log("XR display is ready. Start XR rendering.");
  165. }
  166. }
  167. }
  168. }
  169. #endif
  170. protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
  171. {
  172. BeginFrameRendering(renderContext, cameras);
  173. GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear);
  174. GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher;
  175. SetupPerFrameShaderConstants();
  176. #if ENABLE_VR && ENABLE_XR_MODULE
  177. SetupXRStates();
  178. if(xrSkipRender)
  179. return;
  180. #endif
  181. SortCameras(cameras);
  182. for (int i = 0; i < cameras.Length; ++i)
  183. {
  184. var camera = cameras[i];
  185. if (IsGameCamera(camera))
  186. {
  187. RenderCameraStack(renderContext, camera);
  188. }
  189. else
  190. {
  191. BeginCameraRendering(renderContext, camera);
  192. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  193. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  194. VFX.VFXManager.PrepareCamera(camera);
  195. #endif
  196. UpdateVolumeFramework(camera, null);
  197. RenderSingleCamera(renderContext, camera);
  198. EndCameraRendering(renderContext, camera);
  199. }
  200. }
  201. EndFrameRendering(renderContext, cameras);
  202. }
  203. /// <summary>
  204. /// Standalone camera rendering. Use this to render procedural cameras.
  205. /// This method doesn't call <c>BeginCameraRendering</c> and <c>EndCameraRendering</c> callbacks.
  206. /// </summary>
  207. /// <param name="context">Render context used to record commands during execution.</param>
  208. /// <param name="camera">Camera to render.</param>
  209. /// <seealso cref="ScriptableRenderContext"/>
  210. public static void RenderSingleCamera(ScriptableRenderContext context, Camera camera)
  211. {
  212. UniversalAdditionalCameraData additionalCameraData = null;
  213. if (IsGameCamera(camera))
  214. camera.gameObject.TryGetComponent(out additionalCameraData);
  215. if (additionalCameraData != null && additionalCameraData.renderType != CameraRenderType.Base)
  216. {
  217. Debug.LogWarning("Only Base cameras can be rendered with standalone RenderSingleCamera. Camera will be skipped.");
  218. return;
  219. }
  220. InitializeCameraData(camera, additionalCameraData, out var cameraData);
  221. RenderSingleCamera(context, cameraData, true, cameraData.postProcessEnabled);
  222. }
  223. /// <summary>
  224. /// Renders a single camera. This method will do culling, setup and execution of the renderer.
  225. /// </summary>
  226. /// <param name="context">Render context used to record commands during execution.</param>
  227. /// <param name="cameraData">Camera rendering data. This might contain data inherited from a base camera.</param>
  228. /// <param name="requiresBlitToBackbuffer">True if this is the last camera in the stack rendering, false otherwise.</param>
  229. /// <param name="anyPostProcessingEnabled">True if at least one camera has post-processing enabled in the stack, false otherwise.</param>
  230. static void RenderSingleCamera(ScriptableRenderContext context, CameraData cameraData, bool requiresBlitToBackbuffer, bool anyPostProcessingEnabled)
  231. {
  232. Camera camera = cameraData.camera;
  233. var renderer = cameraData.renderer;
  234. if (renderer == null)
  235. {
  236. Debug.LogWarning(string.Format("Trying to render {0} with an invalid renderer. Camera rendering will be skipped.", camera.name));
  237. return;
  238. }
  239. if (!camera.TryGetCullingParameters(IsStereoEnabled(camera), out var cullingParameters))
  240. return;
  241. SetupPerCameraShaderConstants(cameraData);
  242. ProfilingSampler sampler = (asset.debugLevel >= PipelineDebugLevel.Profiling) ? new ProfilingSampler(camera.name): _CameraProfilingSampler;
  243. CommandBuffer cmd = CommandBufferPool.Get(sampler.name);
  244. using (new ProfilingScope(cmd, sampler))
  245. {
  246. renderer.Clear(cameraData.renderType);
  247. renderer.SetupCullingParameters(ref cullingParameters, ref cameraData);
  248. context.ExecuteCommandBuffer(cmd);
  249. cmd.Clear();
  250. #if UNITY_EDITOR
  251. // Emit scene view UI
  252. if (cameraData.isSceneViewCamera)
  253. {
  254. ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
  255. }
  256. #endif
  257. var cullResults = context.Cull(ref cullingParameters);
  258. InitializeRenderingData(asset, ref cameraData, ref cullResults, requiresBlitToBackbuffer, anyPostProcessingEnabled, out var renderingData);
  259. renderer.Setup(context, ref renderingData);
  260. renderer.Execute(context, ref renderingData);
  261. }
  262. context.ExecuteCommandBuffer(cmd);
  263. CommandBufferPool.Release(cmd);
  264. context.Submit();
  265. }
  266. /// <summary>
  267. // Renders a camera stack. This method calls RenderSingleCamera for each valid camera in the stack.
  268. // The last camera resolves the final target to screen.
  269. /// </summary>
  270. /// <param name="context">Render context used to record commands during execution.</param>
  271. /// <param name="camera">Camera to render.</param>
  272. static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera)
  273. {
  274. baseCamera.TryGetComponent<UniversalAdditionalCameraData>(out var baseCameraAdditionalData);
  275. // Overlay cameras will be rendered stacked while rendering base cameras
  276. if (baseCameraAdditionalData != null && baseCameraAdditionalData.renderType == CameraRenderType.Overlay)
  277. return;
  278. // renderer contains a stack if it has additional data and the renderer supports stacking
  279. var renderer = baseCameraAdditionalData?.scriptableRenderer;
  280. bool supportsCameraStacking = renderer != null && renderer.supportedRenderingFeatures.cameraStacking;
  281. List<Camera> cameraStack = (supportsCameraStacking) ? baseCameraAdditionalData?.cameraStack : null;
  282. bool anyPostProcessingEnabled = baseCameraAdditionalData != null && baseCameraAdditionalData.renderPostProcessing;
  283. anyPostProcessingEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
  284. // We need to know the last active camera in the stack to be able to resolve
  285. // rendering to screen when rendering it. The last camera in the stack is not
  286. // necessarily the last active one as it users might disable it.
  287. int lastActiveOverlayCameraIndex = -1;
  288. if (cameraStack != null && cameraStack.Count > 0)
  289. {
  290. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  291. if (asset.postProcessingFeatureSet != PostProcessingFeatureSet.PostProcessingV2)
  292. {
  293. #endif
  294. // TODO: Add support to camera stack in VR multi pass mode
  295. if (!IsMultiPassStereoEnabled(baseCamera))
  296. {
  297. var baseCameraRendererType = baseCameraAdditionalData?.scriptableRenderer.GetType();
  298. for (int i = 0; i < cameraStack.Count; ++i)
  299. {
  300. Camera currCamera = cameraStack[i];
  301. if (currCamera != null && currCamera.isActiveAndEnabled)
  302. {
  303. currCamera.TryGetComponent<UniversalAdditionalCameraData>(out var data);
  304. if (data == null || data.renderType != CameraRenderType.Overlay)
  305. {
  306. Debug.LogWarning(string.Format("Stack can only contain Overlay cameras. {0} will skip rendering.", currCamera.name));
  307. }
  308. else if (data?.scriptableRenderer.GetType() != baseCameraRendererType)
  309. {
  310. Debug.LogWarning(string.Format("Only cameras with the same renderer type as the base camera can be stacked. {0} will skip rendering", currCamera.name));
  311. }
  312. else
  313. {
  314. anyPostProcessingEnabled |= data.renderPostProcessing;
  315. lastActiveOverlayCameraIndex = i;
  316. }
  317. }
  318. }
  319. }
  320. else
  321. {
  322. Debug.LogWarning("Multi pass stereo mode doesn't support Camera Stacking. Overlay cameras will skip rendering.");
  323. }
  324. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  325. }
  326. else
  327. {
  328. Debug.LogWarning("Post-processing V2 doesn't support Camera Stacking. Overlay cameras will skip rendering.");
  329. }
  330. #endif
  331. }
  332. bool isStackedRendering = lastActiveOverlayCameraIndex != -1;
  333. BeginCameraRendering(context, baseCamera);
  334. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  335. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  336. VFX.VFXManager.PrepareCamera(baseCamera);
  337. #endif
  338. UpdateVolumeFramework(baseCamera, baseCameraAdditionalData);
  339. InitializeCameraData(baseCamera, baseCameraAdditionalData, out var baseCameraData);
  340. RenderSingleCamera(context, baseCameraData, !isStackedRendering, anyPostProcessingEnabled);
  341. EndCameraRendering(context, baseCamera);
  342. if (!isStackedRendering)
  343. return;
  344. for (int i = 0; i < cameraStack.Count; ++i)
  345. {
  346. var currCamera = cameraStack[i];
  347. if (!currCamera.isActiveAndEnabled)
  348. continue;
  349. currCamera.TryGetComponent<UniversalAdditionalCameraData>(out var currCameraData);
  350. // Camera is overlay and enabled
  351. if (currCameraData != null)
  352. {
  353. // Copy base settings from base camera data and initialize initialize remaining specific settings for this camera type.
  354. CameraData overlayCameraData = baseCameraData;
  355. bool lastCamera = i == lastActiveOverlayCameraIndex;
  356. BeginCameraRendering(context, currCamera);
  357. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  358. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  359. VFX.VFXManager.PrepareCamera(currCamera);
  360. #endif
  361. UpdateVolumeFramework(currCamera, currCameraData);
  362. InitializeAdditionalCameraData(currCamera, currCameraData, ref overlayCameraData);
  363. RenderSingleCamera(context, overlayCameraData, lastCamera, anyPostProcessingEnabled);
  364. EndCameraRendering(context, currCamera);
  365. }
  366. }
  367. }
  368. static void UpdateVolumeFramework(Camera camera, UniversalAdditionalCameraData additionalCameraData)
  369. {
  370. // Default values when there's no additional camera data available
  371. LayerMask layerMask = 1; // "Default"
  372. Transform trigger = camera.transform;
  373. if (additionalCameraData != null)
  374. {
  375. layerMask = additionalCameraData.volumeLayerMask;
  376. trigger = additionalCameraData.volumeTrigger != null
  377. ? additionalCameraData.volumeTrigger
  378. : trigger;
  379. }
  380. else if (camera.cameraType == CameraType.SceneView)
  381. {
  382. // Try to mirror the MainCamera volume layer mask for the scene view - do not mirror the target
  383. var mainCamera = Camera.main;
  384. UniversalAdditionalCameraData mainAdditionalCameraData = null;
  385. if (mainCamera != null && mainCamera.TryGetComponent(out mainAdditionalCameraData))
  386. layerMask = mainAdditionalCameraData.volumeLayerMask;
  387. trigger = mainAdditionalCameraData != null && mainAdditionalCameraData.volumeTrigger != null ? mainAdditionalCameraData.volumeTrigger : trigger;
  388. }
  389. VolumeManager.instance.Update(trigger, layerMask);
  390. }
  391. static bool CheckPostProcessForDepth(in CameraData cameraData)
  392. {
  393. if (!cameraData.postProcessEnabled)
  394. return false;
  395. if (cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing)
  396. return true;
  397. var stack = VolumeManager.instance.stack;
  398. if (stack.GetComponent<DepthOfField>().IsActive())
  399. return true;
  400. if (stack.GetComponent<MotionBlur>().IsActive())
  401. return true;
  402. return false;
  403. }
  404. static void SetSupportedRenderingFeatures()
  405. {
  406. #if UNITY_EDITOR
  407. SupportedRenderingFeatures.active = new SupportedRenderingFeatures()
  408. {
  409. reflectionProbeModes = SupportedRenderingFeatures.ReflectionProbeModes.None,
  410. defaultMixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive,
  411. mixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive | SupportedRenderingFeatures.LightmapMixedBakeModes.IndirectOnly,
  412. lightmapBakeTypes = LightmapBakeType.Baked | LightmapBakeType.Mixed,
  413. lightmapsModes = LightmapsMode.CombinedDirectional | LightmapsMode.NonDirectional,
  414. lightProbeProxyVolumes = false,
  415. motionVectors = false,
  416. receiveShadows = false,
  417. reflectionProbes = true
  418. };
  419. SceneViewDrawMode.SetupDrawMode();
  420. #endif
  421. }
  422. static void InitializeCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, out CameraData cameraData)
  423. {
  424. cameraData = new CameraData();
  425. InitializeStackedCameraData(camera, additionalCameraData, ref cameraData);
  426. InitializeAdditionalCameraData(camera, additionalCameraData, ref cameraData);
  427. }
  428. #if ENABLE_VR && ENABLE_XR_MODULE
  429. static List<XR.XRDisplaySubsystem> displaySubsystemList = new List<XR.XRDisplaySubsystem>();
  430. static bool CanXRSDKUseSinglePass(Camera camera)
  431. {
  432. XR.XRDisplaySubsystem display = null;
  433. SubsystemManager.GetInstances(displaySubsystemList);
  434. if (displaySubsystemList.Count > 0)
  435. {
  436. XR.XRDisplaySubsystem.XRRenderPass renderPass;
  437. display = displaySubsystemList[0];
  438. if (display.GetRenderPassCount() > 0)
  439. {
  440. display.GetRenderPass(0, out renderPass);
  441. if (renderPass.renderTargetDesc.dimension != TextureDimension.Tex2DArray)
  442. return false;
  443. if (renderPass.GetRenderParameterCount() != 2 || renderPass.renderTargetDesc.volumeDepth != 2)
  444. return false;
  445. renderPass.GetRenderParameter(camera, 0, out var renderParam0);
  446. renderPass.GetRenderParameter(camera, 1, out var renderParam1);
  447. if (renderParam0.textureArraySlice != 0 || renderParam1.textureArraySlice != 1)
  448. return false;
  449. if (renderParam0.viewport != renderParam1.viewport)
  450. return false;
  451. return true;
  452. }
  453. }
  454. return false;
  455. }
  456. #endif
  457. /// <summary>
  458. /// Initialize camera data settings common for all cameras in the stack. Overlay cameras will inherit
  459. /// settings from base camera.
  460. /// </summary>
  461. /// <param name="baseCamera">Base camera to inherit settings from.</param>
  462. /// <param name="baseAdditionalCameraData">Component that contains additional base camera data.</param>
  463. /// <param name="cameraData">Camera data to initialize setttings.</param>
  464. static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCameraData baseAdditionalCameraData, ref CameraData cameraData)
  465. {
  466. var settings = asset;
  467. cameraData.targetTexture = baseCamera.targetTexture;
  468. cameraData.isStereoEnabled = IsStereoEnabled(baseCamera);
  469. cameraData.isSceneViewCamera = baseCamera.cameraType == CameraType.SceneView;
  470. cameraData.numberOfXRPasses = 1;
  471. cameraData.isXRMultipass = false;
  472. #if ENABLE_VR && ENABLE_VR_MODULE
  473. if (cameraData.isStereoEnabled && !cameraData.isSceneViewCamera &&
  474. !CanXRSDKUseSinglePass(baseCamera) && XR.XRSettings.stereoRenderingMode == XR.XRSettings.StereoRenderingMode.MultiPass)
  475. {
  476. cameraData.numberOfXRPasses = 2;
  477. cameraData.isXRMultipass = true;
  478. }
  479. #endif
  480. ///////////////////////////////////////////////////////////////////
  481. // Environment and Post-processing settings /
  482. ///////////////////////////////////////////////////////////////////
  483. if (cameraData.isSceneViewCamera)
  484. {
  485. cameraData.volumeLayerMask = 1; // "Default"
  486. cameraData.volumeTrigger = null;
  487. cameraData.isStopNaNEnabled = false;
  488. cameraData.isDitheringEnabled = false;
  489. cameraData.antialiasing = AntialiasingMode.None;
  490. cameraData.antialiasingQuality = AntialiasingQuality.High;
  491. }
  492. else if (baseAdditionalCameraData != null)
  493. {
  494. cameraData.volumeLayerMask = baseAdditionalCameraData.volumeLayerMask;
  495. cameraData.volumeTrigger = baseAdditionalCameraData.volumeTrigger == null ? baseCamera.transform : baseAdditionalCameraData.volumeTrigger;
  496. cameraData.isStopNaNEnabled = baseAdditionalCameraData.stopNaN && SystemInfo.graphicsShaderLevel >= 35;
  497. cameraData.isDitheringEnabled = baseAdditionalCameraData.dithering;
  498. cameraData.antialiasing = baseAdditionalCameraData.antialiasing;
  499. cameraData.antialiasingQuality = baseAdditionalCameraData.antialiasingQuality;
  500. }
  501. else
  502. {
  503. cameraData.volumeLayerMask = 1; // "Default"
  504. cameraData.volumeTrigger = null;
  505. cameraData.isStopNaNEnabled = false;
  506. cameraData.isDitheringEnabled = false;
  507. cameraData.antialiasing = AntialiasingMode.None;
  508. cameraData.antialiasingQuality = AntialiasingQuality.High;
  509. }
  510. ///////////////////////////////////////////////////////////////////
  511. // Settings that control output of the camera /
  512. ///////////////////////////////////////////////////////////////////
  513. int msaaSamples = 1;
  514. if (baseCamera.allowMSAA && settings.msaaSampleCount > 1)
  515. msaaSamples = (baseCamera.targetTexture != null) ? baseCamera.targetTexture.antiAliasing : settings.msaaSampleCount;
  516. cameraData.isHdrEnabled = baseCamera.allowHDR && settings.supportsHDR;
  517. Rect cameraRect = baseCamera.rect;
  518. cameraData.pixelRect = baseCamera.pixelRect;
  519. cameraData.pixelWidth = baseCamera.pixelWidth;
  520. cameraData.pixelHeight = baseCamera.pixelHeight;
  521. cameraData.aspectRatio = (float)cameraData.pixelWidth / (float)cameraData.pixelHeight;
  522. cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f ||
  523. Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f));
  524. // If XR is enabled, use XR renderScale.
  525. // Discard variations lesser than kRenderScaleThreshold.
  526. // Scale is only enabled for gameview.
  527. const float kRenderScaleThreshold = 0.05f;
  528. float usedRenderScale = XRGraphics.enabled ? XRGraphics.eyeTextureResolutionScale : settings.renderScale;
  529. cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale;
  530. var commonOpaqueFlags = SortingCriteria.CommonOpaque;
  531. var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
  532. bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU;
  533. bool canSkipFrontToBackSorting = (baseCamera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || baseCamera.opaqueSortMode == OpaqueSortMode.NoDistanceSort;
  534. cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags;
  535. cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(baseCamera);
  536. bool needsAlphaChannel = Graphics.preserveFramebufferAlpha;
  537. cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(baseCamera, cameraData.renderScale,
  538. cameraData.isStereoEnabled, cameraData.isHdrEnabled, msaaSamples, needsAlphaChannel);
  539. }
  540. /// <summary>
  541. /// Initialize settings that can be different for each camera in the stack.
  542. /// </summary>
  543. /// <param name="camera">Camera to initialize settings from.</param>
  544. /// <param name="additionalCameraData">Additional camera data component to initialize settings from.</param>
  545. /// <param name="cameraData">Settings to be initilized.</param>
  546. static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, ref CameraData cameraData)
  547. {
  548. var settings = asset;
  549. cameraData.camera = camera;
  550. bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows;
  551. cameraData.maxShadowDistance = Mathf.Min(settings.shadowDistance, camera.farClipPlane);
  552. cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ?
  553. cameraData.maxShadowDistance : 0.0f;
  554. cameraData.viewMatrix = camera.worldToCameraMatrix;
  555. // Overlay cameras inherit viewport from base.
  556. // If the viewport is different between them we might need to patch the projection
  557. // matrix to prevent squishing when rendering objects in overlay cameras.
  558. cameraData.projectionMatrix = (!camera.orthographic && !cameraData.isStereoEnabled && cameraData.pixelRect != camera.pixelRect) ?
  559. Matrix4x4.Perspective(camera.fieldOfView, cameraData.aspectRatio, camera.nearClipPlane, camera.farClipPlane) :
  560. camera.projectionMatrix;
  561. if (cameraData.isSceneViewCamera)
  562. {
  563. cameraData.renderType = CameraRenderType.Base;
  564. cameraData.clearDepth = true;
  565. cameraData.postProcessEnabled = CoreUtils.ArePostProcessesEnabled(camera);
  566. cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
  567. cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
  568. cameraData.renderer = asset.scriptableRenderer;
  569. }
  570. else if (additionalCameraData != null)
  571. {
  572. cameraData.renderType = additionalCameraData.renderType;
  573. cameraData.clearDepth = (additionalCameraData.renderType != CameraRenderType.Base) ? additionalCameraData.clearDepth : true;
  574. cameraData.postProcessEnabled = additionalCameraData.renderPostProcessing;
  575. cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f;
  576. cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture;
  577. cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture;
  578. cameraData.renderer = additionalCameraData.scriptableRenderer;
  579. }
  580. else
  581. {
  582. cameraData.renderType = CameraRenderType.Base;
  583. cameraData.clearDepth = true;
  584. cameraData.postProcessEnabled = false;
  585. cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
  586. cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
  587. cameraData.renderer = asset.scriptableRenderer;
  588. }
  589. // Disable depth and color copy. We should add it in the renderer instead to avoid performance pitfalls
  590. // of camera stacking breaking render pass execution implicitly.
  591. if (cameraData.renderType == CameraRenderType.Overlay)
  592. {
  593. cameraData.requiresDepthTexture = false;
  594. cameraData.requiresOpaqueTexture = false;
  595. }
  596. // Disables post if GLes2
  597. cameraData.postProcessEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
  598. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  599. #pragma warning disable 0618 // Obsolete
  600. if (settings.postProcessingFeatureSet == PostProcessingFeatureSet.PostProcessingV2)
  601. {
  602. camera.TryGetComponent(out cameraData.postProcessLayer);
  603. cameraData.postProcessEnabled &= cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled;
  604. }
  605. bool depthRequiredForPostFX = settings.postProcessingFeatureSet == PostProcessingFeatureSet.PostProcessingV2
  606. ? cameraData.postProcessEnabled
  607. : CheckPostProcessForDepth(cameraData);
  608. #pragma warning restore 0618
  609. #else
  610. bool depthRequiredForPostFX = CheckPostProcessForDepth(cameraData);
  611. #endif
  612. cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || depthRequiredForPostFX;
  613. }
  614. static void InitializeRenderingData(UniversalRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults,
  615. bool requiresBlitToBackbuffer, bool anyPostProcessingEnabled, out RenderingData renderingData)
  616. {
  617. var visibleLights = cullResults.visibleLights;
  618. int mainLightIndex = GetMainLightIndex(settings, visibleLights);
  619. bool mainLightCastShadows = false;
  620. bool additionalLightsCastShadows = false;
  621. if (cameraData.maxShadowDistance > 0.0f)
  622. {
  623. mainLightCastShadows = (mainLightIndex != -1 && visibleLights[mainLightIndex].light != null &&
  624. visibleLights[mainLightIndex].light.shadows != LightShadows.None);
  625. // If additional lights are shaded per-pixel they cannot cast shadows
  626. if (settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel)
  627. {
  628. for (int i = 0; i < visibleLights.Length; ++i)
  629. {
  630. if (i == mainLightIndex)
  631. continue;
  632. Light light = visibleLights[i].light;
  633. // UniversalRP doesn't support additional directional lights or point light shadows yet
  634. if (visibleLights[i].lightType == LightType.Spot && light != null && light.shadows != LightShadows.None)
  635. {
  636. additionalLightsCastShadows = true;
  637. break;
  638. }
  639. }
  640. }
  641. }
  642. renderingData.cullResults = cullResults;
  643. renderingData.cameraData = cameraData;
  644. InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData);
  645. InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData);
  646. InitializePostProcessingData(settings, out renderingData.postProcessingData);
  647. renderingData.supportsDynamicBatching = settings.supportsDynamicBatching;
  648. renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount);
  649. bool isOffscreenCamera = cameraData.targetTexture != null && !cameraData.isSceneViewCamera;
  650. renderingData.resolveFinalTarget = requiresBlitToBackbuffer;
  651. renderingData.postProcessingEnabled = anyPostProcessingEnabled;
  652. #pragma warning disable // avoid warning because killAlphaInFinalBlit has attribute Obsolete
  653. renderingData.killAlphaInFinalBlit = false;
  654. #pragma warning restore
  655. }
  656. static void InitializeShadowData(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights, bool mainLightCastShadows, bool additionalLightsCastShadows, out ShadowData shadowData)
  657. {
  658. m_ShadowBiasData.Clear();
  659. for (int i = 0; i < visibleLights.Length; ++i)
  660. {
  661. Light light = visibleLights[i].light;
  662. UniversalAdditionalLightData data = null;
  663. if (light != null)
  664. {
  665. #if UNITY_2019_3_OR_NEWER
  666. light.gameObject.TryGetComponent(out data);
  667. #else
  668. data = light.gameObject.GetComponent<UniversalAdditionalLightData>();
  669. #endif
  670. }
  671. if (data && !data.usePipelineSettings)
  672. m_ShadowBiasData.Add(new Vector4(light.shadowBias, light.shadowNormalBias, 0.0f, 0.0f));
  673. else
  674. m_ShadowBiasData.Add(new Vector4(settings.shadowDepthBias, settings.shadowNormalBias, 0.0f, 0.0f));
  675. }
  676. shadowData.bias = m_ShadowBiasData;
  677. shadowData.supportsMainLightShadows = SystemInfo.supportsShadows && settings.supportsMainLightShadows && mainLightCastShadows;
  678. // We no longer use screen space shadows in URP.
  679. // This change allows us to have particles & transparent objects receive shadows.
  680. shadowData.requiresScreenSpaceShadowResolve = false;// shadowData.supportsMainLightShadows && supportsScreenSpaceShadows && settings.shadowCascadeOption != ShadowCascadesOption.NoCascades;
  681. int shadowCascadesCount;
  682. switch (settings.shadowCascadeOption)
  683. {
  684. case ShadowCascadesOption.FourCascades:
  685. shadowCascadesCount = 4;
  686. break;
  687. case ShadowCascadesOption.TwoCascades:
  688. shadowCascadesCount = 2;
  689. break;
  690. default:
  691. shadowCascadesCount = 1;
  692. break;
  693. }
  694. shadowData.mainLightShadowCascadesCount = shadowCascadesCount;//(shadowData.requiresScreenSpaceShadowResolve) ? shadowCascadesCount : 1;
  695. shadowData.mainLightShadowmapWidth = settings.mainLightShadowmapResolution;
  696. shadowData.mainLightShadowmapHeight = settings.mainLightShadowmapResolution;
  697. switch (shadowData.mainLightShadowCascadesCount)
  698. {
  699. case 1:
  700. shadowData.mainLightShadowCascadesSplit = new Vector3(1.0f, 0.0f, 0.0f);
  701. break;
  702. case 2:
  703. shadowData.mainLightShadowCascadesSplit = new Vector3(settings.cascade2Split, 1.0f, 0.0f);
  704. break;
  705. default:
  706. shadowData.mainLightShadowCascadesSplit = settings.cascade4Split;
  707. break;
  708. }
  709. shadowData.supportsAdditionalLightShadows = SystemInfo.supportsShadows && settings.supportsAdditionalLightShadows && additionalLightsCastShadows;
  710. shadowData.additionalLightsShadowmapWidth = shadowData.additionalLightsShadowmapHeight = settings.additionalLightsShadowmapResolution;
  711. shadowData.supportsSoftShadows = settings.supportsSoftShadows && (shadowData.supportsMainLightShadows || shadowData.supportsAdditionalLightShadows);
  712. shadowData.shadowmapDepthBufferBits = 16;
  713. }
  714. static void InitializePostProcessingData(UniversalRenderPipelineAsset settings, out PostProcessingData postProcessingData)
  715. {
  716. postProcessingData.gradingMode = settings.supportsHDR
  717. ? settings.colorGradingMode
  718. : ColorGradingMode.LowDynamicRange;
  719. postProcessingData.lutSize = settings.colorGradingLutSize;
  720. }
  721. static void InitializeLightData(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights, int mainLightIndex, out LightData lightData)
  722. {
  723. int maxPerObjectAdditionalLights = UniversalRenderPipeline.maxPerObjectLights;
  724. int maxVisibleAdditionalLights = UniversalRenderPipeline.maxVisibleAdditionalLights;
  725. lightData.mainLightIndex = mainLightIndex;
  726. if (settings.additionalLightsRenderingMode != LightRenderingMode.Disabled)
  727. {
  728. lightData.additionalLightsCount =
  729. Math.Min((mainLightIndex != -1) ? visibleLights.Length - 1 : visibleLights.Length,
  730. maxVisibleAdditionalLights);
  731. lightData.maxPerObjectAdditionalLightsCount = Math.Min(settings.maxAdditionalLightsCount, maxPerObjectAdditionalLights);
  732. }
  733. else
  734. {
  735. lightData.additionalLightsCount = 0;
  736. lightData.maxPerObjectAdditionalLightsCount = 0;
  737. }
  738. lightData.shadeAdditionalLightsPerVertex = settings.additionalLightsRenderingMode == LightRenderingMode.PerVertex;
  739. lightData.visibleLights = visibleLights;
  740. lightData.supportsMixedLighting = settings.supportsMixedLighting;
  741. }
  742. static PerObjectData GetPerObjectLightFlags(int additionalLightsCount)
  743. {
  744. var configuration = PerObjectData.ReflectionProbes | PerObjectData.Lightmaps | PerObjectData.LightProbe | PerObjectData.LightData | PerObjectData.OcclusionProbe;
  745. if (additionalLightsCount > 0)
  746. {
  747. configuration |= PerObjectData.LightData;
  748. // In this case we also need per-object indices (unity_LightIndices)
  749. if (!RenderingUtils.useStructuredBuffer)
  750. configuration |= PerObjectData.LightIndices;
  751. }
  752. return configuration;
  753. }
  754. // Main Light is always a directional light
  755. static int GetMainLightIndex(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights)
  756. {
  757. int totalVisibleLights = visibleLights.Length;
  758. if (totalVisibleLights == 0 || settings.mainLightRenderingMode != LightRenderingMode.PerPixel)
  759. return -1;
  760. Light sunLight = RenderSettings.sun;
  761. int brightestDirectionalLightIndex = -1;
  762. float brightestLightIntensity = 0.0f;
  763. for (int i = 0; i < totalVisibleLights; ++i)
  764. {
  765. VisibleLight currVisibleLight = visibleLights[i];
  766. Light currLight = currVisibleLight.light;
  767. // Particle system lights have the light property as null. We sort lights so all particles lights
  768. // come last. Therefore, if first light is particle light then all lights are particle lights.
  769. // In this case we either have no main light or already found it.
  770. if (currLight == null)
  771. break;
  772. if (currLight == sunLight)
  773. return i;
  774. // In case no shadow light is present we will return the brightest directional light
  775. if (currVisibleLight.lightType == LightType.Directional && currLight.intensity > brightestLightIntensity)
  776. {
  777. brightestLightIntensity = currLight.intensity;
  778. brightestDirectionalLightIndex = i;
  779. }
  780. }
  781. return brightestDirectionalLightIndex;
  782. }
  783. static void SetupPerFrameShaderConstants()
  784. {
  785. // When glossy reflections are OFF in the shader we set a constant color to use as indirect specular
  786. SphericalHarmonicsL2 ambientSH = RenderSettings.ambientProbe;
  787. Color linearGlossyEnvColor = new Color(ambientSH[0, 0], ambientSH[1, 0], ambientSH[2, 0]) * RenderSettings.reflectionIntensity;
  788. Color glossyEnvColor = CoreUtils.ConvertLinearToActiveColorSpace(linearGlossyEnvColor);
  789. Shader.SetGlobalVector(PerFrameBuffer._GlossyEnvironmentColor, glossyEnvColor);
  790. // Used when subtractive mode is selected
  791. Shader.SetGlobalVector(PerFrameBuffer._SubtractiveShadowColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.subtractiveShadowColor));
  792. }
  793. static void SetupPerCameraShaderConstants(in CameraData cameraData)
  794. {
  795. Camera camera = cameraData.camera;
  796. Rect pixelRect = cameraData.pixelRect;
  797. float scaledCameraWidth = (float)pixelRect.width * cameraData.renderScale;
  798. float scaledCameraHeight = (float)pixelRect.height * cameraData.renderScale;
  799. Shader.SetGlobalVector(PerCameraBuffer._ScaledScreenParams, new Vector4(scaledCameraWidth, scaledCameraHeight, 1.0f + 1.0f / scaledCameraWidth, 1.0f + 1.0f / scaledCameraHeight));
  800. Shader.SetGlobalVector(PerCameraBuffer._WorldSpaceCameraPos, camera.transform.position);
  801. float cameraWidth = (float)pixelRect.width;
  802. float cameraHeight = (float)pixelRect.height;
  803. Shader.SetGlobalVector(PerCameraBuffer._ScreenParams, new Vector4(cameraWidth, cameraHeight, 1.0f + 1.0f / cameraWidth, 1.0f + 1.0f / cameraHeight));
  804. Matrix4x4 projMatrix = GL.GetGPUProjectionMatrix(camera.projectionMatrix, false);
  805. Matrix4x4 viewMatrix = camera.worldToCameraMatrix;
  806. Matrix4x4 viewProjMatrix = projMatrix * viewMatrix;
  807. Matrix4x4 invViewProjMatrix = Matrix4x4.Inverse(viewProjMatrix);
  808. Shader.SetGlobalMatrix(PerCameraBuffer._InvCameraViewProj, invViewProjMatrix);
  809. }
  810. }
  811. }