1
0

ForwardRenderer.cs 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567
  1. using UnityEngine.Rendering.Universal.Internal;
  2. namespace UnityEngine.Rendering.Universal
  3. {
  4. /// <summary>
  5. /// Default renderer for Universal RP.
  6. /// This renderer is supported on all Universal RP supported platforms.
  7. /// It uses a classic forward rendering strategy with per-object light culling.
  8. /// </summary>
  9. public sealed class ForwardRenderer : ScriptableRenderer
  10. {
  11. const int k_DepthStencilBufferBits = 32;
  12. const string k_CreateCameraTextures = "Create Camera Texture";
  13. ColorGradingLutPass m_ColorGradingLutPass;
  14. DepthOnlyPass m_DepthPrepass;
  15. MainLightShadowCasterPass m_MainLightShadowCasterPass;
  16. AdditionalLightsShadowCasterPass m_AdditionalLightsShadowCasterPass;
  17. ScreenSpaceShadowResolvePass m_ScreenSpaceShadowResolvePass;
  18. DrawObjectsPass m_RenderOpaqueForwardPass;
  19. DrawSkyboxPass m_DrawSkyboxPass;
  20. CopyDepthPass m_CopyDepthPass;
  21. CopyColorPass m_CopyColorPass;
  22. TransparentSettingsPass m_TransparentSettingsPass;
  23. DrawObjectsPass m_RenderTransparentForwardPass;
  24. InvokeOnRenderObjectCallbackPass m_OnRenderObjectCallbackPass;
  25. PostProcessPass m_PostProcessPass;
  26. PostProcessPass m_FinalPostProcessPass;
  27. FinalBlitPass m_FinalBlitPass;
  28. CapturePass m_CapturePass;
  29. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  30. PostProcessPassCompat m_OpaquePostProcessPassCompat;
  31. PostProcessPassCompat m_PostProcessPassCompat;
  32. #endif
  33. #if UNITY_EDITOR
  34. SceneViewDepthCopyPass m_SceneViewDepthCopyPass;
  35. #endif
  36. RenderTargetHandle m_ActiveCameraColorAttachment;
  37. RenderTargetHandle m_ActiveCameraDepthAttachment;
  38. RenderTargetHandle m_CameraColorAttachment;
  39. RenderTargetHandle m_CameraDepthAttachment;
  40. RenderTargetHandle m_DepthTexture;
  41. RenderTargetHandle m_OpaqueColor;
  42. RenderTargetHandle m_AfterPostProcessColor;
  43. RenderTargetHandle m_ColorGradingLut;
  44. ForwardLights m_ForwardLights;
  45. StencilState m_DefaultStencilState;
  46. Material m_BlitMaterial;
  47. Material m_CopyDepthMaterial;
  48. Material m_SamplingMaterial;
  49. Material m_ScreenspaceShadowsMaterial;
  50. public ForwardRenderer(ForwardRendererData data) : base(data)
  51. {
  52. m_BlitMaterial = CoreUtils.CreateEngineMaterial(data.shaders.blitPS);
  53. m_CopyDepthMaterial = CoreUtils.CreateEngineMaterial(data.shaders.copyDepthPS);
  54. m_SamplingMaterial = CoreUtils.CreateEngineMaterial(data.shaders.samplingPS);
  55. m_ScreenspaceShadowsMaterial = CoreUtils.CreateEngineMaterial(data.shaders.screenSpaceShadowPS);
  56. StencilStateData stencilData = data.defaultStencilState;
  57. m_DefaultStencilState = StencilState.defaultValue;
  58. m_DefaultStencilState.enabled = stencilData.overrideStencilState;
  59. m_DefaultStencilState.SetCompareFunction(stencilData.stencilCompareFunction);
  60. m_DefaultStencilState.SetPassOperation(stencilData.passOperation);
  61. m_DefaultStencilState.SetFailOperation(stencilData.failOperation);
  62. m_DefaultStencilState.SetZFailOperation(stencilData.zFailOperation);
  63. // Note: Since all custom render passes inject first and we have stable sort,
  64. // we inject the builtin passes in the before events.
  65. m_MainLightShadowCasterPass = new MainLightShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
  66. m_AdditionalLightsShadowCasterPass = new AdditionalLightsShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
  67. m_DepthPrepass = new DepthOnlyPass(RenderPassEvent.BeforeRenderingPrepasses, RenderQueueRange.opaque, data.opaqueLayerMask);
  68. m_ScreenSpaceShadowResolvePass = new ScreenSpaceShadowResolvePass(RenderPassEvent.BeforeRenderingPrepasses, m_ScreenspaceShadowsMaterial);
  69. m_ColorGradingLutPass = new ColorGradingLutPass(RenderPassEvent.BeforeRenderingPrepasses, data.postProcessData);
  70. m_RenderOpaqueForwardPass = new DrawObjectsPass("Render Opaques", true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference);
  71. m_CopyDepthPass = new CopyDepthPass(RenderPassEvent.AfterRenderingSkybox, m_CopyDepthMaterial);
  72. m_DrawSkyboxPass = new DrawSkyboxPass(RenderPassEvent.BeforeRenderingSkybox);
  73. m_CopyColorPass = new CopyColorPass(RenderPassEvent.BeforeRenderingTransparents, m_SamplingMaterial);
  74. m_TransparentSettingsPass = new TransparentSettingsPass(RenderPassEvent.BeforeRenderingTransparents, data.shadowTransparentReceive);
  75. m_RenderTransparentForwardPass = new DrawObjectsPass("Render Transparents", false, RenderPassEvent.BeforeRenderingTransparents, RenderQueueRange.transparent, data.transparentLayerMask, m_DefaultStencilState, stencilData.stencilReference);
  76. m_OnRenderObjectCallbackPass = new InvokeOnRenderObjectCallbackPass(RenderPassEvent.BeforeRenderingPostProcessing);
  77. m_PostProcessPass = new PostProcessPass(RenderPassEvent.BeforeRenderingPostProcessing, data.postProcessData, m_BlitMaterial);
  78. m_FinalPostProcessPass = new PostProcessPass(RenderPassEvent.AfterRendering + 1, data.postProcessData, m_BlitMaterial);
  79. m_CapturePass = new CapturePass(RenderPassEvent.AfterRendering);
  80. m_FinalBlitPass = new FinalBlitPass(RenderPassEvent.AfterRendering + 1, m_BlitMaterial);
  81. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  82. m_OpaquePostProcessPassCompat = new PostProcessPassCompat(RenderPassEvent.BeforeRenderingOpaques, true);
  83. m_PostProcessPassCompat = new PostProcessPassCompat(RenderPassEvent.BeforeRenderingPostProcessing);
  84. #endif
  85. #if UNITY_EDITOR
  86. m_SceneViewDepthCopyPass = new SceneViewDepthCopyPass(RenderPassEvent.AfterRendering + 9, m_CopyDepthMaterial);
  87. #endif
  88. // RenderTexture format depends on camera and pipeline (HDR, non HDR, etc)
  89. // Samples (MSAA) depend on camera and pipeline
  90. m_CameraColorAttachment.Init("_CameraColorTexture");
  91. m_CameraDepthAttachment.Init("_CameraDepthAttachment");
  92. m_DepthTexture.Init("_CameraDepthTexture");
  93. m_OpaqueColor.Init("_CameraOpaqueTexture");
  94. m_AfterPostProcessColor.Init("_AfterPostProcessTexture");
  95. m_ColorGradingLut.Init("_InternalGradingLut");
  96. m_ForwardLights = new ForwardLights();
  97. supportedRenderingFeatures = new RenderingFeatures()
  98. {
  99. cameraStacking = true,
  100. };
  101. }
  102. /// <inheritdoc />
  103. protected override void Dispose(bool disposing)
  104. {
  105. // always dispose unmanaged resources
  106. m_PostProcessPass.Cleanup();
  107. CoreUtils.Destroy(m_BlitMaterial);
  108. CoreUtils.Destroy(m_CopyDepthMaterial);
  109. CoreUtils.Destroy(m_SamplingMaterial);
  110. CoreUtils.Destroy(m_ScreenspaceShadowsMaterial);
  111. }
  112. /// <inheritdoc />
  113. public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
  114. {
  115. Camera camera = renderingData.cameraData.camera;
  116. ref CameraData cameraData = ref renderingData.cameraData;
  117. RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;
  118. // Special path for depth only offscreen cameras. Only write opaques + transparents.
  119. bool isOffscreenDepthTexture = cameraData.targetTexture != null && cameraData.targetTexture.format == RenderTextureFormat.Depth;
  120. if (isOffscreenDepthTexture)
  121. {
  122. ConfigureCameraTarget(BuiltinRenderTextureType.CameraTarget, BuiltinRenderTextureType.CameraTarget);
  123. for (int i = 0; i < rendererFeatures.Count; ++i)
  124. {
  125. if(rendererFeatures[i].isActive)
  126. rendererFeatures[i].AddRenderPasses(this, ref renderingData);
  127. }
  128. EnqueuePass(m_RenderOpaqueForwardPass);
  129. EnqueuePass(m_DrawSkyboxPass);
  130. EnqueuePass(m_RenderTransparentForwardPass);
  131. return;
  132. }
  133. // Should apply post-processing after rendering this camera?
  134. bool applyPostProcessing = cameraData.postProcessEnabled;
  135. // There's at least a camera in the camera stack that applies post-processing
  136. bool anyPostProcessing = renderingData.postProcessingEnabled;
  137. var postProcessFeatureSet = UniversalRenderPipeline.asset.postProcessingFeatureSet;
  138. // We generate color LUT in the base camera only. This allows us to not break render pass execution for overlay cameras.
  139. bool generateColorGradingLUT = anyPostProcessing && cameraData.renderType == CameraRenderType.Base;
  140. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  141. // PPv2 doesn't need to generate color grading LUT.
  142. if (postProcessFeatureSet == PostProcessingFeatureSet.PostProcessingV2)
  143. generateColorGradingLUT = false;
  144. #endif
  145. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  146. bool requiresDepthTexture = cameraData.requiresDepthTexture;
  147. bool isStereoEnabled = cameraData.isStereoEnabled;
  148. bool mainLightShadows = m_MainLightShadowCasterPass.Setup(ref renderingData);
  149. bool additionalLightShadows = m_AdditionalLightsShadowCasterPass.Setup(ref renderingData);
  150. bool transparentsNeedSettingsPass = m_TransparentSettingsPass.Setup(ref renderingData);
  151. // Depth prepass is generated in the following cases:
  152. // - Scene view camera always requires a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
  153. // - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
  154. bool requiresDepthPrepass = isSceneViewCamera;
  155. requiresDepthPrepass |= (requiresDepthTexture && !CanCopyDepth(ref renderingData.cameraData));
  156. // The copying of depth should normally happen after rendering opaques.
  157. // But if we only require it for post processing or the scene camera then we do it after rendering transparent objects
  158. m_CopyDepthPass.renderPassEvent = (!requiresDepthTexture && (applyPostProcessing || isSceneViewCamera)) ? RenderPassEvent.AfterRenderingTransparents : RenderPassEvent.AfterRenderingOpaques;
  159. // TODO: There's an issue in multiview and depth copy pass. Atm forcing a depth prepass on XR until we have a proper fix.
  160. if (isStereoEnabled && requiresDepthTexture)
  161. requiresDepthPrepass = true;
  162. bool isRunningHololens = false;
  163. #if ENABLE_VR && ENABLE_VR_MODULE
  164. isRunningHololens = UniversalRenderPipeline.IsRunningHololens(camera);
  165. #endif
  166. bool createColorTexture = RequiresIntermediateColorTexture(ref renderingData, cameraTargetDescriptor) ||
  167. (rendererFeatures.Count != 0 && !isRunningHololens);
  168. // If camera requires depth and there's no depth pre-pass we create a depth texture that can be read later by effect requiring it.
  169. bool createDepthTexture = cameraData.requiresDepthTexture && !requiresDepthPrepass;
  170. createDepthTexture |= (renderingData.cameraData.renderType == CameraRenderType.Base && !renderingData.resolveFinalTarget);
  171. // Configure all settings require to start a new camera stack (base camera only)
  172. if (cameraData.renderType == CameraRenderType.Base)
  173. {
  174. m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : RenderTargetHandle.CameraTarget;
  175. m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : RenderTargetHandle.CameraTarget;
  176. bool intermediateRenderTexture = createColorTexture || createDepthTexture;
  177. // Doesn't create texture for Overlay cameras as they are already overlaying on top of created textures.
  178. bool createTextures = intermediateRenderTexture;
  179. if (createTextures)
  180. CreateCameraRenderTarget(context, ref renderingData.cameraData);
  181. // if rendering to intermediate render texture we don't have to create msaa backbuffer
  182. int backbufferMsaaSamples = (intermediateRenderTexture) ? 1 : cameraTargetDescriptor.msaaSamples;
  183. if (Camera.main == camera && camera.cameraType == CameraType.Game && cameraData.targetTexture == null)
  184. SetupBackbufferFormat(backbufferMsaaSamples, isStereoEnabled);
  185. }
  186. else
  187. {
  188. m_ActiveCameraColorAttachment = m_CameraColorAttachment;
  189. m_ActiveCameraDepthAttachment = m_CameraDepthAttachment;
  190. }
  191. ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), m_ActiveCameraDepthAttachment.Identifier());
  192. for (int i = 0; i < rendererFeatures.Count; ++i)
  193. {
  194. if(rendererFeatures[i].isActive)
  195. rendererFeatures[i].AddRenderPasses(this, ref renderingData);
  196. }
  197. int count = activeRenderPassQueue.Count;
  198. for (int i = count - 1; i >= 0; i--)
  199. {
  200. if(activeRenderPassQueue[i] == null)
  201. activeRenderPassQueue.RemoveAt(i);
  202. }
  203. bool hasPassesAfterPostProcessing = activeRenderPassQueue.Find(x => x.renderPassEvent == RenderPassEvent.AfterRendering) != null;
  204. if (mainLightShadows)
  205. EnqueuePass(m_MainLightShadowCasterPass);
  206. if (additionalLightShadows)
  207. EnqueuePass(m_AdditionalLightsShadowCasterPass);
  208. if (requiresDepthPrepass)
  209. {
  210. m_DepthPrepass.Setup(cameraTargetDescriptor, m_DepthTexture);
  211. EnqueuePass(m_DepthPrepass);
  212. }
  213. if (generateColorGradingLUT)
  214. {
  215. m_ColorGradingLutPass.Setup(m_ColorGradingLut);
  216. EnqueuePass(m_ColorGradingLutPass);
  217. }
  218. EnqueuePass(m_RenderOpaqueForwardPass);
  219. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  220. #pragma warning disable 0618 // Obsolete
  221. bool hasOpaquePostProcessCompat = applyPostProcessing &&
  222. postProcessFeatureSet == PostProcessingFeatureSet.PostProcessingV2 &&
  223. renderingData.cameraData.postProcessLayer.HasOpaqueOnlyEffects(RenderingUtils.postProcessRenderContext);
  224. if (hasOpaquePostProcessCompat)
  225. {
  226. m_OpaquePostProcessPassCompat.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_ActiveCameraColorAttachment);
  227. EnqueuePass(m_OpaquePostProcessPassCompat);
  228. }
  229. #pragma warning restore 0618
  230. #endif
  231. bool isOverlayCamera = cameraData.renderType == CameraRenderType.Overlay;
  232. if (camera.clearFlags == CameraClearFlags.Skybox && RenderSettings.skybox != null && !isOverlayCamera)
  233. EnqueuePass(m_DrawSkyboxPass);
  234. // If a depth texture was created we necessarily need to copy it, otherwise we could have render it to a renderbuffer
  235. if (!requiresDepthPrepass && renderingData.cameraData.requiresDepthTexture && createDepthTexture)
  236. {
  237. m_CopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
  238. EnqueuePass(m_CopyDepthPass);
  239. }
  240. if (renderingData.cameraData.requiresOpaqueTexture)
  241. {
  242. // TODO: Downsampling method should be store in the renderer instead of in the asset.
  243. // We need to migrate this data to renderer. For now, we query the method in the active asset.
  244. Downsampling downsamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;
  245. m_CopyColorPass.Setup(m_ActiveCameraColorAttachment.Identifier(), m_OpaqueColor, downsamplingMethod);
  246. EnqueuePass(m_CopyColorPass);
  247. }
  248. if (transparentsNeedSettingsPass)
  249. {
  250. EnqueuePass(m_TransparentSettingsPass);
  251. }
  252. EnqueuePass(m_RenderTransparentForwardPass);
  253. EnqueuePass(m_OnRenderObjectCallbackPass);
  254. bool lastCameraInTheStack = renderingData.resolveFinalTarget;
  255. bool hasCaptureActions = renderingData.cameraData.captureActions != null && lastCameraInTheStack;
  256. bool applyFinalPostProcessing = anyPostProcessing && lastCameraInTheStack &&
  257. renderingData.cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing;
  258. // When post-processing is enabled we can use the stack to resolve rendering to camera target (screen or RT).
  259. // However when there are render passes executing after post we avoid resolving to screen so rendering continues (before sRGBConvertion etc)
  260. bool dontResolvePostProcessingToCameraTarget = hasCaptureActions || hasPassesAfterPostProcessing || applyFinalPostProcessing;
  261. #region Post-processing v2 support
  262. #if POST_PROCESSING_STACK_2_0_0_OR_NEWER
  263. // To keep things clean we'll separate the logic from builtin PP and PPv2 - expect some copy/pasting
  264. if (postProcessFeatureSet == PostProcessingFeatureSet.PostProcessingV2)
  265. {
  266. // if we have additional filters
  267. // we need to stay in a RT
  268. if (hasPassesAfterPostProcessing)
  269. {
  270. // perform post with src / dest the same
  271. if (applyPostProcessing)
  272. {
  273. m_PostProcessPassCompat.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_ActiveCameraColorAttachment);
  274. EnqueuePass(m_PostProcessPassCompat);
  275. }
  276. //now blit into the final target
  277. if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
  278. {
  279. if (renderingData.cameraData.captureActions != null)
  280. {
  281. m_CapturePass.Setup(m_ActiveCameraColorAttachment);
  282. EnqueuePass(m_CapturePass);
  283. }
  284. m_FinalBlitPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment);
  285. EnqueuePass(m_FinalBlitPass);
  286. }
  287. }
  288. else
  289. {
  290. if (applyPostProcessing)
  291. {
  292. m_PostProcessPassCompat.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, RenderTargetHandle.CameraTarget);
  293. EnqueuePass(m_PostProcessPassCompat);
  294. }
  295. else if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
  296. {
  297. m_FinalBlitPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment);
  298. EnqueuePass(m_FinalBlitPass);
  299. }
  300. }
  301. }
  302. else
  303. #endif
  304. #endregion
  305. {
  306. if (lastCameraInTheStack)
  307. {
  308. // Post-processing will resolve to final target. No need for final blit pass.
  309. if (applyPostProcessing)
  310. {
  311. var destination = dontResolvePostProcessingToCameraTarget ? m_AfterPostProcessColor : RenderTargetHandle.CameraTarget;
  312. // if resolving to screen we need to be able to perform sRGBConvertion in post-processing if necessary
  313. bool doSRGBConvertion = !(dontResolvePostProcessingToCameraTarget || (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget));
  314. m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, destination, m_ActiveCameraDepthAttachment, m_ColorGradingLut, applyFinalPostProcessing, doSRGBConvertion);
  315. Debug.Assert(applyPostProcessing || doSRGBConvertion, "This will do unnecessary blit!");
  316. EnqueuePass(m_PostProcessPass);
  317. }
  318. if (renderingData.cameraData.captureActions != null)
  319. {
  320. m_CapturePass.Setup(m_ActiveCameraColorAttachment);
  321. EnqueuePass(m_CapturePass);
  322. }
  323. // if we applied post-processing for this camera it means current active texture is m_AfterPostProcessColor
  324. var sourceForFinalPass = (applyPostProcessing) ? m_AfterPostProcessColor : m_ActiveCameraColorAttachment;
  325. // Do FXAA or any other final post-processing effect that might need to run after AA.
  326. if (applyFinalPostProcessing)
  327. {
  328. m_FinalPostProcessPass.SetupFinalPass(sourceForFinalPass);
  329. EnqueuePass(m_FinalPostProcessPass);
  330. }
  331. // if post-processing then we already resolved to camera target while doing post.
  332. // Also only do final blit if camera is not rendering to RT.
  333. bool cameraTargetResolved =
  334. // final PP always blit to camera target
  335. applyFinalPostProcessing ||
  336. // no final PP but we have PP stack. In that case it blit unless there are render pass after PP
  337. (applyPostProcessing && !hasPassesAfterPostProcessing) ||
  338. // offscreen camera rendering to a texture, we don't need a blit pass to resolve to screen
  339. m_ActiveCameraColorAttachment == RenderTargetHandle.CameraTarget;
  340. // We need final blit to resolve to screen
  341. if (!cameraTargetResolved)
  342. {
  343. m_FinalBlitPass.Setup(cameraTargetDescriptor, sourceForFinalPass);
  344. EnqueuePass(m_FinalBlitPass);
  345. }
  346. }
  347. // stay in RT so we resume rendering on stack after post-processing
  348. else if (applyPostProcessing)
  349. {
  350. m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_AfterPostProcessColor, m_ActiveCameraDepthAttachment, m_ColorGradingLut, false, false);
  351. EnqueuePass(m_PostProcessPass);
  352. }
  353. }
  354. #if UNITY_EDITOR
  355. if (renderingData.cameraData.isSceneViewCamera)
  356. {
  357. // Scene view camera should always resolve target (not stacked)
  358. Assertions.Assert.IsTrue(lastCameraInTheStack, "Editor camera must resolve target upon finish rendering.");
  359. m_SceneViewDepthCopyPass.Setup(m_DepthTexture);
  360. EnqueuePass(m_SceneViewDepthCopyPass);
  361. }
  362. #endif
  363. }
  364. /// <inheritdoc />
  365. public override void SetupLights(ScriptableRenderContext context, ref RenderingData renderingData)
  366. {
  367. m_ForwardLights.Setup(context, ref renderingData);
  368. }
  369. /// <inheritdoc />
  370. public override void SetupCullingParameters(ref ScriptableCullingParameters cullingParameters,
  371. ref CameraData cameraData)
  372. {
  373. // TODO: PerObjectCulling also affect reflection probes. Enabling it for now.
  374. // if (asset.additionalLightsRenderingMode == LightRenderingMode.Disabled ||
  375. // asset.maxAdditionalLightsCount == 0)
  376. // {
  377. // cullingParameters.cullingOptions |= CullingOptions.DisablePerObjectCulling;
  378. // }
  379. // We disable shadow casters if both shadow casting modes are turned off
  380. // or the shadow distance has been turned down to zero
  381. bool isShadowCastingDisabled = !UniversalRenderPipeline.asset.supportsMainLightShadows && !UniversalRenderPipeline.asset.supportsAdditionalLightShadows;
  382. bool isShadowDistanceZero = Mathf.Approximately(cameraData.maxShadowDistance, 0.0f);
  383. if (isShadowCastingDisabled || isShadowDistanceZero)
  384. {
  385. cullingParameters.cullingOptions &= ~CullingOptions.ShadowCasters;
  386. }
  387. cullingParameters.shadowDistance = cameraData.maxShadowDistance;
  388. }
  389. /// <inheritdoc />
  390. public override void FinishRendering(CommandBuffer cmd)
  391. {
  392. if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
  393. {
  394. cmd.ReleaseTemporaryRT(m_ActiveCameraColorAttachment.id);
  395. m_ActiveCameraColorAttachment = RenderTargetHandle.CameraTarget;
  396. }
  397. if (m_ActiveCameraDepthAttachment != RenderTargetHandle.CameraTarget)
  398. {
  399. cmd.ReleaseTemporaryRT(m_ActiveCameraDepthAttachment.id);
  400. m_ActiveCameraDepthAttachment = RenderTargetHandle.CameraTarget;
  401. }
  402. }
  403. void CreateCameraRenderTarget(ScriptableRenderContext context, ref CameraData cameraData)
  404. {
  405. CommandBuffer cmd = CommandBufferPool.Get(k_CreateCameraTextures);
  406. var descriptor = cameraData.cameraTargetDescriptor;
  407. int msaaSamples = descriptor.msaaSamples;
  408. if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
  409. {
  410. bool useDepthRenderBuffer = m_ActiveCameraDepthAttachment == RenderTargetHandle.CameraTarget;
  411. var colorDescriptor = descriptor;
  412. colorDescriptor.depthBufferBits = (useDepthRenderBuffer) ? k_DepthStencilBufferBits : 0;
  413. cmd.GetTemporaryRT(m_ActiveCameraColorAttachment.id, colorDescriptor, FilterMode.Bilinear);
  414. }
  415. if (m_ActiveCameraDepthAttachment != RenderTargetHandle.CameraTarget)
  416. {
  417. var depthDescriptor = descriptor;
  418. depthDescriptor.colorFormat = RenderTextureFormat.Depth;
  419. depthDescriptor.depthBufferBits = k_DepthStencilBufferBits;
  420. depthDescriptor.bindMS = msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve && (SystemInfo.supportsMultisampledTextures != 0);
  421. cmd.GetTemporaryRT(m_ActiveCameraDepthAttachment.id, depthDescriptor, FilterMode.Point);
  422. }
  423. context.ExecuteCommandBuffer(cmd);
  424. CommandBufferPool.Release(cmd);
  425. }
  426. void SetupBackbufferFormat(int msaaSamples, bool stereo)
  427. {
  428. #if ENABLE_VR && ENABLE_VR_MODULE
  429. bool msaaSampleCountHasChanged = false;
  430. int currentQualitySettingsSampleCount = QualitySettings.antiAliasing;
  431. if (currentQualitySettingsSampleCount != msaaSamples &&
  432. !(currentQualitySettingsSampleCount == 0 && msaaSamples == 1))
  433. {
  434. msaaSampleCountHasChanged = true;
  435. }
  436. // There's no exposed API to control how a backbuffer is created with MSAA
  437. // By settings antiAliasing we match what the amount of samples in camera data with backbuffer
  438. // We only do this for the main camera and this only takes effect in the beginning of next frame.
  439. // This settings should not be changed on a frame basis so that's fine.
  440. QualitySettings.antiAliasing = msaaSamples;
  441. if (stereo && msaaSampleCountHasChanged)
  442. XR.XRDevice.UpdateEyeTextureMSAASetting();
  443. #else
  444. QualitySettings.antiAliasing = msaaSamples;
  445. #endif
  446. }
  447. bool RequiresIntermediateColorTexture(ref RenderingData renderingData, RenderTextureDescriptor baseDescriptor)
  448. {
  449. // When rendering a camera stack we always create an intermediate render texture to composite camera results.
  450. // We create it upon rendering the Base camera.
  451. if (renderingData.cameraData.renderType == CameraRenderType.Base && !renderingData.resolveFinalTarget)
  452. return true;
  453. ref CameraData cameraData = ref renderingData.cameraData;
  454. int msaaSamples = cameraData.cameraTargetDescriptor.msaaSamples;
  455. bool isStereoEnabled = renderingData.cameraData.isStereoEnabled;
  456. bool isScaledRender = !Mathf.Approximately(cameraData.renderScale, 1.0f) && !cameraData.isStereoEnabled;
  457. bool isCompatibleBackbufferTextureDimension = baseDescriptor.dimension == TextureDimension.Tex2D;
  458. bool requiresExplicitMsaaResolve = msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve;
  459. bool isOffscreenRender = cameraData.targetTexture != null && !cameraData.isSceneViewCamera;
  460. bool isCapturing = cameraData.captureActions != null;
  461. #if ENABLE_VR && ENABLE_VR_MODULE
  462. if (isStereoEnabled)
  463. isCompatibleBackbufferTextureDimension = UnityEngine.XR.XRSettings.deviceEyeTextureDimension == baseDescriptor.dimension;
  464. #endif
  465. bool requiresBlitForOffscreenCamera = cameraData.postProcessEnabled || cameraData.requiresOpaqueTexture || requiresExplicitMsaaResolve;
  466. if (isOffscreenRender)
  467. return requiresBlitForOffscreenCamera;
  468. return requiresBlitForOffscreenCamera || cameraData.isSceneViewCamera || isScaledRender || cameraData.isHdrEnabled ||
  469. !isCompatibleBackbufferTextureDimension || !cameraData.isDefaultViewport || isCapturing ||
  470. (Display.main.requiresBlitToBackbuffer && !isStereoEnabled);
  471. }
  472. bool CanCopyDepth(ref CameraData cameraData)
  473. {
  474. bool msaaEnabledForCamera = cameraData.cameraTargetDescriptor.msaaSamples > 1;
  475. bool supportsTextureCopy = SystemInfo.copyTextureSupport != CopyTextureSupport.None;
  476. bool supportsDepthTarget = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.Depth);
  477. bool supportsDepthCopy = !msaaEnabledForCamera && (supportsDepthTarget || supportsTextureCopy);
  478. // TODO: We don't have support to highp Texture2DMS currently and this breaks depth precision.
  479. // currently disabling it until shader changes kick in.
  480. //bool msaaDepthResolve = msaaEnabledForCamera && SystemInfo.supportsMultisampledTextures != 0;
  481. bool msaaDepthResolve = false;
  482. return supportsDepthCopy || msaaDepthResolve;
  483. }
  484. }
  485. }