using System; using Unity.Collections; using System.Collections.Generic; #if UNITY_EDITOR using UnityEditor; using UnityEditor.Rendering.Universal; #endif using UnityEngine.Scripting.APIUpdating; using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping; #if ENABLE_VR && ENABLE_XR_MODULE using UnityEngine.XR; #endif namespace UnityEngine.Rendering.LWRP { [Obsolete("LWRP -> Universal (UnityUpgradable) -> UnityEngine.Rendering.Universal.UniversalRenderPipeline", true)] public class LightweightRenderPipeline { public LightweightRenderPipeline(LightweightRenderPipelineAsset asset) { } } } namespace UnityEngine.Rendering.Universal { public sealed partial class UniversalRenderPipeline : RenderPipeline { internal static class PerFrameBuffer { public static int _GlossyEnvironmentColor; public static int _SubtractiveShadowColor; public static int _Time; public static int _SinTime; public static int _CosTime; public static int unity_DeltaTime; public static int _TimeParameters; } static internal class PerCameraBuffer { // TODO: This needs to account for stereo rendering public static int _InvCameraViewProj; public static int _ScaledScreenParams; public static int _ScreenParams; public static int _WorldSpaceCameraPos; } public const string k_ShaderTagName = "UniversalPipeline"; const string k_RenderCameraTag = "Render Camera"; static ProfilingSampler _CameraProfilingSampler = new ProfilingSampler(k_RenderCameraTag); public static float maxShadowBias { get => 10.0f; } public static float minRenderScale { get => 0.1f; } public static float maxRenderScale { get => 2.0f; } // Amount of Lights that can be shaded per object (in the for loop in the shader) public static int maxPerObjectLights { // No support to bitfield mask and int[] in gles2. Can't index fast more than 4 lights. // Check Lighting.hlsl for more details. get => (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2) ? 4 : 8; } // These limits have to match same limits in Input.hlsl const int k_MaxVisibleAdditionalLightsSSBO = 256; const int k_MaxVisibleAdditionalLightsUBO = 32; public static int maxVisibleAdditionalLights { get { // There are some performance issues by using SSBO in mobile. // Also some GPUs don't supports SSBO in vertex shader. if (RenderingUtils.useStructuredBuffer) return k_MaxVisibleAdditionalLightsSSBO; // We don't use SSBO in D3D because we can't figure out without adding shader variants if platforms is D3D10. // We don't use SSBO on Nintendo Switch as UBO path is faster. // However here we use same limits as SSBO path. var deviceType = SystemInfo.graphicsDeviceType; if (deviceType == GraphicsDeviceType.Direct3D11 || deviceType == GraphicsDeviceType.Direct3D12 || deviceType == GraphicsDeviceType.Switch) return k_MaxVisibleAdditionalLightsSSBO; // We use less limits for mobile as some mobile GPUs have small SP cache for constants // Using more than 32 might cause spilling to main memory. return k_MaxVisibleAdditionalLightsUBO; } } // Internal max count for how many ScriptableRendererData can be added to a single Universal RP asset internal static int maxScriptableRenderers { get => 8; } public UniversalRenderPipeline(UniversalRenderPipelineAsset asset) { SetSupportedRenderingFeatures(); PerFrameBuffer._GlossyEnvironmentColor = Shader.PropertyToID("_GlossyEnvironmentColor"); PerFrameBuffer._SubtractiveShadowColor = Shader.PropertyToID("_SubtractiveShadowColor"); PerFrameBuffer._Time = Shader.PropertyToID("_Time"); PerFrameBuffer._SinTime = Shader.PropertyToID("_SinTime"); PerFrameBuffer._CosTime = Shader.PropertyToID("_CosTime"); PerFrameBuffer.unity_DeltaTime = Shader.PropertyToID("unity_DeltaTime"); PerFrameBuffer._TimeParameters = Shader.PropertyToID("_TimeParameters"); PerCameraBuffer._InvCameraViewProj = Shader.PropertyToID("_InvCameraViewProj"); PerCameraBuffer._ScreenParams = Shader.PropertyToID("_ScreenParams"); PerCameraBuffer._ScaledScreenParams = Shader.PropertyToID("_ScaledScreenParams"); PerCameraBuffer._WorldSpaceCameraPos = Shader.PropertyToID("_WorldSpaceCameraPos"); // Let engine know we have MSAA on for cases where we support MSAA backbuffer if (QualitySettings.antiAliasing != asset.msaaSampleCount) { QualitySettings.antiAliasing = asset.msaaSampleCount; #if ENABLE_VR && ENABLE_VR_MODULE XR.XRDevice.UpdateEyeTextureMSAASetting(); #endif } #if ENABLE_VR && ENABLE_VR_MODULE XRGraphics.eyeTextureResolutionScale = asset.renderScale; #endif // For compatibility reasons we also match old LightweightPipeline tag. Shader.globalRenderPipeline = "UniversalPipeline,LightweightPipeline"; Lightmapping.SetDelegate(lightsDelegate); CameraCaptureBridge.enabled = true; RenderingUtils.ClearSystemInfoCache(); } protected override void Dispose(bool disposing) { base.Dispose(disposing); Shader.globalRenderPipeline = ""; SupportedRenderingFeatures.active = new SupportedRenderingFeatures(); ShaderData.instance.Dispose(); #if UNITY_EDITOR SceneViewDrawMode.ResetDrawMode(); #endif Lightmapping.ResetDelegate(); CameraCaptureBridge.enabled = false; } #if ENABLE_VR && ENABLE_XR_MODULE static List xrDisplayList = new List(); static bool xrSkipRender = false; internal void SetupXRStates() { SubsystemManager.GetInstances(xrDisplayList); if (xrDisplayList.Count > 0) { if (xrDisplayList.Count > 1) throw new NotImplementedException("Only 1 XR display is supported."); XRDisplaySubsystem display = xrDisplayList[0]; if(display.GetRenderPassCount() == 0) { // Disable XR rendering if display contains 0 renderpass if(!xrSkipRender) { xrSkipRender = true; Debug.Log("XR display is not ready. Skip XR rendering."); } } else { // Enable XR rendering if display contains >0 renderpass if (xrSkipRender) { xrSkipRender = false; Debug.Log("XR display is ready. Start XR rendering."); } } } } #endif protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras) { BeginFrameRendering(renderContext, cameras); GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear); GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher; SetupPerFrameShaderConstants(); #if ENABLE_VR && ENABLE_XR_MODULE SetupXRStates(); if(xrSkipRender) return; #endif SortCameras(cameras); for (int i = 0; i < cameras.Length; ++i) { var camera = cameras[i]; if (IsGameCamera(camera)) { RenderCameraStack(renderContext, camera); } else { BeginCameraRendering(renderContext, camera); #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect. VFX.VFXManager.PrepareCamera(camera); #endif UpdateVolumeFramework(camera, null); RenderSingleCamera(renderContext, camera); EndCameraRendering(renderContext, camera); } } EndFrameRendering(renderContext, cameras); } /// /// Standalone camera rendering. Use this to render procedural cameras. /// This method doesn't call BeginCameraRendering and EndCameraRendering callbacks. /// /// Render context used to record commands during execution. /// Camera to render. /// public static void RenderSingleCamera(ScriptableRenderContext context, Camera camera) { UniversalAdditionalCameraData additionalCameraData = null; if (IsGameCamera(camera)) camera.gameObject.TryGetComponent(out additionalCameraData); if (additionalCameraData != null && additionalCameraData.renderType != CameraRenderType.Base) { Debug.LogWarning("Only Base cameras can be rendered with standalone RenderSingleCamera. Camera will be skipped."); return; } InitializeCameraData(camera, additionalCameraData, out var cameraData); RenderSingleCamera(context, cameraData, true, cameraData.postProcessEnabled); } /// /// Renders a single camera. This method will do culling, setup and execution of the renderer. /// /// Render context used to record commands during execution. /// Camera rendering data. This might contain data inherited from a base camera. /// True if this is the last camera in the stack rendering, false otherwise. /// True if at least one camera has post-processing enabled in the stack, false otherwise. static void RenderSingleCamera(ScriptableRenderContext context, CameraData cameraData, bool requiresBlitToBackbuffer, bool anyPostProcessingEnabled) { Camera camera = cameraData.camera; var renderer = cameraData.renderer; if (renderer == null) { Debug.LogWarning(string.Format("Trying to render {0} with an invalid renderer. Camera rendering will be skipped.", camera.name)); return; } if (!camera.TryGetCullingParameters(IsStereoEnabled(camera), out var cullingParameters)) return; SetupPerCameraShaderConstants(cameraData); ProfilingSampler sampler = (asset.debugLevel >= PipelineDebugLevel.Profiling) ? new ProfilingSampler(camera.name): _CameraProfilingSampler; CommandBuffer cmd = CommandBufferPool.Get(sampler.name); using (new ProfilingScope(cmd, sampler)) { renderer.Clear(cameraData.renderType); renderer.SetupCullingParameters(ref cullingParameters, ref cameraData); context.ExecuteCommandBuffer(cmd); cmd.Clear(); #if UNITY_EDITOR // Emit scene view UI if (cameraData.isSceneViewCamera) { ScriptableRenderContext.EmitWorldGeometryForSceneView(camera); } #endif var cullResults = context.Cull(ref cullingParameters); InitializeRenderingData(asset, ref cameraData, ref cullResults, requiresBlitToBackbuffer, anyPostProcessingEnabled, out var renderingData); renderer.Setup(context, ref renderingData); renderer.Execute(context, ref renderingData); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); context.Submit(); } /// // Renders a camera stack. This method calls RenderSingleCamera for each valid camera in the stack. // The last camera resolves the final target to screen. /// /// Render context used to record commands during execution. /// Camera to render. static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera) { baseCamera.TryGetComponent(out var baseCameraAdditionalData); // Overlay cameras will be rendered stacked while rendering base cameras if (baseCameraAdditionalData != null && baseCameraAdditionalData.renderType == CameraRenderType.Overlay) return; // renderer contains a stack if it has additional data and the renderer supports stacking var renderer = baseCameraAdditionalData?.scriptableRenderer; bool supportsCameraStacking = renderer != null && renderer.supportedRenderingFeatures.cameraStacking; List cameraStack = (supportsCameraStacking) ? baseCameraAdditionalData?.cameraStack : null; bool anyPostProcessingEnabled = baseCameraAdditionalData != null && baseCameraAdditionalData.renderPostProcessing; anyPostProcessingEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2; // We need to know the last active camera in the stack to be able to resolve // rendering to screen when rendering it. The last camera in the stack is not // necessarily the last active one as it users might disable it. int lastActiveOverlayCameraIndex = -1; if (cameraStack != null && cameraStack.Count > 0) { #if POST_PROCESSING_STACK_2_0_0_OR_NEWER if (asset.postProcessingFeatureSet != PostProcessingFeatureSet.PostProcessingV2) { #endif // TODO: Add support to camera stack in VR multi pass mode if (!IsMultiPassStereoEnabled(baseCamera)) { var baseCameraRendererType = baseCameraAdditionalData?.scriptableRenderer.GetType(); for (int i = 0; i < cameraStack.Count; ++i) { Camera currCamera = cameraStack[i]; if (currCamera != null && currCamera.isActiveAndEnabled) { currCamera.TryGetComponent(out var data); if (data == null || data.renderType != CameraRenderType.Overlay) { Debug.LogWarning(string.Format("Stack can only contain Overlay cameras. {0} will skip rendering.", currCamera.name)); } else if (data?.scriptableRenderer.GetType() != baseCameraRendererType) { Debug.LogWarning(string.Format("Only cameras with the same renderer type as the base camera can be stacked. {0} will skip rendering", currCamera.name)); } else { anyPostProcessingEnabled |= data.renderPostProcessing; lastActiveOverlayCameraIndex = i; } } } } else { Debug.LogWarning("Multi pass stereo mode doesn't support Camera Stacking. Overlay cameras will skip rendering."); } #if POST_PROCESSING_STACK_2_0_0_OR_NEWER } else { Debug.LogWarning("Post-processing V2 doesn't support Camera Stacking. Overlay cameras will skip rendering."); } #endif } bool isStackedRendering = lastActiveOverlayCameraIndex != -1; BeginCameraRendering(context, baseCamera); #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect. VFX.VFXManager.PrepareCamera(baseCamera); #endif UpdateVolumeFramework(baseCamera, baseCameraAdditionalData); InitializeCameraData(baseCamera, baseCameraAdditionalData, out var baseCameraData); RenderSingleCamera(context, baseCameraData, !isStackedRendering, anyPostProcessingEnabled); EndCameraRendering(context, baseCamera); if (!isStackedRendering) return; for (int i = 0; i < cameraStack.Count; ++i) { var currCamera = cameraStack[i]; if (!currCamera.isActiveAndEnabled) continue; currCamera.TryGetComponent(out var currCameraData); // Camera is overlay and enabled if (currCameraData != null) { // Copy base settings from base camera data and initialize initialize remaining specific settings for this camera type. CameraData overlayCameraData = baseCameraData; bool lastCamera = i == lastActiveOverlayCameraIndex; BeginCameraRendering(context, currCamera); #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect. VFX.VFXManager.PrepareCamera(currCamera); #endif UpdateVolumeFramework(currCamera, currCameraData); InitializeAdditionalCameraData(currCamera, currCameraData, ref overlayCameraData); RenderSingleCamera(context, overlayCameraData, lastCamera, anyPostProcessingEnabled); EndCameraRendering(context, currCamera); } } } static void UpdateVolumeFramework(Camera camera, UniversalAdditionalCameraData additionalCameraData) { // Default values when there's no additional camera data available LayerMask layerMask = 1; // "Default" Transform trigger = camera.transform; if (additionalCameraData != null) { layerMask = additionalCameraData.volumeLayerMask; trigger = additionalCameraData.volumeTrigger != null ? additionalCameraData.volumeTrigger : trigger; } else if (camera.cameraType == CameraType.SceneView) { // Try to mirror the MainCamera volume layer mask for the scene view - do not mirror the target var mainCamera = Camera.main; UniversalAdditionalCameraData mainAdditionalCameraData = null; if (mainCamera != null && mainCamera.TryGetComponent(out mainAdditionalCameraData)) layerMask = mainAdditionalCameraData.volumeLayerMask; trigger = mainAdditionalCameraData != null && mainAdditionalCameraData.volumeTrigger != null ? mainAdditionalCameraData.volumeTrigger : trigger; } VolumeManager.instance.Update(trigger, layerMask); } static bool CheckPostProcessForDepth(in CameraData cameraData) { if (!cameraData.postProcessEnabled) return false; if (cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing) return true; var stack = VolumeManager.instance.stack; if (stack.GetComponent().IsActive()) return true; if (stack.GetComponent().IsActive()) return true; return false; } static void SetSupportedRenderingFeatures() { #if UNITY_EDITOR SupportedRenderingFeatures.active = new SupportedRenderingFeatures() { reflectionProbeModes = SupportedRenderingFeatures.ReflectionProbeModes.None, defaultMixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive, mixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive | SupportedRenderingFeatures.LightmapMixedBakeModes.IndirectOnly, lightmapBakeTypes = LightmapBakeType.Baked | LightmapBakeType.Mixed, lightmapsModes = LightmapsMode.CombinedDirectional | LightmapsMode.NonDirectional, lightProbeProxyVolumes = false, motionVectors = false, receiveShadows = false, reflectionProbes = true }; SceneViewDrawMode.SetupDrawMode(); #endif } static void InitializeCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, out CameraData cameraData) { cameraData = new CameraData(); InitializeStackedCameraData(camera, additionalCameraData, ref cameraData); InitializeAdditionalCameraData(camera, additionalCameraData, ref cameraData); } #if ENABLE_VR && ENABLE_XR_MODULE static List displaySubsystemList = new List(); static bool CanXRSDKUseSinglePass(Camera camera) { XR.XRDisplaySubsystem display = null; SubsystemManager.GetInstances(displaySubsystemList); if (displaySubsystemList.Count > 0) { XR.XRDisplaySubsystem.XRRenderPass renderPass; display = displaySubsystemList[0]; if (display.GetRenderPassCount() > 0) { display.GetRenderPass(0, out renderPass); if (renderPass.renderTargetDesc.dimension != TextureDimension.Tex2DArray) return false; if (renderPass.GetRenderParameterCount() != 2 || renderPass.renderTargetDesc.volumeDepth != 2) return false; renderPass.GetRenderParameter(camera, 0, out var renderParam0); renderPass.GetRenderParameter(camera, 1, out var renderParam1); if (renderParam0.textureArraySlice != 0 || renderParam1.textureArraySlice != 1) return false; if (renderParam0.viewport != renderParam1.viewport) return false; return true; } } return false; } #endif /// /// Initialize camera data settings common for all cameras in the stack. Overlay cameras will inherit /// settings from base camera. /// /// Base camera to inherit settings from. /// Component that contains additional base camera data. /// Camera data to initialize setttings. static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCameraData baseAdditionalCameraData, ref CameraData cameraData) { var settings = asset; cameraData.targetTexture = baseCamera.targetTexture; cameraData.isStereoEnabled = IsStereoEnabled(baseCamera); cameraData.isSceneViewCamera = baseCamera.cameraType == CameraType.SceneView; cameraData.numberOfXRPasses = 1; cameraData.isXRMultipass = false; #if ENABLE_VR && ENABLE_VR_MODULE if (cameraData.isStereoEnabled && !cameraData.isSceneViewCamera && !CanXRSDKUseSinglePass(baseCamera) && XR.XRSettings.stereoRenderingMode == XR.XRSettings.StereoRenderingMode.MultiPass) { cameraData.numberOfXRPasses = 2; cameraData.isXRMultipass = true; } #endif /////////////////////////////////////////////////////////////////// // Environment and Post-processing settings / /////////////////////////////////////////////////////////////////// if (cameraData.isSceneViewCamera) { cameraData.volumeLayerMask = 1; // "Default" cameraData.volumeTrigger = null; cameraData.isStopNaNEnabled = false; cameraData.isDitheringEnabled = false; cameraData.antialiasing = AntialiasingMode.None; cameraData.antialiasingQuality = AntialiasingQuality.High; } else if (baseAdditionalCameraData != null) { cameraData.volumeLayerMask = baseAdditionalCameraData.volumeLayerMask; cameraData.volumeTrigger = baseAdditionalCameraData.volumeTrigger == null ? baseCamera.transform : baseAdditionalCameraData.volumeTrigger; cameraData.isStopNaNEnabled = baseAdditionalCameraData.stopNaN && SystemInfo.graphicsShaderLevel >= 35; cameraData.isDitheringEnabled = baseAdditionalCameraData.dithering; cameraData.antialiasing = baseAdditionalCameraData.antialiasing; cameraData.antialiasingQuality = baseAdditionalCameraData.antialiasingQuality; } else { cameraData.volumeLayerMask = 1; // "Default" cameraData.volumeTrigger = null; cameraData.isStopNaNEnabled = false; cameraData.isDitheringEnabled = false; cameraData.antialiasing = AntialiasingMode.None; cameraData.antialiasingQuality = AntialiasingQuality.High; } /////////////////////////////////////////////////////////////////// // Settings that control output of the camera / /////////////////////////////////////////////////////////////////// int msaaSamples = 1; if (baseCamera.allowMSAA && settings.msaaSampleCount > 1) msaaSamples = (baseCamera.targetTexture != null) ? baseCamera.targetTexture.antiAliasing : settings.msaaSampleCount; cameraData.isHdrEnabled = baseCamera.allowHDR && settings.supportsHDR; Rect cameraRect = baseCamera.rect; cameraData.pixelRect = baseCamera.pixelRect; cameraData.pixelWidth = baseCamera.pixelWidth; cameraData.pixelHeight = baseCamera.pixelHeight; cameraData.aspectRatio = (float)cameraData.pixelWidth / (float)cameraData.pixelHeight; cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f)); // If XR is enabled, use XR renderScale. // Discard variations lesser than kRenderScaleThreshold. // Scale is only enabled for gameview. const float kRenderScaleThreshold = 0.05f; float usedRenderScale = XRGraphics.enabled ? XRGraphics.eyeTextureResolutionScale : settings.renderScale; cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale; var commonOpaqueFlags = SortingCriteria.CommonOpaque; var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder; bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU; bool canSkipFrontToBackSorting = (baseCamera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || baseCamera.opaqueSortMode == OpaqueSortMode.NoDistanceSort; cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags; cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(baseCamera); bool needsAlphaChannel = Graphics.preserveFramebufferAlpha; cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(baseCamera, cameraData.renderScale, cameraData.isStereoEnabled, cameraData.isHdrEnabled, msaaSamples, needsAlphaChannel); } /// /// Initialize settings that can be different for each camera in the stack. /// /// Camera to initialize settings from. /// Additional camera data component to initialize settings from. /// Settings to be initilized. static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, ref CameraData cameraData) { var settings = asset; cameraData.camera = camera; bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows; cameraData.maxShadowDistance = Mathf.Min(settings.shadowDistance, camera.farClipPlane); cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ? cameraData.maxShadowDistance : 0.0f; cameraData.viewMatrix = camera.worldToCameraMatrix; // Overlay cameras inherit viewport from base. // If the viewport is different between them we might need to patch the projection // matrix to prevent squishing when rendering objects in overlay cameras. cameraData.projectionMatrix = (!camera.orthographic && !cameraData.isStereoEnabled && cameraData.pixelRect != camera.pixelRect) ? Matrix4x4.Perspective(camera.fieldOfView, cameraData.aspectRatio, camera.nearClipPlane, camera.farClipPlane) : camera.projectionMatrix; if (cameraData.isSceneViewCamera) { cameraData.renderType = CameraRenderType.Base; cameraData.clearDepth = true; cameraData.postProcessEnabled = CoreUtils.ArePostProcessesEnabled(camera); cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture; cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture; cameraData.renderer = asset.scriptableRenderer; } else if (additionalCameraData != null) { cameraData.renderType = additionalCameraData.renderType; cameraData.clearDepth = (additionalCameraData.renderType != CameraRenderType.Base) ? additionalCameraData.clearDepth : true; cameraData.postProcessEnabled = additionalCameraData.renderPostProcessing; cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f; cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture; cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture; cameraData.renderer = additionalCameraData.scriptableRenderer; } else { cameraData.renderType = CameraRenderType.Base; cameraData.clearDepth = true; cameraData.postProcessEnabled = false; cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture; cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture; cameraData.renderer = asset.scriptableRenderer; } // Disable depth and color copy. We should add it in the renderer instead to avoid performance pitfalls // of camera stacking breaking render pass execution implicitly. if (cameraData.renderType == CameraRenderType.Overlay) { cameraData.requiresDepthTexture = false; cameraData.requiresOpaqueTexture = false; } // Disables post if GLes2 cameraData.postProcessEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2; #if POST_PROCESSING_STACK_2_0_0_OR_NEWER #pragma warning disable 0618 // Obsolete if (settings.postProcessingFeatureSet == PostProcessingFeatureSet.PostProcessingV2) { camera.TryGetComponent(out cameraData.postProcessLayer); cameraData.postProcessEnabled &= cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled; } bool depthRequiredForPostFX = settings.postProcessingFeatureSet == PostProcessingFeatureSet.PostProcessingV2 ? cameraData.postProcessEnabled : CheckPostProcessForDepth(cameraData); #pragma warning restore 0618 #else bool depthRequiredForPostFX = CheckPostProcessForDepth(cameraData); #endif cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || depthRequiredForPostFX; } static void InitializeRenderingData(UniversalRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults, bool requiresBlitToBackbuffer, bool anyPostProcessingEnabled, out RenderingData renderingData) { var visibleLights = cullResults.visibleLights; int mainLightIndex = GetMainLightIndex(settings, visibleLights); bool mainLightCastShadows = false; bool additionalLightsCastShadows = false; if (cameraData.maxShadowDistance > 0.0f) { mainLightCastShadows = (mainLightIndex != -1 && visibleLights[mainLightIndex].light != null && visibleLights[mainLightIndex].light.shadows != LightShadows.None); // If additional lights are shaded per-pixel they cannot cast shadows if (settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel) { for (int i = 0; i < visibleLights.Length; ++i) { if (i == mainLightIndex) continue; Light light = visibleLights[i].light; // UniversalRP doesn't support additional directional lights or point light shadows yet if (visibleLights[i].lightType == LightType.Spot && light != null && light.shadows != LightShadows.None) { additionalLightsCastShadows = true; break; } } } } renderingData.cullResults = cullResults; renderingData.cameraData = cameraData; InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData); InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData); InitializePostProcessingData(settings, out renderingData.postProcessingData); renderingData.supportsDynamicBatching = settings.supportsDynamicBatching; renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount); bool isOffscreenCamera = cameraData.targetTexture != null && !cameraData.isSceneViewCamera; renderingData.resolveFinalTarget = requiresBlitToBackbuffer; renderingData.postProcessingEnabled = anyPostProcessingEnabled; #pragma warning disable // avoid warning because killAlphaInFinalBlit has attribute Obsolete renderingData.killAlphaInFinalBlit = false; #pragma warning restore } static void InitializeShadowData(UniversalRenderPipelineAsset settings, NativeArray visibleLights, bool mainLightCastShadows, bool additionalLightsCastShadows, out ShadowData shadowData) { m_ShadowBiasData.Clear(); for (int i = 0; i < visibleLights.Length; ++i) { Light light = visibleLights[i].light; UniversalAdditionalLightData data = null; if (light != null) { #if UNITY_2019_3_OR_NEWER light.gameObject.TryGetComponent(out data); #else data = light.gameObject.GetComponent(); #endif } if (data && !data.usePipelineSettings) m_ShadowBiasData.Add(new Vector4(light.shadowBias, light.shadowNormalBias, 0.0f, 0.0f)); else m_ShadowBiasData.Add(new Vector4(settings.shadowDepthBias, settings.shadowNormalBias, 0.0f, 0.0f)); } shadowData.bias = m_ShadowBiasData; shadowData.supportsMainLightShadows = SystemInfo.supportsShadows && settings.supportsMainLightShadows && mainLightCastShadows; // We no longer use screen space shadows in URP. // This change allows us to have particles & transparent objects receive shadows. shadowData.requiresScreenSpaceShadowResolve = false;// shadowData.supportsMainLightShadows && supportsScreenSpaceShadows && settings.shadowCascadeOption != ShadowCascadesOption.NoCascades; int shadowCascadesCount; switch (settings.shadowCascadeOption) { case ShadowCascadesOption.FourCascades: shadowCascadesCount = 4; break; case ShadowCascadesOption.TwoCascades: shadowCascadesCount = 2; break; default: shadowCascadesCount = 1; break; } shadowData.mainLightShadowCascadesCount = shadowCascadesCount;//(shadowData.requiresScreenSpaceShadowResolve) ? shadowCascadesCount : 1; shadowData.mainLightShadowmapWidth = settings.mainLightShadowmapResolution; shadowData.mainLightShadowmapHeight = settings.mainLightShadowmapResolution; switch (shadowData.mainLightShadowCascadesCount) { case 1: shadowData.mainLightShadowCascadesSplit = new Vector3(1.0f, 0.0f, 0.0f); break; case 2: shadowData.mainLightShadowCascadesSplit = new Vector3(settings.cascade2Split, 1.0f, 0.0f); break; default: shadowData.mainLightShadowCascadesSplit = settings.cascade4Split; break; } shadowData.supportsAdditionalLightShadows = SystemInfo.supportsShadows && settings.supportsAdditionalLightShadows && additionalLightsCastShadows; shadowData.additionalLightsShadowmapWidth = shadowData.additionalLightsShadowmapHeight = settings.additionalLightsShadowmapResolution; shadowData.supportsSoftShadows = settings.supportsSoftShadows && (shadowData.supportsMainLightShadows || shadowData.supportsAdditionalLightShadows); shadowData.shadowmapDepthBufferBits = 16; } static void InitializePostProcessingData(UniversalRenderPipelineAsset settings, out PostProcessingData postProcessingData) { postProcessingData.gradingMode = settings.supportsHDR ? settings.colorGradingMode : ColorGradingMode.LowDynamicRange; postProcessingData.lutSize = settings.colorGradingLutSize; } static void InitializeLightData(UniversalRenderPipelineAsset settings, NativeArray visibleLights, int mainLightIndex, out LightData lightData) { int maxPerObjectAdditionalLights = UniversalRenderPipeline.maxPerObjectLights; int maxVisibleAdditionalLights = UniversalRenderPipeline.maxVisibleAdditionalLights; lightData.mainLightIndex = mainLightIndex; if (settings.additionalLightsRenderingMode != LightRenderingMode.Disabled) { lightData.additionalLightsCount = Math.Min((mainLightIndex != -1) ? visibleLights.Length - 1 : visibleLights.Length, maxVisibleAdditionalLights); lightData.maxPerObjectAdditionalLightsCount = Math.Min(settings.maxAdditionalLightsCount, maxPerObjectAdditionalLights); } else { lightData.additionalLightsCount = 0; lightData.maxPerObjectAdditionalLightsCount = 0; } lightData.shadeAdditionalLightsPerVertex = settings.additionalLightsRenderingMode == LightRenderingMode.PerVertex; lightData.visibleLights = visibleLights; lightData.supportsMixedLighting = settings.supportsMixedLighting; } static PerObjectData GetPerObjectLightFlags(int additionalLightsCount) { var configuration = PerObjectData.ReflectionProbes | PerObjectData.Lightmaps | PerObjectData.LightProbe | PerObjectData.LightData | PerObjectData.OcclusionProbe; if (additionalLightsCount > 0) { configuration |= PerObjectData.LightData; // In this case we also need per-object indices (unity_LightIndices) if (!RenderingUtils.useStructuredBuffer) configuration |= PerObjectData.LightIndices; } return configuration; } // Main Light is always a directional light static int GetMainLightIndex(UniversalRenderPipelineAsset settings, NativeArray visibleLights) { int totalVisibleLights = visibleLights.Length; if (totalVisibleLights == 0 || settings.mainLightRenderingMode != LightRenderingMode.PerPixel) return -1; Light sunLight = RenderSettings.sun; int brightestDirectionalLightIndex = -1; float brightestLightIntensity = 0.0f; for (int i = 0; i < totalVisibleLights; ++i) { VisibleLight currVisibleLight = visibleLights[i]; Light currLight = currVisibleLight.light; // Particle system lights have the light property as null. We sort lights so all particles lights // come last. Therefore, if first light is particle light then all lights are particle lights. // In this case we either have no main light or already found it. if (currLight == null) break; if (currLight == sunLight) return i; // In case no shadow light is present we will return the brightest directional light if (currVisibleLight.lightType == LightType.Directional && currLight.intensity > brightestLightIntensity) { brightestLightIntensity = currLight.intensity; brightestDirectionalLightIndex = i; } } return brightestDirectionalLightIndex; } static void SetupPerFrameShaderConstants() { // When glossy reflections are OFF in the shader we set a constant color to use as indirect specular SphericalHarmonicsL2 ambientSH = RenderSettings.ambientProbe; Color linearGlossyEnvColor = new Color(ambientSH[0, 0], ambientSH[1, 0], ambientSH[2, 0]) * RenderSettings.reflectionIntensity; Color glossyEnvColor = CoreUtils.ConvertLinearToActiveColorSpace(linearGlossyEnvColor); Shader.SetGlobalVector(PerFrameBuffer._GlossyEnvironmentColor, glossyEnvColor); // Used when subtractive mode is selected Shader.SetGlobalVector(PerFrameBuffer._SubtractiveShadowColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.subtractiveShadowColor)); } static void SetupPerCameraShaderConstants(in CameraData cameraData) { Camera camera = cameraData.camera; Rect pixelRect = cameraData.pixelRect; float scaledCameraWidth = (float)pixelRect.width * cameraData.renderScale; float scaledCameraHeight = (float)pixelRect.height * cameraData.renderScale; Shader.SetGlobalVector(PerCameraBuffer._ScaledScreenParams, new Vector4(scaledCameraWidth, scaledCameraHeight, 1.0f + 1.0f / scaledCameraWidth, 1.0f + 1.0f / scaledCameraHeight)); Shader.SetGlobalVector(PerCameraBuffer._WorldSpaceCameraPos, camera.transform.position); float cameraWidth = (float)pixelRect.width; float cameraHeight = (float)pixelRect.height; Shader.SetGlobalVector(PerCameraBuffer._ScreenParams, new Vector4(cameraWidth, cameraHeight, 1.0f + 1.0f / cameraWidth, 1.0f + 1.0f / cameraHeight)); Matrix4x4 projMatrix = GL.GetGPUProjectionMatrix(camera.projectionMatrix, false); Matrix4x4 viewMatrix = camera.worldToCameraMatrix; Matrix4x4 viewProjMatrix = projMatrix * viewMatrix; Matrix4x4 invViewProjMatrix = Matrix4x4.Inverse(viewProjMatrix); Shader.SetGlobalMatrix(PerCameraBuffer._InvCameraViewProj, invViewProjMatrix); } } }