//======= Copyright (c) Stereolabs Corporation, All rights reserved. ===============
using UnityEngine;
using UnityEngine.Rendering;
///
/// Responsible for actually mixing the real and virtual images, and displaying them in a
/// Frame object within the camera rig.
/// First, it displays the image of the ZED into a quad.
/// Then, it inserts the depth and normals inside the pipeline
/// Third, it computes the data for the light and send it to the shaders.
/// Finally, it post-processes the image, if post-processing is enabled.
///
[RequireComponent(typeof(Camera))]
public class ZEDRenderingPlane : MonoBehaviour
{
///
/// The rendering mode accepted.
///
enum ZED_RENDERING_MODE
{
FORWARD,
DEFERRED,
LAST
};
///
/// Which side of the camera (left/right) and whether or not this can be overridden by the camera's stereoTargetEye.
///
public enum ZED_CAMERA_SIDE
{
///
/// Feed from the ZED's left camera. Can be overridden by the camera's stereoTargetEye value.
///
LEFT,
///
/// Feed from the ZED's right camera. Can be overridden by the camera's stereoTargetEye value.
///
RIGHT,
///
/// Feed from the ZED's left camera. Won't be overridden.
///
LEFT_FORCE,
///
/// Feed from the ZED's right camera. Won't be overridden.
///
RIGHT_FORCE
}
///
/// The GameGbject that displays the final textures.
/// In the ZED_Rig_Mono and ZED_Rig_Stereo prefabs, this is the Frame object that's a child of each camera.
///
[Tooltip("The GameGbject that displays the final textures. " +
"In the ZED_Rig_Mono and ZED_Rig_Stereo prefabs, this is the Frame object that's a child of each camera.")]
public GameObject canvas;
///
/// Which camera on the ZED the image/depth/etc. comes from.
/// If set to LEFT or RIGHT, this may be overridden by the camera's stereoTargetEye.
/// If set to LEFT_FORCE or RIGHT_FORCE, it will not be changed.
///
[Tooltip("Which camera on the ZED the image/depth/etc. comes from.\r\n" +
"If set to LEFT or RIGHT, this may be overridden by the camera's stereoTargetEye.\r\n" +
"If set to LEFT_FORCE or RIGHT_FORCE, it will not be changed.")]
public ZED_CAMERA_SIDE viewSide = ZED_CAMERA_SIDE.LEFT;
///
/// The main material, set to the one on the canvas's MeshRenderer. Used to set the color and depth.
///
public Material matRGB { get; private set; }
///
/// Aspect ratio of the textures. All the textures displayed should be in 16:9.
///
private float aspect = 16.0f / 9.0f;
///
/// The Camera component representing an 'eye' of the ZED. Must be on the same GameObject as this component.
///
private Camera cam;
///
/// The Camera component representing an 'eye' of the ZED. Must be on the same GameObject as this component.
///
public Camera renderingCam
{
get
{
return cam;
}
}
///
/// zedCamera controlled by ZEDManager (one manager controls one camera)
///
private sl.ZEDCamera zedCamera = null;
///
/// ZED Manager that controls the zed Camera
///
private ZEDManager zedManager = null;
///
/// Texture of the real world generated from the ZED. It may be the from the left or right 'eye.'
/// Once created, the ZED SDK automatically updates it with each frame/image the ZED captures.
///
private Texture2D textureEye;
public Texture2D TextureEye { get { return textureEye; } }
///
/// Depth generated by the ZEDCamera.
/// Once created, the ZED SDK automatically updates it whenever the ZED captures new frames/images.
///
Texture2D depth;
///
/// Normals generated by the ZEDCamera.
/// Once created, the ZED SDK automatically updates it whenever the ZED captures new frames/images.
///
Texture2D normals;
#if !ZED_HDRP && !ZED_URP
///
/// CommandBuffer to integrate the depth into Unity's forward or deferred pipeline.
///
CommandBuffer[] buffer = new CommandBuffer[(int)ZED_RENDERING_MODE.LAST];
///
/// CommandBuffer to create a mask for the virtual objects in forward and deferred rendering.
///
CommandBuffer[] postProcessBuffer = new CommandBuffer[(int)ZED_RENDERING_MODE.LAST];
#endif
///
/// The material used to integrate the depth in forward mode after the depth texture is created. Mainly used to get the shadows. Not needed for lighting otherwise.
///
public Material forwardMat { get; private set; }
///
/// The material used to integrate the depth in deferred mode. Always used in deferred regardless of lighting/shadows.
///
public Material deferredMat { get; private set; }
///
/// The actual rendering path used.
/// To change this, change the settings in Project Settings -> Graphics within the Unity editor.
///
private RenderingPath actualRenderingPath = RenderingPath.VertexLit;
public RenderingPath ActualRenderingPath
{
get { return actualRenderingPath; }
}
///
/// The MeshFilter component of the canvas object. Used to draw the depth buffer.
///
MeshFilter meshCanvas;
///
/// A lower resolution of the depth and normals textures from the ZED.
///
private sl.Resolution resolution = new sl.Resolution(1280, 720);
/***LIGHTS definitions***/
///
/// Point light structure for virtual lights on the real world.
/// Gets sent to the shader via a compute buffer.
///
[SerializeField]
public struct PointLight
{
///
/// The color, times the intensity.
///
public Vector4 color;
///
/// The range of the light.
///
public float range;
///
/// The position of the light.
///
public Vector3 position;
}
///
/// Maximum number of point lights accepted.
///
const int NUMBER_POINT_LIGHT_MAX = 8;
///
/// Holds a slot for all point lights that should be cast on the real world.
///
[SerializeField]
public PointLight[] pointLights = new PointLight[NUMBER_POINT_LIGHT_MAX];
///
/// The size, or 'stride', of each PointLight in bytes. Needed to construct computeBufferPointLight.
///
const int SIZE_POINT_LIGHT_BYTES = 32;
///
/// Used to pass the pointLights array into matRGB's shader as a buffer.
///
ComputeBuffer computeBufferPointLight;
///
/// Structure of the spotlight send to the shader
///
[SerializeField]
public struct SpotLight
{
///
/// The color, times the intensity.
///
public Vector4 color;
///
/// The position of the light.
///
public Vector3 position;
///
/// The light's normalized direction and angle.
///
public Vector4 direction;
///
/// The parameters for the light's falloff.
///
public Vector4 parameters;
}
///
/// Maximum number of spotlights accepted.
///
const int NUMBER_SPOT_LIGHT_MAX = 8;
///
/// Holds a slot for all spotlights that should be cast on the real world.
///
[SerializeField]
public SpotLight[] spotLights = new SpotLight[NUMBER_SPOT_LIGHT_MAX];
///
/// The size, or 'stride', of each SpotLight in bytes. Needed to construct computeBufferSpotLight.
///
const int SIZE_SPOT_LIGHT_BYTES = 60;
///
/// Maximum number of total lights rendered (point and spot combined).
///
const int NUMBER_LIGHTS_MAX = NUMBER_POINT_LIGHT_MAX / 2 + NUMBER_SPOT_LIGHT_MAX / 2;
///
/// Data from a directional light. [0] is its direction and [1] is its color.
/// Only one directional light is allowed at once.
///
private Vector4[] directionalLightData = new Vector4[2];
///
/// Used to pass the spotLights array into matRGB's shader as a buffer.
///
ComputeBuffer computeBufferSpotLight;
//Forward ID shader caches.
///
/// Property ID of the number of point lights in the ZED_Lighting shader include file.
///
private int numberPointLightsID;
///
/// Property ID of the number of spotlights in the ZED_Lighting shader include file.
///
private int numberSpotLightsID;
///
/// Cached property id for _IsTextured. use isTexturedID property instead.
///
private int? _istexturedid;
///
/// Property id for _IsTextured, which is whether the rendered image has a texture overlay.
///
private int isTexturedID
{
get
{
if (_istexturedid == null) _istexturedid = Shader.PropertyToID("_IsTextured");
return (int)_istexturedid;
}
}
///
/// Cached property id for _Mask. use maskPropID property instead.
///
private int? _maskpropid;
///
/// Property id for _Mask, which is the RenderTexture property for an overlay texture.
///
private int maskPropID
{
get
{
if (_maskpropid == null) _maskpropid = Shader.PropertyToID("_Mask");
return (int)_maskpropid;
}
}
/*** Post-process definitions***/
///
/// The mask used for post-processing. Filled at runtime and updated each frame.
///
private RenderTexture mask;
///
/// The post process material, used to add noise and change the color.
///
private Material postprocessMaterial;
///
/// Activate/deactivate post-processing. If false, the mask will not be generated.
/// Set by ZEDManager.setRenderingSettings() based on a checkbox in ZEDManager's custom editor.
///
private bool ARpostProcessing = true;
///
/// Used to blur the mask.
///
private Material blurMaterial;
///
/// Used to load a source image's alpha channel into all channels of a destination image.
/// Used during post-processing.
///
private Material blitMaterial;
///
/// Used to convert the stencil buffer of a rendertexture into a regular texture.
///
private Material matStencilToMask;
///
/// Used to compose the virtual mask from different textures.
///
private Material matComposeMask;
///
/// Used to blend the textures from ZEDMeshRenderer, when present.
/// This adds the wireframe effect seen from 3D scanning or plane detection.
///
private Material blender;
///
/// What kind of image the final result will display. Usually set to View.
/// Set this to VIEW_DEPTH or VIEW_NORMAL to see the live depth or normal maps.
///
public sl.VIEW_MODE viewMode = sl.VIEW_MODE.VIEW_IMAGE;
///
/// Which side of the camera we render. Left = 0, Right ==1.
///
private int side
{
get
{
if (viewSide == ZED_CAMERA_SIDE.LEFT || viewSide == ZED_CAMERA_SIDE.LEFT_FORCE) return 0;
else return 1;
}
}
///
/// Default near plane value. Overrides camera's settings on start but will update if camera values change at runtime.
///
private float nearplane = 0.1f;
///
/// Default far plane value. Overrides camera's settings on start but will update if camera values change at runtime.
///
private float farplane = 500.0f;
///
/// The target RenderTexture we'll render to if in AR mode.
///
[HideInInspector]
private RenderTexture renderTextureTarget;
public RenderTexture target
{
get { return renderTextureTarget; }
}
void Awake()
{
//Get the current camera and set the aspect ratio.
zedManager = gameObject.transform.parent.GetComponent();
cam = GetComponent();
cam.aspect = aspect;
cam.renderingPath = RenderingPath.UsePlayerSettings; //Assigns the camera's rendering path to be consistent with the project's settings.
//Make the canvas allow rendering this camera.
HideFromWrongCameras.RegisterZEDCam(cam); //Makes all objects with a HideFromWrongCamera hide from this, unless set to this specifc one.
HideFromWrongCameras hider = canvas.GetComponent();
if (!hider)
{
hider = canvas.AddComponent();
}
hider.SetRenderCamera(cam); //This canvas will allow this camera to render it.
}
///
/// Whether or not post-processing effects are applied.
/// Usually set by ZEDManager based on the selection in its Inspector.
///
///
public void SetPostProcess(bool c)
{
ARpostProcessing = c;
}
///
/// The object that forces the ZED image to be shown at 16:9 aspect ratio, regardless of the target window's resolution.
///
private WindowAspectRatio aspectRatio;
///
/// Changes the scene's global lighting settings to prevent global illumnation from causing
/// lighting that doesn't match the real world.
///
private void SetUpGI()
{
//Only do this if "Hide Skybox" is enabled in ZEDManager, which is is by default.
if (zedManager)
{
if (zedManager.greySkybox)
{
RenderSettings.skybox = null;
Color c;
ColorUtility.TryParseHtmlString("#999999", out c);
RenderSettings.ambientLight = c;
DynamicGI.UpdateEnvironment();
}
}
}
///
/// Configures materials/values/settings needed for post-processing and displaying in proper aspect ratio.
///
private void Start()
{
//No environmental lighting per default
Shader.SetGlobalFloat("_ZEDExposure", -1);
//Load the materials.
matStencilToMask = new Material(Resources.Load("Materials/PostProcessing/Mat_ZED_Stencil2Mask") as Material);
matComposeMask = new Material(Resources.Load("Materials/PostProcessing/Mat_ZED_MaskCompositor") as Material);
//Load and configure the post-process material.
postprocessMaterial = new Material(Resources.Load("Materials/PostProcessing/Mat_ZED_PostProcessing") as Material);
postprocessMaterial.SetFloat("_gamma", 1.0f / (0.87f * 0.9f));
postprocessMaterial.SetFloat("_MinBlack", 15.0f / 255.0f);
postprocessMaterial.SetInt("_NoiseSize", 2);
//Configure the weights for the blur effect.
float[] weights;
float[] offsets;
ZEDPostProcessingTools.ComputeWeights(0.3f, out weights, out offsets);
//Set the blur config to the shader, should be constant
blurMaterial = new Material(Resources.Load("Materials/PostProcessing/Mat_ZED_Blur") as Material);
blurMaterial.SetFloatArray("weights", weights);
blurMaterial.SetFloatArray("offset", offsets);
//blurMaterial.SetTexture("_Mask", mask);
blurMaterial.SetTexture(maskPropID, mask);
//Force Unity into 16:9 mode to match the ZED's output.
#if UNITY_EDITOR
UnityEditor.PlayerSettings.SetAspectRatio(UnityEditor.AspectRatio.Aspect16by9, true);
UnityEditor.PlayerSettings.SetAspectRatio(UnityEditor.AspectRatio.Aspect16by10, false);
UnityEditor.PlayerSettings.SetAspectRatio(UnityEditor.AspectRatio.Aspect4by3, false);
UnityEditor.PlayerSettings.SetAspectRatio(UnityEditor.AspectRatio.Aspect5by4, false);
#endif
CreateRenderTexture();
//Load the blender for the zedmesher
blender = new Material(Resources.Load("Materials/SpatialMapping/Mat_ZED_PostProcess_Blend") as Material);
SetMeshRenderAvailable(false);
//Set the bounds around the camera, used to detect if a point/spotlight is close enough to be applied.
bounds = new Bounds(transform.position, new Vector3(20, 20, 20));
//IF AR REMOVE
//aspectRatio = new WindowAspectRatio(cam);
#if ZED_LWRP || ZED_HDRP || ZED_URP
RenderPipelineManager.beginFrameRendering += SRPStartFrame;
#endif
}
///
/// Configures numerous settings that can't be set until the ZED is fully initialized.
/// Subscribed to ZEDManager.OnZEDReady in OnEnable().
///
void ZEDReady()
{
//Add the fade-in effect for when the camera first becomes visible.
if (zedManager.fadeInOnStart)
gameObject.AddComponent();
//This cannot happen but just in case...
if (zedManager == null)
return;
zedCamera = zedManager.zedCamera;
SetTextures(zedCamera, viewMode);
canvas.SetActive(true);
canvas.transform.SetParent(cam.transform);
ConfigureLightAndShadow(cam.actualRenderingPath);
//Move the plane with the optical centers.
float plane_distance = 0.15f;
Vector4 opticalCenters = zedCamera.ComputeOpticalCenterOffsets(plane_distance);
if (side == 0)
canvas.transform.localPosition = new Vector3(opticalCenters.x, -1.0f * opticalCenters.y, plane_distance);
else if (side == 1)
canvas.transform.localPosition = new Vector3(opticalCenters.z, -1.0f * opticalCenters.w, plane_distance);
//Set the camera's parameters based on the ZED's, and scale the screen based on its distance.
if (zedCamera.IsCameraReady)
{
//cam.projectionMatrix = zedCamera.Projection;
SetProjection(nearplane, farplane);
cam.nearClipPlane = nearplane;
cam.farClipPlane = farplane;
//mainCamera.nearClipPlane = 0.1f;
//mainCamera.farClipPlane = 500.0f;
scale(canvas.gameObject, GetFOVYFromProjectionMatrix(cam.projectionMatrix));
cam.fieldOfView = zedCamera.VerticalFieldOfView * Mathf.Rad2Deg;
}
else //Just scale the screen.
{
scale(canvas.gameObject, cam.fieldOfView);
}
}
///
/// Hides the canvas. Called when the ZED is disconnected via the ZEDManager.OnZEDDisconnected event.
///
void ZEDDisconnected()
{
canvas.SetActive(false);
}
///
/// Fixes GI, enables the canvas and subscribes to events from the ZED.
///
private void OnEnable()
{
SetUpGI();
meshCanvas = gameObject.transform.GetChild(0).GetComponent();
canvas.SetActive(false);
//iterate until we found the ZED Manager parent...
Transform ObjParent = gameObject.transform;
int tries = 0;
while (zedManager == null && tries < 5)
{
if (ObjParent != null)
zedManager = ObjParent.GetComponent();
if (zedManager == null && ObjParent != null)
ObjParent = ObjParent.parent;
tries++;
}
if (zedManager != null)
{
zedManager.OnZEDReady += ZEDReady;
zedManager.OnZEDDisconnected += ZEDDisconnected;
}
#if ZED_HDRP
//Check which material the ZEDManager's SRP Lighting Type wants us to use, then apply it.
Renderer rend = canvas.GetComponent();
Material basemattoapply;
bool changemat = zedManager.GetChosenSRPMaterial(out basemattoapply);
if (changemat) //A specific material has been specified. Apply that one.
{
matRGB = new Material(basemattoapply); //We make a new instance so we can apply different textures to the left/right eye, and different cameras.
}
else //We'll leave whatever material is already on the canvas.
{
matRGB = new Material(rend.material);
}
rend.material = matRGB;
#endif
}
///
/// Disables the canvas and unsubscribes from events from the ZED.
///
private void OnDisable()
{
if (zedManager != null)
{
zedManager.OnZEDReady -= ZEDReady;
zedManager.OnZEDDisconnected -= ZEDDisconnected;
}
canvas.SetActive(false);
}
///
/// Invisible object used to force Unity to render a shadow map.
///
GameObject forceShadowObject = null;
///
/// Configure the canvas to get and light and shadow.
///
/// The current rendering path used
private void ConfigureLightAndShadow(RenderingPath renderingPath)
{
RenderingPath current = actualRenderingPath;
actualRenderingPath = renderingPath;
if (renderingPath == RenderingPath.Forward)
{
canvas.SetActive(true);
SetForward();
}
else if (renderingPath == RenderingPath.DeferredShading)
{
SetDeferred();
}
else //We're using an unknown rendering path. Log an error.
{
actualRenderingPath = current;
Debug.LogError(" [ ZED Plugin ] : The rendering path " + cam.actualRenderingPath.ToString() + " is not compatible with the ZED");
}
}
#if !ZED_HDRP && !ZED_URP
///
/// Clear the depth buffer used.
/// Called when configuring this script for the given rendering path (forward or deferred).
///
private void ClearDepthBuffers()
{
if (buffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
{
buffer[(int)ZED_RENDERING_MODE.FORWARD].Dispose();
buffer[(int)ZED_RENDERING_MODE.FORWARD] = null;
}
if (buffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
{
buffer[(int)ZED_RENDERING_MODE.DEFERRED].Dispose();
buffer[(int)ZED_RENDERING_MODE.DEFERRED] = null;
}
}
#endif
///
/// Configure the materials and buffer for the forward rendering path.
///
private void SetForward()
{
ghasShadows = false;
blitMaterial = new Material(Resources.Load("Materials/PostProcessing/Mat_ZED_Blit") as Material);
Shader.SetGlobalInt("_HasShadows", 0);
gameObject.transform.GetChild(0).GetComponent().enabled = true;
//Set the canvas's material to the material for forward rendering.
matRGB = canvas.GetComponent().material;
matRGB.SetInt("_isLinear", System.Convert.ToInt32(QualitySettings.activeColorSpace));
forwardMat = new Material(Resources.Load("Materials/Lighting/Mat_ZED_Forward") as Material);
// Configure the invisible object that forces Unity to calculate shadows.
if (forceShadowObject == null)
{
ConfigureForceShadowObject();
}
//Set the textures in the materials to the proper ones.
matRGB.SetTexture("_MainTex", textureEye);
matRGB.SetTexture("_CameraTex", textureEye);
matRGB.SetTexture("_DepthXYZTex", depth);
matRGB.SetTexture("_NormalsTex", normals);
#if ZED_HDRP && !ZED_URP//Need FoV to calculate world space positions accurately.
matRGB.SetFloat("_ZEDHFoVRad", zedCamera.GetCalibrationParameters().leftCam.hFOV * Mathf.Deg2Rad);
matRGB.SetFloat("_ZEDVFoVRad", zedCamera.GetCalibrationParameters().leftCam.vFOV * Mathf.Deg2Rad);
#endif
forwardMat.SetTexture("_MainTex", textureEye);
forwardMat.SetTexture("_DepthXYZTex", depth);
#if !ZED_HDRP && !ZED_URP
//Clear the buffers.
if (buffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
cam.RemoveCommandBuffer(CameraEvent.BeforeDepthTexture, buffer[(int)ZED_RENDERING_MODE.FORWARD]);
if (buffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterGBuffer, buffer[(int)ZED_RENDERING_MODE.DEFERRED]);
ClearDepthBuffers();
if (postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterFinalPass, postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED]);
if (postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
{
postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED].Dispose();
postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED] = null;
}
//Set the depth buffer.
buffer[(int)ZED_RENDERING_MODE.FORWARD] = new CommandBuffer();
buffer[(int)ZED_RENDERING_MODE.FORWARD].name = "ZED_DEPTH";
buffer[(int)ZED_RENDERING_MODE.FORWARD].SetRenderTarget(BuiltinRenderTextureType.CurrentActive);
buffer[(int)ZED_RENDERING_MODE.FORWARD].DrawMesh(meshCanvas.mesh, gameObject.transform.GetChild(0).transform.localToWorldMatrix, forwardMat);
#endif
if (mask == null || !mask.IsCreated())
{
mask = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
}
//Set up the post-processing material.
postprocessMaterial.SetTexture("ZEDMaskPostProcess", mask);
postprocessMaterial.SetTexture("ZEDTex", textureEye);
#if !ZED_HDRP && !ZED_URP
postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD] = new CommandBuffer();
postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD].name = "ZED_FORWARD_POSTPROCESS";
postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD].Blit(BuiltinRenderTextureType.CameraTarget, mask, blitMaterial, 0);
postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD].SetGlobalTexture("_ZEDMaskVirtual", mask);
cam.RemoveCommandBuffer(CameraEvent.AfterForwardAlpha, postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD]);
cam.AddCommandBuffer(CameraEvent.AfterForwardAlpha, postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD]);
#endif
//Configure the light containers.
if (computeBufferPointLight == null)
{
computeBufferPointLight = new ComputeBuffer(NUMBER_POINT_LIGHT_MAX, SIZE_POINT_LIGHT_BYTES);
computeBufferPointLight.SetData(pointLights);
matRGB.SetBuffer("pointLights", computeBufferPointLight);
}
if (computeBufferSpotLight == null)
{
computeBufferSpotLight = new ComputeBuffer(NUMBER_SPOT_LIGHT_MAX, SIZE_SPOT_LIGHT_BYTES);
computeBufferSpotLight.SetData(spotLights);
matRGB.SetBuffer("spotLights", computeBufferSpotLight);
}
//Register the property IDs to improve performance. (Setting properties by string is slower)
numberPointLightsID = Shader.PropertyToID("numberPointLights");
numberSpotLightsID = Shader.PropertyToID("numberSpotLights");
}
///
/// Configure the materials and buffer for the deferred rendering path.
///
private void SetDeferred()
{
//Disable MSSA as it's not supported with deferred.
#if UNITY_5_6_OR_NEWER
cam.allowMSAA = false;
#endif
ghasShadows = false;
deferredMat = new Material(Resources.Load("Materials/Lighting/Mat_ZED_Deferred") as Material);
blitMaterial = new Material(Resources.Load("Materials/PostProcessing/Mat_ZED_Blit") as Material);
//Sets the custom shader for the deferred pipeline.
GraphicsSettings.SetCustomShader(BuiltinShaderType.DeferredShading, (Resources.Load("Materials/Lighting/Mat_ZED_Deferred_Lighting") as Material).shader);
deferredMat.SetMatrix("_Model", canvas.transform.localToWorldMatrix.transpose);
deferredMat.SetMatrix("_Projection", cam.projectionMatrix);
deferredMat.SetTexture("_MainTex", textureEye);
deferredMat.SetTexture("_DepthXYZTex", depth);
deferredMat.SetTexture("_NormalsTex", normals);
//Clear the buffers.
#if !ZED_HDRP && !ZED_URP
if (buffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
cam.RemoveCommandBuffer(CameraEvent.BeforeDepthTexture, buffer[(int)ZED_RENDERING_MODE.FORWARD]);
if (buffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterGBuffer, buffer[(int)ZED_RENDERING_MODE.DEFERRED]);
if (postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterForwardAlpha, postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD]);
if (postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
{
postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD].Dispose();
postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD] = null;
}
ClearDepthBuffers();
//Set the depths buffer. This buffer will be changed if the camera is set to allow HDR.
buffer[(int)ZED_RENDERING_MODE.DEFERRED] = new CommandBuffer();
buffer[(int)ZED_RENDERING_MODE.DEFERRED].name = "ZED_DEPTH";
if (cam.allowHDR)
{
RenderTargetIdentifier[] mrt = { BuiltinRenderTextureType.GBuffer0, BuiltinRenderTextureType.GBuffer1, BuiltinRenderTextureType.GBuffer2, BuiltinRenderTextureType.CameraTarget };
buffer[(int)ZED_RENDERING_MODE.DEFERRED].SetRenderTarget(mrt, BuiltinRenderTextureType.CameraTarget);
}
else
{
RenderTargetIdentifier[] mrt = { BuiltinRenderTextureType.GBuffer0, BuiltinRenderTextureType.GBuffer1, BuiltinRenderTextureType.GBuffer2, BuiltinRenderTextureType.GBuffer3 };
buffer[(int)ZED_RENDERING_MODE.DEFERRED].SetRenderTarget(mrt, BuiltinRenderTextureType.CameraTarget);
}
buffer[(int)ZED_RENDERING_MODE.DEFERRED].DrawMesh(meshCanvas.mesh, gameObject.transform.GetChild(0).transform.localToWorldMatrix, deferredMat);
if (mask == null || !mask.IsCreated())
{
mask = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
}
//Set the post process buffer
postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED] = new CommandBuffer();
postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED].name = "ZED_FORWARD_POSTPROCESS";
postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED].Blit(BuiltinRenderTextureType.GBuffer0, mask, blitMaterial, 0);
postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED].SetGlobalTexture("_ZEDMaskVirtual", mask);
postprocessMaterial.SetTexture("ZEDMaskPostProcess", mask);
postprocessMaterial.SetTexture("ZEDTex", textureEye);
cam.AddCommandBuffer(CameraEvent.AfterGBuffer, buffer[(int)ZED_RENDERING_MODE.DEFERRED]);
cam.AddCommandBuffer(CameraEvent.AfterFinalPass, postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED]);
#endif
//Congigure the invisible object
if (forceShadowObject == null)
{
ConfigureForceShadowObject();
}
transform.GetChild(0).GetComponent().enabled = false;
}
///
/// Sets up the invisible shadow GameObject that forces Unity to draw shadows.
///
private void ConfigureForceShadowObject()
{
forceShadowObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
forceShadowObject.name = "ZED_FORCE_SHADOW";
forceShadowObject.transform.parent = transform;
forceShadowObject.transform.localPosition = new Vector3(0, 0, cam.nearClipPlane);
forceShadowObject.GetComponent().sharedMaterial = Resources.Load("Materials/Lighting/Mat_ZED_Hide") as Material;
Destroy(forceShadowObject.GetComponent());
forceShadowObject.hideFlags = HideFlags.HideInHierarchy;
}
///
/// The bounds around the camera that filter out point/spotlights that are too far away to be rendered.
///
private Bounds bounds;
///
/// Sets the camera's local pos/rot to origin/identity and sets up the RenderTexture if in stereo mode.
/// This RenderTexture is then displayed in hidden planes handled by ZEDMixedRealityPlugin where the final
/// output to the HMD is rendered.
///
private void CreateRenderTexture()
{
transform.localRotation = Quaternion.identity;
transform.localPosition = new Vector3(0, 0, 0);
if (cam.stereoTargetEye != StereoTargetEyeMask.None && zedManager.IsStereoRig == true)
{
if (zedCamera != null && zedCamera.IsCameraReady)
{
renderTextureTarget = new RenderTexture(zedCamera.ImageWidth, zedCamera.ImageHeight, 24, RenderTextureFormat.ARGB32);
cam.targetTexture = renderTextureTarget;
}
else if (sl.ZEDCamera.CheckPlugin())
{
renderTextureTarget = new RenderTexture(Screen.width, Screen.height, 24, RenderTextureFormat.ARGB32);
cam.targetTexture = renderTextureTarget;
}
}
}
///
/// Creates and sets the textures from the ZED, including image, depth and normals as needed.
/// Once created, the ZED SDK updates the textures automatically when the ZED sends new frames, so they don't need to be updated here.
///
///
private void SetTextures(sl.ZEDCamera zedCamera, sl.VIEW_MODE view_mode)
{
float baseline = zedCamera.Baseline;
canvas.transform.localRotation = Quaternion.identity;
canvas.transform.localPosition = new Vector3(0, 0, 0);
if (zedManager.IsStereoRig == true && cam.stereoTargetEye != StereoTargetEyeMask.None)
{
if (zedCamera != null && zedCamera.IsCameraReady)
{
renderTextureTarget = new RenderTexture(zedCamera.ImageWidth, zedCamera.ImageHeight, 24, RenderTextureFormat.ARGB32);
cam.targetTexture = renderTextureTarget;
}
//Set the camera to match the target stereo eye, unless force otherwise.
switch (cam.stereoTargetEye)
{
case StereoTargetEyeMask.Left:
if (viewSide == ZED_CAMERA_SIDE.RIGHT) viewSide = ZED_CAMERA_SIDE.LEFT;
break;
case StereoTargetEyeMask.Right:
if (viewSide == ZED_CAMERA_SIDE.LEFT) viewSide = ZED_CAMERA_SIDE.RIGHT;
break;
default:
break;
}
}
switch (viewSide) //Set up textures from the left camera or right camera, depending.
{
case ZED_CAMERA_SIDE.LEFT:
case ZED_CAMERA_SIDE.LEFT_FORCE:
default:
switch (view_mode) //Which kind of texture we view.
{
case sl.VIEW_MODE.VIEW_IMAGE:
textureEye = zedCamera.CreateTextureImageType(sl.VIEW.LEFT);
break;
case sl.VIEW_MODE.VIEW_DEPTH:
textureEye = zedCamera.CreateTextureImageType(sl.VIEW.DEPTH);
break;
case sl.VIEW_MODE.VIEW_NORMALS:
textureEye = zedCamera.CreateTextureImageType(sl.VIEW.NORMALS);
break;
}
normals = zedCamera.CreateTextureMeasureType(sl.MEASURE.NORMALS, resolution);
depth = zedCamera.CreateTextureMeasureType(sl.MEASURE.DEPTH, resolution);
break;
case ZED_CAMERA_SIDE.RIGHT:
case ZED_CAMERA_SIDE.RIGHT_FORCE:
switch (view_mode)//Which kind of texture we view.
{
case sl.VIEW_MODE.VIEW_IMAGE:
textureEye = zedCamera.CreateTextureImageType(sl.VIEW.RIGHT);
break;
case sl.VIEW_MODE.VIEW_DEPTH:
textureEye = zedCamera.CreateTextureImageType(sl.VIEW.DEPTH_RIGHT);
break;
case sl.VIEW_MODE.VIEW_NORMALS:
textureEye = zedCamera.CreateTextureImageType(sl.VIEW.NORMALS_RIGHT);
break;
}
normals = zedCamera.CreateTextureMeasureType(sl.MEASURE.NORMALS_RIGHT, resolution);
depth = zedCamera.CreateTextureMeasureType(sl.MEASURE.DEPTH_RIGHT, resolution);
break;
}
}
///
/// Enables/disables keywords for the material used in the first pass, when in forward rendering.
///
/// New state of the keyword.
/// Keyword's name.
///
public bool ManageKeywordForwardMat(bool enable, string name)
{
if (forwardMat)
{
if (enable)
{
forwardMat.EnableKeyword(name);
}
else
{
forwardMat.DisableKeyword(name);
}
return true;
}
return false;
}
///
/// Enables/disables keywords for the material used in the first pass, when in deferred rendering.
///
/// New state of the keyword.
/// Keyword's name.
///
public bool ManageKeywordDeferredMat(bool enable, string name)
{
if (deferredMat)
{
if (enable)
{
deferredMat.EnableKeyword(name);
}
else
{
deferredMat.DisableKeyword(name);
}
return true;
}
return false;
}
///
/// Enables/disables keywords for the final display material.
///
/// New state of the keyword.
/// Keyword's name.
///
public bool ManageKeywordPipe(bool enable, string name)
{
if (matRGB)
{
if (enable)
{
matRGB.EnableKeyword(name);
}
else
{
matRGB.DisableKeyword(name);
}
return true;
}
return false;
}
//Variables to get information about the lights.
///
/// How many point lights are currently being rendered on the real world by this camera. Excludes ones filtered out by distance.
///
[HideInInspector]
public int numberPointLights;
///
/// How many spotlights are currently being rendered on the real world by this camera. Excludes ones filtered out by distance.
///
[HideInInspector]
public int numberSpotLights;
bool ghasShadows = false;
///
/// Updates lighting information, packages them into ComputeBuffers and sends them to the shader.
///
void UpdateLights()
{
bool hasShadows = false;
int pointLightIndex = 0;
int spotLightIndex = 0;
bounds.center = transform.position;
foreach (ZEDLight zed_light in ZEDLight.s_lights)
{
Light light = zed_light.cachedLight;
if (light.type == LightType.Directional || Vector3.Distance(bounds.center, light.transform.position) < (light.range + bounds.extents.x))
{
//Deactivate all shadows from point light and spotlights as they are not currently supported.
if (light.type != LightType.Directional)
{
light.shadows = LightShadows.None;
}
if (zed_light.IsEnabled() && ((pointLightIndex + spotLightIndex) < NUMBER_LIGHTS_MAX || light.type == LightType.Directional))
{
if (light.type == LightType.Point)
{
if (pointLightIndex < NUMBER_POINT_LIGHT_MAX)
{
pointLights[pointLightIndex].color = light.color * light.intensity;
pointLights[pointLightIndex].position = light.gameObject.transform.position;
pointLights[pointLightIndex].range = light.range;
pointLightIndex++;
}
}
else if (light.type == LightType.Spot)
{
if (spotLightIndex < NUMBER_SPOT_LIGHT_MAX)
{
spotLights[spotLightIndex].color = light.color * light.intensity;
spotLights[spotLightIndex].position = light.gameObject.transform.position;
spotLights[spotLightIndex].direction = new Vector4(light.gameObject.transform.forward.normalized.x, light.gameObject.transform.forward.normalized.y, light.gameObject.transform.forward.normalized.z, Mathf.Cos((light.spotAngle / 2.0f) * Mathf.Deg2Rad));
spotLights[spotLightIndex].parameters = new Vector4(light.spotAngle, light.intensity, 1.0f / light.range, zed_light.interiorCone);
spotLightIndex++;
}
}
else if (light.type == LightType.Directional)
{
hasShadows = light.shadows != LightShadows.None && QualitySettings.shadows != ShadowQuality.Disable;
directionalLightData[0] = new Vector4(light.gameObject.transform.forward.normalized.x, light.gameObject.transform.forward.normalized.y, light.gameObject.transform.forward.normalized.z, 0);
directionalLightData[1] = light.color * light.intensity;
// Copy the shadows from the directional light. If not, no shadows in transparent mode.
if (light.commandBufferCount == 0)
{
forwardMat.SetInt("_HasShadows", System.Convert.ToInt32(light.shadows != LightShadows.None));
// Copy the shadows from the directional light. If not, no shadows in transparent mode.
if (light.commandBufferCount == 0)
{
CommandBuffer lightBuffer = new CommandBuffer();
lightBuffer.name = "ZED_Copy_ShadowMap";
lightBuffer.SetGlobalTexture("_DirectionalShadowMap", BuiltinRenderTextureType.CurrentActive);
light.AddCommandBuffer(LightEvent.AfterScreenspaceMask, lightBuffer);
}
}
}
}
}
}
//Send the new light data to the final display material.
if (computeBufferPointLight != null)
{
computeBufferPointLight.SetData(pointLights);
}
if (computeBufferSpotLight != null)
{
computeBufferSpotLight.SetData(spotLights);
}
numberPointLights = pointLightIndex;
numberSpotLights = spotLightIndex;
if (matRGB != null)
{
//Send the number of point lights/spotlights to the shader.
matRGB.SetInt(numberPointLightsID, pointLightIndex);
matRGB.SetInt(numberSpotLightsID, spotLightIndex);
#if !ZED_HDRP && !ZED_URP
//Add the command buffer to get shadows only if a directional light creates shadows.
if (hasShadows != ghasShadows)
{
ghasShadows = hasShadows;
Shader.SetGlobalInt("_HasShadows", System.Convert.ToInt32(ghasShadows));
cam.RemoveCommandBuffer(CameraEvent.BeforeDepthTexture, buffer[(int)ZED_RENDERING_MODE.FORWARD]);
if (hasShadows)
{
cam.AddCommandBuffer(CameraEvent.BeforeDepthTexture, buffer[(int)ZED_RENDERING_MODE.FORWARD]);
}
}
matRGB.SetVectorArray("ZED_directionalLight", directionalLightData);
#endif
}
}
///
/// Gets the vertical field of view from the given projection matrix, to bypass a round number.
///
/// Projection matrix from a camera.
///
float GetFOVYFromProjectionMatrix(Matrix4x4 projection)
{
return Mathf.Atan(1 / projection[1, 1]) * 2.0f;
}
///
/// Gets the horizontal field of view from the given projection matrix.
///
/// Projection matrix from a camera.
///
float GetFOVXFromProjectionMatrix(Matrix4x4 projection)
{
return Mathf.Atan(1 / projection[0, 0]) * 2.0f;
}
///
/// Scales the canvas in front of the camera so that it fills the whole screen exactly.
///
/// Canvas object.
/// Camera's vertical field of view.
private void scale(GameObject screen, float fov)
{
float height = Mathf.Tan(0.5f * fov) * Vector3.Distance(screen.transform.localPosition, Vector3.zero) * 2;
screen.transform.localScale = new Vector3((height * aspect), height, 1);
}
private void scaleXY(GameObject screen, float fovH, float fovV)
{
float height = Mathf.Tan(0.5f * fovV) * Vector3.Distance(screen.transform.localPosition, Vector3.zero) * 2;
float width = Mathf.Tan(0.5f * fovH) * Vector3.Distance(screen.transform.localPosition, Vector3.zero) * 2;
screen.transform.localScale = new Vector3(width, height, 1);
}
#if !ZED_HDRP && !ZED_URP
///
/// Clears all command buffers on the camera.
///
public void Clear()
{
if (buffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
cam.RemoveCommandBuffer(CameraEvent.BeforeDepthTexture, buffer[(int)ZED_RENDERING_MODE.FORWARD]);
if (buffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterGBuffer, buffer[(int)ZED_RENDERING_MODE.DEFERRED]);
if (postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterForwardAlpha, postProcessBuffer[(int)ZED_RENDERING_MODE.FORWARD]);
if (postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED] != null)
cam.RemoveCommandBuffer(CameraEvent.AfterFinalPass, postProcessBuffer[(int)ZED_RENDERING_MODE.DEFERRED]);
ClearDepthBuffers();
}
#endif
void OnApplicationQuit()
{
if (computeBufferPointLight != null)
{
computeBufferPointLight.Release();
}
if (computeBufferSpotLight != null)
{
computeBufferSpotLight.Release();
}
if (mask != null)
{
mask.Release();
}
#if ZED_HDRP || ZED_URP
RenderPipelineManager.beginFrameRendering -= SRPStartFrame;
#endif
}
///
/// Updates the output size to fit the window at 16:9 and the bounds for light filtering, and calculates the lighting.
///
void Update()
{
if (zedManager == null)
return;
if (aspectRatio != null)
{
aspectRatio.Update();
}
if (actualRenderingPath == RenderingPath.Forward)
{
bounds.center = transform.position;
UpdateLights();
}
if (zedManager.IsZEDReady && (cam.nearClipPlane != nearplane || cam.farClipPlane != farplane))
{
SetProjection(nearplane, farplane); //If the camera's near/far planes changed, update the matrix.
}
#if UNITY_EDITOR
if (actualRenderingPath != RenderingPath.VertexLit && cam.actualRenderingPath != actualRenderingPath)
{
ConfigureLightAndShadow(cam.actualRenderingPath);
}
#endif
}
///
/// Used by the ZEDMeshRenderer/ZEDPlaneRenderer to draw chunks/planes onto the final images.
///
private RenderTexture textureMapping;
///
/// Sets the RenderTexture that gets blended into the final result, if using Plane Detection or Spatial Mapping.
/// ZEDPlaneRenderer and ZEDMeshRenderer call this with the RenderTextures to which they render each frame.
///
///
public void SetTextureOverlayMapping(RenderTexture texture)
{
textureMapping = texture;
blender.SetTexture("_ZEDMeshTex", textureMapping);
}
public void SetMeshRenderAvailable(bool r)
{
int d = -1;
if (r) d = 0;
blender.SetInt("_IsTextured", d);
blender.SetInt(isTexturedID, d);
}
#if ZED_HDRP || ZED_URP
///
/// Blend the wireframe into the image. Used in SRP because there is no OnRenderImage automatic function.
///
private void SRPStartFrame(ScriptableRenderContext context, Camera[] rendcam)
{
foreach(Camera camera in rendcam)
{
if (camera == renderingCam && zedManager.GetSpatialMapping.display)
{
DrawSpatialMappingMeshes(camera);
}
}
}
///
/// Draw every chunk of the wiremesh
///
///
private void DrawSpatialMappingMeshes(Camera drawcam)
{
ZEDSpatialMapping spatialMapping = zedManager.GetSpatialMapping;
if (spatialMapping == null) return;
if(spatialMapping.IsRunning()) // Draw all chunks/submeshes used while spatial mapping is running, before merging.
{
foreach (ZEDSpatialMapping.Chunk chunk in spatialMapping.Chunks.Values)
{
Matrix4x4 canvastrs = Matrix4x4.TRS(chunk.o.transform.position, chunk.o.transform.rotation, chunk.o.transform.localScale);
Graphics.DrawMesh(chunk.mesh, canvastrs, chunk.o.GetComponent().material, gameObject.layer, drawcam, 0, null, false, false);
}
}
else if (!spatialMapping.IsRunning()) // Draw final chunks, after merging.
foreach (ZEDSpatialMapping.Chunk chunk in spatialMapping.ChunkList)
{
Matrix4x4 canvastrs = Matrix4x4.TRS(chunk.o.transform.position, chunk.o.transform.rotation, chunk.o.transform.localScale);
Graphics.DrawMesh(chunk.mesh, canvastrs, chunk.o.GetComponent().material, gameObject.layer, drawcam, 0, null, false, false);
}
}
#else
///
/// Where the post-processing occurs.
/// Called by Unity whenever the attached Camera renders an image.
///
///
///
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (zedManager.GetSpatialMapping.display) //If displaying a mesh from spatial mapping, blend the wireframe into the image.
{
RenderTexture tmpSource = RenderTexture.GetTemporary(source.width, source.height, source.depth, source.format, RenderTextureReadWrite.sRGB);
Graphics.Blit(source, tmpSource);
Graphics.Blit(tmpSource, destination, blender);
RenderTexture.ReleaseTemporary(tmpSource);
}
else
{
if (ARpostProcessing && mask != null && zedCamera.IsCameraReady) //Apply post-processing, if enabled.
{
if (actualRenderingPath == RenderingPath.DeferredShading)
{
RenderTexture bluredMask = RenderTexture.GetTemporary(mask.width, mask.height, mask.depth, mask.format);
RenderTexture buffer = RenderTexture.GetTemporary(source.width, source.height, 24);
Graphics.SetRenderTarget(buffer);
GL.Clear(false, true, new Color(0, 0, 0, 0)); // clear the full RT
//To keep the stencil in post-processing, since Graphics.Blit normally clears it.
Graphics.SetRenderTarget(buffer.colorBuffer, source.depthBuffer);
Graphics.Blit(source, matStencilToMask);
//Compose the second mask retrieved in the forward pass. The shader should set the stencil to 148.
Graphics.Blit(mask, bluredMask);
//matComposeMask.SetTexture("_Mask", bluredMask);
matComposeMask.SetTexture(maskPropID, bluredMask);
Graphics.Blit(buffer, mask, matComposeMask);
ApplyPostProcess(source, destination, bluredMask);
RenderTexture.ReleaseTemporary(buffer);
RenderTexture.ReleaseTemporary(bluredMask);
}
else //Forward path.
{
RenderTexture bluredMask = RenderTexture.GetTemporary(mask.width, mask.height, mask.depth, mask.format);
ApplyPostProcess(source, destination, bluredMask);
RenderTexture.ReleaseTemporary(bluredMask);
}
}
else //Not using post-processing.
{
Graphics.Blit(source, destination);
}
}
}
#endif
///
/// Apply post-processing effects to the given RenderTexture.
///
/// Source RenderTexture.
/// Destination RenderTexture.
/// Mask used to apply blurring effects.
private void ApplyPostProcess(RenderTexture source, RenderTexture destination, RenderTexture bluredMask)
{
RenderTexture tempDestination = RenderTexture.GetTemporary(source.width, source.height, source.depth, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default);
Graphics.Blit(source, tempDestination, postprocessMaterial);
ZEDPostProcessingTools.Blur(mask, bluredMask, blurMaterial, 3, 1, 1);
//blurMaterial.SetTexture("_Mask", bluredMask);
blurMaterial.SetTexture(maskPropID, bluredMask);
ZEDPostProcessingTools.Blur(tempDestination, destination, blurMaterial, 2, 1, 1);
mask.SetGlobalShaderProperty("_ZEDMaskVirtual");
RenderTexture.ReleaseTemporary(tempDestination);
}
///
/// Assigns the projection matrix from the ZED to this camera with the specified near/far planes.
///
/// Adjusts the matrix values from a copy rather than reassigning them in ZEDCamera to avoid getting applied
/// to all copies of the camera.
///
///Desired near plane distance.
///Desired far plane distance.
private void SetProjection(float near = 0.1f, float far = 500f)
{
//float near = mainCamera.nearClipPlane;
//float far = mainCamera.farClipPlane;
Matrix4x4 newmat = zedCamera.Projection;
newmat[2, 2] = -(far + near) / (far - near);
newmat[2, 3] = -(2.0f * far * near) / (far - near);
cam.projectionMatrix = newmat;
nearplane = near;
farplane = far;
}
/// Forces the ZED's image to be displayed at a 16:9 ratio, regardless of the window's aspect ratio.
/// This is why the image doesn't stretch when the Game window in the editor is set to Free Aspet.
///
public class WindowAspectRatio
{
///
/// Current screen width.
///
private int ScreenSizeX = 0;
///
/// Current screen height.
///
private int ScreenSizeY = 0;
///
/// Camera to set to 16:9.
///
private Camera cam;
///
/// Aspect ratio targeted.
///
private const float TARGET_ASPECT = 16.0f / 9.0f;
public WindowAspectRatio(Camera camera)
{
cam = camera;
RescaleCamera();
CreateCamera();
}
///
/// Create a custom hidden camera to render black bars in the background.
///
///
private GameObject CreateCamera()
{
GameObject o = new GameObject("CameraBlackBackground");
Camera cam = o.AddComponent();
cam.backgroundColor = Color.black;
cam.cullingMask = 0;
cam.clearFlags = CameraClearFlags.SolidColor;
cam.depth = -int.MaxValue;
cam.useOcclusionCulling = false;
#if UNITY_5_6_OR_NEWER
cam.allowHDR = false;
cam.allowMSAA = false;
#endif
cam.stereoTargetEye = StereoTargetEyeMask.None;
cam.renderingPath = RenderingPath.Forward;
o.hideFlags = HideFlags.HideInHierarchy;
return o;
}
///
/// Rescale the view port of the current camera to keep the 16:9 aspect ratio.
/// This is called on start and updated each frame.
///
private void RescaleCamera()
{
//If no change, then return
if (Screen.width == ScreenSizeX && Screen.height == ScreenSizeY) return;
float windowaspect = (float)Screen.width / (float)Screen.height;
float scaleheight = windowaspect / TARGET_ASPECT;
if (scaleheight < 1.0f) //Height is too large. Shrink it, adding letterboxes to the top and bottom.
{
Rect rect = cam.rect;
rect.width = 1.0f;
rect.height = scaleheight;
rect.x = 0;
rect.y = (1.0f - scaleheight) / 2.0f;
//cam.rect = rect;
}
else //Height is too small. Reduce width, adding pillarboxes to the sides.
{
float scalewidth = 1.0f / scaleheight;
Rect rect = cam.rect;
rect.width = scalewidth;
rect.height = 1.0f;
rect.x = (1.0f - scalewidth) / 2.0f;
rect.y = 0;
cam.rect = rect;
}
ScreenSizeX = Screen.width;
ScreenSizeY = Screen.height;
}
///
/// Calls RescaleCamera(). Called in ZEDRenderingPlane's Update() function.
///
public void Update()
{
RescaleCamera();
}
}
}