using UnityEngine;
using System;
using System.Threading;
using UnityEngine.XR;
using System.Collections;
using System.Collections.Generic;
///
/// The central script of the ZED Unity plugin, and the primary way a developer can interact with the camera.
/// It sets up and closes connection to the ZED, adjusts parameters based on user settings, enables/disables/handles
/// features like tracking, and holds numerous useful properties, methods, and callbacks.
///
///
/// ZEDManager is attached to the root objects in the ZED_Rig_Mono and ZED_Rig_Stereo prefabs.
/// If using ZED_Rig_Stereo, it will set isStereoRig to true, which triggers several behaviors unique to stereo pass-through AR.
///
public class ZEDManager : MonoBehaviour, IZEDManager
{
///
/// Static function to get instance of the ZEDManager with a given camera_ID. See sl.ZED_CAMERA_ID for the available choices.
///
public static object grabLock;
static ZEDManager[] ZEDManagerInstance = null;
public static ZEDManager GetInstance(sl.ZED_CAMERA_ID _id)
{
if (ZEDManagerInstance == null)
return null;
else
return ZEDManagerInstance[(int)_id];
}
///
/// Static function to get all ZEDManagers that have been properly instantiated.
/// Cameras may not necessarily be connected, if they haven't finished connecting, have disconnected,
/// or if no camera is available.
///
///
public static List GetInstances()
{
List instances = new List();
for (int i = 0; i < (int)sl.Constant.MAX_CAMERA_PLUGIN; i++)
{
ZEDManager instance = GetInstance((sl.ZED_CAMERA_ID)i);
if (instance != null)
instances.Add(instance);
}
return instances;
}
///
/// For advanced debugging. Default false. Set true for the Unity wrapper to log all SDK calls to a new file
/// at C:/ProgramData/stereolabs/SL_Unity_wrapper.txt. This helps find issues that may occur within
/// the protected .dll, but can decrease performance.
///
private bool wrapperVerbose = true;
///
/// Current instance of the ZED Camera, which handles calls to the Unity wrapper .dll.
///
public sl.ZEDCamera zedCamera = null;
/////////////////////////////////////////////////////////////////////////
///////////////////////// Camera Settings ///////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// Resolution setting for all images retrieved from the camera. Higher resolution means lower framerate.
/// HD720 is strongly recommended for pass-through AR.
///
///
/// Camera ID
///
[HideInInspector]
public sl.ZED_CAMERA_ID cameraID = sl.ZED_CAMERA_ID.CAMERA_ID_01;
///
/// The accuracy of depth calculations. Higher settings mean more accurate occlusion and lighting but costs performance.
/// Note there's a significant jump in performance cost between QUALITY and ULTRA modes.
///
/*[Tooltip("The accuracy of depth calculations. Higher settings mean more accurate occlusion and lighting but costs performance.")]*/
[HideInInspector]
public sl.DEPTH_MODE depthMode = sl.DEPTH_MODE.PERFORMANCE;
///
/// Input Type in SDK (USB, SVO or Stream)
///
[HideInInspector]
public sl.INPUT_TYPE inputType = sl.INPUT_TYPE.INPUT_TYPE_USB;
///
/// Camera Resolution
///
[HideInInspector]
public sl.RESOLUTION resolution = sl.RESOLUTION.HD720;
///
/// Targeted FPS, based on the resolution. VGA = 100, HD720 = 60, HD1080 = 30, HD2K = 15.
///
[HideInInspector]
public int FPS = 60;
///
/// SVO Input FileName
///
[HideInInspector]
public string svoInputFileName = "";
///
/// Optional opencv calib file
///
public string opencvCalibFile = "";
///
/// SVO loop back option
///
[HideInInspector]
public bool svoLoopBack = true;
///
/// SVO loop back option
///
[HideInInspector]
public bool svoRealTimeMode = false;
///
/// Current frame being read from the SVO. Doesn't apply when recording.
///
[HideInInspector]
[SerializeField]
private int currentFrame = 0;
///
/// Current frame being read from the SVO. Doesn't apply when recording.
///
public int CurrentFrame
{
get
{
return currentFrame;
}
set
{
currentFrame = value;
}
}
///
/// Total number of frames in a loaded SVO.
///
[HideInInspector]
[SerializeField]
private int numberFrameMax = 0;
///
/// Total number of frames in a loaded SVO.
///
public int NumberFrameMax
{
set
{
numberFrameMax = value;
}
get
{
return numberFrameMax;
}
}
[HideInInspector]
[SerializeField]
public bool pauseSVOReading = false;
[HideInInspector]
public bool pauseLiveReading = false;
///
/// Ask a new frame is in pause (SVO only)
///
[HideInInspector]
public bool NeedNewFrameGrab = false;
///
/// Streaming Input IP (v2.8)
///
[HideInInspector]
public string streamInputIP = "127.0.0.1";
///
/// Streaming Input Port (v2.8)
///
[HideInInspector]
public int streamInputPort = 30000;
#if ZED_HDRP
/////////////////////////////////////////////////////////////////////////
///////////////////////// SRP Lighting //////////////////////////////////
/////////////////////////////////////////////////////////////////////////
public enum shaderType
{
Lit,
Unlit,
Greenscreen_Lit,
Greenscreen_Unlit,
DontChange
}
///
///
///
[HideInInspector]
public shaderType srpShaderType = shaderType.Lit;
///
/// How much the ZED image should light itself via emission.
/// Setting to zero is most realistic, but requires you to emulate the real-world lighting conditions within Unity. Higher settings cause the image\
/// to be uniformly lit, but light and shadow effects are less visible.
///
[HideInInspector]
public float selfIllumination = 0.5f;
///
///
///
[HideInInspector]
public bool applyZEDNormals = false;
#endif
/////////////////////////////////////////////////////////////////////////
///////////////////////// Motion Tracking ///////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// If enabled, the ZED will move/rotate itself using its own inside-out tracking.
/// If false, the camera tracking will move with the VR HMD if connected and available.
/// Normally, ZEDManager's GameObject will move according to the tracking. But if in AR pass-through mode,
/// then the Camera_eyes object in ZED_Rig_Stereo will move while this object stays still.
///
[HideInInspector]
public bool enableTracking = true;
///
/// Enables the spatial memory. Will detect and correct tracking drift by remembering features and anchors in the environment,
/// but may cause visible jumps when it happens.
///
[HideInInspector]
public bool enableSpatialMemory = true;
///
/// If using Spatial Memory, you can specify a path to an existing .area file to start with some memory already loaded.
/// .area files are created by scanning a scene with ZEDSpatialMappingManager and saving the scan.
///
[HideInInspector]
public string pathSpatialMemory;
///
/// Estimate initial position by detecting the floor.
///
[HideInInspector]
public bool estimateInitialPosition = true;
public bool EstimateInitialPosition => estimateInitialPosition;
///
/// If true, tracking is enabled but doesn't move after initializing.
///
[HideInInspector]
public bool trackingIsStatic = false;
/////////////////////////////////////////////////////////////////////////
///////////////////////// Spatial Mapping ///////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// Resolution setting for the scan. A higher resolution creates more submeshes and uses more memory, but is more accurate.
///
[HideInInspector]
public ZEDSpatialMapping.RESOLUTION mappingResolutionPreset = ZEDSpatialMapping.RESOLUTION.MEDIUM;
///
/// Maximum distance geometry can be from the camera to be scanned. Geometry scanned from farther away will be less accurate.
///
[HideInInspector]
public ZEDSpatialMapping.RANGE mappingRangePreset = ZEDSpatialMapping.RANGE.MEDIUM;
///
/// Whether mesh filtering is needed.
///
[HideInInspector]
public bool isMappingFilteringEnable = false;
///
/// Whether surface textures will be scanned and applied. Note that texturing will add further delay to the post-scan finalizing period.
///
[HideInInspector]
public bool isMappingTextured = false;
///
/// Whether to save the mesh .obj and .area files once the scan is finished.
///
[HideInInspector]
public bool saveMeshWhenOver = false;
///
/// Path to save the .obj and .area files.
///
[HideInInspector]
public string meshPath = "Assets/ZEDMesh.obj";
///
/// Filtering setting. More filtering results in fewer faces in the mesh, reducing both file size and accuracy.
///
[HideInInspector]
public sl.FILTER meshFilterParameters;
///
/// Instance of the ZEDSpatialMapping class that handles the actual spatial mapping implementation within Unity.
///
[HideInInspector]
private ZEDSpatialMapping spatialMapping = null;
public ZEDSpatialMapping GetSpatialMapping { get { return spatialMapping; } }
///
/// Whether the spatial mapping is currently scanning.
///
public bool IsMappingRunning { get { return spatialMapping != null ? spatialMapping.IsRunning() : false; } }
///
/// List of the processed submeshes. This list isn't filled until StopSpatialMapping() is called.
///
public List MappingChunkList { get { return spatialMapping != null ? spatialMapping.ChunkList : null; } }
///
/// Whether the mesh update thread is running.
///
public bool IsMappingUpdateThreadRunning { get { return spatialMapping != null ? spatialMapping.IsUpdateThreadRunning : false; } }
///
/// Whether the spatial mapping was running but has been paused (not stopped) by the user.
///
public bool IsMappingPaused { get { return spatialMapping != null ? spatialMapping.IsPaused : false; } }
///
/// Whether the mesh is in the texturing stage of finalization.
///
public bool IsMappingTexturingRunning { get { return spatialMapping != null ? spatialMapping.IsTexturingRunning : false; } }
///
/// Gets a boolean value indicating whether the spatial mapping display is enabled.
///
public bool IsSpatialMappingDisplay { get { return spatialMapping != null ? spatialMapping.display : false; } }
///
/// Gets a boolean value indicating whether the spatial mapping has chunks
///
public bool SpatialMappingHasChunks { get { return spatialMapping != null ? spatialMapping.Chunks.Count > 0 : false; } }
/////////////////////////////////////////////////////////////////////////
//////////////////////// Object Detection //////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// Sync the Object on the image.
///
[HideInInspector]
public bool objectDetectionImageSyncMode = false;
///
/// Whether to track objects across multiple frames using the ZED's position relative to the floor.
/// Requires tracking to be on. It's also recommended to enable Estimate Initial Position to find the floor.
///
[HideInInspector]
public bool objectDetectionTracking = true;
///
/// Whether to calculate 2D masks for each object, showing exactly which pixels within the 2D bounding box are the object.
/// Requires more performance, so do not enable unless needed.
///
[HideInInspector]
public bool objectDetection2DMask = false;
///
/// Choose what detection model to use in the Object detection module
///
[HideInInspector]
public sl.DETECTION_MODEL objectDetectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX;
public sl.DETECTION_MODEL ObjectDetectionModel => objectDetectionModel;
///
/// Defines if the body fitting will be applied
///
[HideInInspector]
public bool objectDetectionBodyFitting = true;
///
/// Defines a upper depth range for detections.
///
[HideInInspector]
public float objectDetectionMaxRange = 40.0f;
///
/// Defines a upper depth range for detections.
///
[HideInInspector]
public sl.OBJECT_FILTERING_MODE objectDetectionFilteringMode = sl.OBJECT_FILTERING_MODE.NMS3D;
[HideInInspector]
public sl.BODY_FORMAT objectDetectionBodyFormat = sl.BODY_FORMAT.POSE_34;
[HideInInspector]
public sl.BODY_FORMAT bodyFormat = sl.BODY_FORMAT.POSE_34;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int SK_personDetectionConfidenceThreshold = 50;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int OD_personDetectionConfidenceThreshold = 60;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int vehicleDetectionConfidenceThreshold = 60;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int bagDetectionConfidenceThreshold = 60;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int animalDetectionConfidenceThreshold = 60;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int electronicsDetectionConfidenceThreshold = 60;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int fruitVegetableDetectionConfidenceThreshold = 60;
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
///
[HideInInspector]
public int sportDetectionConfidenceThreshold = 60;
///
/// Whether to detect people during object detection.
///
[HideInInspector]
public bool objectClassPersonFilter = true;
///
/// Whether to detect vehicles during object detection.
///
[HideInInspector]
public bool objectClassVehicleFilter = true;
///
/// Whether to detect bags during object detection.
///
[HideInInspector]
public bool objectClassBagFilter = true;
///
/// Whether to detect animals during object detection.
///
[HideInInspector]
public bool objectClassAnimalFilter = true;
///
/// Whether to detect electronics during object detection.
///
[HideInInspector]
public bool objectClassElectronicsFilter = true;
///
/// Whether to detect fruits and vegetables during object detection.
///
[HideInInspector]
public bool objectClassFruitVegetableFilter = true;
///
/// Whether to detect sport related objects during object detection.
///
[HideInInspector]
public bool objectClassSportFilter = true;
///
/// Whether the object detection module has been activated successfully.
///
private bool objectDetectionRunning = false;
///
/// Whether the object detection module has been activated successfully.
///
public bool IsObjectDetectionRunning { get { return objectDetectionRunning; } }
///
/// Set to true when there is not a fresh frame of detected objects waiting for processing, meaning we can retrieve the next one.
///
private bool requestobjectsframe = true;
///
/// Set to true when a new frame of detected objects has been retrieved in the image acquisition thread, ready for the main thread to process.
///
private bool newobjectsframeready = false;
///
/// Last object detection frame detected by the SDK. This data comes straight from the C++ SDK; see detectionFrame for an abstracted version
/// with many helper functions for use inside Unity.
///
private sl.ObjectsFrameSDK objectsFrameSDK = new sl.ObjectsFrameSDK();
///
/// Last object detection frame detected by the SDK. This data comes straight from the C++ SDK; see GetDetectionFrame for an abstracted version
/// with many helper functions for use inside Unity.
///
public sl.ObjectsFrameSDK GetSDKObjectsFrame { get { return objectsFrameSDK; } }
///
/// Timestamp of the most recent object frame fully processed. This is used to calculate the FPS of the object detection module.
///
private ulong lastObjectFrameTimeStamp = 0;
///
/// Frame rate at which the object detection module is running. Only reports performance; changing this value has no effect on detection.
///
private float objDetectionModuleFPS = 15.0f;
///
/// Last object detection frame detected by the SDK, in the form of a DetectionFrame instance which has many helper functions for use in Unity.
///
private DetectionFrame detectionFrame;
///
/// Last object detection frame detected by the SDK, in the form of a DetectionFrame instance which has many helper functions for use in Unity.
///
public DetectionFrame GetDetectionFrame { get { return detectionFrame; } }
///
/// Delegate for events that take an object detection frame straight from the SDK (not abstracted).
///
public delegate void onNewDetectionTriggerSDKDelegate(sl.ObjectsFrameSDK objFrame);
///
/// Event that's called whenever the Object Detection module detects a new frame.
/// Includes data straight from the C++ SDK. See OnObjectDetection/DetectionFrame for an abstracted version that has many helper functions
/// that makes it easier to use in Unity.
///
public event onNewDetectionTriggerSDKDelegate OnObjectDetection_SDKData;
///
/// Delegate for events that take an object detection frame, in the form of a DetectionFrame object which has helper functions.
///
public delegate void onNewDetectionTriggerDelegate(DetectionFrame objFrame);
///
/// Event that's called whenever the Object Detection module detects a new frame.
/// Supplies data in the form of a DetectionFrame instance, which has many helper functions for use in Unity.
///
public event onNewDetectionTriggerDelegate OnObjectDetection;
private sl.dll_ObjectDetectionRuntimeParameters od_runtime_params = new sl.dll_ObjectDetectionRuntimeParameters();
/////////////////////////////////////////////////////////////////////////
///////////////////////////// Rendering ///////////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// Rendering paths available to the ZED with the corresponding Unity rendering path.
///
public enum ZEDRenderingMode
{
FORWARD = RenderingPath.Forward,
DEFERRED = RenderingPath.DeferredShading
};
///
/// When enabled, the real world can occlude (cover up) virtual objects that are behind it.
/// Otherwise, virtual objects will appear in front.
///
[HideInInspector]
public bool depthOcclusion = true;
///
/// Enables post-processing effects on virtual objects that blends them in with the real world.
///
[HideInInspector]
public bool postProcessing = true;
///
/// Field version of CameraBrightness property.
///
[SerializeField]
[HideInInspector]
private int m_cameraBrightness = 100;
/// Brightness of the final real-world image. Default is 100. Lower to darken the environment in a realistic-looking way.
/// This is a rendering setting that doesn't affect the raw input from the camera.
///
public int CameraBrightness
{
get { return m_cameraBrightness; }
set
{
if (m_cameraBrightness == value) return;
m_cameraBrightness = value;
if (OnCamBrightnessChange != null)
OnCamBrightnessChange(m_cameraBrightness);
}
}
///
/// Whether to enable the new color/gamma curve added to the ZED SDK in v3.0. Exposes more detail in darker regions
/// and removes a slight red bias.
///
[HideInInspector]
[SerializeField]
public bool enableImageEnhancement = true;
/// Field version of MaxDepthRange property.
///
[SerializeField]
private float m_maxDepthRange = 40f;
///
/// Maximum depth at which the camera will display the real world, in meters. Pixels further than this value will be invisible.
///
[HideInInspector]
public float MaxDepthRange
{
get { return m_maxDepthRange; }
set
{
if (m_maxDepthRange == value) return;
m_maxDepthRange = value;
if (OnMaxDepthChange != null)
OnMaxDepthChange(m_maxDepthRange);
}
}
/////////////////////////////////////////////////////////////////////////
///////////////////////// Recording Module //////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// SVO Output file name
///
[HideInInspector]
public string svoOutputFileName = "Assets/Recording.svo";
///
/// SVO Compression mode used for recording
///
[HideInInspector]
public sl.SVO_COMPRESSION_MODE svoOutputCompressionMode = sl.SVO_COMPRESSION_MODE.H264_BASED;
///
/// SVO specific bitrate in KBits/s
/// Default : 0 = internal bitrate
///
[HideInInspector]
public int svoOutputBitrate = 0;
///
/// SVO specific FPS
/// Default : 0 = Camera FPS
///
[HideInInspector]
public int svoOutputTargetFPS = 0;
///
/// If input is streaming, then set to direct-dump into SVO file (false) or decoding/re-encoding (true).
/// Recommended to leave at false to save an encoding session.
///
public bool svoOutputTranscodeStreaming = false;
///
/// Indicates if frame must be recorded
///
[HideInInspector]
public bool needRecordFrame = false;
/////////////////////////////////////////////////////////////////////////
///////////////////////// Streaming Module //////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
/// Enable/Disable Streaming module
///
[HideInInspector]
public bool enableStreaming = false;
///
/// Status of streaming request
///
private bool isStreamingEnable = false;
///
/// Codec used for Streaming
///
[HideInInspector]
public sl.STREAMING_CODEC streamingCodec = sl.STREAMING_CODEC.AVCHD_BASED;
///
/// port used for Streaming
///
[HideInInspector]
public int streamingPort = 30000;
///
/// bitrate used for Streaming
///
[HideInInspector]
public int bitrate = 8000;
///
/// gop size used for Streaming
///
[HideInInspector]
public int gopSize = -1;
///
/// Enable/Disable adaptative bitrate
///
[HideInInspector]
public bool adaptativeBitrate = false;
///
/// Enable/Disable adaptative bitrate
///
[HideInInspector]
public int chunkSize = 8096;
///
/// Set a specific target for the streaming framerate
///
[HideInInspector]
public int streamingTargetFramerate = 0;
/////////////////////////////////////////////////////////////////////////
///////////////////////// Advanced control /////////////////////////////
/////////////////////////////////////////////////////////////////////////
///
///
/// True to make the ZED image fade from black when the application starts.
///
[HideInInspector]
public bool fadeInOnStart = true;
///
/// True to apply DontDestroyOnLoad() on the ZED rig in Awake(), preserving it between scenes.
///
[HideInInspector]
public bool dontDestroyOnLoad = false;
///
/// Grey Out Skybox on Start", "True to set the background to a neutral gray when the scene starts.
/// Recommended for AR so that lighting on virtual objects better matches the real world.
///
[HideInInspector]
public bool greySkybox = true;
///
/// Field version of confidenceThreshold property.
///
[SerializeField]
[HideInInspector]
private int m_confidenceThreshold = 100;
///
/// How tolerant the ZED SDK is to low confidence values. Lower values filter more pixels.
///
public int confidenceThreshold
{
get
{
return m_confidenceThreshold;
}
set
{
if (value == m_confidenceThreshold) return;
m_confidenceThreshold = Mathf.RoundToInt(Mathf.Clamp(value, 0, 100));
if (Application.isPlaying && zedReady)
{
runtimeParameters.confidenceThreshold = m_confidenceThreshold;
}
}
}
[SerializeField]
[HideInInspector]
private int m_textureConfidenceThreshold = 100;
///
/// How tolerant the ZED SDK is to low confidence values. Lower values filter more pixels.
///
public int textureConfidenceThreshold
{
get
{
return m_textureConfidenceThreshold;
}
set
{
if (value == m_textureConfidenceThreshold) return;
m_textureConfidenceThreshold = Mathf.RoundToInt(Mathf.Clamp(value, 0, 100));
if (Application.isPlaying && zedReady)
{
runtimeParameters.textureConfidenceThreshold = m_textureConfidenceThreshold;
}
}
}
///
/// Options for enabling the depth measurement map for the right camera. Costs performance if on, even if not used.
///
public enum RightDepthEnabledMode
{
///
/// Right depth measure will be enabled if a ZEDRenderingPlane component set to the right eye is detected as a child of
/// ZEDManager's GameObject, as in the ZED rig prefabs.
///
AUTO,
///
/// Right depth measure is disabled.
///
OFF,
///
/// Right depth measure is enabled.
///
ON
}
///
/// Whether to enable depth measurements from the right camera. Required for depth effects in AR pass-through, but requires performance even if not used.
/// Auto enables it only if a ZEDRenderingPlane component set to the right eye is detected as a child of ZEDManager's GameObject (as in the ZED rig prefabs.)
///
[HideInInspector]
public RightDepthEnabledMode enableRightDepthMeasure = RightDepthEnabledMode.AUTO;
///
/// Delegate for OnCamBrightnessChange, which is used to update shader properties when the brightness setting changes.
///
public delegate void onCamBrightnessChangeDelegate(int newVal);
///
/// Event fired when the camera brightness setting is changed. Used to update shader properties.
///
public event onCamBrightnessChangeDelegate OnCamBrightnessChange;
///
/// Delegate for OnCamBrightnessChange, which is used to update shader properties when the max depth setting changes.
///
public delegate void onMaxDepthChangeDelegate(float newVal);
///
/// Event fired when the max depth setting is changed. Used to update shader properties.
///
public event onMaxDepthChangeDelegate OnMaxDepthChange;
///
/// Whether to show the hidden camera rig used in stereo AR mode to prepare images for HMD output.
///
[SerializeField]
[HideInInspector]
private bool showarrig = false;
///
/// Whether to show the hidden camera rig used in stereo AR mode to prepare images for HMD output.
/// This is rarely needed, but can be useful for understanding how the ZED output works.
///
public bool showARRig
{
get
{
return showarrig;
}
set
{
if (Application.isPlaying && showarrig != value && zedRigDisplayer != null)
{
zedRigDisplayer.hideFlags = value ? HideFlags.None : HideFlags.HideInHierarchy;
}
showarrig = value;
}
}
private float maxdepthrange = 40f;
public float maxDepthRange
{
get
{
return maxdepthrange;
}
set
{
maxdepthrange = Mathf.Clamp(value, 0, 40);
if (Application.isPlaying)
{
setRenderingSettings();
}
}
}
///
/// If true, and you are using a ZED2 or ZED Mini, IMU fusion uses data from the camera's IMU to improve tracking results.
///
[HideInInspector]
public bool enableIMUFusion = true;
///
/// If true, the ZED SDK will subtly adjust the ZED's calibration during runtime to account for heat and other factors.
/// Reasons to disable this are rare.
///
[HideInInspector]
public bool enableSelfCalibration = true;
/////////////////////////////////////////////////////////////////////////
///////////////////////// Video Settings ////////////////////////////////
/////////////////////////////////////////////////////////////////////////
//Controls for the ZED's video settings (brightness, saturation, exposure, etc.)
///
/// Behavior options for how the ZED's video settings (brightness, saturation, etc.) are applied when the ZED first connects.
///
public enum VideoSettingsInitMode
{
///
/// Camera will be assigned video settings set in ZEDManager's Inspector before running the scene.
///
Custom,
///
/// Camera will load settings last applied to the ZED. May have been from a source outside Unity.
/// This is the default behavior in the ZED SDK and most ZED apps.
///
LoadFromSDK,
///
/// Camera will load default video settings.
///
Default
}
///
/// How the ZED's video settings (brightness, saturation, etc.) are applied when the ZED first connects.
///
public VideoSettingsInitMode videoSettingsInitMode = VideoSettingsInitMode.Custom;
///
/// Brightness setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private int videoBrightness = 4;
///
/// Contrast setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private int videoContrast = 4;
///
/// Hue setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private int videoHue = 0;
///
/// Saturation setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private int videoSaturation = 4;
///
/// Auto gain/exposure setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private bool videoAutoGainExposure = true;
///
/// Gain setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom and videoAutoGainExposure is false.
///
[SerializeField]
private int videoGain = 10;
///
/// Exposure setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom and videoAutoGainExposure is false.
///
[SerializeField]
public int videoExposure = 100;
///
/// Auto White Balance setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private bool videoAutoWhiteBalance = true;
///
/// White Balance temperature setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom and videoAutoWhiteBalance is false.
///
[SerializeField]
private int videoWhiteBalance = 3200;
///
/// Sharpness setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private int videoSharpness = 3;
///
/// Sharpness setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private int videoGamma = 5;
///
/// Whether the LED on the ZED camera is on.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField]
private bool videoLEDStatus = true;
/////////////////////////////////////////////////////////////////////////
///////////////////////// Status Report /////////////////////////////////
/////////////////////////////////////////////////////////////////////////
//Strings used for the Status display in the Inspector.
[Header("Status")]
///
/// The camera model (ZED or ZED-M).
///
[ReadOnly("Camera S/N")] [HideInInspector] public string cameraModel = "-";
///
/// The camera serial number.
///
[ReadOnly("Camera S/N")] [HideInInspector] public string cameraSerialNumber = "-";
///
/// The camera firmware version
///
[ReadOnly("Camera Firmware")] [HideInInspector] public string cameraFirmware = "-";
///
/// Version of the installed ZED SDK, for display in the Inspector.
///
[ReadOnly("Version")] [HideInInspector] public string versionZED = "-";
///
/// How many frames per second the engine is rendering, for display in the Inspector.
///
[ReadOnly("Engine FPS")] [HideInInspector] public string engineFPS = "-";
///
/// How many images per second are received from the ZED, for display in the Inspector.
///
[ReadOnly("Camera FPS")] [HideInInspector] public string cameraFPS = "-";
///
/// The connected VR headset, if any, for display in the Inspector.
///
[ReadOnly("HMD Device")] [HideInInspector] public string HMDDevice = "-";
///
/// Whether the ZED's tracking is on, off, or searching (lost position, trying to recover) for display in the Inspector.
///
[ReadOnly("Tracking State")] [HideInInspector] public string trackingState = "-";
///
/// Object detection framerate
///
[ReadOnly("Object Detection FPS")] [HideInInspector] public string objectDetectionFPS = "-";
////////////////////////////
//////// Private ///////////
////////////////////////////
///
/// Initialization parameters used to start the ZED. Holds settings that can't be changed at runtime
/// (resolution, depth mode, .SVO path, etc.).
///
private sl.InitParameters initParameters;
///
/// Runtime parameters used to grab a new image. Settings can change each frame, but are lower level
/// (sensing mode, point cloud, if depth is enabled, etc.).
///
private sl.RuntimeParameters runtimeParameters;
///
/// Enables the ZED SDK's depth stabilizer, which improves depth accuracy and stability. There's rarely a reason to disable this.
///
private bool depthStabilizer = true;
///
/// Indicates if Sensors( IMU,...) is needed/required. For most applications, it is required.
/// Sensors are transmitted through USB2.0 lines. If USB2 is not available (USB3.0 only extension for example), set it to false.
///
private bool sensorsRequired = false;
///
/// Set the camera in Flip mode
///
private sl.FLIP_MODE cameraFlipMode = sl.FLIP_MODE.AUTO;
///
/// Whether the camera is currently being tracked using the ZED's inside-out tracking.
/// ccvv
private bool isZEDTracked = false;
///
/// Whether the ZED's inside-out tracking has been activated.
///
private bool isTrackingEnable = false;
///
/// Whether the camera is tracked in any way (ZED's tracking or a VR headset's tracking).
///
private bool isCameraTracked = false;
///
/// Public accessor for whether the camera is tracked in any way (ZED's tracking or a VR headset's tracking).
///
public bool IsCameraTracked
{
get { return isCameraTracked; }
}
///
/// Whether the camera has a new frame available.
///
private bool isNewFrameGrabbed = false;
///
/// Public accessor for whether the camera has a new frame available.
///
public bool IsNewFrameGrabbed
{
get { return isNewFrameGrabbed; }
}
///
/// Orientation last returned by the ZED's tracking.
///
private Quaternion zedOrientation = Quaternion.identity;
///
/// Position last returned by the ZED's tracking.
///
private Vector3 zedPosition = Vector3.zero;
///
/// If Estimate Initial Position is true and we're in SVO mode with Loop enabled, we'll want to cache our first pose to initialPosition and initialRotation.
/// This flag lets us know if we've done that yet so we can only assign them on the first tracked frame.
///
private bool initialPoseCached = false;
///
/// Position of the camera (zedRigRoot) when the scene starts. Not used in Stereo AR.
///
private Vector3 initialPosition = new Vector3();
///
/// Orientation of the camera (zedRigRoot) when the scene starts. Not used in Stereo AR.
///
private Quaternion initialRotation = Quaternion.identity;
///
/// Sensing mode: STANDARD or FILL. FILL corrects for missing depth values.
/// Almost always better to use FILL, since we need depth without holes for proper occlusion.
///
[SerializeField]
[HideInInspector]
public sl.SENSING_MODE sensingMode = sl.SENSING_MODE.FILL;
///
/// Rotation offset used to retrieve the tracking with a rotational offset.
///
private Quaternion rotationOffset;
///
/// Position offset used to retrieve the tracking with a positional offset.
///
private Vector3 positionOffset;
///
/// Enables pose smoothing during drift correction. Leave it to true.
///
private bool enablePoseSmoothing = true;
[HideInInspector]
public sl.ERROR_CODE ZEDGrabError = sl.ERROR_CODE.FAILURE;
#if UNITY_EDITOR
///
/// The engine FPS, updated every frame.
///
private float fps_engine = 90.0f;
#endif
///
/// Recording state
///
private bool isRecording = false;
///////////////////////////////////////
/////////// Static States /////////////
///////////////////////////////////////
///
/// Whether AR mode is activated.
///
private bool isStereoRig = false;
///
/// Whether AR mode is activated. Assigned by ZEDManager.CheckStereoMode() in Awake().
/// Will be true if the ZED_Rig_Stereo prefab (or a similarly-structured prefab) is used.
///
public bool IsStereoRig
{
get { return isStereoRig; }
}
///
/// Checks if the ZED has finished initializing.
///
private bool zedReady = false;
///
/// Checks if the ZED has finished initializing.
///
public bool IsZEDReady
{
get { return zedReady; }
}
///
/// Flag set to true if the camera was connected and the wasn't anymore.
/// Causes ZEDDisconnected() to be called each frame, which attemps to restart it.
///
private bool isDisconnected = false;
///
/// Current state of tracking: On, Off, or Searching (lost tracking, trying to recover). Used by anti-drift.
///
private sl.TRACKING_STATE zedtrackingState = sl.TRACKING_STATE.TRACKING_OFF;
///
/// Current state of tracking: On, Off, or Searching (lost tracking, trying to recover). Used by anti-drift.
///
public sl.TRACKING_STATE ZEDTrackingState
{
get { return zedtrackingState; }
}
///
/// First position registered after the tracking has started (whether via ZED or a VR HMD).
///
public Vector3 OriginPosition { get; private set; }
///
/// First rotation/orientation registered after the tracking has started (whether via ZED or a VR HMD).
///
public Quaternion OriginRotation { get; private set; }
///
/// In AR pass-through mode, whether to compare the ZED's IMU data against the reported position of
/// the VR headset. This helps compensate for drift and should usually be left on.
/// However, in some setups, like when using a custom mount, this can cause tracking errors.
///
/// Read more about the potential errors here: https://support.stereolabs.com/hc/en-us/articles/360026482413
///
public bool setIMUPriorInAR = true;
///
/// If true, the ZED rig will enter 'pass-through' mode if it detects a stereo rig - at least two cameras as children with ZEDRenderingPlane
/// components, each with a different eye) - and a VR headset is connected. If false, it will never enter pass-through mode.
///
public bool allowARPassThrough = true;
///////////////////////////////////////////////////
[HideInInspector] public Quaternion gravityRotation = Quaternion.identity;
[HideInInspector] public Vector3 ZEDSyncPosition;
[HideInInspector] public Vector3 HMDSyncPosition;
[HideInInspector] public Quaternion ZEDSyncRotation;
[HideInInspector] public Quaternion HMDSyncRotation;
///
/// Image acquisition thread.
///
private Thread threadGrab = null;
///
/// State of the image acquisition thread.
///
private bool running = false;
///
/// Initialization thread.
///
private Thread threadOpening = null;
///
/// Result of the latest attempt to initialize the ZED.
///
private sl.ERROR_CODE lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST;
public sl.ERROR_CODE LastInitStatus { get { return lastInitStatus; } }
///
/// State of the ZED initialization thread.
///
private bool openingLaunched;
///
/// Wait Handle used to safely tell the init thread to shut down.
///
EventWaitHandle initQuittingHandle;
///
/// When true, the init thread will close early instead of completing all its connection attempts.
/// Set to true when the application is closed before a camera finishes its initialization.
///
private bool forceCloseInit = false;
///
/// Tracking initialization thread. Used as the tracking takes some time to start.
///
private Thread trackerThread = null;
///////////////////////////////////////////
////// Camera and Player Transforms //////
///////////////////////////////////////////
///
/// Transform of the left camera in the ZED rig.
///
private Transform camLeftTransform = null;
///
/// Transform of the right camera in the ZED rig. Only exists in a stereo rig (like ZED_Rig_Stereo).
///
private Transform camRightTransform = null;
///
/// Contains the position of the player's head, which is different from the ZED's position in AR mode.
/// But its position relative to the ZED does not change during use (it's a rigid transform).
/// In ZED_Rig_Mono, this will be the root ZED_Rig_Mono object. In ZED_Rig_Stereo, this is Camera_eyes.
///
private Transform zedRigRoot = null;
///
/// Left camera in the ZED rig. Also the "main" camera if in ZED_Rig_Mono.
///
private Camera cameraLeft;
///
/// Right camera of the ZED rig. Only exists in a stereo rig (like ZED_Rig_Stereo).
///
private Camera cameraRight;
///
/// Gets the center transform, which is the transform moved by the tracker in AR mode.
/// This is the root object in ZED_Rig_Mono, and Camera_eyes in ZED_Rig_Stereo.
///
public Transform GetZedRootTansform()
{
return zedRigRoot;
}
///
/// Returns the left ZED camera transform. If there is no left camera but there is a right camera,
/// returns the right camera transform instead.
///
///
public Transform GetMainCameraTransform()
{
if (camLeftTransform) return camLeftTransform;
else if (camRightTransform) return camRightTransform;
else return null;
}
///
/// Gets the left camera transform in the ZED rig. It's best to use this one as it's available in all configurations.
///
public Transform GetLeftCameraTransform()
{
return camLeftTransform;
}
///
/// Get the right camera transform in the ZED rig. Only available in the stereo rig (ZED_Rig_Stereo).
///
public Transform GetRightCameraTransform()
{
return camRightTransform;
}
///
/// Returns the left ZED camera. If there is no left camera but there is a right camera,
/// returns the right camera instead.
///
///
public Camera GetMainCamera()
{
if (cameraLeft) return cameraLeft;
else if (cameraRight) return cameraRight;
else return null;
}
///
/// Gets the left camera in the ZED rig. Both ZED_Rig_Mono and ZED_Rig_Stereo have a left camera by default.
///
public Camera GetLeftCamera()
{
if (cameraLeft == null && camLeftTransform != null)
cameraLeft = camLeftTransform.GetComponent();
return cameraLeft;
}
///
/// Get the right camera in the ZED rig. Only available in the stereo rig (ZED_Rig_Stereo) unless configured otherwise.
///
public Camera GetRightCamera()
{
if (cameraRight == null && camRightTransform != null)
cameraRight = camRightTransform.GetComponent();
return cameraRight;
}
#pragma warning disable 414
///
/// Save the foldout options as it was used last time
///
[SerializeField]
[HideInInspector]
private bool advancedPanelOpen = false;
[SerializeField]
[HideInInspector]
private bool spatialMappingFoldoutOpen = false;
[SerializeField]
[HideInInspector]
private bool objectDetectionFoldoutOpen = false;
[SerializeField]
[HideInInspector]
private bool recordingFoldoutOpen = false;
[SerializeField]
[HideInInspector]
private bool streamingOutFoldoutOpen = false;
[SerializeField]
[HideInInspector]
private bool camControlFoldoutOpen = false;
#pragma warning restore 414
/////////////////////////////////////
////// Timestamps //////
/////////////////////////////////////
///
/// Timestamp of the last ZED image grabbed. Textures from this grab may not have updated yet.
///
private ulong cameraTimeStamp = 0;
///
/// Timestamp of the last ZED image grabbed. Textures from this grab may not have updated yet.
///
public ulong CameraTimeStamp
{
get { return cameraTimeStamp; }
}
///
/// Timestamp of the images used to create the current textures.
///
private ulong imageTimeStamp = 0;
///
/// Timestamp of the images used to create the current textures.
///
public ulong ImageTimeStamp
{
get { return imageTimeStamp; }
}
///
/// Whether the grabbing thread should grab a new frame from the ZED SDK.
/// True unless the last grabbed frame hasn't been applied yet, or the ZED isn't initialized.
///
private bool requestNewFrame = false;
///
/// Whether a new frame has been grabbed from the ZED SDK that needs to be updated.
///
private bool newFrameAvailable = false;
/////////////////////////////////////
////// Layers for ZED //////
/////////////////////////////////////
///
/// Layer assigned to the cameras and objects of a (normally hidden) AR camera rig created to handle
/// pass-through AR. This allows the cameras to see nothing but two canvas objects with the final MR images.
///
[HideInInspector]
public int arLayer
{
get
{
return ZEDLayers.arlayer;
}
}
[SerializeField]
[HideInInspector]
//private int arlayer = 30;
/////////////////////////////////////
////// ZED specific events //////
/////////////////////////////////////
///
/// Delegate for OnZEDReady.
///
public delegate void OnZEDManagerReady();
///
/// Called when the ZED has finished initializing successfully.
/// Used by many scripts to run startup logic that requires that the ZED is active.
///
public event OnZEDManagerReady OnZEDReady;
///
/// Delegate for OnZEDDisconnected.
///
public delegate void OnZEDManagerDisconnected();
///
/// Event called when ZED was running but became disconnected.
///
public event OnZEDManagerDisconnected OnZEDDisconnected;
///
/// Delegate for new Frame grabbed for external module update
///
public delegate void OnGrabAction();
///
/// Event called when ZED has grabbed a new frame.
///
public event OnGrabAction OnGrab;
#region CHECK_AR
private bool hasXRDevice()
{
#if UNITY_2020_1_OR_NEWER
var xrDisplaySubsystems = new List();
SubsystemManager.GetInstances(xrDisplaySubsystems);
foreach (var xrDisplay in xrDisplaySubsystems)
{
if (xrDisplay.running)
{
return true;
}
}
return false;
#else
return XRDevice.isPresent;
#endif
}
///
/// Checks if this GameObject is a stereo rig. Requires a child object called 'Camera_eyes' and
/// two cameras as children of that object, one with stereoTargetEye set to Left, the other two Right.
/// Regardless, sets references to leftCamera and (if relevant) rightCamera.
///
private void CheckStereoMode()
{
zedRigRoot = gameObject.transform; //The object moved by tracking. By default it's this Transform. May get changed.
bool devicePresent = hasXRDevice(); //May not need.
//Set first left eye
Component[] cams = gameObject.GetComponentsInChildren();
//Camera firstmonocam = null;
List monocams = new List();
foreach (Camera cam in cams)
{
switch (cam.stereoTargetEye)
{
case StereoTargetEyeMask.Left:
if (!cameraLeft)
{
cameraLeft = cam;
camLeftTransform = cam.transform;
}
break;
case StereoTargetEyeMask.Right:
if (!cameraRight)
{
cameraRight = cam;
camRightTransform = cam.transform;
}
break;
case StereoTargetEyeMask.None:
monocams.Add(cam);
break;
case StereoTargetEyeMask.Both:
default:
break;
}
}
//If the left camera or right camera haven't been assigned via stereo target eyes, search the monocams
//based on their ZEDRenderingPlane assignments.
//This won't affect whether the rig is in stereo mode, but allows the cameras to be accessed via GetLeftCamera() and GetRightCamera().
if (cameraLeft == null || cameraRight == null)
{
foreach (Camera cam in monocams)
{
ZEDRenderingPlane rendplane = cam.gameObject.GetComponent();
if (!rendplane) continue;
if (!cameraLeft && (rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.LEFT || rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.LEFT_FORCE))
{
cameraLeft = cam;
camLeftTransform = cam.transform;
}
else if (!cameraRight && (rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT || rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT_FORCE))
{
cameraRight = cam;
camRightTransform = cam.transform;
}
}
}
if (camLeftTransform && camRightTransform && cameraLeft.stereoTargetEye == StereoTargetEyeMask.Left) //We found both a left- and right-eye camera.
{
if (camLeftTransform.transform.parent != null)
{
zedRigRoot = camLeftTransform.parent; //Make the camera's parent object (Camera_eyes in the ZED_Rig_Stereo prefab) the new zedRigRoot to be tracked.
}
if (hasXRDevice() && allowARPassThrough)
{
isStereoRig = true;
}
else
{
isStereoRig = false;
//If there's no VR headset, then cameras set to Left and Right won't display in Unity. Set them both to None.
if (cameraLeft) cameraLeft.stereoTargetEye = StereoTargetEyeMask.None;
if (cameraRight) cameraRight.stereoTargetEye = StereoTargetEyeMask.None;
}
}
else //Not all conditions for a stereo rig were met.
{
isStereoRig = false;
if (camLeftTransform)
{
Camera caml = camLeftTransform.gameObject.GetComponent();
cameraLeft = caml;
if (camLeftTransform.transform.parent != null)
zedRigRoot = camLeftTransform.parent;
}
else
{
zedRigRoot = transform;
}
}
}
#endregion
///
/// Sets the target GameObject and all its children to the specified layer.
///
/// Target GameObject.
/// Layer that the GameObject and all children will be set to.
public static void SetLayerRecursively(GameObject go, int layerNumber)
{
if (go == null) return;
foreach (Transform trans in go.GetComponentsInChildren(true))
{
trans.gameObject.layer = layerNumber;
}
}
///
/// Stops the initialization and grabbing threads.
///
public void Destroy()
{
running = false;
//In case the opening thread is still running.
if (threadOpening != null)
{
initQuittingHandle.Reset();
forceCloseInit = true;
initQuittingHandle.Set();
threadOpening.Join();
threadOpening = null;
}
//Shut down the image grabbing thread.
if (threadGrab != null)
{
threadGrab.Join();
threadGrab = null;
}
if (IsMappingRunning)
StopSpatialMapping();
Thread.Sleep(10);
}
///
/// Called by Unity when the application is closed.
/// Also called by Reset() to properly start from a 'clean slate.'
///
private void OnApplicationQuit()
{
CloseManager();
//sl.ZEDCamera.UnloadPlugin();
//If this was the last camera to close, make sure all instances are closed.
bool notlast = false;
foreach (ZEDManager manager in ZEDManagerInstance)
{
if (manager != null && manager.IsZEDReady == true)
{
notlast = true;
break;
}
}
if (notlast == false)
{
sl.ZEDCamera.UnloadPlugin();
}
}
private void CloseManager()
{
if (spatialMapping != null)
spatialMapping.Dispose();
if (IsObjectDetectionRunning)
{
StopObjectDetection();
}
#if !ZED_HDRP && !ZED_URP
ClearRendering();
#endif
zedReady = false;
OnCamBrightnessChange -= SetCameraBrightness;
OnMaxDepthChange -= SetMaxDepthRange;
Destroy(); //Close the grab and initialization threads.
if (zedCamera != null)
{
if (isRecording)
{
zedCamera.DisableRecording();
}
zedCamera.Destroy();
zedCamera = null;
}
#if UNITY_EDITOR //Prevents building the app otherwise.
//Restore the AR layers that were hidden, if necessary.
if (!showarrig)
{
LayerMask layerNumberBinary = (1 << arLayer); //Convert layer index into binary number.
UnityEditor.Tools.visibleLayers |= (layerNumberBinary);
}
#endif
sl.ZEDCamera.UnloadInstance((int)cameraID);
}
#if !ZED_HDRP && !ZED_URP
private void ClearRendering()
{
if (camLeftTransform != null)
{
ZEDRenderingPlane leftRenderingPlane = camLeftTransform.GetComponent();
if (leftRenderingPlane)
{
leftRenderingPlane.Clear();
}
}
if (IsStereoRig)
{
ZEDRenderingPlane rightRenderingPlane = GetRightCameraTransform().GetComponent();
rightRenderingPlane.Clear();
}
}
#endif
///
/// Sets up starting properties and starts the ZED initialization co-routine.
///
void Awake()
{
// If never initialized, init the array of instances linked to each ZEDManager that could be created.
if (ZEDManagerInstance == null)
{
ZEDManagerInstance = new ZEDManager[(int)sl.Constant.MAX_CAMERA_PLUGIN];
for (int i = 0; i < (int)sl.Constant.MAX_CAMERA_PLUGIN; i++)
ZEDManagerInstance[i] = null;
}
initialPosition = transform.localPosition;
initialRotation = transform.localRotation;
zedReady = false;
ZEDManagerInstance[(int)cameraID] = this;
zedCamera = new sl.ZEDCamera();
if (dontDestroyOnLoad) DontDestroyOnLoad(transform.root); //If you want the ZED rig not to be destroyed when loading a scene.
//Set first few parameters for initialization. This will get passed to the ZED SDK when initialized.
initParameters = new sl.InitParameters();
initParameters.resolution = resolution;
initParameters.cameraFPS = FPS;
initParameters.cameraDeviceID = (int)cameraID;
initParameters.depthMode = depthMode;
initParameters.depthStabilization = depthStabilizer;
initParameters.sensorsRequired = sensorsRequired;
initParameters.depthMaximumDistance = 40.0f; // 40 meters should be enough for all applications
initParameters.cameraImageFlip = (int)cameraFlipMode;
initParameters.enableImageEnhancement = enableImageEnhancement;
initParameters.cameraDisableSelfCalib = !enableSelfCalibration;
initParameters.optionalOpencvCalibrationFile = opencvCalibFile;
//Check if this rig is a stereo rig. Will set isStereoRig accordingly.
CheckStereoMode();
//Set initialization parameters that may change depending on what was done in CheckStereoMode().
isZEDTracked = enableTracking;
zedPosition = initialPosition;
zedOrientation = initialRotation;
lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST;
bool res = zedCamera.CreateCamera((int)cameraID, wrapperVerbose);
if (!res)
{
Debug.LogError("ZEDManager on " + gameObject.name + " couldn't connect to camera: " + cameraID +
". Check if another ZEDManager is already connected.");
this.gameObject.SetActive(false);
return;
}
initParameters.inputType = inputType;
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_USB)
{
}
else if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO)
{
initParameters.pathSVO = svoInputFileName;
initParameters.svoRealTimeMode = svoRealTimeMode;
}
else if (inputType == sl.INPUT_TYPE.INPUT_TYPE_STREAM)
{
initParameters.ipStream = streamInputIP;
initParameters.portStream = (ushort)streamInputPort;
}
versionZED = "[SDK]: " + sl.ZEDCamera.GetSDKVersion().ToString() + " [Plugin]: " + sl.ZEDCamera.PluginVersion.ToString();
//Behavior specific to AR pass-through mode.
if (isStereoRig)
{
//Creates a hidden camera rig that handles final output to the headset.
GameObject o = CreateZEDRigDisplayer();
if (!showarrig) o.hideFlags = HideFlags.HideInHierarchy;
o.transform.parent = transform;
initParameters.depthMinimumDistance = 0.1f; //Allow depth calculation to very close objects.
//For the Game/output window, mirror the headset view using a custom script that avoids stretching.
CreateMirror();
}
//Determine if we should enable the right depth measurement, which costs performance but is needed for pass-through AR.
switch (enableRightDepthMeasure)
{
case RightDepthEnabledMode.AUTO:
default:
if (isStereoRig) //If so, we've already determined we have both a left and right ZEDRenderingPlane, so skip the lookups.
{
initParameters.enableRightSideMeasure = true;
}
else
{
foreach (ZEDRenderingPlane renderplane in GetComponentsInChildren())
{
//If we have any ZEDRenderingPlanes that are looking through the right side, enable the measurements.
if (renderplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT ||
renderplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT_FORCE)
{
initParameters.enableRightSideMeasure = true;
break;
}
}
}
break;
case RightDepthEnabledMode.OFF:
initParameters.enableRightSideMeasure = false;
break;
case RightDepthEnabledMode.ON:
initParameters.enableRightSideMeasure = true;
break;
}
//Starts a coroutine that initializes the ZED without freezing the game.
lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST;
openingLaunched = false;
StartCoroutine(InitZED());
OnCamBrightnessChange += SetCameraBrightness; //Subscribe event for adjusting brightness setting.
OnMaxDepthChange += SetMaxDepthRange;
//Create Module Object
//Create the spatial mapping module object (even if not used necessarly)
spatialMapping = new ZEDSpatialMapping(transform, this);
}
void Start()
{
//adjust layers for multiple camera
//setLayersForMultiCamera ();
}
#region INITIALIZATION
//const int MAX_OPENING_TRIES = 10;
private uint numberTriesOpening = 0;/// Counter of tries to open the ZED
///
/// ZED opening function. Should be called in the initialization thread (threadOpening).
///
private void OpenZEDInBackground()
{
openingLaunched = true;
int timeout = 0;
do
{
initQuittingHandle.WaitOne(0); //Makes sure we haven't been turned off early, which only happens in Destroy() from OnApplicationQuit().
if (forceCloseInit) break;
lastInitStatus = zedCamera.Init(ref initParameters);
timeout++;
numberTriesOpening++;
} while (lastInitStatus != sl.ERROR_CODE.SUCCESS);
}
///
/// Initialization coroutine.
///
private System.Collections.IEnumerator InitZED()
{
zedReady = false;
if (!openingLaunched)
{
initQuittingHandle = new EventWaitHandle(true, EventResetMode.ManualReset);
threadOpening = new Thread(new ThreadStart(OpenZEDInBackground)); //Assign thread.
threadOpening.Start();
}
while (lastInitStatus != sl.ERROR_CODE.SUCCESS)
{
yield return new WaitForSeconds(0.3f);
}
//ZED has initialized successfully.
if (lastInitStatus == sl.ERROR_CODE.SUCCESS)
{
threadOpening.Join();
//Initialize the tracking thread, AR initial transforms and SVO read/write as needed.
ZEDReady();
//If using tracking, wait until the tracking thread has been initialized.
while (enableTracking && !isTrackingEnable)
{
yield return new WaitForSeconds(0.5f);
}
//Tells all the listeners that the ZED is ready! :)
if (OnZEDReady != null)
{
OnZEDReady();
}
//Make sure the screen is at 16:9 aspect ratio or close. Warn the user otherwise.
float ratio = (float)Screen.width / (float)Screen.height;
float target = 16.0f / 9.0f;
if (Mathf.Abs(ratio - target) > 0.01)
{
Debug.LogWarning(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.SCREEN_RESOLUTION));
}
//get informations from camera (S/N, firmware, model...)
cameraModel = zedCamera.GetCameraModel().ToString();
cameraFirmware = zedCamera.GetCameraFirmwareVersion().ToString() + "-" + zedCamera.GetSensorsFirmwareVersion().ToString();
cameraSerialNumber = zedCamera.GetZEDSerialNumber().ToString();
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO)
{
numberFrameMax = zedCamera.GetSVONumberOfFrames();
}
// If streaming has been switched on before play
if (enableStreaming && !isStreamingEnable)
{
lock (zedCamera.grabLock)
{
sl.ERROR_CODE err = zedCamera.EnableStreaming(streamingCodec, (uint)bitrate, (ushort)streamingPort, gopSize, adaptativeBitrate, chunkSize, streamingTargetFramerate);
if (err == sl.ERROR_CODE.SUCCESS)
{
isStreamingEnable = true;
}
else
{
enableStreaming = false;
isStreamingEnable = false;
}
}
}
//If not already launched, launch the image grabbing thread.
if (!running)
{
running = true;
requestNewFrame = true;
threadGrab = new Thread(new ThreadStart(ThreadedZEDGrab));
threadGrab.Start();
}
zedReady = true;
isDisconnected = false; //In case we just regained connection.
setRenderingSettings(); //Find the ZEDRenderingPlanes in the rig and configure them.
AdjustZEDRigCameraPosition(); //If in AR mode, move cameras to proper offset relative to zedRigRoot.
}
}
///
/// Adjust camera(s) relative to zedRigRoot transform, which is what is moved each frame. Called at start of tracking.
/// In AR mode, offset is each camera's position relative to center of the user's head. Otherwise, cameras are just spaced
/// by the camera's baseline/IPD, or no offset is applied if there's just one camera.
///
void AdjustZEDRigCameraPosition()
{
//Vector3 rightCameraOffset = new Vector3(zedCamera.Baseline, 0.0f, 0.0f);
if (isStereoRig && hasXRDevice()) //Using AR pass-through mode.
{
//zedRigRoot transform (origin of the global camera) is placed on the HMD headset. Therefore, we move the
//camera in front of it by offsetHmdZEDPosition to compensate for the ZED's position on the headset.
//If values are wrong, tweak calibration file created in ZEDMixedRealityPlugin.
camLeftTransform.localPosition = arRig.HmdToZEDCalibration.translation;
camLeftTransform.localRotation = arRig.HmdToZEDCalibration.rotation;
if (camRightTransform) camRightTransform.localPosition = camLeftTransform.localPosition + new Vector3(zedCamera.Baseline, 0.0f, 0.0f); //Space the eyes apart.
if (camRightTransform) camRightTransform.localRotation = camLeftTransform.localRotation;
}
else if (camLeftTransform && camRightTransform) //Using stereo rig, but no VR headset.
{
//When no VR HMD is available, simply put the origin at the left camera.
camLeftTransform.localPosition = Vector3.zero;
camLeftTransform.localRotation = Quaternion.identity;
camRightTransform.localPosition = new Vector3(zedCamera.Baseline, 0.0f, 0.0f); //Space the eyes apart.
camRightTransform.localRotation = Quaternion.identity;
}
else //Using mono rig (ZED_Rig_Mono). No offset needed.
{
if (GetMainCameraTransform())
{
GetMainCameraTransform().localPosition = Vector3.zero;
GetMainCameraTransform().localRotation = Quaternion.identity;
}
}
}
///
/// Find the ZEDRenderingPlane components in the ZED rig and set their rendering settings
/// (rendering path, shader values, etc.) for left and right cameras. Also activate/deactivate depth occlusions.
///
void setRenderingSettings()
{
ZEDRenderingPlane leftRenderingPlane = null;
if (GetLeftCameraTransform() != null)
{
leftRenderingPlane = GetLeftCameraTransform().GetComponent();
if (leftRenderingPlane)
{
leftRenderingPlane.SetPostProcess(postProcessing);
GetLeftCameraTransform().GetComponent().renderingPath = RenderingPath.UsePlayerSettings;
SetCameraBrightness(m_cameraBrightness);
cameraLeft.cullingMask &= ~(1 << zedCamera.TagInvisibleToZED);
}
}
ZEDRenderingPlane rightRenderingPlane = null;
if (GetRightCameraTransform() != null)
{
rightRenderingPlane = GetRightCameraTransform().GetComponent();
if (rightRenderingPlane)
{
rightRenderingPlane.SetPostProcess(postProcessing);
cameraRight.renderingPath = RenderingPath.UsePlayerSettings;
cameraRight.cullingMask &= ~(1 << zedCamera.TagInvisibleToZED);
}
}
SetCameraBrightness(m_cameraBrightness);
SetMaxDepthRange(m_maxDepthRange);
#if ZED_HDRP
SetSelfIllumination(selfIllumination);
SetBoolValueOnPlaneMaterials("_ApplyZEDNormals", applyZEDNormals);
#endif
Camera maincam = GetMainCamera();
if (maincam != null)
{
ZEDRenderingMode renderingPath = (ZEDRenderingMode)maincam.actualRenderingPath;
//Make sure we're in either forward or deferred rendering. Default to forward otherwise.
if (renderingPath != ZEDRenderingMode.FORWARD && renderingPath != ZEDRenderingMode.DEFERRED)
{
Debug.LogError("[ZED Plugin] Only Forward and Deferred Shading rendering path are supported");
if (cameraLeft) cameraLeft.renderingPath = RenderingPath.Forward;
if (cameraRight) cameraRight.renderingPath = RenderingPath.Forward;
}
//Set depth occlusion.
if (renderingPath == ZEDRenderingMode.FORWARD)
{
if (leftRenderingPlane)
leftRenderingPlane.ManageKeywordPipe(!depthOcclusion, "NO_DEPTH");
if (rightRenderingPlane)
rightRenderingPlane.ManageKeywordPipe(!depthOcclusion, "NO_DEPTH");
}
else if (renderingPath == ZEDRenderingMode.DEFERRED)
{
if (leftRenderingPlane)
leftRenderingPlane.ManageKeywordDeferredMat(!depthOcclusion, "NO_DEPTH");
if (rightRenderingPlane)
rightRenderingPlane.ManageKeywordDeferredMat(!depthOcclusion, "NO_DEPTH");
}
}
}
#endregion
#region IMAGE_ACQUIZ
///
/// Continuously grabs images from the ZED. Runs on its own thread.
///
private void ThreadedZEDGrab()
{
runtimeParameters = new sl.RuntimeParameters();
runtimeParameters.sensingMode = sensingMode;
runtimeParameters.enableDepth = true;
runtimeParameters.confidenceThreshold = confidenceThreshold;
runtimeParameters.textureConfidenceThreshold = textureConfidenceThreshold;
runtimeParameters.removeSaturatedAreas = true;
//Don't change this reference frame. If we need normals in the world frame, better to do the conversion ourselves.
runtimeParameters.measure3DReferenceFrame = sl.REFERENCE_FRAME.CAMERA;
while (running)
{
if (zedCamera == null)
return;
if (runtimeParameters.sensingMode != sensingMode) runtimeParameters.sensingMode = sensingMode;
AcquireImages();
}
}
///
/// Grabs images from the ZED SDK and updates tracking, FPS and timestamp values.
/// Called from ThreadedZEDGrab() in a separate thread.
///
private void AcquireImages()
{
if (requestNewFrame && zedReady)
{
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO)
{
//handle pause
if (NeedNewFrameGrab && pauseSVOReading)
{
ZEDGrabError = zedCamera.Grab(ref runtimeParameters);
NeedNewFrameGrab = false;
}
else if (!pauseSVOReading)
ZEDGrabError = zedCamera.Grab(ref runtimeParameters);
currentFrame = zedCamera.GetSVOPosition();
}
else if (!pauseLiveReading)
{
ZEDGrabError = zedCamera.Grab(ref runtimeParameters);
}
lock (zedCamera.grabLock)
{
if (ZEDGrabError == sl.ERROR_CODE.CAMERA_NOT_DETECTED)
{
Debug.Log("Camera not detected or disconnected.");
isDisconnected = true;
Thread.Sleep(10);
requestNewFrame = false;
}
else if (ZEDGrabError == sl.ERROR_CODE.SUCCESS)
{
#if UNITY_EDITOR
float camera_fps = zedCamera.GetCameraFPS();
cameraFPS = camera_fps.ToString() + " FPS";
#endif
//Update object detection here if using object sync.
if (objectDetectionRunning && objectDetectionImageSyncMode == true && requestobjectsframe)
{
RetrieveObjectDetectionFrame();
}
//Get position of camera
if (isTrackingEnable)
{
zedtrackingState = zedCamera.GetPosition(ref zedOrientation, ref zedPosition, sl.TRACKING_FRAME.LEFT_EYE);
//zedtrackingState = sl.TRACKING_STATE.TRACKING_OK;
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO && svoLoopBack == true && initialPoseCached == false)
{
initialPosition = zedPosition;
initialRotation = zedOrientation;
initialPoseCached = true;
}
}
else
{
zedtrackingState = sl.TRACKING_STATE.TRACKING_OFF;
}
// Indicate that a new frame is available and pause the thread until a new request is called
newFrameAvailable = true;
requestNewFrame = false;
}
else
Thread.Sleep(1);
}
}
else
{
//To avoid "overheating."
Thread.Sleep(1);
}
}
#endregion
///
/// Initialize the SVO, and launch the thread to initialize tracking. Called once the ZED
/// is initialized successfully.
///
private void ZEDReady()
{
//Apply camera settings based on user preference.
InitVideoSettings(videoSettingsInitMode);
FPS = (int)zedCamera.GetRequestedCameraFPS();
if (enableTracking)
{
trackerThread = new Thread(EnableTrackingThreaded);
trackerThread.Start();
}
else if (estimateInitialPosition)
{
sl.ERROR_CODE err = zedCamera.EstimateInitialPosition(ref initialRotation, ref initialPosition);
if (zedCamera.GetCameraModel() != sl.MODEL.ZED)
zedCamera.GetInternalIMUOrientation(ref initialRotation, sl.TIME_REFERENCE.IMAGE);
if (err != sl.ERROR_CODE.SUCCESS)
Debug.LogWarning("Failed to estimate initial camera position");
}
if (enableTracking)
trackerThread.Join();
if (isStereoRig && hasXRDevice())
{
ZEDMixedRealityPlugin.Pose pose = arRig.InitTrackingAR();
OriginPosition = pose.translation;
OriginRotation = pose.rotation;
if (!zedCamera.IsHmdCompatible && zedCamera.IsCameraReady)
Debug.LogWarning("WARNING: AR Passtrough with a ZED is not recommended. Consider using ZED Mini, designed for this purpose.");
}
else
{
OriginPosition = initialPosition;
OriginRotation = initialRotation;
}
//Set the original transform for the Rig
zedRigRoot.localPosition = OriginPosition;
zedRigRoot.localRotation = OriginRotation;
}
///
/// Initializes the ZED's inside-out tracking. Started as a separate thread in OnZEDReady.
///
void EnableTrackingThreaded()
{
lock (zedCamera.grabLock)
{
//If using spatial memory and given a path to a .area file, make sure that path is valid.
if (enableSpatialMemory && pathSpatialMemory != "" && !System.IO.File.Exists(pathSpatialMemory))
{
Debug.Log("Specified path to .area file '" + pathSpatialMemory + "' does not exist. Ignoring.");
pathSpatialMemory = "";
}
sl.ERROR_CODE err = (zedCamera.EnableTracking(ref zedOrientation, ref zedPosition, enableSpatialMemory,
enablePoseSmoothing, estimateInitialPosition, trackingIsStatic, enableIMUFusion, pathSpatialMemory));
//Now enable the tracking with the proper parameters.
if (!(enableTracking = (err == sl.ERROR_CODE.SUCCESS)))
{
throw new Exception(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.TRACKING_NOT_INITIALIZED));
}
else
{
isTrackingEnable = true;
}
}
}
#if ZED_HDRP
public bool GetChosenSRPMaterial(out Material srpMat)
{
switch(srpShaderType)
{
case shaderType.Lit:
srpMat = Resources.Load("Materials/Lighting/Mat_ZED_HDRP_Lit");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Lighting/Mat_ZED_HDRP_Lit");
return false;
}
else return true;
case shaderType.Unlit:
srpMat = Resources.Load("Materials/Unlit/Mat_ZED_Unlit_RawInput");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Unlit/Mat_ZED_Unlit_RawInput");
return false;
}
else return true;
case shaderType.Greenscreen_Lit:
srpMat = Resources.Load("Materials/Lighting/Mat_ZED_Greenscreen_HDRP_Lit");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Lighting/Mat_ZED_Greenscreen_HDRP_Lit");
return false;
}
else return true;
case shaderType.Greenscreen_Unlit:
srpMat = Resources.Load("Materials/Unlit/Mat_ZED_Greenscreen_Unlit");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Unlit/Mat_ZED_Greenscreen_Unlit");
return false;
}
else return true;
case shaderType.DontChange:
default:
srpMat = null;
return false;
}
}
#endif
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////// ENGINE UPDATE REGION /////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#region ENGINE_UPDATE
///
/// If a new frame is available, this function retrieves the images and updates the Unity textures. Called in Update().
///
public void UpdateImages()
{
if (zedCamera == null)
return;
if (newFrameAvailable) //ThreadedZEDGrab()/AcquireImages() grabbed images we haven't updated yet.
{
lock (zedCamera.grabLock)
{
zedCamera.RetrieveTextures(); //Tell the wrapper to compute the textures.
zedCamera.UpdateTextures(); //Tell the wrapper to update the textures.
imageTimeStamp = zedCamera.GetImagesTimeStamp();
}
//For external module ... Trigger the capture done event.
if (OnGrab != null)
OnGrab();
//SVO and loop back ? --> reset position if needed
if (zedCamera.GetInputType() == sl.INPUT_TYPE.INPUT_TYPE_SVO && svoLoopBack)
{
int maxSVOFrame = zedCamera.GetSVONumberOfFrames();
if (zedCamera.GetSVOPosition() >= maxSVOFrame - (svoRealTimeMode ? 2 : 1))
{
zedCamera.SetSVOPosition(0);
if (enableTracking)
{
if (!(enableTracking = (zedCamera.ResetTracking(initialRotation, initialPosition) == sl.ERROR_CODE.SUCCESS)))
{
Debug.LogError("ZED Tracking disabled: Not available during SVO playback when Loop is enabled.");
}
}
zedRigRoot.localPosition = initialPosition;
zedRigRoot.localRotation = initialRotation;
}
}
requestNewFrame = true; //Lets ThreadedZEDGrab/AcquireImages() start grabbing again.
newFrameAvailable = false;
}
}
///
/// Gets the tracking position from the ZED and updates zedRigRoot's position. Also updates the AR tracking if enabled.
/// Only called in Live (not SVO playback) mode. Called in Update().
///
private void UpdateTracking()
{
if (!zedReady)
return;
if (isZEDTracked) //ZED inside-out tracking is enabled and initialized.
{
Quaternion r;
Vector3 v;
isCameraTracked = true;
if (hasXRDevice() && isStereoRig) //AR pass-through mode.
{
if (calibrationHasChanged) //If the HMD offset calibration file changed during runtime.
{
AdjustZEDRigCameraPosition(); //Re-apply the ZED's offset from the VR headset.
calibrationHasChanged = false;
}
arRig.ExtractLatencyPose(imageTimeStamp); //Find what HMD's pose was at ZED image's timestamp for latency compensation.
arRig.AdjustTrackingAR(zedPosition, zedOrientation, out r, out v, setIMUPriorInAR);
zedRigRoot.localRotation = r;
zedRigRoot.localPosition = v;
//Debug.DrawLine(new Vector3(0, 0.05f, 0), (r * Vector3.one * 5) + new Vector3(0, 0.05f, 0), Color.red);
//Debug.DrawLine(Vector3.zero, zedOrientation * Vector3.one * 5, Color.green);
ZEDSyncPosition = v;
ZEDSyncRotation = r;
HMDSyncPosition = arRig.LatencyPose().translation;
HMDSyncRotation = arRig.LatencyPose().rotation;
}
else //Not AR pass-through mode.
{
zedRigRoot.localRotation = zedOrientation;
if (!ZEDSupportFunctions.IsVector3NaN(zedPosition))
zedRigRoot.localPosition = zedPosition;
}
}
else if (hasXRDevice() && isStereoRig) //ZED tracking is off but HMD tracking is on. Fall back to that.
{
isCameraTracked = true;
arRig.ExtractLatencyPose(imageTimeStamp); //Find what HMD's pose was at ZED image's timestamp for latency compensation.
zedRigRoot.localRotation = arRig.LatencyPose().rotation;
zedRigRoot.localPosition = arRig.LatencyPose().translation;
}
else //The ZED is not tracked by itself or an HMD.
isCameraTracked = false;
}
///
/// Stores the HMD's current pose. Used in AR mode for latency compensation.
/// Pose will be applied to final canvases when a new image's timestamp matches
/// the time when this is called.
///
void UpdateHmdPose()
{
if (IsStereoRig && hasXRDevice())
arRig.CollectPose(); //Save headset pose with current timestamp.
}
///
/// Updates images, collects HMD poses for latency correction, and applies tracking.
/// Called by Unity each frame.
///
void Update()
{
//Check if ZED is disconnected; invoke event and call function if so.
if (isDisconnected)
{
if (OnZEDDisconnected != null)
OnZEDDisconnected(); //Invoke event. Used for GUI message and pausing ZEDRenderingPlanes.
ZEDDisconnected(); //Tries to reset the camera.
return;
}
// Then update all modules
UpdateImages(); //Image is updated first so we have its timestamp for latency compensation.
UpdateHmdPose(); //Store the HMD's pose at the current timestamp.
UpdateTracking(); //Apply position/rotation changes to zedRigRoot.
UpdateObjectsDetection(); //Update od if activated
UpdateMapping(); //Update mapping if activated
/// If in Unity Editor, update the ZEDManager status list
#if UNITY_EDITOR
//Update strings used for di splaying stats in the Inspector.
if (zedCamera != null)
{
float frame_drop_count = zedCamera.GetFrameDroppedPercent();
float CurrentTickFPS = 1.0f / Time.deltaTime;
fps_engine = (fps_engine + CurrentTickFPS) / 2.0f;
engineFPS = fps_engine.ToString("F0") + " FPS";
if (frame_drop_count > 30 && fps_engine < 45)
engineFPS += "WARNING: Low engine framerate detected";
if (isZEDTracked)
trackingState = ZEDTrackingState.ToString();
else if (hasXRDevice() && isStereoRig)
trackingState = "HMD Tracking";
else
trackingState = "Camera Not Tracked";
}
#endif
}
public void LateUpdate()
{
if (IsStereoRig)
{
arRig.LateUpdateHmdRendering(); //Update textures on final AR rig for output to the headset.
}
}
#endregion
///
/// Event called when camera is disconnected
///
void ZEDDisconnected()
{
cameraFPS = "Disconnected";
isDisconnected = true;
if (zedReady)
{
Reset(); //Cache tracking, turn it off and turn it back on again.
}
}
private void OnDestroy()
{
//OnApplicationQuit();
CloseManager();
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////// SPATIAL MAPPING REGION /////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#region MAPPING_MODULE
///
/// Tells ZEDSpatialMapping to begin a new scan. This clears the previous scan from the scene if there is one.
///
public void StartSpatialMapping()
{
transform.position = Vector3.zero;
transform.rotation = Quaternion.identity;
spatialMapping.StartStatialMapping(sl.SPATIAL_MAP_TYPE.MESH, mappingResolutionPreset, mappingRangePreset, isMappingTextured);
}
///
/// Ends the current spatial mapping. Once called, the current mesh will be filtered, textured (if enabled) and saved (if enabled),
/// and a mesh collider will be added.
///
public void StopSpatialMapping()
{
if (spatialMapping != null)
{
if (saveMeshWhenOver)
SaveMesh(meshPath);
spatialMapping.StopStatialMapping();
}
}
///
/// Updates the filtering parameters and call the ZEDSpatialMapping instance's Update() function.
///
private void UpdateMapping()
{
if (spatialMapping != null)
{
//if (IsMappingUpdateThreadRunning)
if (spatialMapping.IsRunning())
{
spatialMapping.filterParameters = meshFilterParameters;
spatialMapping.Update();
}
}
}
///
/// Toggles whether to display the mesh or not.
///
/// True to make the mesh visible, false to make it invisible.
public void SwitchDisplayMeshState(bool state)
{
if (spatialMapping != null)
spatialMapping.SwitchDisplayMeshState(state);
}
public void ClearAllMeshes()
{
if (spatialMapping != null)
spatialMapping.ClearAllMeshes();
}
///
/// Pauses the current scan.
///
/// True to pause the scanning, false to unpause it.
public void SwitchPauseState(bool state)
{
if (spatialMapping != null)
spatialMapping.SwitchPauseState(state);
}
///
/// Saves the mesh into a 3D model (.obj, .ply or .bin) file. Also saves an .area file for spatial memory for better tracking.
/// Calling this will end the spatial mapping if it's running. Note it can take a significant amount of time to finish.
///
/// Path where the mesh and .area files will be saved.
public void SaveMesh(string meshPath = "ZEDMeshObj.obj")
{
spatialMapping.RequestSaveMesh(meshPath);
}
///
/// Loads a mesh and spatial memory data from a file.
/// If scanning is running, it will be stopped. Existing scans in the scene will be cleared.
///
/// Path to the 3D mesh file (.obj, .ply or .bin) to load.
/// True if successfully loaded, false otherwise.
public bool LoadMesh(string meshPath = "ZEDMeshObj.obj")
{
//Cache the save setting and set to false, to avoid overwriting the mesh file during the load.
bool oldSaveWhenOver = saveMeshWhenOver;
saveMeshWhenOver = false;
gravityRotation = Quaternion.identity;
spatialMapping.SetMeshRenderer();
bool loadresult = spatialMapping.LoadMesh(meshPath);
saveMeshWhenOver = oldSaveWhenOver; //Restoring old setting.
return loadresult;
}
#endregion
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////// OBJECT DETECTION REGION //////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#region OBJECT_DETECTION
///
/// True when the object detection coroutine is in the process of starting.
/// Used to prevent object detection from being launched multiple times at once, which causes instability.
///
private bool odIsStarting = false;
///
/// Starts the ZED object detection.
/// Note: This will lock the main thread for a moment, which may appear to be a freeze.
///
public void StartObjectDetection()
{
sl.AI_Model_status AiModelStatus = sl.ZEDCamera.CheckAIModelStatus(sl.ZEDCamera.cvtDetection(objectDetectionModel));
if (!AiModelStatus.optimized)
{
Debug.LogError("The Model * " + objectDetectionModel.ToString() + " * has not been downloaded/optimized. Use the ZED Diagnostic tool to download/optimze all the AI model you plan to use.");
// return;
}
//We start a coroutine so we can delay actually starting the detection.
//This is because the main thread is locked for awhile when you call this, appearing like a freeze.
//This time lets us deliver a log message to the user indicating that this is expected.
StartCoroutine(startObjectDetection());
}
///
///
/// Starts the object detection module after a two-frame delay, allowing us to deliver a log message
/// to the user indicating that what appears to be a freeze is actually expected and will pass.
///
///
private IEnumerator startObjectDetection()
{
if (odIsStarting == true)
{
Debug.LogError("Tried to start Object Detection while it was already starting. Do you have two scripts trying to start it?");
yield break;
}
if (objectDetectionRunning)
{
Debug.LogWarning("Tried to start Object Detection while it was already running.");
}
if (zedCamera != null)
{
odIsStarting = true;
Debug.LogWarning("Starting Object Detection. This may take a moment.");
bool oldpausestate = pauseSVOReading; //The two frame delay will cause you to miss some SVO frames if playing back from an SVO, unless we pause.
pauseSVOReading = true;
yield return null;
pauseSVOReading = oldpausestate;
sl.dll_ObjectDetectionParameters od_param = new sl.dll_ObjectDetectionParameters();
od_param.imageSync = objectDetectionImageSyncMode;
od_param.enableObjectTracking = objectDetectionTracking;
od_param.enable2DMask = objectDetection2DMask;
od_param.detectionModel = objectDetectionModel;
od_param.maxRange = objectDetectionMaxRange;
od_param.filteringMode = objectDetectionFilteringMode;
if (objectDetectionBodyFormat == sl.BODY_FORMAT.POSE_34 && objectDetectionBodyFitting == false && (objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_ACCURATE || objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_MEDIUM
|| objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_FAST))
{
Debug.LogWarning("sl.BODY_FORMAT.POSE_34 is chosen, Skeleton Tracking will automatically enable body fitting");
objectDetectionBodyFitting = true;
}
od_param.bodyFormat = objectDetectionBodyFormat;
od_param.enableBodyFitting = objectDetectionBodyFitting;
od_runtime_params.object_confidence_threshold = new int[(int)sl.OBJECT_CLASS.LAST];
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.PERSON] = (objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_ACCURATE || objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_FAST || objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_MEDIUM) ? SK_personDetectionConfidenceThreshold : OD_personDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = vehicleDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.BAG] = bagDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.ANIMAL] = animalDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.ELECTRONICS] = electronicsDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.FRUIT_VEGETABLE] = fruitVegetableDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.SPORT] = sportDetectionConfidenceThreshold;
od_runtime_params.objectClassFilter = new int[(int)sl.OBJECT_CLASS.LAST];
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(objectClassPersonFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(objectClassVehicleFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.BAG] = Convert.ToInt32(objectClassBagFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.ANIMAL] = Convert.ToInt32(objectClassAnimalFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.ELECTRONICS] = Convert.ToInt32(objectClassElectronicsFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.FRUIT_VEGETABLE] = Convert.ToInt32(objectClassFruitVegetableFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.SPORT] = Convert.ToInt32(objectClassSportFilter);
System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); //Time how long the loading takes so we can tell the user.
watch.Start();
sl.ERROR_CODE err = zedCamera.EnableObjectsDetection(ref od_param);
if (err == sl.ERROR_CODE.SUCCESS)
{
Debug.Log("Object Detection module started in " + watch.Elapsed.Seconds + " seconds.");
objectDetectionRunning = true;
}
else
{
Debug.Log("Object Detection failed to start. (Error: " + err + " )");
objectDetectionRunning = false;
}
watch.Stop();
odIsStarting = false;
}
}
///
/// Stops the object detection.
///
public void StopObjectDetection()
{
if (zedCamera != null && running)
{
zedCamera.DisableObjectsDetection();
objectDetectionRunning = false;
}
}
///
/// Updates the objects detection by triggering the detection event
///
public void UpdateObjectsDetection()
{
if (!objectDetectionRunning) return;
//Update the runtime parameters in case the user made changes.
od_runtime_params.object_confidence_threshold = new int[(int)sl.OBJECT_CLASS.LAST];
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.PERSON] = (objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_ACCURATE || objectDetectionModel == sl.DETECTION_MODEL.HUMAN_BODY_FAST) ? SK_personDetectionConfidenceThreshold : OD_personDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = vehicleDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.BAG] = bagDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.ANIMAL] = animalDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.ELECTRONICS] = electronicsDetectionConfidenceThreshold;
od_runtime_params.object_confidence_threshold[(int)sl.OBJECT_CLASS.FRUIT_VEGETABLE] = fruitVegetableDetectionConfidenceThreshold;
od_runtime_params.objectClassFilter = new int[(int)sl.OBJECT_CLASS.LAST];
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(objectClassPersonFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(objectClassVehicleFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.BAG] = Convert.ToInt32(objectClassBagFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.ANIMAL] = Convert.ToInt32(objectClassAnimalFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.ELECTRONICS] = Convert.ToInt32(objectClassElectronicsFilter);
od_runtime_params.objectClassFilter[(int)sl.OBJECT_CLASS.FRUIT_VEGETABLE] = Convert.ToInt32(objectClassFruitVegetableFilter);
if (objectDetectionImageSyncMode == false) RetrieveObjectDetectionFrame(); //If true, this is called in the AcquireImages function in the image acquisition thread.
if (newobjectsframeready)
{
lock (zedCamera.grabLock)
{
float objdetect_fps = 1000000000.0f / (objectsFrameSDK.timestamp - lastObjectFrameTimeStamp);
objDetectionModuleFPS = (objDetectionModuleFPS + objdetect_fps) / 2.0f;
objectDetectionFPS = objDetectionModuleFPS.ToString("F1") + " FPS";
lastObjectFrameTimeStamp = objectsFrameSDK.timestamp;
///Trigger the event that holds the raw data, and pass the whole objects frame.
if (OnObjectDetection_SDKData != null)
{
OnObjectDetection_SDKData(objectsFrameSDK);
}
//If there are any subscribers to the non-raw data, create that data and publish the event.
if (OnObjectDetection != null)
{
DetectionFrame oldoframe = detectionFrame; //Cache so we can clean it up once we're done setting up the new one.
//DetectionFrame oframe = new DetectionFrame(objectsFrame, this);
detectionFrame = new DetectionFrame(objectsFrameSDK, this);
OnObjectDetection(detectionFrame);
if (oldoframe != null) oldoframe.CleanUpAllObjects();
}
//Now that all events have been sent out, it's safe to let the image acquisition thread detect more objects.
requestobjectsframe = true;
newobjectsframeready = false;
}
}
}
///
/// Requests the latest object detection frame information. If it's new, it'll fill the objectsFrame object
/// with the new frame info, set requestobjectsframe to false, and set newobjectsframeready to true.
///
private void RetrieveObjectDetectionFrame()
{
sl.ObjectsFrameSDK oframebuffer = new sl.ObjectsFrameSDK();
sl.ERROR_CODE res = zedCamera.RetrieveObjectsDetectionData(ref od_runtime_params, ref oframebuffer);
if (res == sl.ERROR_CODE.SUCCESS && oframebuffer.isNew != 0)
{
if (objectDetection2DMask)
{
//Release memory from masks.
for (int i = 0; i < objectsFrameSDK.numObject; i++)
{
sl.ZEDMat oldmat = new sl.ZEDMat(objectsFrameSDK.objectData[i].mask);
oldmat.Free();
}
}
objectsFrameSDK = oframebuffer;
requestobjectsframe = false;
newobjectsframeready = true;
}
}
///
/// Switchs the state of the object detection pause.
///
/// If set to true state, the object detection will pause. It will resume otherwise
public void SwitchObjectDetectionPauseState(bool state)
{
if (zedCamera != null)
{
if (objectDetectionRunning)
zedCamera.PauseObjectsDetection(state);
}
}
#endregion
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////// AR REGION //////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#region AR_CAMERAS
///
/// Stereo rig that adjusts images from ZED_Rig_Stereo to look correct in the HMD.
/// Hidden by default as it rarely needs to be changed.
///
[HideInInspector]
public GameObject zedRigDisplayer;
private ZEDMixedRealityPlugin arRig;
///
/// Create a GameObject to display the ZED in an headset (ZED-M Only).
///
///
private GameObject CreateZEDRigDisplayer()
{
//Make sure we don't already have one, such as if the camera disconnected and reconnected.
if (zedRigDisplayer != null) Destroy(zedRigDisplayer);
zedRigDisplayer = new GameObject("ZEDRigDisplayer");
arRig = zedRigDisplayer.AddComponent();
/*Screens left and right */
GameObject centerScreen = GameObject.CreatePrimitive(PrimitiveType.Quad);
centerScreen.name = "Quad";
MeshRenderer meshCenterScreen = centerScreen.GetComponent();
meshCenterScreen.lightProbeUsage = UnityEngine.Rendering.LightProbeUsage.Off;
meshCenterScreen.reflectionProbeUsage = UnityEngine.Rendering.ReflectionProbeUsage.Off;
meshCenterScreen.receiveShadows = false;
meshCenterScreen.motionVectorGenerationMode = MotionVectorGenerationMode.ForceNoMotion;
meshCenterScreen.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
meshCenterScreen.sharedMaterial = Resources.Load("Materials/Unlit/Mat_ZED_Unlit") as Material;
centerScreen.layer = arLayer;
GameObject.Destroy(centerScreen.GetComponent());
/*Camera left and right*/
GameObject camCenter = new GameObject("camera");
camCenter.transform.SetParent(zedRigDisplayer.transform);
Camera cam = camCenter.AddComponent();
cam.renderingPath = RenderingPath.Forward;//Minimal overhead
cam.clearFlags = CameraClearFlags.Color;
cam.backgroundColor = Color.black;
cam.stereoTargetEye = StereoTargetEyeMask.Both; //Temporary setting to fix loading screen issue.
cam.cullingMask = 1 << arLayer;
cam.allowHDR = false;
cam.allowMSAA = false;
cam.depth = camRightTransform.GetComponent().depth;
HideFromWrongCameras.RegisterZEDCam(cam);
HideFromWrongCameras hider = centerScreen.AddComponent();
hider.SetRenderCamera(cam);
hider.showInNonZEDCameras = false;
SetLayerRecursively(camCenter, arLayer);
//Hide camera in editor.
#if UNITY_EDITOR
if (!showarrig)
{
LayerMask layerNumberBinary = (1 << arLayer); //Convert layer index into binary number.
LayerMask flippedVisibleLayers = ~UnityEditor.Tools.visibleLayers;
UnityEditor.Tools.visibleLayers = ~(flippedVisibleLayers | layerNumberBinary);
}
#endif
centerScreen.transform.SetParent(zedRigDisplayer.transform);
arRig.finalCameraCenter = camCenter;
arRig.ZEDEyeLeft = camLeftTransform.gameObject;
arRig.ZEDEyeRight = camRightTransform.gameObject;
arRig.quadCenter = centerScreen.transform;
ZEDMixedRealityPlugin.OnHmdCalibChanged += CalibrationHasChanged;
if (hasXRDevice())
{
#if UNITY_2019_1_OR_NEWER
HMDDevice = XRSettings.loadedDeviceName;
#else
HMDDevice = XRDevice.model;
#endif
}
return zedRigDisplayer;
}
#endregion
#region MIRROR
private ZEDMirror mirror = null;
private GameObject mirrorContainer = null;
void CreateMirror()
{
GameObject camLeft;
Camera camL;
if (mirrorContainer == null)
{
mirrorContainer = new GameObject("Mirror");
mirrorContainer.hideFlags = HideFlags.HideInHierarchy;
camLeft = new GameObject("MirrorCamera");
camLeft.hideFlags = HideFlags.HideInHierarchy;
mirror = camLeft.AddComponent();
mirror.manager = this;
camL = camLeft.AddComponent();
}
else
{
camLeft = mirror.gameObject;
camL = camLeft.GetComponent();
}
camLeft.transform.parent = mirrorContainer.transform;
camL.stereoTargetEye = StereoTargetEyeMask.None;
camL.renderingPath = RenderingPath.Forward;//Minimal overhead
camL.clearFlags = CameraClearFlags.Color;
camL.backgroundColor = Color.black;
camL.cullingMask = 0; //It should see nothing. It gets its final image entirely from a Graphics.Blit call in ZEDMirror.
camL.allowHDR = false;
camL.allowMSAA = false;
camL.useOcclusionCulling = false;
camL.depth = cameraLeft.GetComponent().depth; //Make sure it renders after the left cam so we can copy texture from latest frame.
}
#endregion
///
/// Closes out the current stream, then starts it up again while maintaining tracking data.
/// Used when the zed becomes unplugged, or you want to change a setting at runtime that
/// requires re-initializing the camera.
///
public void Reset()
{
//Save tracking
if (enableTracking && isTrackingEnable)
{
zedCamera.GetPosition(ref zedOrientation, ref zedPosition);
}
CloseManager();
openingLaunched = false;
running = false;
numberTriesOpening = 0;
forceCloseInit = false;
Awake();
}
public void Reboot()
{
//Save tracking
if (enableTracking && isTrackingEnable)
{
zedCamera.GetPosition(ref zedOrientation, ref zedPosition);
}
int sn = zedCamera.GetZEDSerialNumber();
CloseManager();
openingLaunched = false;
running = false;
numberTriesOpening = 0;
forceCloseInit = false;
bool isCameraAvailable = false;
Thread.Sleep(1000);
sl.ERROR_CODE err = sl.ZEDCamera.Reboot(sn);
if (err == sl.ERROR_CODE.SUCCESS)
{
int count = 0;
// Check if the camera is available before trying to re open it
while (!isCameraAvailable && count < 30)
{
count++;
sl.DeviceProperties[] devices = sl.ZEDCamera.GetDeviceList(out int nbDevices);
for (int i = 0; i < nbDevices; i++)
{
if (sn == devices[i].sn)
{
isCameraAvailable = true;
break;
}
}
Thread.Sleep(500);
}
}
if (isCameraAvailable)
{
Debug.LogWarning("Reboot successful.");
Awake();
}
else
{
Debug.LogWarning("Unable to reboot correctly.");
#if UNITY_EDITOR
UnityEditor.EditorApplication.isPlaying = false;
#else
Application.Quit();
#endif
}
}
public void InitVideoSettings(VideoSettingsInitMode mode)
{
if (!zedCamera.IsCameraReady)
{
Debug.LogError("Tried to apply camera settings before ZED camera was ready.");
return;
}
switch (mode)
{
case VideoSettingsInitMode.Custom:
ApplyLocalVideoSettingsToZED();
return;
case VideoSettingsInitMode.LoadFromSDK:
default:
//This is the SDK's default behavior, so we don't need to specify anything. Just apply the ZED's values locally.
GetCurrentVideoSettings();
return;
case VideoSettingsInitMode.Default:
zedCamera.ResetCameraSettings();
GetCurrentVideoSettings();
return;
}
}
private void GetCurrentVideoSettings()
{
//Sets all the video setting values to the ones currently applied to the ZED.
videoBrightness = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.BRIGHTNESS);
videoContrast = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.CONTRAST);
videoHue = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.HUE);
videoSaturation = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.SATURATION);
videoSharpness = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.SHARPNESS);
videoGamma = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.GAMMA);
videoAutoGainExposure = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.AEC_AGC) == 1 ? true : false;
if (!videoAutoGainExposure)
{
videoGain = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.GAIN);
videoExposure = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.EXPOSURE);
}
videoAutoWhiteBalance = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.AUTO_WHITEBALANCE) == 1 ? true : false;
if (!videoAutoWhiteBalance)
{
videoWhiteBalance = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.WHITEBALANCE);
}
videoLEDStatus = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.LED_STATUS) == 1 ? true : false;
}
private void ApplyLocalVideoSettingsToZED()
{
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.BRIGHTNESS, videoBrightness);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.CONTRAST, videoContrast);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.HUE, videoHue);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.SATURATION, videoSaturation);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.SHARPNESS, videoSharpness);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.GAMMA, videoGamma);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.AEC_AGC, videoAutoGainExposure ? 1 : 0);
if (!videoAutoGainExposure)
{
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.GAIN, videoGain);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.EXPOSURE, videoExposure);
}
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.AUTO_WHITEBALANCE, videoAutoWhiteBalance ? 1 : 0);
if (!videoAutoWhiteBalance)
{
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.WHITEBALANCE, videoWhiteBalance);
}
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.LED_STATUS, 1);
}
#region EventHandler
///
/// Changes the real-world brightness by setting the brightness value in the shaders.
///
/// New brightness value to be applied. Should be between 0 and 100.
public void SetCameraBrightness(int newVal)
{
SetFloatValueOnPlaneMaterials("_ZEDFactorAffectReal", newVal / 100f);
}
#if ZED_HDRP
public void SetSelfIllumination(float newVal)
{
SetFloatValueOnPlaneMaterials("_SelfIllumination", newVal);
}
#endif
///
/// Sets the maximum depth range of real-world objects. Pixels further than this range are discarded.
///
/// Furthest distance, in meters, that the camera will display pixels for. Should be between 0 and 20.
public void SetMaxDepthRange(float newVal)
{
if (newVal < 0 || newVal > 40)
{
Debug.LogWarning("Tried to set max depth range to " + newVal + "m. Must be within 0m and 40m.");
newVal = Mathf.Clamp(newVal, 0, 40);
}
SetFloatValueOnPlaneMaterials("_MaxDepth", newVal);
}
///
/// Sets a value of a float property on the material(s) rendering the ZED image.
/// Used to set things like brightness and maximum depth.
///
/// Name of value/property within Shader.
/// New value for the specified property.
private void SetFloatValueOnPlaneMaterials(string propertyname, float newvalue)
{
foreach (ZEDRenderingPlane renderPlane in GetComponentsInChildren())
{
Material rendmat;
if (renderPlane.ActualRenderingPath == RenderingPath.Forward) rendmat = renderPlane.canvas.GetComponent().material;
else if (renderPlane.ActualRenderingPath == RenderingPath.DeferredShading) rendmat = renderPlane.deferredMat;
else
{
Debug.LogError("Can't set " + propertyname + " value for Rendering Path " + renderPlane.ActualRenderingPath +
": only Forward and DeferredShading are supported.");
return;
}
rendmat.SetFloat(propertyname, newvalue);
}
}
private void SetBoolValueOnPlaneMaterials(string propertyname, bool newvalue)
{
foreach (ZEDRenderingPlane renderPlane in GetComponentsInChildren())
{
Material rendmat;
MeshRenderer rend = renderPlane.canvas.GetComponent();
if (!rend) continue;
rendmat = rend.material;
rendmat.SetInt(propertyname, newvalue ? 1 : 0);
}
}
///
/// Flag set to true when the HMD-to-ZED calibration file has changed during runtime.
/// Causes values from the new file to be applied during Update().
///
private bool calibrationHasChanged = false;
///
/// Sets the calibrationHasChanged flag to true, which causes the next Update() to
/// re-apply the HMD-to-ZED offsets.
///
private void CalibrationHasChanged()
{
calibrationHasChanged = true;
}
#endregion
#if UNITY_EDITOR
///
/// Handles changes to tracking or graphics settings changed from the Inspector.
///
void OnValidate()
{
if (zedCamera != null)
{
// If tracking has been switched on
if (zedCamera.IsCameraReady && !isTrackingEnable && enableTracking)
{
//Enables tracking and initializes the first position of the camera.
if (!(enableTracking = (zedCamera.EnableTracking(ref zedOrientation, ref zedPosition, enableSpatialMemory, enablePoseSmoothing, estimateInitialPosition, trackingIsStatic,
enableIMUFusion, pathSpatialMemory) == sl.ERROR_CODE.SUCCESS)))
{
isZEDTracked = false;
throw new Exception(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.TRACKING_NOT_INITIALIZED));
}
else
{
isZEDTracked = true;
isTrackingEnable = true;
}
}
// If tracking has been switched off
if (isTrackingEnable && !enableTracking)
{
isZEDTracked = false;
lock (zedCamera.grabLock)
{
zedCamera.DisableTracking();
}
isTrackingEnable = false;
}
// If streaming has been switched on
if (enableStreaming && !isStreamingEnable)
{
lock (zedCamera.grabLock)
{
sl.ERROR_CODE err = zedCamera.EnableStreaming(streamingCodec, (uint)bitrate, (ushort)streamingPort, gopSize, adaptativeBitrate, chunkSize, streamingTargetFramerate);
if (err == sl.ERROR_CODE.SUCCESS)
{
isStreamingEnable = true;
}
else
{
enableStreaming = false;
isStreamingEnable = false;
}
}
}
// If streaming has been switched off
if (!enableStreaming && isStreamingEnable)
{
lock (zedCamera.grabLock)
{
zedCamera.DisableStreaming();
isStreamingEnable = false;
}
}
//Reapplies graphics settings based on current values.
setRenderingSettings();
}
}
#endif
}