ZEDCommon.cs 84 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262
  1. //======= Copyright (c) Stereolabs Corporation, All rights reserved. ===============
  2. using System.Runtime.InteropServices;
  3. using System;
  4. using System.Collections.Generic;
  5. using UnityEngine;
  6. /// <summary>
  7. /// This file holds classes built to be exchanged between the ZED wrapper DLL (sl_unitywrapper.dll)
  8. /// and C# scripts within Unity. Most have parity with a structure within the ZED C++ SDK.
  9. /// Find more info at https://www.stereolabs.com/developers/documentation/API/latest/.
  10. /// </summary>
  11. namespace sl
  12. {
  13. public class ZEDCommon
  14. {
  15. public const string NameDLL = "sl_unitywrapper";
  16. }
  17. public enum ZED_CAMERA_ID
  18. {
  19. CAMERA_ID_01,
  20. CAMERA_ID_02,
  21. CAMERA_ID_03,
  22. CAMERA_ID_04
  23. };
  24. public enum INPUT_TYPE
  25. {
  26. INPUT_TYPE_USB,
  27. INPUT_TYPE_SVO,
  28. INPUT_TYPE_STREAM
  29. };
  30. /// <summary>
  31. /// Constant for plugin. Should not be changed
  32. /// </summary>
  33. public enum Constant
  34. {
  35. MAX_CAMERA_PLUGIN = 4,
  36. PLANE_DISTANCE = 10,
  37. MAX_OBJECTS = 75,
  38. MAX_BATCH_SIZE = 200
  39. };
  40. /// <summary>
  41. /// Holds a 3x3 matrix that can be marshaled between the ZED
  42. /// Unity wrapper and C# scripts.
  43. /// </summary>
  44. public struct Matrix3x3
  45. {
  46. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 9)]
  47. public float[] m; //3x3 matrix.
  48. };
  49. /// <summary>
  50. /// Holds a camera resolution as two pointers (for height and width) for easy
  51. /// passing back and forth to the ZED Unity wrapper.
  52. /// </summary>
  53. public struct Resolution
  54. {
  55. /// <summary>
  56. ///
  57. /// </summary>
  58. /// <param name="width"></param>
  59. /// <param name="height"></param>
  60. public Resolution(uint width, uint height)
  61. {
  62. this.width = (System.UIntPtr)width;
  63. this.height = (System.UIntPtr)height;
  64. }
  65. public System.UIntPtr width;
  66. public System.UIntPtr height;
  67. };
  68. /// <summary>
  69. /// Pose structure with data on timing and validity in addition to
  70. /// position and rotation.
  71. /// </summary>
  72. [StructLayout(LayoutKind.Sequential)]
  73. public struct Pose
  74. {
  75. public bool valid;
  76. public ulong timestap;
  77. public Quaternion rotation;
  78. public Vector3 translation;
  79. public int pose_confidence;
  80. };
  81. /// <summary>
  82. /// Rect structure to define a rectangle or a ROI in pixels
  83. /// Use to set ROI target for AEC/AGC
  84. /// </summary>
  85. [StructLayout(LayoutKind.Sequential)]
  86. public struct iRect
  87. {
  88. public int x;
  89. public int y;
  90. public int width;
  91. public int height;
  92. };
  93. public enum CAMERA_STATE
  94. {
  95. /// <summary>
  96. /// Defines if the camera can be openned by the sdk
  97. /// </summary>
  98. AVAILABLE,
  99. /// <summary>
  100. /// Defines if the camera is already opened and unavailable
  101. /// </summary>
  102. NOT_AVAILABLE
  103. }
  104. [StructLayout(LayoutKind.Sequential)]
  105. public struct DeviceProperties
  106. {
  107. /// <summary>
  108. /// The camera state
  109. /// </summary>
  110. public sl.CAMERA_STATE cameraState;
  111. /// <summary>
  112. /// The camera id (Notice that only the camera with id '0' can be used on Windows)
  113. /// </summary>
  114. public int id;
  115. /// <summary>
  116. /// The camera model
  117. /// </summary>
  118. public sl.MODEL cameraModel;
  119. /// <summary>
  120. /// The camera serial number
  121. /// </summary>
  122. public int sn;
  123. };
  124. /// <summary>
  125. /// Full IMU data structure.
  126. /// </summary>
  127. [StructLayout(LayoutKind.Sequential)]
  128. public struct ImuData
  129. {
  130. /// <summary>
  131. /// Indicates if imu data is available
  132. /// </summary>
  133. public bool available;
  134. /// <summary>
  135. /// IMU Data timestamp in ns
  136. /// </summary>
  137. public ulong timestamp;
  138. /// <summary>
  139. /// Gyroscope calibrated data in degrees/second.
  140. /// </summary>
  141. public Vector3 angularVelocity;
  142. /// <summary>
  143. /// Accelerometer calibrated data in m/s².
  144. /// </summary>
  145. public Vector3 linearAcceleration;
  146. /// <summary>
  147. /// Gyroscope raw/uncalibrated data in degrees/second.
  148. /// </summary>
  149. public Vector3 angularVelocityUncalibrated;
  150. /// <summary>
  151. /// Accelerometer raw/uncalibrated data in m/s².
  152. /// </summary>
  153. public Vector3 linearAccelerationUncalibrated;
  154. /// <summary>
  155. /// Orientation from gyro/accelerator fusion.
  156. /// </summary>
  157. public Quaternion fusedOrientation;
  158. /// <summary>
  159. /// Covariance matrix of the quaternion.
  160. /// </summary>
  161. public Matrix3x3 orientationCovariance;
  162. /// <summary>
  163. /// Gyroscope raw data covariance matrix.
  164. /// </summary>
  165. public Matrix3x3 angularVelocityCovariance;
  166. /// <summary>
  167. /// Accelerometer raw data covariance matrix.
  168. /// </summary>
  169. public Matrix3x3 linearAccelerationCovariance;
  170. };
  171. [StructLayout(LayoutKind.Sequential)]
  172. public struct BarometerData
  173. {
  174. /// <summary>
  175. /// Indicates if mag data is available
  176. /// </summary>
  177. public bool available;
  178. /// <summary>
  179. /// mag Data timestamp in ns
  180. /// </summary>
  181. public ulong timestamp;
  182. /// <summary>
  183. /// Barometer ambient air pressure in hPa
  184. /// </summary>
  185. public float pressure;
  186. /// <summary>
  187. /// Relative altitude from first camera position
  188. /// </summary>
  189. public float relativeAltitude;
  190. };
  191. public enum HEADING_STATE
  192. {
  193. /// <summary>
  194. /// The heading is reliable and not affected by iron interferences.
  195. /// </summary>
  196. GOOD,
  197. /// <summary>
  198. /// The heading is reliable, but affected by slight iron interferences.
  199. /// </summary>
  200. OK,
  201. /// <summary>
  202. /// The heading is not reliable because affected by strong iron interferences.
  203. /// </summary>
  204. NOT_GOOD,
  205. /// <summary>
  206. /// The magnetometer has not been calibrated.
  207. /// </summary>
  208. NOT_CALIBRATED,
  209. /// <summary>
  210. /// The magnetomer sensor is not available.
  211. /// </summary>
  212. MAG_NOT_AVAILABLE
  213. };
  214. [StructLayout(LayoutKind.Sequential)]
  215. public struct MagnetometerData
  216. {
  217. /// <summary>
  218. /// Indicates if mag data is available
  219. /// </summary>
  220. public bool available;
  221. /// <summary>
  222. /// mag Data timestamp in ns
  223. /// </summary>
  224. public ulong timestamp;
  225. /// <summary>
  226. /// Magnetic field calibrated values in uT
  227. /// </summary>
  228. /// </summary>
  229. public Vector3 magneticField;
  230. /// <summary>
  231. /// Magnetic field raw values in uT
  232. /// </summary>
  233. public Vector3 magneticFieldUncalibrated;
  234. /// <summary>
  235. /// The camera heading in degrees relative to the magnetic North Pole.
  236. /// note: The magnetic North Pole has an offset with respect to the geographic North Pole, depending on the
  237. /// geographic position of the camera.
  238. /// To get a correct magnetic heading the magnetometer sensor must be calibrated using the ZED Sensor Viewer tool
  239. /// </summary>
  240. public float magneticHeading;
  241. /// <summary>
  242. /// The state of the /ref magnetic_heading value
  243. /// </summary>
  244. public HEADING_STATE magnetic_heading_state;
  245. /// <summary>
  246. /// The accuracy of the magnetic heading measure in the range [0.0,1.0].
  247. /// A negative value means that the magnetometer must be calibrated using the ZED Sensor Viewer tool
  248. /// </summary>
  249. public float magnetic_heading_accuracy;
  250. };
  251. [StructLayout(LayoutKind.Sequential)]
  252. public struct TemperatureSensorData
  253. {
  254. /// <summary>
  255. /// Temperature from IMU device ( -100 if not available)
  256. /// </summary>
  257. public float imu_temp;
  258. /// <summary>
  259. /// Temperature from Barometer device ( -100 if not available)
  260. /// </summary>
  261. public float barometer_temp;
  262. /// <summary>
  263. /// Temperature from Onboard left analog temperature sensor ( -100 if not available)
  264. /// </summary>
  265. public float onboard_left_temp;
  266. /// <summary>
  267. /// Temperature from Onboard right analog temperature sensor ( -100 if not available)
  268. /// </summary>
  269. public float onboard_right_temp;
  270. };
  271. [StructLayout(LayoutKind.Sequential)]
  272. public struct SensorsData
  273. {
  274. /// <summary>
  275. /// Contains Imu Data
  276. /// </summary>
  277. public ImuData imu;
  278. /// <summary>
  279. /// Contains Barometer Data
  280. /// </summary>
  281. public BarometerData barometer;
  282. /// <summary>
  283. /// Contains Mag Data
  284. /// </summary>
  285. public MagnetometerData magnetometer;
  286. /// <summary>
  287. /// Contains Temperature Data
  288. /// </summary>
  289. public TemperatureSensorData temperatureSensor;
  290. /// <summary>
  291. /// Indicated if camera is :
  292. /// -> Static : 0
  293. /// -> Moving : 1
  294. /// -> Falling : 2
  295. /// </summary>
  296. public int camera_moving_state;
  297. /// <summary>
  298. /// Indicates if the current sensors data is sync to the current image (>=1). Otherwise, value will be 0.
  299. /// </summary>
  300. public int image_sync_val;
  301. };
  302. /*******************************************************************************************************************************
  303. *******************************************************************************************************************************/
  304. /// <summary>
  305. /// Calibration information for an individual sensor on the ZED (left or right). </summary>
  306. /// <remarks>For more information, see:
  307. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/structsl_1_1CameraParameters.html </remarks>
  308. [StructLayout(LayoutKind.Sequential)]
  309. public struct CameraParameters
  310. {
  311. /// <summary>
  312. /// Focal X.
  313. /// </summary>
  314. public float fx;
  315. /// <summary>
  316. /// Focal Y.
  317. /// </summary>
  318. public float fy;
  319. /// <summary>
  320. /// Optical center X.
  321. /// </summary>
  322. public float cx;
  323. /// <summary>
  324. /// Optical center Y.
  325. /// </summary>
  326. public float cy;
  327. /// <summary>
  328. /// Distortion coefficients.
  329. /// </summary>
  330. [MarshalAs(UnmanagedType.ByValArray, ArraySubType = UnmanagedType.U8, SizeConst = 5)]
  331. public double[] disto;
  332. /// <summary>
  333. /// Vertical field of view after stereo rectification.
  334. /// </summary>
  335. public float vFOV;
  336. /// <summary>
  337. /// Horizontal field of view after stereo rectification.
  338. /// </summary>
  339. public float hFOV;
  340. /// <summary>
  341. /// Diagonal field of view after stereo rectification.
  342. /// </summary>
  343. public float dFOV;
  344. /// <summary>
  345. /// Camera's current resolution.
  346. /// </summary>
  347. public Resolution resolution;
  348. };
  349. /// <summary>
  350. /// List of the available onboard sensors
  351. /// </summary>
  352. public enum SENSOR_TYPE
  353. {
  354. /// <summary>
  355. /// Three axis Accelerometer sensor to measure the inertial accelerations.
  356. /// </summary>
  357. ACCELEROMETER,
  358. /// <summary>
  359. /// Three axis Gyroscope sensor to measure the angular velocitiers.
  360. /// </summary>
  361. GYROSCOPE,
  362. /// <summary>
  363. /// Three axis Magnetometer sensor to measure the orientation of the device respect to the earth magnetic field.
  364. /// </summary>
  365. MAGNETOMETER,
  366. /// <summary>
  367. /// Barometer sensor to measure the atmospheric pressure.
  368. /// </summary>
  369. BAROMETER,
  370. LAST
  371. };
  372. /// <summary>
  373. /// List of the available onboard sensors measurement units
  374. /// </summary>
  375. public enum SENSORS_UNIT
  376. {
  377. /// <summary>
  378. /// Acceleration [m/s²].
  379. /// </summary>
  380. M_SEC_2,
  381. /// <summary>
  382. /// Angular velocity [deg/s].
  383. /// </summary>
  384. DEG_SEC,
  385. /// <summary>
  386. /// Magnetic Fiels [uT].
  387. /// </summary>
  388. U_T,
  389. /// <summary>
  390. /// Atmospheric pressure [hPa].
  391. /// </summary>
  392. HPA,
  393. /// <summary>
  394. /// Temperature [°C].
  395. /// </summary>
  396. CELSIUS,
  397. /// <summary>
  398. /// Frequency [Hz].
  399. /// </summary>
  400. HERTZ,
  401. /// <summary>
  402. ///
  403. /// </summary>
  404. LAST
  405. };
  406. /// <summary>
  407. /// Structure containing information about a single sensor available in the current device
  408. /// </summary>
  409. [StructLayout(LayoutKind.Sequential)]
  410. public struct SensorParameters
  411. {
  412. /// <summary>
  413. /// The type of the sensor as \ref DEVICE_SENSORS.
  414. /// </summary>
  415. public SENSOR_TYPE type;
  416. /// <summary>
  417. /// The resolution of the sensor.
  418. /// </summary>
  419. public float resolution;
  420. /// <summary>
  421. /// The sampling rate (or ODR) of the sensor.
  422. /// </summary>
  423. public float sampling_rate;
  424. /// <summary>
  425. /// The range values of the sensor. MIN: `range.x`, MAX: `range.y`
  426. /// </summary>
  427. public float2 range;
  428. /// <summary>
  429. /// also known as white noise, given as continous (frequency independant). Units will be expressed in sensor_unit/√(Hz). `NAN` if the information is not available.
  430. /// </summary>
  431. public float noise_density;
  432. /// <summary>
  433. /// derived from the Allan Variance, given as continous (frequency independant). Units will be expressed in sensor_unit/s/√(Hz).`NAN` if the information is not available.
  434. /// </summary>
  435. public float random_walk;
  436. /// <summary>
  437. /// The string relative to the measurement unit of the sensor.
  438. /// </summary>
  439. public SENSORS_UNIT sensor_unit;
  440. /// <summary>
  441. ///
  442. /// </summary>
  443. public bool isAvailable;
  444. };
  445. /// <summary>
  446. /// Structure containing information about all the sensors available in the current device
  447. /// </summary>
  448. [StructLayout(LayoutKind.Sequential)]
  449. public struct SensorsConfiguration
  450. {
  451. /// <summary>
  452. /// The firmware version of the sensor module, 0 if no sensors are available (ZED camera model).
  453. /// </summary>
  454. public uint firmware_version;
  455. /// <summary>
  456. /// contains rotation between IMU frame and camera frame.
  457. /// </summary>
  458. public float4 camera_imu_rotation;
  459. /// <summary>
  460. /// contains translation between IMU frame and camera frame.
  461. /// </summary>
  462. public float3 camera_imu_translation;
  463. /// <summary>
  464. /// Magnetometer to IMU rotation. contains rotation between IMU frame and magnetometer frame.
  465. /// </summary>
  466. public float4 imu_magnometer_rotation;
  467. /// <summary>
  468. /// Magnetometer to IMU translation. contains translation between IMU frame and magnetometer frame.
  469. /// </summary>
  470. public float3 imu_magnometer_translation;
  471. /// <summary>
  472. /// Configuration of the accelerometer device.
  473. /// </summary>
  474. public SensorParameters accelerometer_parameters;
  475. /// <summary>
  476. /// Configuration of the gyroscope device.
  477. /// </summary>
  478. public SensorParameters gyroscope_parameters;
  479. /// <summary>
  480. /// Configuration of the magnetometer device.
  481. /// </summary>
  482. public SensorParameters magnetometer_parameters;
  483. /// <summary>
  484. /// Configuration of the barometer device
  485. /// </summary>
  486. public SensorParameters barometer_parameters;
  487. /// <summary>
  488. /// if a sensor type is available on the device
  489. /// </summary>
  490. /// <param name="sensor_type"></param>
  491. /// <returns></returns>
  492. public bool isSensorAvailable(SENSOR_TYPE sensor_type)
  493. {
  494. switch (sensor_type)
  495. {
  496. case SENSOR_TYPE.ACCELEROMETER:
  497. return accelerometer_parameters.isAvailable;
  498. case SENSOR_TYPE.GYROSCOPE:
  499. return gyroscope_parameters.isAvailable;
  500. case SENSOR_TYPE.MAGNETOMETER:
  501. return magnetometer_parameters.isAvailable;
  502. case SENSOR_TYPE.BAROMETER:
  503. return barometer_parameters.isAvailable;
  504. default:
  505. break;
  506. }
  507. return false;
  508. }
  509. };
  510. /// <summary>
  511. /// Holds calibration information about the current ZED's hardware, including per-sensor
  512. /// calibration and offsets between the two sensors.
  513. /// </summary> <remarks>For more info, see:
  514. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/structsl_1_1CalibrationParameters.html </remarks>
  515. [StructLayout(LayoutKind.Sequential)]
  516. public struct CalibrationParameters
  517. {
  518. /// <summary>
  519. /// Parameters of the left sensor.
  520. /// </summary>
  521. public CameraParameters leftCam;
  522. /// <summary>
  523. /// Parameters of the right sensor.
  524. /// </summary>
  525. public CameraParameters rightCam;
  526. /// <summary>
  527. /// Rotation (using Rodrigues' transformation) between the two sensors. Defined as 'tilt', 'convergence' and 'roll'.
  528. /// </summary>
  529. public Quaternion Rot;
  530. /// <summary>
  531. /// Translation between the two sensors. T[0] is the distance between the two cameras in meters.
  532. /// </summary>
  533. public Vector3 Trans;
  534. };
  535. /// <summary>
  536. /// Container for information about the current SVO recording process.
  537. /// </summary><remarks>
  538. /// Mirrors sl.RecordingState in the ZED C++ SDK. For more info, visit:
  539. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/structsl_1_1RecordingState.html
  540. /// </remarks>
  541. [StructLayout(LayoutKind.Sequential)]
  542. public struct Recording_state
  543. {
  544. /// <summary>
  545. /// Status of the current frame. True if recording was successful, false if frame could not be written.
  546. /// </summary>
  547. public bool status;
  548. /// <summary>
  549. /// Compression time for the current frame in milliseconds.
  550. /// </summary>
  551. public double current_compression_time;
  552. /// <summary>
  553. /// Compression ratio (% of raw size) for the current frame.
  554. /// </summary>
  555. public double current_compression_ratio;
  556. /// <summary>
  557. /// Average compression time in millisecond since beginning of recording.
  558. /// </summary>
  559. public double average_compression_time;
  560. /// <summary>
  561. /// Compression ratio (% of raw size) since recording was started.
  562. /// </summary>
  563. public double average_compression_ratio;
  564. }
  565. /// <summary>
  566. /// Status of the ZED's self-calibration. Since v0.9.3, self-calibration is done in the background and
  567. /// starts in the sl.ZEDCamera.Init or Reset functions.
  568. /// </summary><remarks>
  569. /// Mirrors SELF_CALIBRATION_STATE in the ZED C++ SDK. For more info, see:
  570. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/group__Video__group.html#gacce19db438a07075b7e5e22ee5845c95
  571. /// </remarks>
  572. public enum ZED_SELF_CALIBRATION_STATE
  573. {
  574. /// <summary>
  575. /// Self-calibration has not yet been called (no Init() called).
  576. /// </summary>
  577. SELF_CALIBRATION_NOT_CALLED,
  578. /// <summary>
  579. /// Self-calibration is currently running.
  580. /// </summary>
  581. SELF_CALIBRATION_RUNNING,
  582. /// <summary>
  583. /// Self-calibration has finished running but did not manage to get coherent values. Old Parameters are used instead.
  584. /// </summary>
  585. SELF_CALIBRATION_FAILED,
  586. /// <summary>
  587. /// Self Calibration has finished running and successfully produces coherent values.
  588. /// </summary>
  589. SELF_CALIBRATION_SUCCESS
  590. };
  591. /// <summary>
  592. /// Lists available depth computation modes. Each mode offers better accuracy than the
  593. /// mode before it, but at a performance cost.
  594. /// </summary><remarks>
  595. /// Mirrors DEPTH_MODE in the ZED C++ SDK. For more info, see:
  596. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/group__Depth__group.html#ga8d542017c9b012a19a15d46be9b7fa43
  597. /// </remarks>
  598. public enum DEPTH_MODE
  599. {
  600. /// <summary>
  601. /// Does not compute any depth map. Only rectified stereo images will be available.
  602. /// </summary>
  603. NONE,
  604. /// <summary>
  605. /// Fastest mode for depth computation.
  606. /// </summary>
  607. PERFORMANCE,
  608. /// <summary>
  609. /// Balanced quality mode. Depth map is robust in most environment and requires medium compute power.
  610. /// </summary>
  611. QUALITY,
  612. /// <summary>
  613. /// Native depth. Very accurate, but at a large performance cost.
  614. /// </summary>
  615. ULTRA,
  616. /// <summary>
  617. /// End to End Neural disparity estimation, requires AI module
  618. /// </summary>
  619. NEURAL
  620. };
  621. /// <summary>
  622. /// Types of Image view modes, for creating human-viewable textures.
  623. /// Used only in ZEDRenderingPlane as a simplified version of sl.VIEW, which has more detailed options.
  624. /// </summary>
  625. public enum VIEW_MODE
  626. {
  627. /// <summary>
  628. /// Dsplays regular color images.
  629. /// </summary>
  630. VIEW_IMAGE,
  631. /// <summary>
  632. /// Displays a greyscale depth map.
  633. /// </summary>
  634. VIEW_DEPTH,
  635. /// <summary>
  636. /// Displays a normal map.
  637. /// </summary>
  638. VIEW_NORMALS,
  639. };
  640. /// <summary>
  641. /// List of error codes in the ZED SDK.
  642. /// </summary><remarks>
  643. /// Mirrors ERROR_CODE in the ZED C++ SDK. For more info, read:
  644. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/group__Camera__group.html#ga4db9ee29f2ff83c71567c12f6bfbf28c
  645. /// </remarks>
  646. public enum ERROR_CODE
  647. {
  648. /// <summary>
  649. /// Operation was successful.
  650. /// </summary>
  651. SUCCESS,
  652. /// <summary>
  653. /// Standard, generic code for unsuccessful behavior when no other code is more appropriate.
  654. /// </summary>
  655. FAILURE,
  656. /// <summary>
  657. /// No GPU found, or CUDA capability of the device is not supported.
  658. /// </summary>
  659. NO_GPU_COMPATIBLE,
  660. /// <summary>
  661. /// Not enough GPU memory for this depth mode. Try a different mode (such as PERFORMANCE).
  662. /// </summary>
  663. NOT_ENOUGH_GPUMEM,
  664. /// <summary>
  665. /// The ZED camera is not plugged in or detected.
  666. /// </summary>
  667. CAMERA_NOT_DETECTED,
  668. /// <summary>
  669. /// The MCU that controls the sensors module has an invalid Serial Number. You can try to recover it launching the 'ZED Diagnostic' tool from the command line with the option '-r'.
  670. /// </summary>
  671. SENSORS_NOT_INITIALIZED,
  672. /// <summary>
  673. /// a ZED Mini is detected but the inertial sensor cannot be opened. (Never called for original ZED)
  674. /// </summary>
  675. SENSOR_NOT_DETECTED,
  676. /// <summary>
  677. /// For Nvidia Jetson X1 only - resolution not yet supported (USB3.0 bandwidth).
  678. /// </summary>
  679. INVALID_RESOLUTION,
  680. /// <summary>
  681. /// USB communication issues. Occurs when the camera FPS cannot be reached, due to a lot of corrupted frames.
  682. /// Try changing the USB port.
  683. /// </summary>
  684. LOW_USB_BANDWIDTH,
  685. /// <summary>
  686. /// ZED calibration file is not found on the host machine. Use ZED Explorer or ZED Calibration to get one.
  687. /// </summary>
  688. CALIBRATION_FILE_NOT_AVAILABLE,
  689. /// <summary>
  690. /// ZED calibration file is not valid. Try downloading the factory one or recalibrating using the ZED Calibration tool.
  691. /// </summary>
  692. INVALID_CALIBRATION_FILE,
  693. /// <summary>
  694. /// The provided SVO file is not valid.
  695. /// </summary>
  696. INVALID_SVO_FILE,
  697. /// <summary>
  698. /// An SVO recorder-related error occurred (such as not enough free storage or an invalid file path).
  699. /// </summary>
  700. SVO_RECORDING_ERROR,
  701. /// <summary>
  702. /// An SVO related error when NVIDIA based compression cannot be loaded
  703. /// </summary>
  704. SVO_UNSUPPORTED_COMPRESSION,
  705. /// <summary>
  706. /// The requested coordinate system is not available.
  707. /// </summary>
  708. INVALID_COORDINATE_SYSTEM,
  709. /// <summary>
  710. /// The firmware of the ZED is out of date. Update to the latest version.
  711. /// </summary>
  712. INVALID_FIRMWARE,
  713. /// <summary>
  714. /// An invalid parameter has been set for the function.
  715. /// </summary>
  716. INVALID_FUNCTION_PARAMETERS,
  717. /// <summary>
  718. /// In grab() only, the current call return the same frame as last call. Not a new frame.
  719. /// </summary>
  720. NOT_A_NEW_FRAME,
  721. /// <summary>
  722. /// In grab() only, a CUDA error has been detected in the process. Activate wrapperVerbose in ZEDManager.cs for more info.
  723. /// </summary>
  724. CUDA_ERROR,
  725. /// <summary>
  726. /// In grab() only, ZED SDK is not initialized. Probably a missing call to sl::Camera::open.
  727. /// </summary>
  728. CAMERA_NOT_INITIALIZED,
  729. /// <summary>
  730. /// Your NVIDIA driver is too old and not compatible with your current CUDA version.
  731. /// </summary>
  732. NVIDIA_DRIVER_OUT_OF_DATE,
  733. /// <summary>
  734. /// The function call is not valid in the current context. Could be a missing a call to sl::Camera::open.
  735. /// </summary>
  736. INVALID_FUNCTION_CALL,
  737. /// <summary>
  738. /// The SDK wasn't able to load its dependencies, the installer should be launched.
  739. /// </summary>
  740. CORRUPTED_SDK_INSTALLATION,
  741. /// <summary>
  742. /// The installed SDK is not the SDK used to compile the program.
  743. /// </summary>
  744. INCOMPATIBLE_SDK_VERSION,
  745. /// <summary>
  746. /// The given area file does not exist. Check the file path.
  747. /// </summary>
  748. INVALID_AREA_FILE,
  749. /// <summary>
  750. /// The area file does not contain enough data to be used ,or the sl::DEPTH_MODE used during the creation of the
  751. /// area file is different from the one currently set.
  752. /// </summary>
  753. INCOMPATIBLE_AREA_FILE,
  754. /// <summary>
  755. /// Camera failed to set up.
  756. /// </summary>
  757. CAMERA_FAILED_TO_SETUP,
  758. /// <summary>
  759. /// Your ZED cannot be opened. Try replugging it to another USB port or flipping the USB-C connector (if using ZED Mini).
  760. /// </summary>
  761. CAMERA_DETECTION_ISSUE,
  762. /// <summary>
  763. /// The Camera is already in use by another process.
  764. /// </summary>
  765. CAMERA_ALREADY_IN_USE,
  766. /// <summary>
  767. /// No GPU found or CUDA is unable to list it. Can be a driver/reboot issue.
  768. /// </summary>
  769. NO_GPU_DETECTED,
  770. /// <summary>
  771. /// Plane not found. Either no plane is detected in the scene, at the location or corresponding to the floor,
  772. /// or the floor plane doesn't match the prior given.
  773. /// </summary>
  774. PLANE_NOT_FOUND,
  775. /// <summary>
  776. /// The Object detection module is only compatible with the ZED 2
  777. /// </summary>
  778. MODULE_NOT_COMPATIBLE_WITH_CAMERA,
  779. /// <summary>
  780. /// The module needs the sensors to be enabled (see InitParameters::sensors_required)
  781. /// </summary>
  782. MOTION_SENSORS_REQUIRED,
  783. /// <summary>
  784. /// The module needs a newer version of CUDA
  785. /// </summary>
  786. MODULE_NOT_COMPATIBLE_WITH_CUDA_VERSION,
  787. /// <summary>
  788. /// End of ERROR_CODE
  789. /// </summary>
  790. ERROR_CODE_LAST
  791. };
  792. /// <summary>
  793. /// Represents the available resolution options.
  794. /// </summary>
  795. public enum RESOLUTION
  796. {
  797. /// <summary>
  798. /// 2208*1242. Supported frame rate: 15 FPS.
  799. /// </summary>
  800. HD2K,
  801. /// <summary>
  802. /// 1920*1080. Supported frame rates: 15, 30 FPS.
  803. /// </summary>
  804. HD1080,
  805. /// <summary>
  806. /// 1280*720. Supported frame rates: 15, 30, 60 FPS.
  807. /// </summary>
  808. HD720,
  809. /// <summary>
  810. /// 672*376. Supported frame rates: 15, 30, 60, 100 FPS.
  811. /// </summary>
  812. VGA
  813. };
  814. /// <summary>
  815. /// Types of compatible ZED cameras.
  816. /// </summary>
  817. public enum MODEL
  818. {
  819. /// <summary>
  820. /// ZED(1)
  821. /// </summary>
  822. ZED,
  823. /// <summary>
  824. /// ZED Mini.
  825. /// </summary>
  826. ZED_M,
  827. /// <summary>
  828. /// ZED2.
  829. /// </summary>
  830. ZED2,
  831. /// <summary>
  832. /// ZED2i
  833. /// </summary>
  834. ZED2i
  835. };
  836. /// <summary>
  837. /// Lists available sensing modes - whether to produce the original depth map (STANDARD) or one with
  838. /// smoothing and other effects added to fill gaps and roughness (FILL).
  839. /// </summary>
  840. public enum SENSING_MODE
  841. {
  842. /// <summary>
  843. /// This mode outputs the standard ZED depth map that preserves edges and depth accuracy.
  844. /// However, there will be missing data where a depth measurement couldn't be taken, such as from
  845. /// a surface being occluded from one sensor but not the other.
  846. /// Better for: Obstacle detection, autonomous navigation, people detection, 3D reconstruction.
  847. /// </summary>
  848. STANDARD,
  849. /// <summary>
  850. /// This mode outputs a smooth and fully dense depth map. It doesn't have gaps in the data
  851. /// like STANDARD where depth can't be calculated directly, but the values it fills them with
  852. /// is less accurate than a real measurement.
  853. /// Better for: AR/VR, mixed-reality capture, image post-processing.
  854. /// </summary>
  855. FILL
  856. };
  857. /// <summary>
  858. /// Lists available view types retrieved from the camera, used for creating human-viewable (Image-type) textures.
  859. /// </summary><remarks>
  860. /// Based on the VIEW enum in the ZED C++ SDK. For more info, see:
  861. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/group__Video__group.html#ga77fc7bfc159040a1e2ffb074a8ad248c
  862. /// </remarks>
  863. public enum VIEW
  864. {
  865. /// <summary>
  866. /// Left RGBA image. As a ZEDMat, MAT_TYPE is set to MAT_TYPE_8U_C4.
  867. /// </summary>
  868. LEFT,
  869. /// <summary>
  870. /// Right RGBA image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C4.
  871. /// </summary>
  872. RIGHT,
  873. /// <summary>
  874. /// Left GRAY image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C1.
  875. /// </summary>
  876. LEFT_GREY,
  877. /// <summary>
  878. /// Right GRAY image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C1.
  879. /// </summary>
  880. RIGHT_GREY,
  881. /// <summary>
  882. /// Left RGBA unrectified image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C4.
  883. /// </summary>
  884. LEFT_UNRECTIFIED,
  885. /// <summary>
  886. /// Right RGBA unrectified image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C4.
  887. /// </summary>
  888. RIGHT_UNRECTIFIED,
  889. /// <summary>
  890. /// Left GRAY unrectified image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C1.
  891. /// </summary>
  892. LEFT_UNRECTIFIED_GREY,
  893. /// <summary>
  894. /// Right GRAY unrectified image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C1.
  895. /// </summary>
  896. RIGHT_UNRECTIFIED_GREY,
  897. /// <summary>
  898. /// Left and right image. Will be double the width to hold both. As a ZEDMat, MAT_TYPE is set to MAT_8U_C4.
  899. /// </summary>
  900. SIDE_BY_SIDE,
  901. /// <summary>
  902. /// Normalized depth image. As a ZEDMat, MAT_TYPE is set to sl::MAT_TYPE_8U_C4.
  903. /// <para>Use an Image texture for viewing only. For measurements, use a Measure type instead
  904. /// (ZEDCamera.RetrieveMeasure()) to preserve accuracy. </para>
  905. /// </summary>
  906. DEPTH,
  907. /// <summary>
  908. /// Normalized confidence image. As a ZEDMat, MAT_TYPE is set to MAT_8U_C4.
  909. /// <para>Use an Image texture for viewing only. For measurements, use a Measure type instead
  910. /// (ZEDCamera.RetrieveMeasure()) to preserve accuracy. </para>
  911. /// </summary>
  912. CONFIDENCE,
  913. /// <summary>
  914. /// Color rendering of the normals. As a ZEDMat, MAT_TYPE is set to MAT_8U_C4.
  915. /// <para>Use an Image texture for viewing only. For measurements, use a Measure type instead
  916. /// (ZEDCamera.RetrieveMeasure()) to preserve accuracy. </para>
  917. /// </summary>
  918. NORMALS,
  919. /// <summary>
  920. /// Color rendering of the right depth mapped on right sensor. As a ZEDMat, MAT_TYPE is set to MAT_8U_C4.
  921. /// <para>Use an Image texture for viewing only. For measurements, use a Measure type instead
  922. /// (ZEDCamera.RetrieveMeasure()) to preserve accuracy. </para>
  923. /// </summary>
  924. DEPTH_RIGHT,
  925. /// <summary>
  926. /// Color rendering of the normals mapped on right sensor. As a ZEDMat, MAT_TYPE is set to MAT_8U_C4.
  927. /// <para>Use an Image texture for viewing only. For measurements, use a Measure type instead
  928. /// (ZEDCamera.RetrieveMeasure()) to preserve accuracy. </para>
  929. /// </summary>
  930. NORMALS_RIGHT
  931. };
  932. /// <summary>
  933. /// Lists available camera settings for the ZED camera (contrast, hue, saturation, gain, etc.)
  934. /// </summary>
  935. public enum CAMERA_SETTINGS
  936. {
  937. /// <summary>
  938. /// Brightness control. Value should be between 0 and 8.
  939. /// </summary>
  940. BRIGHTNESS,
  941. /// <summary>
  942. /// Contrast control. Value should be between 0 and 8.
  943. /// </summary>
  944. CONTRAST,
  945. /// <summary>
  946. /// Hue control. Value should be between 0 and 11.
  947. /// </summary>
  948. HUE,
  949. /// <summary>
  950. /// Saturation control. Value should be between 0 and 8.
  951. /// </summary>
  952. SATURATION,
  953. /// <summary>
  954. /// Sharpness control. Value should be between 0 and 8.
  955. /// </summary>
  956. SHARPNESS,
  957. /// <summary>
  958. /// Gamma control. Value should be between 1 and 9
  959. /// </summary>
  960. GAMMA,
  961. /// <summary>
  962. /// Gain control. Value should be between 0 and 100 for manual control.
  963. /// If ZED_EXPOSURE is set to -1 (automatic mode), then gain will be automatic as well.
  964. /// </summary>
  965. GAIN,
  966. /// <summary>
  967. /// Exposure control. Value can be between 0 and 100.
  968. /// Setting to -1 enables auto exposure and auto gain.
  969. /// Setting to 0 disables auto exposure but doesn't change the last applied automatic values.
  970. /// Setting to 1-100 disables auto mode and sets exposure to the chosen value.
  971. /// </summary>
  972. EXPOSURE,
  973. /// <summary>
  974. /// Auto-exposure and auto gain. Setting this to true switches on both. Assigning a specifc value to GAIN or EXPOSURE will set this to 0.
  975. /// </summary>
  976. AEC_AGC,
  977. /// <summary>
  978. /// ROI for auto exposure/gain. ROI defines the target where the AEC/AGC will be calculated
  979. /// Use overloaded function for this enum
  980. /// </summary>
  981. AEC_AGC_ROI,
  982. /// <summary>
  983. /// Color temperature control. Value should be between 2800 and 6500 with a step of 100.
  984. /// </summary>
  985. WHITEBALANCE,
  986. /// <summary>
  987. /// Defines if the white balance is in automatic mode or not.
  988. /// </summary>
  989. AUTO_WHITEBALANCE,
  990. /// <summary>
  991. /// front LED status (1==enable, 0 == disable)
  992. /// </summary>
  993. LED_STATUS
  994. };
  995. /// <summary>
  996. /// Lists available measure types retrieved from the camera, used for creating precise measurement maps
  997. /// (Measure-type textures).
  998. /// Based on the MEASURE enum in the ZED C++ SDK. For more info, see:
  999. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/group__Depth__group.html#ga798a8eed10c573d759ef7e5a5bcd545d
  1000. /// </summary>
  1001. public enum MEASURE
  1002. {
  1003. /// <summary>
  1004. /// Disparity map. As a ZEDMat, MAT_TYPE is set to MAT_32F_C1.
  1005. /// </summary>
  1006. DISPARITY,
  1007. /// <summary>
  1008. /// Depth map. As a ZEDMat, MAT_TYPE is set to MAT_32F_C1.
  1009. /// </summary>
  1010. DEPTH,
  1011. /// <summary>
  1012. /// Certainty/confidence of the disparity map. As a ZEDMat, MAT_TYPE is set to MAT_32F_C1.
  1013. /// </summary>
  1014. CONFIDENCE,
  1015. /// <summary>
  1016. /// 3D coordinates of the image points. Used for point clouds in ZEDPointCloudManager.
  1017. /// As a ZEDMat, MAT_TYPE is set to MAT_32F_C4. The 4th channel may contain the colors.
  1018. /// </summary>
  1019. XYZ,
  1020. /// <summary>
  1021. /// 3D coordinates and color of the image. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1022. /// The 4th channel encodes 4 UCHARs for colors in R-G-B-A order.
  1023. /// </summary>
  1024. XYZRGBA,
  1025. /// <summary>
  1026. /// 3D coordinates and color of the image. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1027. /// The 4th channel encode 4 UCHARs for colors in B-G-R-A order.
  1028. /// </summary>
  1029. XYZBGRA,
  1030. /// <summary>
  1031. /// 3D coordinates and color of the image. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1032. /// The 4th channel encodes 4 UCHARs for color in A-R-G-B order.
  1033. /// </summary>
  1034. XYZARGB,
  1035. /// <summary>
  1036. /// 3D coordinates and color of the image. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1037. /// Channel 4 contains color in A-B-G-R order.
  1038. /// </summary>
  1039. XYZABGR,
  1040. /// <summary>
  1041. /// 3D coordinates and color of the image. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1042. /// The 4th channel encode 4 UCHARs for color in A-B-G-R order.
  1043. /// </summary>
  1044. NORMALS,
  1045. /// <summary>
  1046. /// Disparity map for the right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C1.
  1047. /// </summary>
  1048. DISPARITY_RIGHT,
  1049. /// <summary>
  1050. /// Depth map for right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C1.
  1051. /// </summary>
  1052. DEPTH_RIGHT,
  1053. /// <summary>
  1054. /// Point cloud for right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4. Channel 4 is empty.
  1055. /// </summary>
  1056. XYZ_RIGHT,
  1057. /// <summary>
  1058. /// Colored point cloud for right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1059. /// Channel 4 contains colors in R-G-B-A order.
  1060. /// </summary>
  1061. XYZRGBA_RIGHT,
  1062. /// <summary>
  1063. /// Colored point cloud for right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1064. /// Channel 4 contains colors in B-G-R-A order.
  1065. /// </summary>
  1066. XYZBGRA_RIGHT,
  1067. /// <summary>
  1068. /// Colored point cloud for right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1069. /// Channel 4 contains colors in A-R-G-B order.
  1070. /// </summary>
  1071. XYZARGB_RIGHT,
  1072. /// <summary>
  1073. /// Colored point cloud for right sensor. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1074. /// Channel 4 contains colors in A-B-G-R order.
  1075. /// </summary>
  1076. XYZABGR_RIGHT,
  1077. /// <summary>
  1078. /// Normals vector for right view. As a ZEDMat, MAT_TYPE is set to MAT_32F_C4.
  1079. /// Channel 4 is empty (set to 0).
  1080. /// </summary>
  1081. NORMALS_RIGHT,
  1082. /// <summary>
  1083. /// Depth map in millimeter. Each pixel contains 1 unsigned short. As a ZEDMat, MAT_TYPE is set to MAT_U16_C1.
  1084. /// </summary>
  1085. DEPTH_U16_MM,
  1086. /// <summary>
  1087. /// Depth map in millimeter for right sensor. Each pixel contains 1 unsigned short. As a ZEDMat, MAT_TYPE is set to MAT_U16_C1.
  1088. /// </summary>
  1089. DEPTH_U16_MM_RIGHT
  1090. };
  1091. /// <summary>
  1092. /// Categories indicating when a timestamp is captured.
  1093. /// </summary>
  1094. public enum TIME_REFERENCE
  1095. {
  1096. /// <summary>
  1097. /// Timestamp from when the image was received over USB from the camera, defined
  1098. /// by when the entire image was available in memory.
  1099. /// </summary>
  1100. IMAGE,
  1101. /// <summary>
  1102. /// Timestamp from when the relevant function was called.
  1103. /// </summary>
  1104. CURRENT
  1105. };
  1106. /// <summary>
  1107. /// Reference frame (world or camera) for tracking and depth sensing.
  1108. /// </summary>
  1109. public enum REFERENCE_FRAME
  1110. {
  1111. /// <summary>
  1112. /// Matrix contains the total displacement from the world origin/the first tracked point.
  1113. /// </summary>
  1114. WORLD,
  1115. /// <summary>
  1116. /// Matrix contains the displacement from the previous camera position to the current one.
  1117. /// </summary>
  1118. CAMERA
  1119. };
  1120. /// <summary>
  1121. /// Possible states of the ZED's Tracking system.
  1122. /// </summary>
  1123. public enum TRACKING_STATE
  1124. {
  1125. /// <summary>
  1126. /// Tracking is searching for a match from the database to relocate to a previously known position.
  1127. /// </summary>
  1128. TRACKING_SEARCH,
  1129. /// <summary>
  1130. /// Tracking is operating normally; tracking data should be correct.
  1131. /// </summary>
  1132. TRACKING_OK,
  1133. /// <summary>
  1134. /// Tracking is not enabled.
  1135. /// </summary>
  1136. TRACKING_OFF,
  1137. /// <summary>
  1138. /// This is the last searching state of the track, the track will be deleted in the next retreiveObject
  1139. /// </summary>
  1140. TRACKING_TERMINATE
  1141. }
  1142. /// <summary>
  1143. /// SVO compression modes.
  1144. /// </summary>
  1145. public enum SVO_COMPRESSION_MODE
  1146. {
  1147. /// <summary>
  1148. /// Lossless compression based on png/zstd. Average size = 42% of RAW.
  1149. /// </summary>
  1150. LOSSLESS_BASED,
  1151. /// <summary>
  1152. /// H264(AVCHD) GPU based compression : avg size = 1% (of RAW). Requires a NVIDIA GPU
  1153. /// </summary>
  1154. H264_BASED,
  1155. /// <summary>
  1156. /// H265(HEVC) GPU based compression: avg size = 1% (of RAW). Requires a NVIDIA GPU, Pascal architecture or newer
  1157. /// </summary>
  1158. H265_BASED,
  1159. /// <summary>
  1160. /// H264 Lossless GPU/Hardware based compression: avg size = 25% (of RAW). Provides a SSIM/PSNR result (vs RAW) >= 99.9%. Requires a NVIDIA GPU
  1161. /// </summary>
  1162. H264_LOSSLESS_BASED,
  1163. /// <summary>
  1164. /// H265 Lossless GPU/Hardware based compression: avg size = 25% (of RAW). Provides a SSIM/PSNR result (vs RAW) >= 99.9%. Requires a NVIDIA GPU
  1165. /// </summary>
  1166. H265_LOSSLESS_BASED,
  1167. }
  1168. /// <summary>
  1169. /// Streaming codecs
  1170. /// </summary>
  1171. public enum STREAMING_CODEC
  1172. {
  1173. /// <summary>
  1174. /// AVCHD Based compression (H264)
  1175. /// </summary>
  1176. AVCHD_BASED,
  1177. /// <summary>
  1178. /// HEVC Based compression (H265)
  1179. /// </summary>
  1180. HEVC_BASED
  1181. }
  1182. /// <summary>
  1183. /// Spatial Mapping type (default is mesh)
  1184. /// </summary>
  1185. public enum SPATIAL_MAP_TYPE
  1186. {
  1187. /// <summary>
  1188. /// Represent a surface with faces, 3D points are linked by edges, no color information
  1189. /// </summary>
  1190. MESH,
  1191. /// <summary>
  1192. /// Geometry is represented by a set of 3D colored points.
  1193. /// </summary>
  1194. FUSED_POINT_CLOUD
  1195. };
  1196. /// <summary>
  1197. /// Mesh formats that can be saved/loaded with spatial mapping.
  1198. /// </summary>
  1199. public enum MESH_FILE_FORMAT
  1200. {
  1201. /// <summary>
  1202. /// Contains only vertices and faces.
  1203. /// </summary>
  1204. PLY,
  1205. /// <summary>
  1206. /// Contains only vertices and faces, encoded in binary.
  1207. /// </summary>
  1208. BIN,
  1209. /// <summary>
  1210. /// Contains vertices, normals, faces, and texture information (if possible).
  1211. /// </summary>
  1212. OBJ
  1213. }
  1214. /// <summary>
  1215. /// Presets for filtering meshes scannedw ith spatial mapping. Higher values reduce total face count by more.
  1216. /// </summary>
  1217. public enum FILTER
  1218. {
  1219. /// <summary>
  1220. /// Soft decimation and smoothing.
  1221. /// </summary>
  1222. LOW,
  1223. /// <summary>
  1224. /// Decimate the number of faces and apply a soft smooth.
  1225. /// </summary>
  1226. MEDIUM,
  1227. /// <summary>
  1228. /// Drastically reduce the number of faces.
  1229. /// </summary>
  1230. HIGH,
  1231. }
  1232. /// <summary>
  1233. /// Possible states of the ZED's Spatial Mapping system.
  1234. /// </summary>
  1235. public enum SPATIAL_MAPPING_STATE
  1236. {
  1237. /// <summary>
  1238. /// Spatial mapping is initializing.
  1239. /// </summary>
  1240. SPATIAL_MAPPING_STATE_INITIALIZING,
  1241. /// <summary>
  1242. /// Depth and tracking data were correctly integrated into the fusion algorithm.
  1243. /// </summary>
  1244. SPATIAL_MAPPING_STATE_OK,
  1245. /// <summary>
  1246. /// Maximum memory dedicated to scanning has been reached; the mesh will no longer be updated.
  1247. /// </summary>
  1248. SPATIAL_MAPPING_STATE_NOT_ENOUGH_MEMORY,
  1249. /// <summary>
  1250. /// EnableSpatialMapping() wasn't called (or the scanning was stopped and not relaunched).
  1251. /// </summary>
  1252. SPATIAL_MAPPING_STATE_NOT_ENABLED,
  1253. /// <summary>
  1254. /// Effective FPS is too low to give proper results for spatial mapping.
  1255. /// Consider using performance-friendly parameters (DEPTH_MODE_PERFORMANCE, VGA or HD720 camera resolution,
  1256. /// and LOW spatial mapping resolution).
  1257. /// </summary>
  1258. SPATIAL_MAPPING_STATE_FPS_TOO_LOW
  1259. }
  1260. /// <summary>
  1261. /// Units used by the SDK for measurements and tracking. METER is best to stay consistent with Unity.
  1262. /// </summary>
  1263. public enum UNIT
  1264. {
  1265. /// <summary>
  1266. /// International System, 1/1000 meters.
  1267. /// </summary>
  1268. MILLIMETER,
  1269. /// <summary>
  1270. /// International System, 1/100 meters.
  1271. /// </summary>
  1272. CENTIMETER,
  1273. /// <summary>
  1274. /// International System, 1/1 meters.
  1275. /// </summary>
  1276. METER,
  1277. /// <summary>
  1278. /// Imperial Unit, 1/12 feet.
  1279. /// </summary>
  1280. INCH,
  1281. /// <summary>
  1282. /// Imperial Unit, 1/1 feet.
  1283. /// </summary>
  1284. FOOT
  1285. }
  1286. /// <summary>
  1287. /// Struct containing all parameters passed to the SDK when initializing the ZED.
  1288. /// These parameters will be fixed for the whole execution life time of the camera.
  1289. /// </summary><remarks>For more details, see the InitParameters class in the SDK API documentation:
  1290. /// https://www.stereolabs.com/developers/documentation/API/v2.5.1/structsl_1_1InitParameters.html </remarks>
  1291. /// </summary>
  1292. public class InitParameters
  1293. {
  1294. public sl.INPUT_TYPE inputType;
  1295. /// <summary>
  1296. /// Resolution the ZED will be set to.
  1297. /// </summary>
  1298. public sl.RESOLUTION resolution;
  1299. /// <summary>
  1300. /// Requested FPS for this resolution. Setting it to 0 will choose the default FPS for this resolution.
  1301. /// </summary>
  1302. public int cameraFPS;
  1303. /// <summary>
  1304. /// ID for identifying which of multiple connected ZEDs to use.
  1305. /// </summary>
  1306. public int cameraDeviceID;
  1307. /// <summary>
  1308. /// Path to a recorded SVO file to play, including filename.
  1309. /// </summary>
  1310. public string pathSVO = "";
  1311. /// <summary>
  1312. /// In SVO playback, this mode simulates a live camera and consequently skipped frames if the computation framerate is too slow.
  1313. /// </summary>
  1314. public bool svoRealTimeMode;
  1315. /// <summary>
  1316. /// Define a unit for all metric values (depth, point clouds, tracking, meshes, etc.) Meters are recommended for Unity.
  1317. /// </summary>
  1318. public UNIT coordinateUnit;
  1319. /// <summary>
  1320. /// This defines the order and the direction of the axis of the coordinate system.
  1321. /// LEFT_HANDED_Y_UP is recommended to match Unity's coordinates.
  1322. /// </summary>
  1323. public COORDINATE_SYSTEM coordinateSystem;
  1324. /// <summary>
  1325. /// Quality level of depth calculations. Higher settings improve accuracy but cost performance.
  1326. /// </summary>
  1327. public sl.DEPTH_MODE depthMode;
  1328. /// <summary>
  1329. /// Minimum distance from the camera from which depth will be computed, in the defined coordinateUnit.
  1330. /// </summary>
  1331. public float depthMinimumDistance;
  1332. /// <summary>
  1333. /// When estimating the depth, the SDK uses this upper limit to turn higher values into \ref TOO_FAR ones.
  1334. /// The current maximum distance that can be computed in the defined \ref UNIT.
  1335. /// Changing this value has no impact on performance and doesn't affect the positional tracking nor the spatial mapping. (Only the depth, point cloud, normals)
  1336. /// </summary>
  1337. public float depthMaximumDistance;
  1338. /// <summary>
  1339. /// Defines if images are horizontally flipped.
  1340. /// </summary>
  1341. public int cameraImageFlip;
  1342. /// <summary>
  1343. /// Defines if measures relative to the right sensor should be computed (needed for MEASURE_<XXX>_RIGHT).
  1344. /// </summary>
  1345. public bool enableRightSideMeasure;
  1346. /// <summary>
  1347. /// True to disable self-calibration and use the optional calibration parameters without optimizing them.
  1348. /// False is recommended, so that calibration parameters can be optimized.
  1349. /// </summary>
  1350. public bool cameraDisableSelfCalib;
  1351. /// <summary>
  1352. /// True for the SDK to provide text feedback.
  1353. /// </summary>
  1354. public int sdkVerbose;
  1355. /// <summary>
  1356. /// ID of the graphics card on which the ZED's computations will be performed.
  1357. /// </summary>
  1358. public int sdkGPUId;
  1359. /// <summary>
  1360. /// If set to verbose, the filename of the log file into which the SDK will store its text output.
  1361. /// </summary>
  1362. public string sdkVerboseLogFile = "";
  1363. /// <summary>
  1364. /// True to stabilize the depth map. Recommended.
  1365. /// </summary>
  1366. public bool depthStabilization;
  1367. /// <summary>
  1368. /// Optional path for searching configuration (calibration) file SNxxxx.conf. (introduced in ZED SDK 2.6)
  1369. /// </summary>
  1370. public string optionalSettingsPath = "";
  1371. /// <summary>
  1372. /// True to stabilize the depth map. Recommended.
  1373. /// </summary>
  1374. public bool sensorsRequired;
  1375. /// <summary>
  1376. /// Path to a recorded SVO file to play, including filename.
  1377. /// </summary>
  1378. public string ipStream = "";
  1379. /// <summary>
  1380. /// Path to a recorded SVO file to play, including filename.
  1381. /// </summary>
  1382. public ushort portStream = 30000;
  1383. /// <summary>
  1384. /// Whether to enable improved color/gamma curves added in ZED SDK 3.0.
  1385. /// </summary>
  1386. public bool enableImageEnhancement = true;
  1387. /// <summary>
  1388. /// Set an optional file path where the SDK can find a file containing the calibration information of the camera computed by OpenCV.
  1389. /// <remarks> Using this will disable the factory calibration of the camera. </remarks>
  1390. /// <warning> Erroneous calibration values can lead to poor SDK modules accuracy. </warning>
  1391. /// </summary>
  1392. public string optionalOpencvCalibrationFile = "";
  1393. /// <summary>
  1394. /// Define a timeout in seconds after which an error is reported if the \ref open() command fails.
  1395. /// Set to '-1' to try to open the camera endlessly without returning error in case of failure.
  1396. /// Set to '0' to return error in case of failure at the first attempt.
  1397. /// This parameter only impacts the LIVE mode.
  1398. /// </summary>
  1399. public float openTimeoutSec;
  1400. /// <summary>
  1401. /// Constructor. Sets default initialization parameters recommended for Unity.
  1402. /// </summary>
  1403. public InitParameters()
  1404. {
  1405. this.inputType = sl.INPUT_TYPE.INPUT_TYPE_USB;
  1406. this.resolution = RESOLUTION.HD720;
  1407. this.cameraFPS = 60;
  1408. this.cameraDeviceID = 0;
  1409. this.pathSVO = "";
  1410. this.svoRealTimeMode = false;
  1411. this.coordinateUnit = UNIT.METER;
  1412. this.coordinateSystem = COORDINATE_SYSTEM.LEFT_HANDED_Y_UP;
  1413. this.depthMode = DEPTH_MODE.PERFORMANCE;
  1414. this.depthMinimumDistance = -1;
  1415. this.depthMaximumDistance = -1;
  1416. this.cameraImageFlip = 2;
  1417. this.cameraDisableSelfCalib = false;
  1418. this.sdkVerbose = 0;
  1419. this.sdkGPUId = -1;
  1420. this.sdkVerboseLogFile = "";
  1421. this.enableRightSideMeasure = false;
  1422. this.depthStabilization = true;
  1423. this.optionalSettingsPath = "";
  1424. this.sensorsRequired = false;
  1425. this.ipStream = "";
  1426. this.portStream = 30000;
  1427. this.enableImageEnhancement = true;
  1428. this.optionalOpencvCalibrationFile = "";
  1429. this.openTimeoutSec = 5.0f;
  1430. }
  1431. }
  1432. /// <summary>
  1433. /// List of available coordinate systems. Left-Handed, Y Up is recommended to stay consistent with Unity.
  1434. /// consistent with Unity.
  1435. /// </summary>
  1436. public enum COORDINATE_SYSTEM
  1437. {
  1438. /// <summary>
  1439. /// Standard coordinates system used in computer vision.
  1440. /// Used in OpenCV. See: http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html
  1441. /// </summary>
  1442. IMAGE,
  1443. /// <summary>
  1444. /// Left-Handed with Y up and Z forward. Recommended. Used in Unity with DirectX.
  1445. /// </summary>
  1446. LEFT_HANDED_Y_UP,
  1447. /// <summary>
  1448. /// Right-Handed with Y pointing up and Z backward. Used in OpenGL.
  1449. /// </summary>
  1450. RIGHT_HANDED_Y_UP,
  1451. /// <summary>
  1452. /// Right-Handed with Z pointing up and Y forward. Used in 3DSMax.
  1453. /// </summary>
  1454. RIGHT_HANDED_Z_UP,
  1455. /// <summary>
  1456. /// Left-Handed with Z axis pointing up and X forward. Used in Unreal Engine.
  1457. /// </summary>
  1458. LEFT_HANDED_Z_UP
  1459. }
  1460. /// <summary>
  1461. /// Possible states of the ZED's spatial memory area export, for saving 3D features used
  1462. /// by the tracking system to relocalize the camera. This is used when saving a mesh generated
  1463. /// by spatial mapping when Save Mesh is enabled - a .area file is saved as well.
  1464. /// </summary>
  1465. public enum AREA_EXPORT_STATE
  1466. {
  1467. /// <summary>
  1468. /// Spatial memory file has been successfully created.
  1469. /// </summary>
  1470. AREA_EXPORT_STATE_SUCCESS,
  1471. /// <summary>
  1472. /// Spatial memory file is currently being written to.
  1473. /// </summary>
  1474. AREA_EXPORT_STATE_RUNNING,
  1475. /// <summary>
  1476. /// Spatial memory file export has not been called.
  1477. /// </summary>
  1478. AREA_EXPORT_STATE_NOT_STARTED,
  1479. /// <summary>
  1480. /// Spatial memory contains no data; the file is empty.
  1481. /// </summary>
  1482. AREA_EXPORT_STATE_FILE_EMPTY,
  1483. /// <summary>
  1484. /// Spatial memory file has not been written to because of a bad file name.
  1485. /// </summary>
  1486. AREA_EXPORT_STATE_FILE_ERROR,
  1487. /// <summary>
  1488. /// Spatial memory has been disabled, so no file can be created.
  1489. /// </summary>
  1490. AREA_EXPORT_STATE_SPATIAL_MEMORY_DISABLED
  1491. };
  1492. /// <summary>
  1493. /// Runtime parameters used by the ZEDCamera.Grab() function, and its Camera::grab() counterpart in the SDK.
  1494. /// </summary>
  1495. [StructLayout(LayoutKind.Sequential)]
  1496. public struct RuntimeParameters {
  1497. /// <summary>
  1498. /// Defines the algorithm used for depth map computation, more info : \ref SENSING_MODE definition.
  1499. /// </summary>
  1500. public sl.SENSING_MODE sensingMode;
  1501. /// <summary>
  1502. /// Provides 3D measures (point cloud and normals) in the desired reference frame (default is REFERENCE_FRAME_CAMERA).
  1503. /// </summary>
  1504. public sl.REFERENCE_FRAME measure3DReferenceFrame;
  1505. /// <summary>
  1506. /// Defines whether the depth map should be computed.
  1507. /// </summary>
  1508. [MarshalAs(UnmanagedType.U1)]
  1509. public bool enableDepth;
  1510. /// <summary>
  1511. /// Defines the confidence threshold for the depth. Based on stereo matching score.
  1512. /// </summary>
  1513. public int confidenceThreshold;
  1514. /// <summary>
  1515. /// Defines texture confidence threshold for the depth. Based on textureness confidence.
  1516. /// </summary>
  1517. public int textureConfidenceThreshold;
  1518. /// <summary>
  1519. /// Defines if the saturated area (Luminance>=255) must be removed from depth map estimation
  1520. /// </summary>
  1521. public bool removeSaturatedAreas;
  1522. }
  1523. /// <summary>
  1524. ///brief Lists available compression modes for SVO recording.
  1525. /// </summary>
  1526. public enum FLIP_MODE
  1527. {
  1528. OFF = 0, /// default behavior.
  1529. ON = 1, /// Images and camera sensors data are flipped, useful when your camera is mounted upside down.
  1530. AUTO = 2, /// in live mode: use the camera orientation (if an IMU is available) to set the flip mode, in SVO mode, read the state of this enum when recorded
  1531. };
  1532. /// <summary>
  1533. /// Part of the ZED (left/right sensor, center) that's considered its center for tracking purposes.
  1534. /// </summary>
  1535. public enum TRACKING_FRAME
  1536. {
  1537. /// <summary>
  1538. /// Camera's center is at the left sensor.
  1539. /// </summary>
  1540. LEFT_EYE,
  1541. /// <summary>
  1542. /// Camera's center is in the camera's physical center, between the sensors.
  1543. /// </summary>
  1544. CENTER_EYE,
  1545. /// <summary>
  1546. /// Camera's center is at the right sensor.
  1547. /// </summary>
  1548. RIGHT_EYE
  1549. };
  1550. /// <summary>
  1551. /// Types of USB device brands.
  1552. /// </summary>
  1553. public enum USB_DEVICE
  1554. {
  1555. /// <summary>
  1556. /// Oculus device, eg. Oculus Rift VR Headset.
  1557. /// </summary>
  1558. USB_DEVICE_OCULUS,
  1559. /// <summary>
  1560. /// HTC device, eg. HTC Vive.
  1561. /// </summary>
  1562. USB_DEVICE_HTC,
  1563. /// <summary>
  1564. /// Stereolabs device, eg. ZED/ZED Mini.
  1565. /// </summary>
  1566. USB_DEVICE_STEREOLABS
  1567. };
  1568. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  1569. //////////////////////////////////////// Object Detection /////////////////////////////////////////////
  1570. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  1571. /// <summary>
  1572. /// sets batch trajectory parameters
  1573. /// The default constructor sets all parameters to their default settings.
  1574. /// Parameters can be user adjusted.
  1575. /// </summary>
  1576. [StructLayout(LayoutKind.Sequential)]
  1577. public struct BatchParameters
  1578. {
  1579. /// <summary>
  1580. /// Defines if the Batch option in the object detection module is enabled. Batch queueing system provides:
  1581. /// - Deep-Learning based re-identification
  1582. /// - Trajectory smoothing and filtering
  1583. /// </summary>
  1584. /// <remarks>
  1585. /// To activate this option, enable must be set to true.
  1586. /// </remarks>
  1587. [MarshalAs(UnmanagedType.U1)]
  1588. public bool enable;
  1589. /// <summary>
  1590. /// Max retention time in seconds of a detected object. After this time, the same object will mostly have a different ID.
  1591. /// </summary>
  1592. public float idRetentionTime;
  1593. /// <summary>
  1594. /// Trajectories will be output in batch with the desired latency in seconds.
  1595. /// During this waiting time, re-identification of objects is done in the background.
  1596. /// Specifying a short latency will limit the search (falling in timeout) for previously seen object IDs but will be closer to real time output.
  1597. /// Specifying a long latency will reduce the change of timeout in Re-ID but increase difference with live output.
  1598. /// </summary>
  1599. public float latency;
  1600. }
  1601. /// <summary>
  1602. /// Contains AI model status.
  1603. /// </summary>
  1604. [StructLayout(LayoutKind.Sequential)]
  1605. public struct AI_Model_status
  1606. {
  1607. /// <summary>
  1608. /// the model file is currently present on the host.
  1609. /// </summary>
  1610. [MarshalAs(UnmanagedType.U1)]
  1611. public bool downloaded;
  1612. /// <summary>
  1613. /// an engine file with the expected architecure is found.
  1614. /// </summary>
  1615. [MarshalAs(UnmanagedType.U1)]
  1616. public bool optimized;
  1617. };
  1618. /// <summary>
  1619. /// Sets the object detection parameters.
  1620. /// </summary>
  1621. [StructLayout(LayoutKind.Sequential)]
  1622. public struct dll_ObjectDetectionParameters
  1623. {
  1624. /// <summary>
  1625. /// Defines if the object detection is synchronized to the image or runs in a separate thread.
  1626. /// </summary>
  1627. [MarshalAs(UnmanagedType.U1)]
  1628. public bool imageSync;
  1629. /// <summary>
  1630. /// Defines if the object detection will track objects across multiple images, instead of an image-by-image basis.
  1631. /// </summary>
  1632. [MarshalAs(UnmanagedType.U1)]
  1633. public bool enableObjectTracking;
  1634. /// <summary>
  1635. /// Defines if the SDK will calculate 2D masks for each object. Requires more performance, so don't enable if you don't need these masks.
  1636. /// </summary>
  1637. [MarshalAs(UnmanagedType.U1)]
  1638. public bool enable2DMask;
  1639. /// <summary>
  1640. /// Defines the AI model used for detection
  1641. /// </summary>
  1642. public sl.DETECTION_MODEL detectionModel;
  1643. /// <summary>
  1644. /// Defines if the body fitting will be applied
  1645. /// </summary>
  1646. [MarshalAs(UnmanagedType.U1)]
  1647. public bool enableBodyFitting;
  1648. /// <summary>
  1649. /// Body Format. BODY_FORMAT.POSE_34 automatically enables body fitting.
  1650. /// </summary>
  1651. public sl.BODY_FORMAT bodyFormat;
  1652. /// <summary>
  1653. /// Defines a upper depth range for detections.
  1654. /// Defined in UNIT set at sl.Camera.Open.
  1655. /// Default value is set to sl.Initparameters.depthMaximumDistance (can not be higher).
  1656. /// </summary>
  1657. public float maxRange;
  1658. /// <summary>
  1659. /// Batching system parameters.
  1660. /// Batching system(introduced in 3.5) performs short-term re-identification with deep learning and trajectories filtering.
  1661. /// BatchParameters.enable need to be true to use this feature (by default disabled)
  1662. /// </summary>
  1663. public BatchParameters batchParameters;
  1664. /**
  1665. \brief Defines the filtering mode that should be applied to raw detections.
  1666. */
  1667. public OBJECT_FILTERING_MODE filteringMode;
  1668. };
  1669. [StructLayout(LayoutKind.Sequential)]
  1670. public struct dll_ObjectDetectionRuntimeParameters
  1671. {
  1672. /// <summary>
  1673. /// The detection confidence threshold between 1 and 99.
  1674. /// A confidence of 1 means a low threshold, more uncertain objects and 99 very few but very precise objects.
  1675. /// Ex: If set to 80, then the SDK must be at least 80% sure that a given object exists before reporting it in the list of detected objects.
  1676. /// If the scene contains a lot of objects, increasing the confidence can slightly speed up the process, since every object instance is tracked.
  1677. /// </summary>
  1678. public float detectionConfidenceThreshold;
  1679. /// <summary>
  1680. ///
  1681. /// </summary>
  1682. [MarshalAs(UnmanagedType.ByValArray, SizeConst = (int)sl.OBJECT_CLASS.LAST)]
  1683. public int[] objectClassFilter;
  1684. /// <summary>
  1685. ///
  1686. /// </summary>
  1687. [MarshalAs(UnmanagedType.ByValArray, SizeConst = (int)sl.OBJECT_CLASS.LAST)]
  1688. public int[] object_confidence_threshold;
  1689. };
  1690. /// <summary>
  1691. /// Lists of supported skeleton body model
  1692. /// </summary>
  1693. public enum BODY_FORMAT
  1694. {
  1695. POSE_18,
  1696. POSE_34,
  1697. };
  1698. /// <summary>
  1699. /// Object data structure directly from the SDK. Represents a single object detection.
  1700. /// See DetectedObject for an abstracted version with helper functions that make this data easier to use in Unity.
  1701. /// </summary>
  1702. [StructLayout(LayoutKind.Sequential)]
  1703. public struct ObjectDataSDK
  1704. {
  1705. /// <summary>
  1706. /// Object identification number, used as a reference when tracking the object through the frames.
  1707. /// </summary>
  1708. public int id;
  1709. /// <summary>
  1710. ///Unique ID to help identify and track AI detections. Can be either generated externally, or using \ref ZEDCamera.generateUniqueId() or left empty
  1711. /// </summary>
  1712. [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 37)]
  1713. public string uniqueObjectId;
  1714. /// <summary>
  1715. /// Object label, forwarded from \ref CustomBoxObjects when using DETECTION_MODEL.CUSTOM_BOX_OBJECTS
  1716. /// </summary>
  1717. public int rawLabel;
  1718. /// <summary>
  1719. /// Object category. Identify the object type.
  1720. /// </summary>
  1721. public sl.OBJECT_CLASS objectClass;
  1722. public sl.OBJECT_SUBCLASS objectSubClass;
  1723. public sl.OBJECT_TRACK_STATE objectTrackingState;
  1724. public sl.OBJECT_ACTION_STATE actionState;
  1725. public float confidence;
  1726. public System.IntPtr mask;
  1727. /// <summary>
  1728. /// Image data.
  1729. /// Note that Y in these values is relative from the top of the image, whereas the opposite is true
  1730. /// in most related Unity functions. If using this raw value, subtract Y from the
  1731. /// image height to get the height relative to the bottom.
  1732. /// </summary>
  1733. /// 0 ------- 1
  1734. /// | obj |
  1735. /// 3-------- 2
  1736. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
  1737. public Vector2[] imageBoundingBox;
  1738. /// <summary>
  1739. /// 3D space data (Camera Frame since this is what we used in Unity)
  1740. /// </summary>
  1741. public Vector3 rootWorldPosition; //object root position
  1742. public Vector3 headWorldPosition; //object head position (only for HUMAN detectionModel)
  1743. public Vector3 rootWorldVelocity; //object root velocity
  1744. /// <summary>
  1745. /// The 3D space bounding box. given as array of vertices
  1746. /// </summary>
  1747. /// 1 ---------2
  1748. /// /| /|
  1749. /// 0 |--------3 |
  1750. /// | | | |
  1751. /// | 5--------|-6
  1752. /// |/ |/
  1753. /// 4 ---------7
  1754. ///
  1755. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 8)]
  1756. public Vector3[] worldBoundingBox; // 3D Bounding Box of object
  1757. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 8)]
  1758. public Vector3[] headBoundingBox;// 3D Bounding Box of head (only for HUMAN detectionModel)
  1759. /// <summary>
  1760. /// The 2D position of skeleton joints
  1761. /// </summary>
  1762. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 34)]
  1763. public Vector2[] skeletonJointPosition2D;// 2D position of the joints of the skeleton
  1764. /// <summary>
  1765. /// The 3D position of skeleton joints
  1766. /// </summary>
  1767. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 34)]
  1768. public Vector3[] skeletonJointPosition;// 3D position of the joints of the skeleton
  1769. // Full covariance matrix for position (3x3). Only 6 values are necessary
  1770. // [p0, p1, p2]
  1771. // [p1, p3, p4]
  1772. // [p2, p4, p5]
  1773. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 6)]
  1774. public float[] positionCovariance;// covariance matrix of the 3d position, represented by its upper triangular matrix value
  1775. /// <summary>
  1776. /// Per keypoint detection confidence, can not be lower than the ObjectDetectionRuntimeParameters.detection_confidence_threshold.
  1777. /// Not available with DETECTION_MODEL.MULTI_CLASS_BOX.
  1778. /// in some cases, eg. body partially out of the image or missing depth data, some keypoint can not be detected, they will have non finite values.
  1779. /// </summary>
  1780. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 34)]
  1781. public float[] keypointConfidence;
  1782. /// <summary>
  1783. /// Global position per joint in the coordinate frame of the requested skeleton format.
  1784. /// </summary>
  1785. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 34)]
  1786. public Vector3[] localPositionPerJoint;
  1787. /// <summary>
  1788. /// Local orientation per joint in the coordinate frame of the requested skeleton format.
  1789. /// The orientation is represented by a quaternion.
  1790. /// </summary>
  1791. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 34)]
  1792. public Quaternion[] localOrientationPerJoint;
  1793. /// <summary>
  1794. /// Global root rotation.
  1795. /// </summary>
  1796. public Quaternion globalRootOrientation;
  1797. };
  1798. /// <summary>
  1799. /// Container to store the externally detected objects. The objects can be ingested using IngestCustomBoxObjects() function to extract 3D information and tracking over time.
  1800. /// </summary>
  1801. [StructLayout(LayoutKind.Sequential)]
  1802. public struct CustomBoxObjectData
  1803. {
  1804. /// <summary>
  1805. ///Unique ID to help identify and track AI detections. Can be either generated externally, or using \ref ZEDCamera.generateUniqueId() or left empty
  1806. /// </summary>
  1807. [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 37)]
  1808. public string uniqueObjectID;
  1809. /// <summary>
  1810. /// 2D bounding box represented as four 2D points starting at the top left corner and rotation clockwise.
  1811. /// </summary>
  1812. /// 0 ------- 1
  1813. /// | obj |
  1814. /// 3-------- 2
  1815. [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
  1816. public Vector2[] boundingBox2D;
  1817. /// <summary>
  1818. /// Object label, this information is passed-through and can be used to improve object tracking
  1819. /// </summary>
  1820. public int label;
  1821. /// <summary>
  1822. /// Detection confidence. Should be [0-1]. It can be used to improve the object tracking
  1823. /// </summary>
  1824. public float probability;
  1825. /// <summary>
  1826. /// Provide hypothesis about the object movements(degrees of freedom) to improve the object tracking
  1827. /// true: means 2 DoF projected alongside the floor plane, the default for object standing on the ground such as person, vehicle, etc
  1828. /// false : 6 DoF full 3D movements are allowed
  1829. /// </summary>
  1830. [MarshalAs(UnmanagedType.U1)]
  1831. public bool isGrounded;
  1832. }
  1833. /// <summary>
  1834. /// Object Scene data directly from the ZED SDK. Represents all detections given during a single image frame.
  1835. /// See DetectionFrame for an abstracted version with helper functions that make this data easier to use in Unity.
  1836. /// Contains the number of object in the scene and the objectData structure for each object.
  1837. /// Since the data is transmitted from C++ to C#, the size of the structure must be constant. Therefore, there is a limitation of 200 (MAX_OBJECT constant) objects in the image.
  1838. /// <c> This number cannot be changed.<c>
  1839. /// </summary>
  1840. [StructLayout(LayoutKind.Sequential)]
  1841. public struct ObjectsFrameSDK
  1842. {
  1843. /// <summary>
  1844. /// How many objects were detected this frame. Use this to iterate through the top of objectData; objects with indexes greater than numObject are empty.
  1845. /// </summary>
  1846. public int numObject;
  1847. /// <summary>
  1848. /// Timestamp of the image where these objects were detected.
  1849. /// </summary>
  1850. public ulong timestamp;
  1851. /// <summary>
  1852. /// Defines if the object frame is new (new timestamp)
  1853. /// </summary>
  1854. public int isNew;
  1855. /// <summary>
  1856. /// Defines if the object is tracked
  1857. /// </summary>
  1858. public int isTracked;
  1859. /// <summary>
  1860. /// Current detection model used.
  1861. /// </summary>
  1862. public sl.DETECTION_MODEL detectionModel;
  1863. /// <summary>
  1864. /// Array of objects
  1865. /// </summary>
  1866. [MarshalAs(UnmanagedType.ByValArray, SizeConst = (int)(Constant.MAX_OBJECTS))]
  1867. public ObjectDataSDK[] objectData;
  1868. };
  1869. /// <summary>
  1870. /// Lists available object class
  1871. /// </summary>
  1872. public enum OBJECT_CLASS
  1873. {
  1874. PERSON = 0,
  1875. VEHICLE = 1,
  1876. BAG = 2,
  1877. ANIMAL = 3,
  1878. ELECTRONICS = 4,
  1879. FRUIT_VEGETABLE = 5,
  1880. SPORT = 6,
  1881. LAST = 7
  1882. };
  1883. /// <summary>
  1884. /// Lists available object subclass.
  1885. /// </summary>
  1886. public enum OBJECT_SUBCLASS
  1887. {
  1888. PERSON = 0,
  1889. // VEHICLES
  1890. BICYCLE = 1,
  1891. CAR = 2,
  1892. MOTORBIKE = 3,
  1893. BUS = 4,
  1894. TRUCK = 5,
  1895. BOAT = 6,
  1896. // BAGS
  1897. BACKPACK = 7,
  1898. HANDBAG = 8,
  1899. SUITCASE = 9,
  1900. // ANIMALS
  1901. BIRD = 10,
  1902. CAT = 11,
  1903. DOG = 12,
  1904. HORSE = 13,
  1905. SHEEP = 14,
  1906. COW = 15,
  1907. // ELECTRONICS
  1908. CELLPHONE = 16,
  1909. LAPTOP = 17,
  1910. // FRUITS/VEGETABLES
  1911. BANANA = 18,
  1912. APPLE = 19,
  1913. ORANGE = 20,
  1914. CARROT = 21,
  1915. PERSON_HEAD = 22,
  1916. SPORTSBALL = 23,
  1917. LAST = 24
  1918. };
  1919. /// <summary>
  1920. /// Tracking state of an individual object.
  1921. /// </summary>
  1922. public enum OBJECT_TRACK_STATE
  1923. {
  1924. OFF, /**< The tracking is not yet initialized, the object ID is not usable */
  1925. OK, /**< The object is tracked */
  1926. SEARCHING,/**< The object couldn't be detected in the image and is potentially occluded, the trajectory is estimated */
  1927. TERMINATE/**< This is the last searching state of the track, the track will be deleted in the next retreiveObject */
  1928. };
  1929. public enum OBJECT_ACTION_STATE
  1930. {
  1931. IDLE = 0, /**< The object is staying static. */
  1932. MOVING = 1, /**< The object is moving. */
  1933. LAST = 2
  1934. };
  1935. /// <summary>
  1936. /// List available models for detection
  1937. /// </summary>
  1938. public enum DETECTION_MODEL {
  1939. /// <summary>
  1940. /// Any objects, bounding box based.
  1941. /// </summary>
  1942. MULTI_CLASS_BOX,
  1943. /// <summary>
  1944. /// Any objects, bounding box based.
  1945. /// </summary>
  1946. MULTI_CLASS_BOX_ACCURATE,
  1947. /// <summary>
  1948. /// Keypoints based, specific to human skeleton, real time performance even on Jetson or low end GPU cards.
  1949. /// </summary>
  1950. HUMAN_BODY_FAST,
  1951. /// <summary>
  1952. /// Keypoints based, specific to human skeleton, state of the art accuracy, requires powerful GPU.
  1953. /// </summary>
  1954. HUMAN_BODY_ACCURATE,
  1955. /// <summary>
  1956. /// Any objects, bounding box based.
  1957. /// </summary>
  1958. MULTI_CLASS_BOX_MEDIUM,
  1959. /// <summary>
  1960. /// Keypoints based, specific to human skeleton, real time performance even on Jetson or low end GPU cards.
  1961. /// </summary>
  1962. HUMAN_BODY_MEDIUM,
  1963. /// <summary>
  1964. /// Bounding Box detector specialized in person heads, particulary well suited for crowded environement, the person localization is also improved
  1965. /// </summary>
  1966. PERSON_HEAD_BOX,
  1967. /// <summary>
  1968. /// For external inference, using your own custom model and/or frameworks. This mode disable the internal inference engine, the 2D bounding box detection must be provided
  1969. /// </summary>
  1970. CUSTOM_BOX_OBJECTS,
  1971. LAST
  1972. };
  1973. /// <summary>
  1974. /// Lists of supported bounding box preprocessing
  1975. /// </summary>
  1976. public enum OBJECT_FILTERING_MODE
  1977. {
  1978. /// <summary>
  1979. /// SDK will not apply any preprocessing to the detected objects
  1980. /// </summary>
  1981. NONE,
  1982. /// <summary>
  1983. /// SDK will remove objects that are in the same 3D position as an already tracked object (independant of class ID). Default value
  1984. /// </summary>
  1985. NMS3D,
  1986. /// <summary>
  1987. /// SDK will remove objects that are in the same 3D position as an already tracked object of the same class ID
  1988. /// </summary>
  1989. NMS3D_PER_CLASS
  1990. };
  1991. public enum AI_MODELS
  1992. {
  1993. /// <summary>
  1994. /// related to sl.DETECTION_MODEL.MULTI_CLASS_BOX
  1995. /// </summary>
  1996. MULTI_CLASS_DETECTION,
  1997. /// <summary>
  1998. /// related to sl.DETECTION_MODEL.MULTI_CLASS_BOX_MEDIUM
  1999. /// </summary>
  2000. MULTI_CLASS_MEDIUM_DETECTION,
  2001. /// <summary>
  2002. /// related to sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE
  2003. /// </summary>
  2004. MULTI_CLASS_ACCURATE_DETECTION,
  2005. /// <summary>
  2006. /// related to sl.DETECTION_MODEL.HUMAN_BODY_FAST
  2007. /// </summary>
  2008. HUMAN_BODY_FAST_DETECTION,
  2009. /// <summary>
  2010. /// related to sl.DETECTION_MODEL.HUMAN_BODY_MEDIUM
  2011. /// </summary>
  2012. HUMAN_BODY_MEDIUM_DETECTION,
  2013. /// <summary>
  2014. /// related to sl.DETECTION_MODEL.HUMAN_BODY_ACCURATE
  2015. /// </summary>
  2016. HUMAN_BODY_ACCURATE_DETECTION, //
  2017. /// <summary>
  2018. /// related to sl.DETECTION_MODEL.PERSON_HEAD
  2019. /// </summary>
  2020. PERSON_HEAD_DETECTION,
  2021. /// <summary>
  2022. /// related to sl.BatchParameters.enable
  2023. /// </summary>
  2024. REID_ASSOCIATION, // related to
  2025. /// <summary>
  2026. /// related to sl.DETECTION_MODEL.NEURAL
  2027. /// </summary>
  2028. NEURAL_DEPTH,
  2029. LAST
  2030. };
  2031. /// <summary>
  2032. /// semantic and order of human body keypoints.
  2033. /// </summary>
  2034. public enum BODY_PARTS {
  2035. NOSE = 0,
  2036. NECK = 1,
  2037. RIGHT_SHOULDER = 2,
  2038. RIGHT_ELBOW= 3,
  2039. RIGHT_WRIST = 4,
  2040. LEFT_SHOULDER = 5,
  2041. LEFT_ELBOW = 6,
  2042. LEFT_WRIST = 7,
  2043. RIGHT_HIP = 8,
  2044. RIGHT_KNEE = 9,
  2045. RIGHT_ANKLE = 10,
  2046. LEFT_HIP = 11,
  2047. LEFT_KNEE = 12,
  2048. LEFT_ANKLE = 13,
  2049. RIGHT_EYE = 14,
  2050. LEFT_EYE = 15,
  2051. RIGHT_EAR = 16,
  2052. LEFT_EAR = 17,
  2053. LAST = 18
  2054. };
  2055. /// <summary>
  2056. /// Contains batched data of a detected object
  2057. /// </summary>
  2058. /// <summary>
  2059. /// Contains batched data of a detected object
  2060. /// </summary>
  2061. public class ObjectsBatch
  2062. {
  2063. /// <summary>
  2064. /// How many data were stored. Use this to iterate through the top of position/velocity/bounding_box/...; objects with indexes greater than numData are empty.
  2065. /// </summary>
  2066. public int numData = 0;
  2067. /// <summary>
  2068. /// The trajectory id
  2069. /// </summary>
  2070. public int id = 0;
  2071. /// <summary>
  2072. /// Object Category. Identity the object type
  2073. /// </summary>
  2074. public OBJECT_CLASS label = OBJECT_CLASS.LAST;
  2075. /// <summary>
  2076. /// Object subclass
  2077. /// </summary>
  2078. public OBJECT_SUBCLASS sublabel = OBJECT_SUBCLASS.LAST;
  2079. /// <summary>
  2080. /// Defines the object tracking state
  2081. /// </summary>
  2082. public TRACKING_STATE trackingState = TRACKING_STATE.TRACKING_TERMINATE;
  2083. /// <summary>
  2084. /// A sample of 3d position
  2085. /// </summary>
  2086. public Vector3[] positions = new Vector3[(int)Constant.MAX_BATCH_SIZE];
  2087. /// <summary>
  2088. /// a sample of the associated position covariance
  2089. /// </summary>
  2090. public float[,] positionCovariances = new float[(int)Constant.MAX_BATCH_SIZE, 6];
  2091. /// <summary>
  2092. /// A sample of 3d velocity
  2093. /// </summary>
  2094. public Vector3[] velocities = new Vector3[(int)Constant.MAX_BATCH_SIZE];
  2095. /// <summary>
  2096. /// The associated position timestamp
  2097. /// </summary>
  2098. public ulong[] timestamps = new ulong[(int)Constant.MAX_BATCH_SIZE];
  2099. /// <summary>
  2100. /// A sample of 3d bounding boxes
  2101. /// </summary>
  2102. public Vector3[,] boundingBoxes = new Vector3[(int)Constant.MAX_BATCH_SIZE, 8];
  2103. /// <summary>
  2104. /// 2D bounding box of the person represented as four 2D points starting at the top left corner and rotation clockwise.
  2105. /// Expressed in pixels on the original image resolution, [0, 0] is the top left corner.
  2106. /// A ------ B
  2107. /// | Object |
  2108. /// D ------ C
  2109. /// </summary>
  2110. public Vector2[,] boundingBoxes2D = new Vector2[(int)Constant.MAX_BATCH_SIZE, 4];
  2111. /// <summary>
  2112. /// a sample of object detection confidence
  2113. /// </summary>
  2114. public float[] confidences = new float[(int)Constant.MAX_BATCH_SIZE];
  2115. /// <summary>
  2116. /// a sample of the object action state
  2117. /// </summary>
  2118. public OBJECT_ACTION_STATE[] actionStates = new OBJECT_ACTION_STATE[(int)Constant.MAX_BATCH_SIZE];
  2119. /// <summary>
  2120. /// a sample of 2d person keypoints.
  2121. /// Not available with DETECTION_MODEL::MULTI_CLASS_BOX.
  2122. /// in some cases, eg. body partially out of the image or missing depth data, some keypoint can not be detected, they will have non finite values.
  2123. /// </summary>
  2124. public Vector2[,] keypoints2D = new Vector2[(int)Constant.MAX_BATCH_SIZE, 18];
  2125. /// <summary>
  2126. /// a sample of 3d person keypoints
  2127. /// Not available with DETECTION_MODEL::MULTI_CLASS_BOX.
  2128. /// in some cases, eg. body partially out of the image or missing depth data, some keypoint can not be detected, they will have non finite values.
  2129. /// </summary>
  2130. public Vector3[,] keypoints = new Vector3[(int)Constant.MAX_BATCH_SIZE, 18];
  2131. /// <summary>
  2132. /// bounds the head with four 2D points.
  2133. /// Expressed in pixels on the original image resolution.
  2134. /// Not available with DETECTION_MODEL.MULTI_CLASS_BOX.
  2135. /// </summary>
  2136. public Vector2[,] headBoundingBoxes2D = new Vector2[(int)Constant.MAX_BATCH_SIZE, 8];
  2137. /// <summary>
  2138. /// bounds the head with eight 3D points.
  2139. /// Defined in sl.InitParameters.UNIT, expressed in RuntimeParameters.measure3DReferenceFrame.
  2140. /// Not available with DETECTION_MODEL.MULTI_CLASS_BOX.
  2141. /// </summary>
  2142. public Vector3[,] headBoundingBoxes = new Vector3[(int)Constant.MAX_BATCH_SIZE, 8];
  2143. /// <summary>
  2144. /// 3D head centroid.
  2145. /// Defined in sl.InitParameters.UNIT, expressed in RuntimeParameters.measure3DReferenceFrame.
  2146. /// Not available with DETECTION_MODEL.MULTI_CLASS_BOX.
  2147. /// </summary>
  2148. public Vector3[] headPositions = new Vector3[(int)Constant.MAX_BATCH_SIZE];
  2149. /// <summary>
  2150. /// Per keypoint detection confidence, can not be lower than the ObjectDetectionRuntimeParameters.detectionConfidenceThreshold.
  2151. /// Not available with DETECTION_MODEL.MULTI_CLASS_BOX.
  2152. /// in some cases, eg. body partially out of the image or missing depth data, some keypoint can not be detected, they will have non finite values.
  2153. /// </summary>
  2154. public float[,] keypointConfidences = new float[(int)Constant.MAX_BATCH_SIZE, 18];
  2155. }
  2156. }// end namespace sl