SteamVR_TrackedCamera.cs 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. //======= Copyright (c) Valve Corporation, All rights reserved. ===============
  2. //
  3. // Purpose: Provides access to video feed and poses of tracked cameras.
  4. //
  5. // Usage:
  6. // var source = SteamVR_TrackedCamera.Distorted();
  7. // var source = SteamVR_TrackedCamera.Undistorted();
  8. // or
  9. // var undistorted = true; // or false
  10. // var source = SteamVR_TrackedCamera.Source(undistorted);
  11. //
  12. // - Distorted feeds are the decoded images from the camera.
  13. // - Undistorted feeds correct for the camera lens distortion (a.k.a. fisheye)
  14. // to make straight lines straight.
  15. //
  16. // VideoStreamTexture objects must be symmetrically Acquired and Released to
  17. // ensure the video stream is activated, and shutdown properly once there are
  18. // no more consumers. You only need to Acquire once when starting to use a
  19. // stream, and Release when you are done using it (as opposed to every frame).
  20. //
  21. //=============================================================================
  22. using UnityEngine;
  23. using Valve.VR;
  24. namespace Valve.VR
  25. {
  26. public class SteamVR_TrackedCamera
  27. {
  28. public class VideoStreamTexture
  29. {
  30. public VideoStreamTexture(uint deviceIndex, bool undistorted)
  31. {
  32. this.undistorted = undistorted;
  33. videostream = Stream(deviceIndex);
  34. }
  35. public bool undistorted { get; private set; }
  36. public uint deviceIndex { get { return videostream.deviceIndex; } }
  37. public bool hasCamera { get { return videostream.hasCamera; } }
  38. public bool hasTracking { get { Update(); return header.trackedDevicePose.bPoseIsValid; } }
  39. public uint frameId { get { Update(); return header.nFrameSequence; } }
  40. public VRTextureBounds_t frameBounds { get; private set; }
  41. public EVRTrackedCameraFrameType frameType { get { return undistorted ? EVRTrackedCameraFrameType.Undistorted : EVRTrackedCameraFrameType.Distorted; } }
  42. Texture2D _texture;
  43. public Texture2D texture { get { Update(); return _texture; } }
  44. public SteamVR_Utils.RigidTransform transform { get { Update(); return new SteamVR_Utils.RigidTransform(header.trackedDevicePose.mDeviceToAbsoluteTracking); } }
  45. public Vector3 velocity { get { Update(); var pose = header.trackedDevicePose; return new Vector3(pose.vVelocity.v0, pose.vVelocity.v1, -pose.vVelocity.v2); } }
  46. public Vector3 angularVelocity { get { Update(); var pose = header.trackedDevicePose; return new Vector3(-pose.vAngularVelocity.v0, -pose.vAngularVelocity.v1, pose.vAngularVelocity.v2); } }
  47. public TrackedDevicePose_t GetPose() { Update(); return header.trackedDevicePose; }
  48. public ulong Acquire()
  49. {
  50. return videostream.Acquire();
  51. }
  52. public ulong Release()
  53. {
  54. var result = videostream.Release();
  55. if (videostream.handle == 0)
  56. {
  57. Object.Destroy(_texture);
  58. _texture = null;
  59. }
  60. return result;
  61. }
  62. int prevFrameCount = -1;
  63. void Update()
  64. {
  65. if (Time.frameCount == prevFrameCount)
  66. return;
  67. prevFrameCount = Time.frameCount;
  68. if (videostream.handle == 0)
  69. return;
  70. var vr = SteamVR.instance;
  71. if (vr == null)
  72. return;
  73. var trackedCamera = OpenVR.TrackedCamera;
  74. if (trackedCamera == null)
  75. return;
  76. var nativeTex = System.IntPtr.Zero;
  77. var deviceTexture = (_texture != null) ? _texture : new Texture2D(2, 2);
  78. var headerSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(header.GetType());
  79. if (vr.textureType == ETextureType.OpenGL)
  80. {
  81. if (glTextureId != 0)
  82. trackedCamera.ReleaseVideoStreamTextureGL(videostream.handle, glTextureId);
  83. if (trackedCamera.GetVideoStreamTextureGL(videostream.handle, frameType, ref glTextureId, ref header, headerSize) != EVRTrackedCameraError.None)
  84. return;
  85. nativeTex = (System.IntPtr)glTextureId;
  86. }
  87. else if (vr.textureType == ETextureType.DirectX)
  88. {
  89. if (trackedCamera.GetVideoStreamTextureD3D11(videostream.handle, frameType, deviceTexture.GetNativeTexturePtr(), ref nativeTex, ref header, headerSize) != EVRTrackedCameraError.None)
  90. return;
  91. }
  92. if (_texture == null)
  93. {
  94. _texture = Texture2D.CreateExternalTexture((int)header.nWidth, (int)header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);
  95. uint width = 0, height = 0;
  96. var frameBounds = new VRTextureBounds_t();
  97. if (trackedCamera.GetVideoStreamTextureSize(deviceIndex, frameType, ref frameBounds, ref width, ref height) == EVRTrackedCameraError.None)
  98. {
  99. // Account for textures being upside-down in Unity.
  100. frameBounds.vMin = 1.0f - frameBounds.vMin;
  101. frameBounds.vMax = 1.0f - frameBounds.vMax;
  102. this.frameBounds = frameBounds;
  103. }
  104. }
  105. else
  106. {
  107. _texture.UpdateExternalTexture(nativeTex);
  108. }
  109. }
  110. uint glTextureId;
  111. VideoStream videostream;
  112. CameraVideoStreamFrameHeader_t header;
  113. }
  114. #region Top level accessors.
  115. public static VideoStreamTexture Distorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
  116. {
  117. if (distorted == null)
  118. distorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
  119. if (distorted[deviceIndex] == null)
  120. distorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, false);
  121. return distorted[deviceIndex];
  122. }
  123. public static VideoStreamTexture Undistorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
  124. {
  125. if (undistorted == null)
  126. undistorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
  127. if (undistorted[deviceIndex] == null)
  128. undistorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, true);
  129. return undistorted[deviceIndex];
  130. }
  131. public static VideoStreamTexture Source(bool undistorted, int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
  132. {
  133. return undistorted ? Undistorted(deviceIndex) : Distorted(deviceIndex);
  134. }
  135. private static VideoStreamTexture[] distorted, undistorted;
  136. #endregion
  137. #region Internal class to manage lifetime of video streams (per device).
  138. class VideoStream
  139. {
  140. public VideoStream(uint deviceIndex)
  141. {
  142. this.deviceIndex = deviceIndex;
  143. var trackedCamera = OpenVR.TrackedCamera;
  144. if (trackedCamera != null)
  145. trackedCamera.HasCamera(deviceIndex, ref _hasCamera);
  146. }
  147. public uint deviceIndex { get; private set; }
  148. ulong _handle;
  149. public ulong handle { get { return _handle; } }
  150. bool _hasCamera;
  151. public bool hasCamera { get { return _hasCamera; } }
  152. ulong refCount;
  153. public ulong Acquire()
  154. {
  155. if (_handle == 0 && hasCamera)
  156. {
  157. var trackedCamera = OpenVR.TrackedCamera;
  158. if (trackedCamera != null)
  159. trackedCamera.AcquireVideoStreamingService(deviceIndex, ref _handle);
  160. }
  161. return ++refCount;
  162. }
  163. public ulong Release()
  164. {
  165. if (refCount > 0 && --refCount == 0 && _handle != 0)
  166. {
  167. var trackedCamera = OpenVR.TrackedCamera;
  168. if (trackedCamera != null)
  169. trackedCamera.ReleaseVideoStreamingService(_handle);
  170. _handle = 0;
  171. }
  172. return refCount;
  173. }
  174. }
  175. static VideoStream Stream(uint deviceIndex)
  176. {
  177. if (videostreams == null)
  178. videostreams = new VideoStream[OpenVR.k_unMaxTrackedDeviceCount];
  179. if (videostreams[deviceIndex] == null)
  180. videostreams[deviceIndex] = new VideoStream(deviceIndex);
  181. return videostreams[deviceIndex];
  182. }
  183. static VideoStream[] videostreams;
  184. #endregion
  185. }
  186. }