OVRExternalComposition.cs 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. #if UNITY_ANDROID && !UNITY_EDITOR
  14. #define OVR_ANDROID_MRC
  15. #endif
  16. using UnityEngine;
  17. using System.Collections.Generic;
  18. using System.Threading;
  19. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
  20. public class OVRExternalComposition : OVRComposition
  21. {
  22. private GameObject previousMainCameraObject = null;
  23. public GameObject foregroundCameraGameObject = null;
  24. public Camera foregroundCamera = null;
  25. public GameObject backgroundCameraGameObject = null;
  26. public Camera backgroundCamera = null;
  27. public GameObject cameraProxyPlane = null;
  28. #if OVR_ANDROID_MRC
  29. public AudioListener audioListener;
  30. public OVRMRAudioFilter audioFilter;
  31. public RenderTexture[] mrcRenderTextureArray = new RenderTexture[2];
  32. public int frameIndex;
  33. public int lastMrcEncodeFrameSyncId;
  34. #endif
  35. public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
  36. public OVRExternalComposition(GameObject parentObject, Camera mainCamera)
  37. : base(parentObject, mainCamera)
  38. {
  39. #if OVR_ANDROID_MRC
  40. int frameWidth;
  41. int frameHeight;
  42. OVRPlugin.Media.GetMrcFrameSize(out frameWidth, out frameHeight);
  43. Debug.LogFormat("[OVRExternalComposition] Create render texture {0}, {1}", frameWidth, frameHeight);
  44. for (int i=0; i<2; ++i)
  45. {
  46. mrcRenderTextureArray[i] = new RenderTexture(frameWidth, frameHeight, 24, RenderTextureFormat.ARGB32);
  47. mrcRenderTextureArray[i].Create();
  48. }
  49. frameIndex = 0;
  50. lastMrcEncodeFrameSyncId = -1;
  51. #endif
  52. RefreshCameraObjects(parentObject, mainCamera);
  53. }
  54. private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera)
  55. {
  56. if (mainCamera.gameObject != previousMainCameraObject)
  57. {
  58. Debug.LogFormat("[OVRExternalComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
  59. OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
  60. backgroundCamera = null;
  61. OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
  62. foregroundCamera = null;
  63. OVRCompositionUtil.SafeDestroy(ref cameraProxyPlane);
  64. RefreshCameraRig(parentObject, mainCamera);
  65. Debug.Assert(backgroundCameraGameObject == null);
  66. backgroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
  67. backgroundCameraGameObject.name = "OculusMRC_BackgroundCamera";
  68. backgroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
  69. if (backgroundCameraGameObject.GetComponent<AudioListener>())
  70. {
  71. Object.Destroy(backgroundCameraGameObject.GetComponent<AudioListener>());
  72. }
  73. if (backgroundCameraGameObject.GetComponent<OVRManager>())
  74. {
  75. Object.Destroy(backgroundCameraGameObject.GetComponent<OVRManager>());
  76. }
  77. backgroundCamera = backgroundCameraGameObject.GetComponent<Camera>();
  78. backgroundCamera.tag = "Untagged";
  79. backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
  80. backgroundCamera.depth = 99990.0f;
  81. backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
  82. backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  83. #if OVR_ANDROID_MRC
  84. backgroundCamera.targetTexture = mrcRenderTextureArray[0];
  85. #endif
  86. Debug.Assert(foregroundCameraGameObject == null);
  87. foregroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
  88. foregroundCameraGameObject.name = "OculusMRC_ForgroundCamera";
  89. foregroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
  90. if (foregroundCameraGameObject.GetComponent<AudioListener>())
  91. {
  92. Object.Destroy(foregroundCameraGameObject.GetComponent<AudioListener>());
  93. }
  94. if (foregroundCameraGameObject.GetComponent<OVRManager>())
  95. {
  96. Object.Destroy(foregroundCameraGameObject.GetComponent<OVRManager>());
  97. }
  98. foregroundCamera = foregroundCameraGameObject.GetComponent<Camera>();
  99. foregroundCamera.tag = "Untagged";
  100. foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
  101. foregroundCamera.depth = backgroundCamera.depth + 1.0f; // enforce the forground be rendered after the background
  102. foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
  103. foregroundCamera.clearFlags = CameraClearFlags.Color;
  104. #if OVR_ANDROID_MRC
  105. foregroundCamera.backgroundColor = OVRManager.instance.externalCompositionBackdropColorQuest;
  106. #else
  107. foregroundCamera.backgroundColor = OVRManager.instance.externalCompositionBackdropColorRift;
  108. #endif
  109. foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  110. #if OVR_ANDROID_MRC
  111. foregroundCamera.targetTexture = mrcRenderTextureArray[0];
  112. #endif
  113. // Create cameraProxyPlane for clipping
  114. Debug.Assert(cameraProxyPlane == null);
  115. cameraProxyPlane = GameObject.CreatePrimitive(PrimitiveType.Quad);
  116. cameraProxyPlane.name = "OculusMRC_ProxyClipPlane";
  117. cameraProxyPlane.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
  118. cameraProxyPlane.GetComponent<Collider>().enabled = false;
  119. cameraProxyPlane.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
  120. Material clipMaterial = new Material(Shader.Find("Oculus/OVRMRClipPlane"));
  121. cameraProxyPlane.GetComponent<MeshRenderer>().material = clipMaterial;
  122. #if OVR_ANDROID_MRC
  123. clipMaterial.SetColor("_Color", OVRManager.instance.externalCompositionBackdropColorQuest);
  124. #else
  125. clipMaterial.SetColor("_Color", OVRManager.instance.externalCompositionBackdropColorRift);
  126. #endif
  127. clipMaterial.SetFloat("_Visible", 0.0f);
  128. cameraProxyPlane.transform.localScale = new Vector3(1000, 1000, 1000);
  129. cameraProxyPlane.SetActive(true);
  130. OVRMRForegroundCameraManager foregroundCameraManager = foregroundCameraGameObject.AddComponent<OVRMRForegroundCameraManager>();
  131. foregroundCameraManager.composition = this;
  132. foregroundCameraManager.clipPlaneGameObj = cameraProxyPlane;
  133. previousMainCameraObject = mainCamera.gameObject;
  134. }
  135. }
  136. #if OVR_ANDROID_MRC
  137. private void RefreshAudioFilter()
  138. {
  139. if (cameraRig != null && (audioListener == null || !audioListener.enabled || !audioListener.gameObject.activeInHierarchy))
  140. {
  141. CleanupAudioFilter();
  142. AudioListener tmpAudioListener = cameraRig.centerEyeAnchor.gameObject.activeInHierarchy ? cameraRig.centerEyeAnchor.GetComponent<AudioListener>() : null;
  143. if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
  144. if (tmpAudioListener == null)
  145. {
  146. if (Camera.main != null && Camera.main.gameObject.activeInHierarchy)
  147. {
  148. tmpAudioListener = Camera.main.GetComponent<AudioListener>();
  149. if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
  150. }
  151. }
  152. if (tmpAudioListener == null)
  153. {
  154. Object[] allListeners = Object.FindObjectsOfType<AudioListener>();
  155. foreach (var l in allListeners)
  156. {
  157. AudioListener al = l as AudioListener;
  158. if (al != null && al.enabled && al.gameObject.activeInHierarchy)
  159. {
  160. tmpAudioListener = al;
  161. break;
  162. }
  163. }
  164. }
  165. if (tmpAudioListener == null)
  166. {
  167. Debug.LogWarning("[OVRExternalComposition] No AudioListener in scene");
  168. }
  169. else
  170. {
  171. Debug.LogFormat("[OVRExternalComposition] AudioListener found, obj {0}", tmpAudioListener.gameObject.name);
  172. }
  173. audioListener = tmpAudioListener;
  174. audioFilter = audioListener.gameObject.AddComponent<OVRMRAudioFilter>();
  175. audioFilter.composition = this;
  176. Debug.LogFormat("OVRMRAudioFilter added");
  177. }
  178. }
  179. private float[] cachedAudioDataArray = null;
  180. private int CastMrcFrame(int castTextureIndex)
  181. {
  182. int audioFrames;
  183. int audioChannels;
  184. GetAndResetAudioData(ref cachedAudioDataArray, out audioFrames, out audioChannels);
  185. int syncId = -1;
  186. //Debug.Log("EncodeFrameThreadObject EncodeMrcFrame");
  187. bool ret = false;
  188. if (OVRPlugin.Media.GetMrcInputVideoBufferType() == OVRPlugin.Media.InputVideoBufferType.TextureHandle)
  189. {
  190. ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex].GetNativeTexturePtr(), cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, ref syncId);
  191. }
  192. else
  193. {
  194. ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex], cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, ref syncId);
  195. }
  196. if (!ret)
  197. {
  198. Debug.LogWarning("EncodeMrcFrame failed. Likely caused by OBS plugin disconnection");
  199. return -1;
  200. }
  201. return syncId;
  202. }
  203. private void SetCameraTargetTexture(int drawTextureIndex)
  204. {
  205. RenderTexture texture = mrcRenderTextureArray[drawTextureIndex];
  206. if (backgroundCamera.targetTexture != texture)
  207. {
  208. backgroundCamera.targetTexture = texture;
  209. }
  210. if (foregroundCamera.targetTexture != texture)
  211. {
  212. foregroundCamera.targetTexture = texture;
  213. }
  214. }
  215. #endif
  216. public override void Update(GameObject gameObject, Camera mainCamera)
  217. {
  218. RefreshCameraObjects(gameObject, mainCamera);
  219. OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
  220. #if OVR_ANDROID_MRC
  221. RefreshAudioFilter();
  222. int drawTextureIndex = (frameIndex / 2) % 2;
  223. int castTextureIndex = 1 - drawTextureIndex;
  224. backgroundCamera.enabled = (frameIndex % 2) == 0;
  225. foregroundCamera.enabled = (frameIndex % 2) == 1;
  226. if (frameIndex % 2 == 0)
  227. {
  228. if (lastMrcEncodeFrameSyncId != -1)
  229. {
  230. OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
  231. lastMrcEncodeFrameSyncId = -1;
  232. }
  233. lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
  234. SetCameraTargetTexture(drawTextureIndex);
  235. }
  236. ++ frameIndex;
  237. #endif
  238. backgroundCamera.clearFlags = mainCamera.clearFlags;
  239. backgroundCamera.backgroundColor = mainCamera.backgroundColor;
  240. backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  241. backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
  242. backgroundCamera.farClipPlane = mainCamera.farClipPlane;
  243. foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  244. foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
  245. foregroundCamera.farClipPlane = mainCamera.farClipPlane;
  246. if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
  247. {
  248. OVRPose worldSpacePose = new OVRPose();
  249. OVRPose trackingSpacePose = new OVRPose();
  250. trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
  251. OVRMixedReality.fakeCameraEyeLevelPosition :
  252. OVRMixedReality.fakeCameraFloorLevelPosition;
  253. trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
  254. worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
  255. backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
  256. backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
  257. foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
  258. foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
  259. if (cameraInTrackingSpace)
  260. {
  261. backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  262. foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  263. }
  264. else
  265. {
  266. backgroundCamera.transform.FromOVRPose(worldSpacePose);
  267. foregroundCamera.transform.FromOVRPose(worldSpacePose);
  268. }
  269. }
  270. else
  271. {
  272. OVRPlugin.CameraExtrinsics extrinsics;
  273. OVRPlugin.CameraIntrinsics intrinsics;
  274. OVRPlugin.Posef calibrationRawPose;
  275. // So far, only support 1 camera for MR and always use camera index 0
  276. if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
  277. {
  278. float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
  279. float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
  280. backgroundCamera.fieldOfView = fovY;
  281. backgroundCamera.aspect = aspect;
  282. foregroundCamera.fieldOfView = fovY;
  283. foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
  284. if (cameraInTrackingSpace)
  285. {
  286. OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
  287. backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  288. foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  289. }
  290. else
  291. {
  292. OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics, calibrationRawPose);
  293. backgroundCamera.transform.FromOVRPose(worldSpacePose);
  294. foregroundCamera.transform.FromOVRPose(worldSpacePose);
  295. }
  296. }
  297. else
  298. {
  299. Debug.LogError("Failed to get external camera information");
  300. return;
  301. }
  302. }
  303. // Assume player always standing straightly
  304. Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;
  305. externalCameraToHeadXZ.y = 0;
  306. cameraProxyPlane.transform.position = mainCamera.transform.position;
  307. cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
  308. }
  309. #if OVR_ANDROID_MRC
  310. private void CleanupAudioFilter()
  311. {
  312. if (audioFilter)
  313. {
  314. audioFilter.composition = null;
  315. Object.Destroy(audioFilter);
  316. Debug.LogFormat("OVRMRAudioFilter destroyed");
  317. audioFilter = null;
  318. }
  319. }
  320. #endif
  321. public override void Cleanup()
  322. {
  323. OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
  324. backgroundCamera = null;
  325. OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
  326. foregroundCamera = null;
  327. OVRCompositionUtil.SafeDestroy(ref cameraProxyPlane);
  328. Debug.Log("ExternalComposition deactivated");
  329. #if OVR_ANDROID_MRC
  330. if (lastMrcEncodeFrameSyncId != -1)
  331. {
  332. OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
  333. lastMrcEncodeFrameSyncId = -1;
  334. }
  335. CleanupAudioFilter();
  336. for (int i=0; i<2; ++i)
  337. {
  338. mrcRenderTextureArray[i].Release();
  339. mrcRenderTextureArray[i] = null;
  340. }
  341. frameIndex = 0;
  342. #endif
  343. }
  344. private readonly object audioDataLock = new object();
  345. private List<float> cachedAudioData = new List<float>(16384);
  346. private int cachedChannels = 0;
  347. public void CacheAudioData(float[] data, int channels)
  348. {
  349. lock(audioDataLock)
  350. {
  351. if (channels != cachedChannels)
  352. {
  353. cachedAudioData.Clear();
  354. }
  355. cachedChannels = channels;
  356. cachedAudioData.AddRange(data);
  357. //Debug.LogFormat("[CacheAudioData] dspTime {0} indata {1} channels {2} accu_len {3}", AudioSettings.dspTime, data.Length, channels, cachedAudioData.Count);
  358. }
  359. }
  360. public void GetAndResetAudioData(ref float[] audioData, out int audioFrames, out int channels)
  361. {
  362. lock(audioDataLock)
  363. {
  364. //Debug.LogFormat("[GetAndResetAudioData] dspTime {0} accu_len {1}", AudioSettings.dspTime, cachedAudioData.Count);
  365. if (audioData == null || audioData.Length < cachedAudioData.Count)
  366. {
  367. audioData = new float[cachedAudioData.Capacity];
  368. }
  369. cachedAudioData.CopyTo(audioData);
  370. audioFrames = cachedAudioData.Count;
  371. channels = cachedChannels;
  372. cachedAudioData.Clear();
  373. }
  374. }
  375. }
  376. /// <summary>
  377. /// Helper internal class for foregroundCamera, don't call it outside
  378. /// </summary>
  379. internal class OVRMRForegroundCameraManager : MonoBehaviour
  380. {
  381. public OVRExternalComposition composition;
  382. public GameObject clipPlaneGameObj;
  383. private Material clipPlaneMaterial;
  384. void OnPreRender()
  385. {
  386. // the clipPlaneGameObj should be only visible to foreground camera
  387. if (clipPlaneGameObj)
  388. {
  389. if (clipPlaneMaterial == null)
  390. clipPlaneMaterial = clipPlaneGameObj.GetComponent<MeshRenderer>().material;
  391. clipPlaneGameObj.GetComponent<MeshRenderer>().material.SetFloat("_Visible", 1.0f);
  392. }
  393. }
  394. void OnPostRender()
  395. {
  396. if (clipPlaneGameObj)
  397. {
  398. Debug.Assert(clipPlaneMaterial);
  399. clipPlaneGameObj.GetComponent<MeshRenderer>().material.SetFloat("_Visible", 0.0f);
  400. }
  401. }
  402. }
  403. #if OVR_ANDROID_MRC
  404. public class OVRMRAudioFilter : MonoBehaviour
  405. {
  406. private bool running = false;
  407. public OVRExternalComposition composition;
  408. void Start()
  409. {
  410. running = true;
  411. }
  412. void OnAudioFilterRead(float[] data, int channels)
  413. {
  414. if (!running)
  415. return;
  416. if (composition != null)
  417. {
  418. composition.CacheAudioData(data, channels);
  419. }
  420. }
  421. }
  422. #endif
  423. #endif