1 /*
2 OpenVR for PyMOL Copyright Notice
3 =====================================
4 
5 The OpenVR for PyMOL source code is copyrighted, but you can freely use and
6 copy it as long as you don't change or remove any of the Copyright notices.
7 OpenVR for PyMOL is made available under the following open-source license
8 terms:
9 
10 ------------------------------------------------------------------------------
11 Copyright (c) 2018 EPAM Systems, Inc.
12 
13 Permission is hereby granted, free of charge, to any person obtaining a copy
14 of this software and associated documentation files (the "Software"), to deal
15 in the Software without restriction, including without limitation the rights
16 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17 copies of the Software, and to permit persons to whom the Software is
18 furnished to do so, subject to the following conditions:
19 
20 The above copyright notice and this permission notice shall be included in all
21 copies or substantial portions of the Software.
22 
23 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29 SOFTWARE.
30 ------------------------------------------------------------------------------
31 
32 */
33 
34 // this header
35 #include "OpenVRMode.h"
36 
37 // system headers
38 #include "os_std.h"
39 #include "os_gl.h"
40 #include "os_python.h"
41 #include <string>
42 #include <vector>
43 #include "openvr.h"
44 
45 // pymol headers
46 #include "PyMOLOptions.h"
47 #include "Setting.h"
48 #include "Feedback.h"
49 #include "Matrix.h"
50 #include "Ortho.h"
51 
52 // local headers
53 #include "OpenVRUtils.h"
54 #include "OpenVRStub.h"
55 #include "OpenVRController.h"
56 #include "OpenVRMenu.h"
57 #include "OpenVRActionList.h"
58 #include "OpenVRScenePicker.h"
59 #include "OpenVRLaserTarget.h"
60 
61 struct CEye {
62   vr::EVREye Eye;
63 
64   GLfloat HeadToEyeMatrix[16];
65   GLfloat ProjectionMatrix[16];
66 
67   GLuint FrameBufferID;
68   GLuint DepthBufferID;
69   GLuint ColorBufferID;
70 
71   GLuint ResolveBufferID;
72   GLuint ResolveTextureID;
73 
74   vr::Texture_t Texture;
75 
76   float Left, Right, Top, Bottom; // projection params
77 };
78 
79 enum EHand
80 {
81   HLeft = 0,
82   HRight = 1,
83 
84   Hand_Count
85 };
86 
87 enum EUserActionSet
88 {
89   UserActionSet_Mouse,
90   UserActionSet_Scene,
91   UserActionSet_Movie,
92 
93   UserActionSet_Count
94 };
95 
96 enum EUserAction
97 {
98   UserAction_None,
99 
100   UserAction_Mouse_LClick,
101   UserAction_Mouse_MClick,
102   UserAction_Mouse_RClick,
103 
104   UserAction_Scene_Prev,
105   UserAction_Scene_Next,
106 
107   UserAction_Movie_Prev,
108   UserAction_Movie_Toggle,
109   UserAction_Movie_Next,
110 };
111 
112 static EUserAction s_userActionMapping[UserActionSet_Count][3] = {
113   {UserAction_Mouse_LClick, UserAction_None/*UserAction_Mouse_MClick*/, UserAction_Mouse_RClick}, // UserActionSet_Mouse
114   {UserAction_Scene_Prev, UserAction_None, UserAction_Scene_Next}, // UserActionSet_Scene
115   {UserAction_Movie_Prev, UserAction_Movie_Toggle, UserAction_Movie_Next}, // UserActionSet_Movie
116 };
117 
118 struct COpenVR {
119   // Such structures used to be calloc-ed, this replicates that
operator newCOpenVR120   void *operator new(size_t size) {
121     void *mem = ::operator new(size);
122     memset(mem, 0, size);
123     return mem;
124   }
125 
126   vr::EVRInitError InitError;
127   vr::IVRSystem* System;
128   vr::IVRCompositor* Compositor;
129   vr::IVRInput* Input;
130   vr::TrackedDevicePose_t Poses[vr::k_unMaxTrackedDeviceCount]; // todo remove from globals?
131 
132   GLfloat HeadPose[16];
133   GLfloat WorldToHeadMatrix[16];
134 
135   unsigned Width;
136   unsigned Height;
137 
138   CEye* Eye;
139   CEye Left;
140   CEye Right;
141 
142   OpenVRController Hands[Hand_Count];
143   GLuint ControllerHintsTexture;
144 
145   bool ForcedFront;
146   bool ForcedBack;
147 
148   OpenVRMenu Menu;
149   GLuint MenuSplashTexture;
150 
151   OpenVRScenePicker Picker;
152 
153   OpenVRInputHandlers* Handlers;
154   OpenVRActionList* Actions;
155   EUserActionSet UserActionSet[Hand_Count];
156 
157   // mouse cursor imitation information
158   int startX, startY;
159   int deltaX, deltaY;
160 
161   float moleculeToWorldMatrix[16];
162   float moleculeToCapturingController[16];
163   float controllersCenterToWorld[16];
164   int capturingHandIdx;
165   float controllersDistance;
166 };
167 
168 static char const* deviceClassNames[] = {
169   "Invalid",
170   "Head-Mounted Display",
171   "Controller",
172   "Generic Tracker",
173   "Reference Point",
174   "Accessory",
175 };
176 static const int deviceClassNamesCount = sizeof(deviceClassNames) / sizeof(*deviceClassNames);
177 
178 struct CMouseEvent {
179   unsigned deviceIndex;
180   int button;
181   int state;
CMouseEventCMouseEvent182   CMouseEvent() : deviceIndex(0), button(0), state(0) {}
CMouseEventCMouseEvent183   CMouseEvent(unsigned i, int b, int s) : deviceIndex(i), button(b), state(s) {}
CMouseEventCMouseEvent184   CMouseEvent(unsigned i, int b, bool s) : deviceIndex(i), button(b), state(s ? P_GLUT_DOWN : P_GLUT_UP) {}
185 };
186 
187 class OpenVROrthoInputHandlers : public OpenVRInputHandlers {
188   PyMOLGlobals *G;
189 
190 public:
OpenVROrthoInputHandlers(PyMOLGlobals * G)191   explicit OpenVROrthoInputHandlers(PyMOLGlobals *G) : G(G) {}
192 
KeyboardFunc(unsigned char k,int x,int y,int mod)193   void KeyboardFunc(unsigned char k, int x, int y, int mod) override {
194     OrthoKey(G, k, x, y, mod);
195   }
SpecialFunc(int k,int x,int y,int mod)196   void SpecialFunc(int k, int x, int y, int mod) override {
197     OrthoSpecial(G, k, x, y, mod);
198   }
MouseFunc(int button,int state,int x,int y,int mod)199   int MouseFunc(int button, int state, int x, int y, int mod) override {
200     return OrthoButtonDefer(G, button, state, x, y, mod);
201   }
MotionFunc(int x,int y,int mod)202   int MotionFunc(int x, int y, int mod) override {
203     return OrthoDrag(G, x, y, mod);
204   }
ActionFunc(int a)205   void ActionFunc(int a) override {
206     switch (a) {
207     case cAction_scene_next:
208       OrthoCommandIn(G, "cmd.scene('','next')");
209       break;
210 
211     case cAction_scene_prev:
212       OrthoCommandIn(G, "cmd.scene('','previous')");
213       break;
214 
215     case cAction_movie_toggle:
216       OrthoCommandIn(G, "mtoggle");
217       break;
218 
219     case cAction_movie_next:
220       OrthoCommandIn(G, "forward");
221       break;
222 
223     case cAction_movie_prev:
224       OrthoCommandIn(G, "backward");
225       break;
226     }
227 
228     OrthoInvalidateDoDraw(G);
229   }
230 };
231 
232 void UpdateDevicePoses(PyMOLGlobals * G);
233 
OpenVRAvailable(PyMOLGlobals *)234 bool OpenVRAvailable(PyMOLGlobals *)
235 {
236   return vr::stub::VR_IsHmdPresent();
237 }
238 
OpenVRReady(PyMOLGlobals * G)239 bool OpenVRReady(PyMOLGlobals * G)
240 {
241   COpenVR *I = G->OpenVR;
242   return I && I->InitError == vr::VRInitError_None && I->System != NULL;
243 }
244 
EyeInit(CEye * I,vr::EVREye eye,int scene_width,int scene_height)245 static bool EyeInit(CEye * I, vr::EVREye eye, int scene_width, int scene_height)
246 {
247   I->Eye = eye;
248 
249   // framebuffer
250   glGenFramebuffersEXT(1, &I->FrameBufferID);
251   glBindFramebufferEXT(GL_FRAMEBUFFER, I->FrameBufferID);
252 
253   // - depth
254   glGenRenderbuffersEXT(1, &I->DepthBufferID);
255   glBindRenderbufferEXT(GL_RENDERBUFFER, I->DepthBufferID);
256   glRenderbufferStorageMultisampleEXT(GL_RENDERBUFFER, 4, GL_DEPTH_COMPONENT, scene_width, scene_height);
257   glFramebufferRenderbufferEXT(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, I->DepthBufferID);
258 
259   // - color
260   glGenTextures(1, &I->ColorBufferID);
261   glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, I->ColorBufferID);
262   glTexImage2DMultisample(GL_TEXTURE_2D_MULTISAMPLE, 4, GL_RGBA8, scene_width, scene_height, GL_TRUE);
263   glFramebufferTexture2DEXT(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D_MULTISAMPLE, I->ColorBufferID, 0);
264   glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, 0);
265 
266   // resolve buffer
267   glGenFramebuffersEXT(1, &I->ResolveBufferID);
268   glBindFramebufferEXT(GL_FRAMEBUFFER, I->ResolveBufferID);
269 
270   // - color
271   glGenTextures(1, &I->ResolveTextureID);
272   glBindTexture(GL_TEXTURE_2D, I->ResolveTextureID);
273   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
274   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
275   glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, scene_width, scene_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
276   glFramebufferTexture2DEXT(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, I->ResolveTextureID, 0);
277   glBindTexture(GL_TEXTURE_2D, 0);
278 
279   // VR texture
280   I->Texture.handle = (void*)(size_t)I->ResolveTextureID;
281   I->Texture.eType = vr::TextureType_OpenGL;
282   I->Texture.eColorSpace = vr::ColorSpace_Gamma;
283 
284   // check FBO status
285   GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER);
286   glBindFramebufferEXT(GL_FRAMEBUFFER, 0);
287   return (status == GL_FRAMEBUFFER_COMPLETE);
288 }
289 
EyeFree(CEye * I)290 static void EyeFree(CEye * I)
291 {
292   glDeleteTextures(1, &I->ResolveTextureID);
293   glDeleteFramebuffers(1, &I->ResolveBufferID);
294   glDeleteTextures(1, &I->ColorBufferID);
295   glDeleteRenderbuffers(1, &I->DepthBufferID);
296   glDeleteFramebuffers(1, &I->FrameBufferID);
297 }
298 
OpenVRInit(PyMOLGlobals * G)299 int OpenVRInit(PyMOLGlobals * G)
300 {
301   if(G->OpenVR)
302     return 1; // already initialized
303 
304   vr::stub::VR_StubEnable(G->Option->openvr_stub);
305   if (!OpenVRAvailable(G))
306     return 0; // don't bother initializing the whole system
307 
308   COpenVR *I = G->OpenVR = new COpenVR();
309   if(!I)
310     return 0;
311 
312   I->InitError = vr::VRInitError_None;
313   I->System = vr::stub::VR_Init(&I->InitError, vr::VRApplication_Scene);
314   if (I->InitError != vr::VRInitError_None) {
315     I->System = NULL;
316     return 0;
317   }
318 
319   I->Compositor = vr::stub::VRCompositor();
320   I->ForcedFront = true;
321 
322   OpenVRSetInputHandlers(G, new OpenVROrthoInputHandlers(G));
323 
324   I->Input = vr::stub::VRInput();
325   if (I->Input) {
326     // init manifest
327     auto manifestPath = std::string(getenv("PYMOL_DATA"))
328                             .append(PATH_SEP "openvr" PATH_SEP "actions.json");
329     I->Input->SetActionManifestPath(manifestPath.c_str());
330 
331     I->Actions = new OpenVRActionList(I->Input);
332   }
333 
334   I->capturingHandIdx = -1;
335 
336   return 1;
337 }
338 
OpenVRFree(PyMOLGlobals * G)339 void OpenVRFree(PyMOLGlobals * G)
340 {
341   ShutdownRenderModels();
342 
343   if(!G->OpenVR)
344     return;
345 
346   COpenVR *I = G->OpenVR;
347   if(I->System) {
348     vr::stub::VR_Shutdown();
349 
350     I->Picker.Free();
351     I->Menu.Free();
352     delete I->Handlers;
353 
354     I->Hands[HLeft].Free();
355     I->Hands[HRight].Free();
356 
357     EyeFree(&I->Right);
358     EyeFree(&I->Left);
359 
360     I->System = NULL;
361   }
362 
363   delete G->OpenVR;
364   G->OpenVR = NULL;
365 }
366 
OpenVRInitPostponed(PyMOLGlobals * G)367 static void OpenVRInitPostponed(PyMOLGlobals * G)
368 {
369   COpenVR *I = G->OpenVR;
370   if(!OpenVRReady(G))
371     return;
372 
373   if (!I->Width || !I->Height) {
374     I->System->GetRecommendedRenderTargetSize(&I->Width, &I->Height);
375     EyeInit(&I->Left, vr::Eye_Left, I->Width, I->Height);
376     EyeInit(&I->Right, vr::Eye_Right, I->Width, I->Height);
377 
378     I->ControllerHintsTexture = OpenVRUtils::LoadTexture("hints_vive_controller.png");
379     I->MenuSplashTexture = OpenVRUtils::LoadTexture("menu_splash.png");
380 
381     I->Menu.Init(I->Handlers);
382     I->Picker.Init(I->Handlers);
383 
384     OpenVRMenuSettingsChanged(G);
385   }
386 
387   float width = SettingGetGlobal_f(G, cSetting_openvr_laser_width);
388   for (int i = HLeft; i <= HRight; ++i) {
389     OpenVRController &hand = I->Hands[i];
390     if (!hand.IsInitialized()) {
391       hand.Init();
392       hand.SetHintsTexture(I->ControllerHintsTexture, UserActionSet_Count);
393       hand.SetLaserWidth(width);
394     }
395   }
396 }
397 
OpenVRSetInputHandlers(PyMOLGlobals * G,OpenVRInputHandlers * handlers)398 void OpenVRSetInputHandlers(PyMOLGlobals * G, OpenVRInputHandlers* handlers)
399 {
400   COpenVR *I = G->OpenVR;
401   if (I) {
402     if (I->Handlers)
403       delete I->Handlers;
404     I->Handlers = handlers;
405   }
406 }
407 
GetStringTrackedDeviceProperty(vr::IVRSystem * System,vr::TrackedDeviceIndex_t index,vr::TrackedDeviceProperty prop)408 static std::string GetStringTrackedDeviceProperty(vr::IVRSystem *System, vr::TrackedDeviceIndex_t index, vr::TrackedDeviceProperty prop)
409 {
410   uint32_t length = System->GetStringTrackedDeviceProperty(index, prop, NULL, 0);
411   if(length != 0) {
412     std::string buffer(length, 0);
413     if (System->GetStringTrackedDeviceProperty(index, prop, &buffer[0], length) != 0) {
414       return buffer;
415     }
416   }
417 
418   return std::string("<ERROR>");
419 }
420 
OpenVRFeedback(PyMOLGlobals * G)421 void OpenVRFeedback(PyMOLGlobals * G)
422 {
423   COpenVR *I = G->OpenVR;
424   if(vr::stub::VR_IsStubEnabled()) {
425     G->Feedback->add(" OpenVR stub is enabled.\n");
426   }
427   if(!OpenVRAvailable(G)) {
428     G->Feedback->add(" OpenVR system is not available.\n");
429   } else if(!OpenVRReady(G)) {
430     PRINTF
431       " OpenVR system is not ready: %s.\n",
432       I ? vr::stub::VR_GetVRInitErrorAsEnglishDescription(I->InitError) : "Failed to initialize properly"
433     ENDF(G);
434   } else {
435     G->Feedback->add(" Detected OpenVR system. Devices being currently tracked:\n");
436 
437     bool found = false;
438     for(uint32_t i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i) {
439       vr::ETrackedDeviceClass deviceClass = I->System->GetTrackedDeviceClass(i);
440       if(deviceClass != vr::TrackedDeviceClass_Invalid) {
441         found = true;
442 
443         char const* className = (0 <= deviceClass && deviceClass < deviceClassNamesCount) ? deviceClassNames[deviceClass] : "<ERROR>";
444         std::string model = GetStringTrackedDeviceProperty(I->System, i, vr::Prop_ModelNumber_String);
445         std::string serial = GetStringTrackedDeviceProperty(I->System, i, vr::Prop_SerialNumber_String);
446 
447         PRINTF "  %02u: %s (%s %s)\n", i, className, model.c_str(), serial.c_str() ENDF(G);
448       }
449     }
450     if(!found) {
451       G->Feedback->add("  No valid devices found.\n");
452     }
453   }
454   G->Feedback->add("\n");
455 }
456 
OpenVRFrameStart(PyMOLGlobals * G)457 void OpenVRFrameStart(PyMOLGlobals * G)
458 {
459   COpenVR *I = G->OpenVR;
460   if(!OpenVRReady(G))
461     return;
462 
463   // create OpenGL assets on the first use
464   OpenVRInitPostponed(G);
465 
466   // get matrices from tracked devices
467   if (I->Compositor->WaitGetPoses(I->Poses, vr::k_unMaxTrackedDeviceCount, NULL, 0) != vr::VRCompositorError_None) {
468     G->Feedback->add("  Cannot update device poses\n");
469   }
470   UpdateDevicePoses(G);
471 }
472 
OpenVREyeStart(PyMOLGlobals * G,int eye)473 void OpenVREyeStart(PyMOLGlobals * G, int eye)
474 {
475   COpenVR *I = G->OpenVR;
476   if(!OpenVRReady(G))
477     return;
478 
479   GL_DEBUG_FUN();
480 
481   CEye *E = I->Eye = eye ? &I->Right : &I->Left;
482 
483   glBindFramebufferEXT(GL_FRAMEBUFFER, E->FrameBufferID);
484   glViewport(0, 0, I->Width, I->Height);
485   glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
486 }
487 
OpenVREyeFinish(PyMOLGlobals * G)488 void OpenVREyeFinish(PyMOLGlobals * G)
489 {
490   COpenVR *I = G->OpenVR;
491   if(!OpenVRReady(G))
492     return;
493 
494   GL_DEBUG_FUN();
495 
496   CEye *E = I->Eye;
497   if(!E)
498     return;
499 
500   if(G->Option->multisample)
501     glDisable(0x809D);       /* GL_MULTISAMPLE_ARB */
502 
503   glBindFramebufferEXT(GL_READ_FRAMEBUFFER, E->FrameBufferID);
504   glBindFramebufferEXT(GL_DRAW_FRAMEBUFFER, E->ResolveBufferID);
505   glBlitFramebufferEXT(0, 0, I->Width, I->Height, 0, 0, I->Width, I->Height, GL_COLOR_BUFFER_BIT, GL_LINEAR);
506   glBindFramebufferEXT(GL_READ_FRAMEBUFFER, 0);
507   glBindFramebufferEXT(GL_DRAW_FRAMEBUFFER, 0);
508   glBindFramebufferEXT(GL_FRAMEBUFFER, 0);
509 
510   if(G->Option->multisample)
511     glEnable(0x809D);       /* GL_MULTISAMPLE_ARB */
512 
513   I->Eye = NULL;
514 }
515 
OpenVRSceneFinish(PyMOLGlobals * G,unsigned sceneX,unsigned sceneY,unsigned sceneWidth,unsigned sceneHeight)516 void OpenVRSceneFinish(PyMOLGlobals * G, unsigned sceneX, unsigned sceneY, unsigned sceneWidth, unsigned sceneHeight)
517 {
518   COpenVR *I = G->OpenVR;
519   if(!OpenVRReady(G))
520     return;
521 
522   GL_DEBUG_FUN();
523 
524   // find a proper rectangle with the scene aspect ratio
525   unsigned width = I->Height * sceneWidth / sceneHeight;
526   unsigned height = I->Width * sceneHeight / sceneWidth;
527   unsigned dx = 0, dy = 0;
528   if (width < I->Width) {
529     dx = (I->Width - width) / 2;
530     height = I->Height;
531   } else {
532     dy = (I->Height - height) / 2;
533     width = I->Width;
534   }
535 
536   // display a copy of the VR framebuffer in the main PyMOL window
537   glDrawBuffer(GL_BACK);
538   glBindFramebufferEXT(GL_READ_FRAMEBUFFER, I->Left.ResolveBufferID);
539   glBlitFramebufferEXT(dx, dy, dx + width, dy + height, sceneX, sceneY, sceneX + sceneWidth, sceneY + sceneHeight, GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT, GL_NEAREST);
540   glBindFramebufferEXT(GL_READ_FRAMEBUFFER, 0);
541 }
542 
OpenVRFrameFinish(PyMOLGlobals * G)543 void OpenVRFrameFinish(PyMOLGlobals * G)
544 {
545   COpenVR *I = G->OpenVR;
546   if(!OpenVRReady(G))
547     return;
548 
549   GL_DEBUG_FUN();
550 
551   // send rendered pictures into the headset
552   I->Compositor->Submit(vr::Eye_Left, &I->Left.Texture);
553   I->Compositor->Submit(vr::Eye_Right, &I->Right.Texture);
554 }
555 
OpenVRGetWidthHeight(PyMOLGlobals * G,int * width,int * height)556 void OpenVRGetWidthHeight(PyMOLGlobals * G, int* width, int* height)
557 {
558   COpenVR *I = G->OpenVR;
559   if (I) {
560     *width = I->Width;
561     *height = I->Height;
562   }
563 }
564 
OpenVRMenuBufferStart(PyMOLGlobals * G,unsigned width,unsigned height,bool clear)565 void OpenVRMenuBufferStart(PyMOLGlobals * G, unsigned width, unsigned height, bool clear /* = true */)
566 {
567   COpenVR *I = G->OpenVR;
568   if(!OpenVRReady(G))
569     return;
570 
571   I->Menu.Start(width, height, clear);
572 }
573 
OpenVRMenuBufferFinish(PyMOLGlobals * G)574 void OpenVRMenuBufferFinish(PyMOLGlobals * G)
575 {
576   COpenVR *I = G->OpenVR;
577   if(!OpenVRReady(G))
578     return;
579 
580   I->Menu.Finish();
581 }
582 
OpenVRMenuToggle(PyMOLGlobals * G,unsigned deviceIndex)583 void OpenVRMenuToggle(PyMOLGlobals * G, unsigned deviceIndex /* = ~0U */)
584 {
585   COpenVR *I = G->OpenVR;
586   if(!OpenVRReady(G))
587     return;
588 
589   if (!I->Menu.IsVisible()) {
590     I->Menu.Show(I->HeadPose, deviceIndex);
591     I->ForcedBack = true;
592   } else {
593     unsigned ownerIndex = I->Menu.GetOwnerID();
594     if (deviceIndex == ownerIndex || deviceIndex == ~0U || ownerIndex == ~0U) {
595       I->Menu.Hide();
596       I->ForcedBack = false;
597     }
598   }
599 }
600 
OpenVRMenuCrop(PyMOLGlobals * G,unsigned x,unsigned y,unsigned width,unsigned height)601 void OpenVRMenuCrop(PyMOLGlobals * G, unsigned x, unsigned y, unsigned width, unsigned height)
602 {
603   COpenVR *I = G->OpenVR;
604   if(!OpenVRReady(G))
605     return;
606 
607   I->Menu.Crop(x, y, width, height);
608 }
609 
OpenVRMenuSettingsChanged(PyMOLGlobals * G)610 void OpenVRMenuSettingsChanged(PyMOLGlobals * G)
611 {
612   COpenVR *I = G->OpenVR;
613   if(!OpenVRReady(G))
614     return;
615 
616   float distance = SettingGetGlobal_f(G, cSetting_openvr_gui_distance);
617   float fov = SettingGetGlobal_f(G, cSetting_openvr_gui_fov);
618   I->Menu.SetSize(distance, tanf(fov * PI / 180.0f));
619 
620   float sceneColor = SettingGetGlobal_f(G, cSetting_openvr_gui_scene_color);
621   float sceneAlpha = SettingGetGlobal_f(G, cSetting_openvr_gui_scene_alpha);
622   I->Menu.SetSceneColor(sceneColor, sceneAlpha);
623 
624   float backColor = SettingGetGlobal_f(G, cSetting_openvr_gui_back_color);
625   float backAlpha = SettingGetGlobal_f(G, cSetting_openvr_gui_back_alpha);
626   I->Menu.SetBackdropColor(backColor, backAlpha);
627 }
628 
OpenVRGetHeadToEye(PyMOLGlobals * G)629 float* OpenVRGetHeadToEye(PyMOLGlobals * G)
630 {
631   COpenVR *I = G->OpenVR;
632   if(!OpenVRReady(G) || !I->Eye)
633     return NULL;
634 
635   CEye *E = I->Eye;
636   vr::HmdMatrix34_t EyeToHeadTransform = I->System->GetEyeToHeadTransform(E->Eye);
637   OpenVRUtils::MatrixFastInverseVRGL((const float *)EyeToHeadTransform.m, E->HeadToEyeMatrix);
638 
639   return E->HeadToEyeMatrix;
640 }
641 
OpenVRGetWorldToHead(PyMOLGlobals * G)642 float* OpenVRGetWorldToHead(PyMOLGlobals * G) {
643   COpenVR *I = G->OpenVR;
644   if(!OpenVRReady(G))
645     return NULL;
646 
647   return I->WorldToHeadMatrix;
648 }
649 
OpenVRGetControllerPose(PyMOLGlobals * G,EHand handIdx)650 float* OpenVRGetControllerPose(PyMOLGlobals * G, EHand handIdx) {
651   COpenVR *I = G->OpenVR;
652   if(!OpenVRReady(G))
653     return NULL;
654 
655   return I->Hands[handIdx].GetPose();
656 }
657 
OpenVRGetProjection(float left,float right,float top,float bottom,float near_plane,float far_plane,float * matrix)658 void OpenVRGetProjection(float left, float right, float top, float bottom, float near_plane, float far_plane, float *matrix)
659 {
660   if (!matrix)
661     return;
662 
663   // fast affine inverse matrix, row major to column major, whew...
664   {
665     float (*dst)[4] = (float(*)[4])matrix;
666     float dx = (right - left);
667     float dy = (bottom - top);
668     float dz = far_plane - near_plane;
669 
670     // transpose rotation
671     dst[0][0] = 2.0f / dx;
672     dst[0][1] = 0.0f;
673     dst[0][2] = 0.0f;
674     dst[0][3] = 0.0f;
675 
676     dst[1][0] = 0.0f;
677     dst[1][1] = 2.0f / dy;
678     dst[1][2] = 0.0f;
679     dst[1][3] = 0.0f;
680 
681     dst[2][0] = (right + left) / dx;
682     dst[2][1] = (top + bottom) / dy;
683     dst[2][2] = -(far_plane + near_plane) / dz;
684     dst[2][3] = -1.0f;
685 
686     dst[3][0] = 0.0f;
687     dst[3][1] = 0.0f;
688     dst[3][2] = -2.0f * far_plane * near_plane / dz;
689     dst[3][3] = 0.0f;
690   }
691 
692   return;
693 }
694 
CheckNearFarPlaneSettings(PyMOLGlobals * G,float & near_plane,float & far_plane)695 void CheckNearFarPlaneSettings(PyMOLGlobals * G, float &near_plane, float &far_plane) {
696   COpenVR *I = G->OpenVR;
697 
698   if (I->ForcedFront || SettingGetGlobal_b(G, cSetting_openvr_disable_clipping)) {
699     near_plane = SettingGetGlobal_f(G, cSetting_openvr_near_plane);
700   }
701   if (I->ForcedBack || SettingGetGlobal_b(G, cSetting_openvr_disable_clipping)) {
702     far_plane = SettingGetGlobal_f(G, cSetting_openvr_far_plane);
703   }
704 }
705 
OpenVRGetEyeProjection(PyMOLGlobals * G,float near_plane,float far_plane)706 float* OpenVRGetEyeProjection(PyMOLGlobals * G, float near_plane, float far_plane)
707 {
708   COpenVR *I = G->OpenVR;
709   if(!OpenVRReady(G) || !I->Eye)
710     return NULL;
711 
712   CheckNearFarPlaneSettings(G, near_plane, far_plane);
713 
714   CEye *E = I->Eye;
715   I->System->GetProjectionRaw(E->Eye, &(E->Left), &(E->Right), &(E->Top), &(E->Bottom));
716   OpenVRGetProjection(E->Left, E->Right, E->Top, E->Bottom, near_plane, far_plane, E->ProjectionMatrix);
717   return E->ProjectionMatrix;
718 }
719 
OpenVRGetPickingProjection(PyMOLGlobals * G,float near_plane,float far_plane,float * matrix)720 void  OpenVRGetPickingProjection(PyMOLGlobals * G, float near_plane, float far_plane, float *matrix)
721 {
722   COpenVR *I = G->OpenVR;
723   if(!OpenVRReady(G))
724     return;
725 
726   CheckNearFarPlaneSettings(G, near_plane, far_plane);
727 
728   // take avarage projection params from eyes
729   float left, right, top, bottom;
730   CEye &LEye = I->Left, &REye = I->Right;
731   left = (LEye.Left + REye.Left) * 0.5f;
732   right = (LEye.Right + REye.Right) * 0.5f;
733   top = (LEye.Top + REye.Top) * 0.5f;
734   bottom = (LEye.Bottom + REye.Bottom) * 0.5f;
735   OpenVRGetProjection(left, right, top, bottom, near_plane, far_plane, matrix);
736   return;
737 }
738 
OpenVRGetPickingMatrix(PyMOLGlobals * G)739 float const* OpenVRGetPickingMatrix(PyMOLGlobals * G)
740 {
741   COpenVR *I = G->OpenVR;
742   if(!OpenVRReady(G))
743     return NULL;
744 
745   return I->Picker.GetMatrix();
746 }
747 
OpenVRLoadProjectionMatrix(PyMOLGlobals * G,float near_plane,float far_plane)748 void OpenVRLoadProjectionMatrix(PyMOLGlobals * G, float near_plane, float far_plane)
749 {
750   glLoadMatrixf(OpenVRGetEyeProjection(G, near_plane, far_plane));
751 }
752 
OpenVRLoadPickingProjectionMatrix(PyMOLGlobals * G,float near_plane,float far_plane)753 void OpenVRLoadPickingProjectionMatrix(PyMOLGlobals * G, float near_plane, float far_plane)
754 {
755   float matrix[16];
756   OpenVRGetPickingProjection(G, near_plane, far_plane, matrix);
757   glLoadMatrixf(matrix);
758 }
759 
OpenVRLoadWorld2EyeMatrix(PyMOLGlobals * G)760 void OpenVRLoadWorld2EyeMatrix(PyMOLGlobals * G)
761 {
762   glLoadMatrixf(OpenVRGetHeadToEye(G));
763   glMultMatrixf(OpenVRGetWorldToHead(G));
764 }
765 
GetTrackedDeviceString(PyMOLGlobals * G,vr::TrackedDeviceIndex_t unDevice,vr::TrackedDeviceProperty prop,vr::TrackedPropertyError * peError=NULL)766 std::string GetTrackedDeviceString(PyMOLGlobals * G, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL )
767 {
768   COpenVR *I = G->OpenVR;
769   if (!I || !I->System)
770     return "";
771 
772   uint32_t unRequiredBufferLen = I->System->GetStringTrackedDeviceProperty( unDevice, prop, NULL, 0, peError );
773   if( unRequiredBufferLen == 0 )
774     return "";
775 
776   char *pchBuffer = new char[ unRequiredBufferLen ];
777   unRequiredBufferLen = I->System->GetStringTrackedDeviceProperty( unDevice, prop, pchBuffer, unRequiredBufferLen, peError );
778   std::string sResult = pchBuffer;
779   delete [] pchBuffer;
780   return sResult;
781 }
782 
ProcessButtonDragAsMouse(PyMOLGlobals * G,OpenVRAction * action,int glutButton,int screenCenterX,int screenCenterY)783 void ProcessButtonDragAsMouse(PyMOLGlobals * G, OpenVRAction *action, int glutButton, int screenCenterX, int screenCenterY) {
784   COpenVR *I = G->OpenVR;
785   if (!action || !I) return;
786 
787   OpenVRInputHandlers* Handlers = I->Handlers;
788   if (!Handlers)
789    return;
790 
791   // imitate mouse cursor position from controller camera position
792   float (*mat)[4] = (float (*)[4])I->Hands[HRight].GetPose();
793   int x = (int)(mat[3][0]* 500.0f), y = (int)(mat[3][1] * 500.0f); // magic factors
794 
795   bool nowPressed = action->IsPressed();
796   if (action->WasPressedOrReleased()) {
797     if (nowPressed) {
798       I->startX = x;
799       I->startY = y;
800       Handlers->MouseFunc(glutButton, P_GLUT_DOWN, screenCenterX, screenCenterY, 0);
801     } else {
802       Handlers->MouseFunc(glutButton, P_GLUT_UP, I->deltaX + screenCenterX, I->deltaY + screenCenterY, 0);
803     }
804   }
805   if (nowPressed) {
806     I->deltaX = x - I->startX;
807     I->deltaY = y - I->startY;
808     Handlers->MotionFunc(I->deltaX + screenCenterX, I->deltaY + screenCenterY, 0);
809   }
810 }
811 
OpenVRIsMoleculeCaptured(PyMOLGlobals * G)812 bool OpenVRIsMoleculeCaptured(PyMOLGlobals * G) {
813   COpenVR *I = G->OpenVR;
814   return I->Hands[HLeft].isGripPressed() || I->Hands[HRight].isGripPressed();
815 }
816 
CalculateScalingPivotToWorldMatrix(PyMOLGlobals * G,float * pivotToWorldMatrix)817 void CalculateScalingPivotToWorldMatrix(PyMOLGlobals * G, float *pivotToWorldMatrix) {
818   COpenVR *I = G->OpenVR;
819   if (!I || !pivotToWorldMatrix)
820     return;
821 
822   identity44f(pivotToWorldMatrix);
823   float *lPose = I->Hands[HLeft].GetPose();
824   float *rPose = I->Hands[HRight].GetPose();
825   // get center traslation matrix
826   average3f(&lPose[12], &rPose[12], &pivotToWorldMatrix[12]);
827 }
828 
OpenVRGetMolecule2WorldMatrix(PyMOLGlobals * G,float * scaler)829 float const *OpenVRGetMolecule2WorldMatrix(PyMOLGlobals * G, float *scaler) {
830   COpenVR *I = G->OpenVR;
831   if (!I)
832     return NULL;
833 
834   float temp[16];
835   OpenVRActionList* Actions = I->Actions;
836   if (Actions->LGrip->IsPressed() && Actions->RGrip->IsPressed()) {
837     // translate after the scaling pivot
838     float pivotToWorldMatrix[16];
839     CalculateScalingPivotToWorldMatrix(G, pivotToWorldMatrix);
840     memcpy(temp, pivotToWorldMatrix, sizeof(temp));
841     // scale due to changing distance between controllers
842     if (scaler) {
843       float newDistance = diff3f(&(I->Hands[HLeft].GetPose()[12]), &(I->Hands[HRight].GetPose()[12]));
844       *scaler = newDistance / I->controllersDistance;
845       I->controllersDistance = newDistance;
846     }
847   } else {
848     memcpy(temp, I->Hands[I->capturingHandIdx].GetPose(), sizeof(temp));
849     *scaler = 1.0f;
850   }
851   MatrixMultiplyC44f(I->moleculeToCapturingController, temp);
852   memcpy(I->moleculeToWorldMatrix, temp, sizeof(I->moleculeToWorldMatrix));
853   return I->moleculeToWorldMatrix;
854 }
855 
AttachMoleculeToController(PyMOLGlobals * G,int handIdx)856 void AttachMoleculeToController(PyMOLGlobals * G, int handIdx) {
857   COpenVR *I = G->OpenVR;
858   I->capturingHandIdx = handIdx;
859   // catch up
860   memcpy(I->moleculeToCapturingController, I->Hands[handIdx].GetWorldToControllerMatrix(), sizeof(I->moleculeToCapturingController));
861   MatrixMultiplyC44f(I->moleculeToWorldMatrix, I->moleculeToCapturingController);
862 }
863 
AttachMoleculeToCenter(PyMOLGlobals * G)864 void AttachMoleculeToCenter(PyMOLGlobals * G) {
865   COpenVR *I = G->OpenVR;
866 
867   // get scaling pivot center = controllers mass center
868   float worldToPivotMatrix[16];
869   CalculateScalingPivotToWorldMatrix(G, worldToPivotMatrix);
870   // inverse transform to be exactly WorldToPivot
871   worldToPivotMatrix[12] *= -1.0;
872   worldToPivotMatrix[13] *= -1.0;
873   worldToPivotMatrix[14] *= -1.0;
874   // attach molecule to pivot
875   memcpy(I->moleculeToCapturingController, worldToPivotMatrix, sizeof(I->moleculeToCapturingController));
876   MatrixMultiplyC44f(I->moleculeToWorldMatrix, I->moleculeToCapturingController);
877   // save distance between controllers
878   I->controllersDistance = diff3f(&(I->Hands[HLeft].GetPose()[12]), &(I->Hands[HRight].GetPose()[12]));
879 }
880 
881 void HandleLaser(PyMOLGlobals * G, int centerX, int centerY, CMouseEvent const& mouseEvent);
882 
OpenVRHandleInput(PyMOLGlobals * G,int SceneX,int SceneY,int SceneWidth,int SceneHeight,float * model2World)883 void OpenVRHandleInput(PyMOLGlobals * G, int SceneX, int SceneY, int SceneWidth, int SceneHeight, float *model2World)
884 {
885   COpenVR *I = G->OpenVR;
886   if(!OpenVRReady(G))
887     return;
888 
889   vr::VREvent_t event;
890   while (I->System->PollNextEvent(&event, sizeof(event)))
891     /* pass */;
892 
893   if (!I->Input)
894     return;
895 
896   OpenVRActionList* Actions = I->Actions;
897   Actions->Update(I->Input);
898 
899   OpenVRController& LeftHand = I->Hands[HLeft];
900   OpenVRController& RightHand = I->Hands[HRight];
901   int centerX = SceneX + SceneWidth / 2;
902   int centerY = SceneY + SceneHeight / 2;
903 
904   // update VR GUI state
905   if (Actions->ToggleMenu->WasPressed()) {
906     OpenVRMenuToggle(G);
907   }
908 
909   // update controllers visibility
910   LeftHand.Show(Actions->LeftHand->PoseValid());
911   RightHand.Show(Actions->RightHand->PoseValid());
912 
913   // process grips
914   {
915     I->Hands[HLeft].pressGrip(Actions->LGrip->IsPressed());
916     I->Hands[HRight].pressGrip(Actions->RGrip->IsPressed());
917 
918     if (OpenVRIsMoleculeCaptured(G)) {
919       memcpy(I->moleculeToWorldMatrix, model2World, sizeof(I->moleculeToWorldMatrix));
920     }
921     if (Actions->LGrip->WasPressed() && !Actions->RGrip->IsPressed()) {
922       AttachMoleculeToController(G, HLeft);
923     }
924     if (Actions->RGrip->WasPressed() && !Actions->LGrip->IsPressed()) {
925       AttachMoleculeToController(G, HRight);
926     }
927     // TODO make it being less ugly
928     if ((Actions->RGrip->WasPressed() && Actions->LGrip->IsPressed()) ||
929        (Actions->LGrip->WasPressed() && Actions->RGrip->IsPressed())) {
930       AttachMoleculeToCenter(G);
931     }
932     if (Actions->LGrip->WasReleased() && Actions->RGrip->IsPressed()) {
933       AttachMoleculeToController(G, HRight);
934     }
935     if (Actions->RGrip->WasReleased() && Actions->LGrip->IsPressed()) {
936       AttachMoleculeToController(G, HLeft);
937     }
938     if ((Actions->LGrip->WasReleased() && !Actions->RGrip->IsPressed()) ||
939         (Actions->RGrip->WasReleased() && !Actions->LGrip->IsPressed())) {
940       I->capturingHandIdx = -1;
941     }
942   }
943 
944   // switch user action sets
945   {
946     int increment = 0;
947     unsigned deviceIndex = 0;
948 
949     if (Actions->ActionSetNext->WasPressed()) {
950       deviceIndex = Actions->ActionSetNext->DeviceIndex();
951       increment = +1;
952     } else if (Actions->ActionSetPrev->WasPressed()) {
953       deviceIndex = Actions->ActionSetPrev->DeviceIndex();
954       increment = -1;
955     }
956 
957     if (increment) {
958       EHand handIndex = EHand(deviceIndex == RightHand.m_deviceIndex);
959       I->UserActionSet[handIndex] = EUserActionSet((I->UserActionSet[handIndex] + UserActionSet_Count + increment) % UserActionSet_Count);
960       I->Hands[handIndex].SetHintsIndex(I->UserActionSet[handIndex]);
961     }
962   }
963 
964   // process user actions
965   CMouseEvent mouseEvent;
966   OpenVRAction* userActions[] = {Actions->Action1, Actions->Action2, Actions->Action3};
967   for (int i = 0, n = sizeof(userActions) / sizeof(*userActions); i < n; ++i) {
968     OpenVRAction* action = userActions[i];
969     if (action->WasPressedOrReleased()) {
970       EHand handIndex = EHand(action->DeviceIndex() == RightHand.m_deviceIndex);
971       EUserActionSet userActionSet = I->UserActionSet[handIndex];
972       EUserAction userAction = s_userActionMapping[userActionSet][i];
973 
974       switch (userAction) {
975       case UserAction_Mouse_LClick:
976         mouseEvent = CMouseEvent(action->DeviceIndex(), P_GLUT_LEFT_BUTTON, action->IsPressed());
977         break;
978       case UserAction_Mouse_MClick:
979         mouseEvent = CMouseEvent(action->DeviceIndex(), P_GLUT_MIDDLE_BUTTON, action->IsPressed());
980         break;
981       case UserAction_Mouse_RClick:
982         mouseEvent = CMouseEvent(action->DeviceIndex(), P_GLUT_RIGHT_BUTTON, action->IsPressed());
983         break;
984       case UserAction_Scene_Prev:
985         if (action->IsPressed())
986           I->Handlers->ActionFunc(cAction_scene_prev);
987         break;
988       case UserAction_Scene_Next:
989         if (action->IsPressed())
990           I->Handlers->ActionFunc(cAction_scene_next);
991         break;
992       case UserAction_Movie_Prev:
993         if (action->IsPressed())
994           I->Handlers->ActionFunc(cAction_movie_prev);
995         break;
996       case UserAction_Movie_Toggle:
997         if (action->IsPressed())
998           I->Handlers->ActionFunc(cAction_movie_toggle);
999         break;
1000       case UserAction_Movie_Next:
1001         if (action->IsPressed())
1002           I->Handlers->ActionFunc(cAction_movie_next);
1003         break;
1004       }
1005     }
1006   }
1007 
1008   HandleLaser(G, centerX, centerY, mouseEvent);
1009 }
1010 
HandleLaser(PyMOLGlobals * G,int centerX,int centerY,CMouseEvent const & mouseEvent)1011 void HandleLaser(PyMOLGlobals * G, int centerX, int centerY, CMouseEvent const& mouseEvent)
1012 {
1013   COpenVR* I = G->OpenVR;
1014   OpenVRActionList* Actions = I->Actions;
1015 
1016   // hide all lasers
1017   I->Menu.HideHotspot();
1018   for (size_t laserIndex = 0; laserIndex < sizeof(I->Hands) / sizeof(I->Hands[0]); ++laserIndex) {
1019     I->Hands[laserIndex].LaserShow(false);
1020   }
1021 
1022   // detect a laser source
1023   OpenVRLaserSource* laserSource = 0;
1024   if (Actions->Laser->IsPressed()) {
1025     for (size_t laserIndex = 0; laserIndex < sizeof(I->Hands) / sizeof(I->Hands[0]); ++laserIndex) {
1026       if (Actions->Laser->DeviceIndex() == I->Hands[laserIndex].GetLaserDeviceIndex() && I->UserActionSet[laserIndex] == UserActionSet_Mouse) {
1027         laserSource = &I->Hands[laserIndex];
1028         break;
1029       }
1030     }
1031   }
1032 
1033   bool menuHit = false;
1034 
1035   if (laserSource) {
1036     I->Picker.Activate(laserSource->GetLaserDeviceIndex(), centerX, centerY);
1037 
1038     float origin[3], dir[3];
1039     laserSource->LaserShow(true);
1040     laserSource->GetLaserRay(origin, dir);
1041 
1042     // shoot the laser
1043     OpenVRLaserTarget* laserTarget = 0;
1044     OpenVRLaserTarget* targets[] = {&I->Menu, &I->Picker};
1045     for (int i = 0, n = sizeof(targets) / sizeof(targets[0]); i < n && !laserTarget; ++i) {
1046       float distance = 0.0f;
1047       if (targets[i]->IsLaserAllowed(laserSource->GetLaserDeviceIndex()) &&
1048           targets[i]->LaserShoot(origin, dir, targets[i]->GetLaserColor(), &distance)) {
1049         laserTarget = targets[i];
1050         laserSource->SetLaserLength(distance);
1051         laserSource->SetLaserColor(laserTarget->GetLaserColor());
1052         if (laserTarget == &I->Menu)
1053           menuHit = true;
1054       }
1055     }
1056 
1057     // laser missed
1058     float missedColor[4] = {1.0f, 1.0f, 0.0f, 0.5f};
1059     if (!laserTarget) {
1060       laserTarget = &I->Picker;
1061       if (!SettingGetGlobal_b(G, cSetting_openvr_cut_laser)) {
1062         laserSource->SetLaserLength(0.0f);
1063       }
1064       laserSource->SetLaserColor(missedColor);
1065     }
1066 
1067     if (mouseEvent.deviceIndex == laserSource->GetLaserDeviceIndex()) {
1068       laserTarget->LaserClick(mouseEvent.button, mouseEvent.state);
1069     }
1070 
1071   } else {
1072     I->Picker.Deactivate();
1073   }
1074 
1075   float alpha = SettingGetGlobal_f(G, cSetting_openvr_gui_alpha);
1076   int useAlpha = SettingGetGlobal_i(G, cSetting_openvr_gui_use_alpha);
1077   I->Menu.SetAlpha(useAlpha == 0 || useAlpha == 2 && menuHit ? 1.0f : alpha);
1078 
1079   int useBackdrop = SettingGetGlobal_i(G, cSetting_openvr_gui_use_backdrop);
1080   I->Menu.SetBackdrop(useBackdrop == 1 || useBackdrop == 2 && menuHit ? true : false);
1081 
1082   int overlay = SettingGetGlobal_i(G, cSetting_openvr_gui_overlay);
1083   I->Menu.SetOverlay(overlay == 1 || overlay == 2 && menuHit ? true : false);
1084 }
1085 
UpdateDevicePoses(PyMOLGlobals * G)1086 void UpdateDevicePoses(PyMOLGlobals * G) {
1087   COpenVR *I = G->OpenVR;
1088 
1089   for (uint32_t nDevice = 0; nDevice < vr::k_unMaxTrackedDeviceCount; nDevice++) {
1090     vr::TrackedDevicePose_t &pose = I->Poses[nDevice];
1091     if (pose.bPoseIsValid) {
1092       vr::ETrackedDeviceClass device = I->System->GetTrackedDeviceClass(nDevice);
1093       switch (device) {
1094         case vr::TrackedDeviceClass_HMD:
1095           OpenVRUtils::MatrixCopyVRGL((const float *)pose.mDeviceToAbsoluteTracking.m, I->HeadPose);
1096           OpenVRUtils::MatrixFastInverseVRGL((const float *)pose.mDeviceToAbsoluteTracking.m, I->WorldToHeadMatrix);
1097           break;
1098         case vr::TrackedDeviceClass_Controller:
1099           {
1100             vr::ETrackedControllerRole role = I->System->GetControllerRoleForTrackedDeviceIndex(nDevice);
1101 
1102             OpenVRController* hand = 0;
1103             if (role == vr::TrackedControllerRole_LeftHand) {
1104               hand = &I->Hands[HLeft];
1105             } else if (role == vr::TrackedControllerRole_RightHand) {
1106               hand = &I->Hands[HRight];
1107             }
1108 
1109             if (hand) {
1110               OpenVRUtils::MatrixCopyVRGL((const float *)pose.mDeviceToAbsoluteTracking.m, (float *)hand->GetPose());
1111               OpenVRUtils::MatrixFastInverseVRGL((const float *)pose.mDeviceToAbsoluteTracking.m, (float *)hand->GetWorldToControllerMatrix());
1112               hand->m_deviceIndex = nDevice;
1113               std::string sRenderModelName = GetTrackedDeviceString(G, nDevice, vr::Prop_RenderModelName_String);
1114               if (sRenderModelName != hand->m_sRenderModelName) {
1115                 hand->m_pRenderModel = FindOrLoadRenderModel(G, sRenderModelName.c_str());
1116                 hand->m_sRenderModelName = sRenderModelName;
1117               }
1118             }
1119           }
1120           break;
1121         default:
1122           break;
1123       }
1124     }
1125   }
1126 }
1127 
OpenVRDraw(PyMOLGlobals * G)1128 void OpenVRDraw(PyMOLGlobals * G)
1129 {
1130   COpenVR *I = G->OpenVR;
1131   if(!OpenVRReady(G))
1132     return;
1133 
1134   GL_DEBUG_FUN();
1135 
1136   glPushMatrix();
1137   OpenVRLoadWorld2EyeMatrix(G);
1138 
1139   // render menu if present
1140   I->Menu.Draw(I->MenuSplashTexture);
1141 
1142   // render controllers
1143   for (int i = HLeft; i <= HRight; ++i) {
1144     I->Hands[i].Draw();
1145   }
1146 
1147   glPopMatrix();
1148 }
1149 
OpenVRClippingChanged(PyMOLGlobals * G)1150 void OpenVRClippingChanged(PyMOLGlobals * G) {
1151   static bool s_oldDepthCue = true;
1152   bool clipping = SettingGetGlobal_b(G, cSetting_openvr_disable_clipping);
1153   if (clipping) {
1154     s_oldDepthCue = SettingGetGlobal_b(G, cSetting_depth_cue);
1155     SettingSetGlobal_b(G, cSetting_depth_cue, false);
1156   } else {
1157     SettingSetGlobal_b(G, cSetting_depth_cue, s_oldDepthCue);
1158   }
1159 }
1160 
OpenVRLaserWidthChanged(PyMOLGlobals * G)1161 void OpenVRLaserWidthChanged(PyMOLGlobals * G) {
1162   COpenVR *I = G->OpenVR;
1163   float width = SettingGetGlobal_f(G, cSetting_openvr_laser_width);
1164   for (int i = HLeft; i <= HRight; ++i) {
1165     OpenVRController &hand = I->Hands[i];
1166     hand.SetLaserWidth(width);
1167   }
1168 }
1169 
OpenVRUpdateScenePickerLength(PyMOLGlobals * G,float * PickWorldPoint)1170 void OpenVRUpdateScenePickerLength(PyMOLGlobals * G, float *PickWorldPoint)
1171 {
1172   COpenVR *I = G->OpenVR;
1173   if(!OpenVRReady(G))
1174     return;
1175 
1176   // get active ray
1177   for (int i = HLeft; i <= HRight; ++i) {
1178     OpenVRController &hand = I->Hands[i];
1179     if (hand.IsLaserVisible()) {
1180       // get ray start point in world
1181       float laserStartPointWorld[3];
1182       if (hand.GetLaserRay(laserStartPointWorld, NULL)) {
1183         // calc distance to pointed atom in world CS
1184         float dist = diff3f(laserStartPointWorld, PickWorldPoint);
1185         // set new ray length
1186         hand.SetLaserLength(dist);
1187       }
1188     }
1189   }
1190 }
1191 
OpenVRIsScenePickerActive(PyMOLGlobals * G)1192 bool OpenVRIsScenePickerActive(PyMOLGlobals * G) {
1193   COpenVR *I = G->OpenVR;
1194   return (I && I->Picker.IsActive());
1195 }
1196