1 // Copyright (c) 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "device/vr/openvr/openvr_render_loop.h"
6 
7 #include "base/trace_event/trace_event.h"
8 #include "device/vr/openvr/openvr_api_wrapper.h"
9 #include "device/vr/openvr/openvr_gamepad_helper.h"
10 #include "device/vr/openvr/openvr_type_converters.h"
11 #include "ui/gfx/geometry/angle_conversions.h"
12 #include "ui/gfx/transform.h"
13 
14 #if defined(OS_WIN)
15 #include "device/vr/windows/d3d11_texture_helper.h"
16 #endif
17 
18 namespace device {
19 
20 namespace {
21 
22 // OpenVR reports the controllers pose of the controller's tip, while WebXR
23 // needs to report the pose of the controller's grip (centered on the user's
24 // palm.) This experimentally determined value is how far back along the Z axis
25 // in meters OpenVR's pose needs to be translated to align with WebXR's
26 // coordinate system.
27 const float kGripOffsetZMeters = 0.08f;
28 
29 // WebXR reports a pointer pose separate from the grip pose, which represents a
30 // pointer ray emerging from the tip of the controller. OpenVR does not report
31 // anything like that, and most pointers are assumed to come straight from the
32 // controller's tip. For consistency with other WebXR backends we'll synthesize
33 // a pointer ray that's angled down slightly from the controller's handle,
34 // defined by this angle. Experimentally determined, should roughly point in the
35 // same direction as a user's outstretched index finger while holding a
36 // controller.
37 const float kPointerErgoAngleDegrees = -40.0f;
38 
HmdMatrix34ToTransform(const vr::HmdMatrix34_t & mat)39 gfx::Transform HmdMatrix34ToTransform(const vr::HmdMatrix34_t& mat) {
40   return gfx::Transform(mat.m[0][0], mat.m[0][1], mat.m[0][2], mat.m[0][3],
41                         mat.m[1][0], mat.m[1][1], mat.m[1][2], mat.m[1][3],
42                         mat.m[2][0], mat.m[2][1], mat.m[2][2], mat.m[2][3], 0,
43                         0, 0, 1);
44 }
45 
ConvertToMojoHandedness(vr::ETrackedControllerRole controller_role)46 device::mojom::XRHandedness ConvertToMojoHandedness(
47     vr::ETrackedControllerRole controller_role) {
48   switch (controller_role) {
49     case vr::TrackedControllerRole_LeftHand:
50       return device::mojom::XRHandedness::LEFT;
51     case vr::TrackedControllerRole_RightHand:
52       return device::mojom::XRHandedness::RIGHT;
53     case vr::TrackedControllerRole_Invalid:
54     case vr::TrackedControllerRole_OptOut:
55     case vr::TrackedControllerRole_Treadmill:
56     case vr::TrackedControllerRole_Max:
57       return device::mojom::XRHandedness::NONE;
58   }
59 
60   NOTREACHED();
61 }
62 
63 }  // namespace
64 
MarkAsInactive()65 void OpenVRRenderLoop::InputActiveState::MarkAsInactive() {
66   active = false;
67   primary_input_pressed = false;
68   device_class = vr::TrackedDeviceClass_Invalid;
69   controller_role = vr::TrackedControllerRole_Invalid;
70 }
71 
OpenVRRenderLoop()72 OpenVRRenderLoop::OpenVRRenderLoop() : XRCompositorCommon() {}
73 
~OpenVRRenderLoop()74 OpenVRRenderLoop::~OpenVRRenderLoop() {
75   Stop();
76 }
77 
PreComposite()78 bool OpenVRRenderLoop::PreComposite() {
79   texture_helper_.AllocateBackBuffer();
80   return true;
81 }
82 
SubmitCompositedFrame()83 bool OpenVRRenderLoop::SubmitCompositedFrame() {
84   DCHECK(openvr_);
85   vr::IVRCompositor* vr_compositor = openvr_->GetCompositor();
86   DCHECK(vr_compositor);
87   if (!vr_compositor)
88     return false;
89 
90   vr::Texture_t texture;
91   texture.handle = texture_helper_.GetBackbuffer().Get();
92   texture.eType = vr::TextureType_DirectX;
93   texture.eColorSpace = vr::ColorSpace_Auto;
94 
95   gfx::RectF left_bounds = texture_helper_.BackBufferLeft();
96   gfx::RectF right_bounds = texture_helper_.BackBufferRight();
97 
98   vr::VRTextureBounds_t bounds[2];
99   bounds[0] = {left_bounds.x(), left_bounds.y(),
100                left_bounds.width() + left_bounds.x(),
101                left_bounds.height() + left_bounds.y()};
102   bounds[1] = {right_bounds.x(), right_bounds.y(),
103                right_bounds.width() + right_bounds.x(),
104                right_bounds.height() + right_bounds.y()};
105 
106   vr::EVRCompositorError error =
107       vr_compositor->Submit(vr::EVREye::Eye_Left, &texture, &bounds[0]);
108   if (error != vr::VRCompositorError_None) {
109     return false;
110   }
111   error = vr_compositor->Submit(vr::EVREye::Eye_Right, &texture, &bounds[1]);
112   if (error != vr::VRCompositorError_None) {
113     return false;
114   }
115   vr_compositor->PostPresentHandoff();
116   return true;
117 }
118 
StartRuntime()119 bool OpenVRRenderLoop::StartRuntime() {
120   if (!openvr_) {
121     openvr_ = std::make_unique<OpenVRWrapper>(true);
122     if (!openvr_->IsInitialized()) {
123       openvr_ = nullptr;
124       return false;
125     }
126 
127     openvr_->GetCompositor()->SuspendRendering(true);
128     openvr_->GetCompositor()->SetTrackingSpace(
129         vr::ETrackingUniverseOrigin::TrackingUniverseSeated);
130   }
131 
132 #if defined(OS_WIN)
133   int32_t adapter_index;
134   openvr_->GetSystem()->GetDXGIOutputInfo(&adapter_index);
135   if (!texture_helper_.SetAdapterIndex(adapter_index) ||
136       !texture_helper_.EnsureInitialized()) {
137     openvr_ = nullptr;
138     return false;
139   }
140 #endif
141 
142   uint32_t width, height;
143   openvr_->GetSystem()->GetRecommendedRenderTargetSize(&width, &height);
144   texture_helper_.SetDefaultSize(gfx::Size(width, height));
145 
146   return true;
147 }
148 
StopRuntime()149 void OpenVRRenderLoop::StopRuntime() {
150   if (openvr_)
151     openvr_->GetCompositor()->SuspendRendering(true);
152   openvr_ = nullptr;
153 }
154 
OnSessionStart()155 void OpenVRRenderLoop::OnSessionStart() {
156   // Reset the active states for all the controllers.
157   for (uint32_t i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i) {
158     InputActiveState& input_active_state = input_active_states_[i];
159     input_active_state.active = false;
160     input_active_state.primary_input_pressed = false;
161     input_active_state.device_class = vr::TrackedDeviceClass_Invalid;
162     input_active_state.controller_role = vr::TrackedControllerRole_Invalid;
163   }
164 
165   openvr_->GetCompositor()->SuspendRendering(false);
166 
167   // Measure the VrViewerType we are presenting with.
168   std::string model =
169       GetOpenVRString(openvr_->GetSystem(), vr::Prop_ModelNumber_String);
170   VrViewerType type = VrViewerType::OPENVR_UNKNOWN;
171   if (model == "Oculus Rift CV1")
172     type = VrViewerType::OPENVR_RIFT_CV1;
173   else if (model == "Vive MV")
174     type = VrViewerType::OPENVR_VIVE;
175 
176   LogViewerType(type);
177 }
178 
GetNextFrameData()179 mojom::XRFrameDataPtr OpenVRRenderLoop::GetNextFrameData() {
180   mojom::XRFrameDataPtr frame_data = mojom::XRFrameData::New();
181   frame_data->frame_id = next_frame_id_;
182 
183   if (openvr_) {
184     vr::TrackedDevicePose_t rendering_poses[vr::k_unMaxTrackedDeviceCount];
185 
186     TRACE_EVENT0("gpu", "WaitGetPoses");
187     openvr_->GetCompositor()->WaitGetPoses(
188         rendering_poses, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
189 
190     frame_data->pose = mojo::ConvertTo<mojom::VRPosePtr>(
191         rendering_poses[vr::k_unTrackedDeviceIndex_Hmd]);
192 
193     // Update WebXR input sources.
194     frame_data->input_state =
195         GetInputState(rendering_poses, vr::k_unMaxTrackedDeviceCount);
196 
197     vr::Compositor_FrameTiming timing;
198     timing.m_nSize = sizeof(vr::Compositor_FrameTiming);
199     bool valid_time = openvr_->GetCompositor()->GetFrameTiming(&timing);
200     if (valid_time) {
201       frame_data->time_delta =
202           base::TimeDelta::FromSecondsD(timing.m_flSystemTimeInSeconds);
203     }
204   }
205 
206   return frame_data;
207 }
208 
GetInputState(vr::TrackedDevicePose_t * poses,uint32_t count)209 std::vector<mojom::XRInputSourceStatePtr> OpenVRRenderLoop::GetInputState(
210     vr::TrackedDevicePose_t* poses,
211     uint32_t count) {
212   std::vector<mojom::XRInputSourceStatePtr> input_states;
213 
214   if (!openvr_)
215     return input_states;
216 
217   // Loop through every device pose and determine which are controllers
218   for (uint32_t i = vr::k_unTrackedDeviceIndex_Hmd + 1; i < count; ++i) {
219     const vr::TrackedDevicePose_t& pose = poses[i];
220     InputActiveState& input_active_state = input_active_states_[i];
221 
222     if (!pose.bDeviceIsConnected) {
223       // If this was an active controller on the last frame report it as
224       // disconnected.
225       if (input_active_state.active)
226         input_active_state.MarkAsInactive();
227       continue;
228     }
229 
230     // Is this a newly connected controller?
231     bool newly_active = false;
232     if (!input_active_state.active) {
233       input_active_state.active = true;
234       input_active_state.device_class =
235           openvr_->GetSystem()->GetTrackedDeviceClass(i);
236       newly_active = true;
237     }
238 
239     // Skip over any tracked devices that aren't controllers.
240     if (input_active_state.device_class != vr::TrackedDeviceClass_Controller) {
241       continue;
242     }
243 
244     device::mojom::XRInputSourceStatePtr state =
245         device::mojom::XRInputSourceState::New();
246 
247     vr::VRControllerState_t controller_state;
248     bool have_state = openvr_->GetSystem()->GetControllerState(
249         i, &controller_state, sizeof(vr::VRControllerState_t));
250     if (!have_state) {
251       input_active_state.MarkAsInactive();
252       continue;
253     }
254 
255     bool pressed = controller_state.ulButtonPressed &
256                    vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Trigger);
257 
258     state->source_id = i;
259     state->primary_input_pressed = pressed;
260     state->primary_input_clicked =
261         (!pressed && input_active_state.primary_input_pressed);
262 
263     input_active_state.primary_input_pressed = pressed;
264 
265     if (pose.bPoseIsValid) {
266       state->mojo_from_input =
267           HmdMatrix34ToTransform(pose.mDeviceToAbsoluteTracking);
268       // Scoot the grip matrix back a bit so that it actually lines up with the
269       // user's palm.
270       state->mojo_from_input->Translate3d(0, 0, kGripOffsetZMeters);
271     }
272 
273     // Poll controller roll per-frame, since OpenVR controllers can swap hands.
274     vr::ETrackedControllerRole controller_role =
275         openvr_->GetSystem()->GetControllerRoleForTrackedDeviceIndex(i);
276 
277     device::mojom::XRHandedness handedness =
278         ConvertToMojoHandedness(controller_role);
279 
280     OpenVRInputSourceData input_source_data =
281         OpenVRGamepadHelper::GetXRInputSourceData(openvr_->GetSystem(), i,
282                                                   controller_state, handedness);
283     state->gamepad = input_source_data.gamepad;
284 
285     // OpenVR controller are fully 6DoF.
286     state->emulated_position = false;
287 
288     // Re-send the controller's description if it's newly active or if the
289     // handedness or profile strings have changed.
290     if (newly_active ||
291         (controller_role != input_active_state.controller_role) ||
292         (input_source_data.profiles != input_active_state.profiles)) {
293       device::mojom::XRInputSourceDescriptionPtr desc =
294           device::mojom::XRInputSourceDescription::New();
295 
296       // It's a handheld pointing device.
297       desc->target_ray_mode = device::mojom::XRTargetRayMode::POINTING;
298 
299       desc->handedness = handedness;
300       input_active_state.controller_role = controller_role;
301 
302       // Tweak the pointer transform so that it's angled down from the
303       // grip. This should be a bit more ergonomic.
304       desc->input_from_pointer = gfx::Transform();
305       desc->input_from_pointer->RotateAboutXAxis(kPointerErgoAngleDegrees);
306 
307       desc->profiles = input_source_data.profiles;
308 
309       state->description = std::move(desc);
310 
311       // Keep track of the current profiles so we know if it changes next frame.
312       input_active_state.profiles = input_source_data.profiles;
313     }
314 
315     input_states.push_back(std::move(state));
316   }
317 
318   return input_states;
319 }
320 
321 OpenVRRenderLoop::InputActiveState::InputActiveState() = default;
322 OpenVRRenderLoop::InputActiveState::~InputActiveState() = default;
323 
324 }  // namespace device
325