1 // Copyright (c) 2019 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "device/vr/windows_mixed_reality/mixed_reality_renderloop.h"
6
7 #include <Windows.Graphics.DirectX.Direct3D11.interop.h>
8 #include <windows.perception.spatial.h>
9
10 #include <algorithm>
11 #include <limits>
12 #include <utility>
13
14 #include "base/strings/string_util.h"
15 #include "base/strings/utf_string_conversions.h"
16 #include "base/time/time.h"
17 #include "base/trace_event/trace_event.h"
18 #include "base/win/com_init_util.h"
19 #include "base/win/core_winrt_util.h"
20 #include "base/win/scoped_co_mem.h"
21 #include "base/win/scoped_hstring.h"
22 #include "device/vr/test/test_hook.h"
23 #include "device/vr/util/transform_utils.h"
24 #include "device/vr/windows/d3d11_texture_helper.h"
25 #include "device/vr/windows_mixed_reality/mixed_reality_input_helper.h"
26 #include "device/vr/windows_mixed_reality/mixed_reality_statics.h"
27 #include "device/vr/windows_mixed_reality/wrappers/wmr_holographic_frame.h"
28 #include "device/vr/windows_mixed_reality/wrappers/wmr_holographic_space.h"
29 #include "device/vr/windows_mixed_reality/wrappers/wmr_logging.h"
30 #include "device/vr/windows_mixed_reality/wrappers/wmr_origins.h"
31 #include "device/vr/windows_mixed_reality/wrappers/wmr_rendering.h"
32 #include "device/vr/windows_mixed_reality/wrappers/wmr_timestamp.h"
33 #include "device/vr/windows_mixed_reality/wrappers/wmr_wrapper_factories.h"
34 #include "ui/gfx/geometry/angle_conversions.h"
35 #include "ui/gfx/geometry/vector3d_f.h"
36 #include "ui/gfx/transform.h"
37 #include "ui/gfx/transform_util.h"
38
39 namespace device {
40
41 namespace WFN = ABI::Windows::Foundation::Numerics;
42 using SpatialMovementRange =
43 ABI::Windows::Perception::Spatial::SpatialMovementRange;
44 using ABI::Windows::Foundation::DateTime;
45 using ABI::Windows::Foundation::TimeSpan;
46 using ABI::Windows::Foundation::Numerics::Matrix4x4;
47 using HolographicSpaceUserPresence =
48 ABI::Windows::Graphics::Holographic::HolographicSpaceUserPresence;
49 using ABI::Windows::Graphics::Holographic::HolographicStereoTransform;
50 using Microsoft::WRL::ComPtr;
51
52 class MixedRealityWindow : public gfx::WindowImpl {
53 public:
MixedRealityWindow(base::OnceCallback<void ()> on_destroyed)54 explicit MixedRealityWindow(base::OnceCallback<void()> on_destroyed)
55 : gfx::WindowImpl(), on_destroyed_(std::move(on_destroyed)) {
56 set_window_style(WS_OVERLAPPED);
57 }
58
59 BOOL ProcessWindowMessage(HWND window,
60 UINT message,
61 WPARAM w_param,
62 LPARAM l_param,
63 LRESULT& result,
64 DWORD msg_map_id) override;
65
66 private:
67 base::OnceCallback<void()> on_destroyed_;
68 };
69
ProcessWindowMessage(HWND window,UINT message,WPARAM w_param,LPARAM l_param,LRESULT & result,DWORD msg_map_id)70 BOOL MixedRealityWindow::ProcessWindowMessage(HWND window,
71 UINT message,
72 WPARAM w_param,
73 LPARAM l_param,
74 LRESULT& result,
75 DWORD msg_map_id) {
76 if (message == WM_DESTROY) {
77 // Despite handling WM_DESTROY, we still return false so the base class can
78 // also process this message.
79 std::move(on_destroyed_).Run();
80 }
81 return false; // Base class should handle all messages.
82 }
83
84 namespace {
ConvertToGfxTransform(const Matrix4x4 & matrix)85 gfx::Transform ConvertToGfxTransform(const Matrix4x4& matrix) {
86 // clang-format off
87 return gfx::Transform(
88 matrix.M11, matrix.M21, matrix.M31, matrix.M41,
89 matrix.M12, matrix.M22, matrix.M32, matrix.M42,
90 matrix.M13, matrix.M23, matrix.M33, matrix.M43,
91 matrix.M14, matrix.M24, matrix.M34, matrix.M44);
92 // clang-format on
93 }
94
ParseProjection(const Matrix4x4 & projection)95 mojom::VRFieldOfViewPtr ParseProjection(const Matrix4x4& projection) {
96 gfx::Transform proj = ConvertToGfxTransform(projection);
97
98 gfx::Transform projInv;
99 bool invertable = proj.GetInverse(&projInv);
100 DCHECK(invertable);
101
102 // We will convert several points from projection space into view space to
103 // calculate the view frustum angles. We are assuming some common form for
104 // the projection matrix.
105 gfx::Point3F left_top_far(-1, 1, 1);
106 gfx::Point3F left_top_near(-1, 1, 0);
107 gfx::Point3F right_bottom_far(1, -1, 1);
108 gfx::Point3F right_bottom_near(1, -1, 0);
109
110 projInv.TransformPoint(&left_top_far);
111 projInv.TransformPoint(&left_top_near);
112 projInv.TransformPoint(&right_bottom_far);
113 projInv.TransformPoint(&right_bottom_near);
114
115 float left_on_far_plane = left_top_far.x();
116 float top_on_far_plane = left_top_far.y();
117 float right_on_far_plane = right_bottom_far.x();
118 float bottom_on_far_plane = right_bottom_far.y();
119 float far_plane = left_top_far.z();
120
121 mojom::VRFieldOfViewPtr field_of_view = mojom::VRFieldOfView::New();
122 field_of_view->up_degrees =
123 gfx::RadToDeg(atanf(-top_on_far_plane / far_plane));
124 field_of_view->down_degrees =
125 gfx::RadToDeg(atanf(bottom_on_far_plane / far_plane));
126 field_of_view->left_degrees =
127 gfx::RadToDeg(atanf(left_on_far_plane / far_plane));
128 field_of_view->right_degrees =
129 gfx::RadToDeg(atanf(-right_on_far_plane / far_plane));
130
131 // TODO(billorr): Expand the mojo interface to support just sending the
132 // projection matrix directly, instead of decomposing it.
133 return field_of_view;
134 }
135 } // namespace
136
MixedRealityRenderLoop(base::RepeatingCallback<void (mojom::VRDisplayInfoPtr)> on_display_info_changed)137 MixedRealityRenderLoop::MixedRealityRenderLoop(
138 base::RepeatingCallback<void(mojom::VRDisplayInfoPtr)>
139 on_display_info_changed)
140 : XRCompositorCommon(),
141 on_display_info_changed_(std::move(on_display_info_changed)) {}
142
~MixedRealityRenderLoop()143 MixedRealityRenderLoop::~MixedRealityRenderLoop() {
144 Stop();
145 }
146
GetOrigin()147 const WMRCoordinateSystem* MixedRealityRenderLoop::GetOrigin() {
148 return anchor_origin_.get();
149 }
150
OnInputSourceEvent(mojom::XRInputSourceStatePtr input_state)151 void MixedRealityRenderLoop::OnInputSourceEvent(
152 mojom::XRInputSourceStatePtr input_state) {
153 if (input_event_listener_)
154 input_event_listener_->OnButtonEvent(std::move(input_state));
155 }
156
PreComposite()157 bool MixedRealityRenderLoop::PreComposite() {
158 if (rendering_params_) {
159 ComPtr<ID3D11Texture2D> texture =
160 rendering_params_->TryGetBackbufferAsTexture2D();
161 if (!texture)
162 return false;
163
164 D3D11_TEXTURE2D_DESC desc;
165 texture->GetDesc(&desc);
166
167 texture_helper_.SetBackbuffer(texture);
168 ABI::Windows::Foundation::Rect viewport = pose_->Viewport();
169 gfx::RectF override_viewport =
170 gfx::RectF(viewport.X / desc.Width, viewport.Y / desc.Height,
171 viewport.Width / desc.Width, viewport.Height / desc.Height);
172
173 texture_helper_.OverrideViewports(override_viewport, override_viewport);
174 texture_helper_.SetDefaultSize(gfx::Size(desc.Width, desc.Height));
175
176 TRACE_EVENT_INSTANT0("xr", "PreCompositorWMR", TRACE_EVENT_SCOPE_THREAD);
177 }
178 return true;
179 }
180
SubmitCompositedFrame()181 bool MixedRealityRenderLoop::SubmitCompositedFrame() {
182 return holographic_frame_->TryPresentUsingCurrentPrediction();
183 }
184
185 namespace {
186
LoadD3D11Function(const char * function_name)187 FARPROC LoadD3D11Function(const char* function_name) {
188 static HMODULE const handle = ::LoadLibrary(L"d3d11.dll");
189 return handle ? ::GetProcAddress(handle, function_name) : nullptr;
190 }
191
192 decltype(&::CreateDirect3D11DeviceFromDXGIDevice)
GetCreateDirect3D11DeviceFromDXGIDeviceFunction()193 GetCreateDirect3D11DeviceFromDXGIDeviceFunction() {
194 static decltype(&::CreateDirect3D11DeviceFromDXGIDevice) const function =
195 reinterpret_cast<decltype(&::CreateDirect3D11DeviceFromDXGIDevice)>(
196 LoadD3D11Function("CreateDirect3D11DeviceFromDXGIDevice"));
197 return function;
198 }
199
WrapperCreateDirect3D11DeviceFromDXGIDevice(IDXGIDevice * in,IInspectable ** out)200 HRESULT WrapperCreateDirect3D11DeviceFromDXGIDevice(IDXGIDevice* in,
201 IInspectable** out) {
202 *out = nullptr;
203 auto func = GetCreateDirect3D11DeviceFromDXGIDeviceFunction();
204 if (!func)
205 return E_FAIL;
206 return func(in, out);
207 }
208
209 } // namespace
210
StartRuntime()211 bool MixedRealityRenderLoop::StartRuntime() {
212 initializer_ = std::make_unique<base::win::ScopedWinrtInitializer>();
213
214 {
215 auto hook = MixedRealityDeviceStatics::GetLockedTestHook();
216 if (hook.GetHook()) {
217 hook.GetHook()->AttachCurrentThread();
218 }
219 }
220
221 InitializeSpace();
222 if (!holographic_space_)
223 return false;
224
225 // Since we explicitly null out both the holographic_space and the
226 // subscription during StopRuntime (which happens before destruction),
227 // base::Unretained is safe.
228 user_presence_changed_subscription_ =
229 holographic_space_->AddUserPresenceChangedCallback(
230 base::BindRepeating(&MixedRealityRenderLoop::OnUserPresenceChanged,
231 base::Unretained(this)));
232 UpdateVisibilityState();
233
234 input_helper_ = std::make_unique<MixedRealityInputHelper>(
235 window_->hwnd(), weak_ptr_factory_.GetWeakPtr());
236
237 ABI::Windows::Graphics::Holographic::HolographicAdapterId id =
238 holographic_space_->PrimaryAdapterId();
239
240 LUID adapter_luid;
241 adapter_luid.HighPart = id.HighPart;
242 adapter_luid.LowPart = id.LowPart;
243 texture_helper_.SetUseBGRA(true);
244 if (!texture_helper_.SetAdapterLUID(adapter_luid) ||
245 !texture_helper_.EnsureInitialized()) {
246 return false;
247 }
248
249 // Associate our holographic space with our directx device.
250 ComPtr<IDXGIDevice> dxgi_device;
251 HRESULT hr = texture_helper_.GetDevice().As(&dxgi_device);
252 if (FAILED(hr))
253 return false;
254
255 ComPtr<IInspectable> spInsp;
256 hr = WrapperCreateDirect3D11DeviceFromDXGIDevice(dxgi_device.Get(), &spInsp);
257 if (FAILED(hr))
258 return false;
259
260 ComPtr<ABI::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice> device;
261 hr = spInsp.As(&device);
262 if (FAILED(hr))
263 return false;
264
265 if (!holographic_space_->TrySetDirect3D11Device(device))
266 return false;
267
268 // Go through one initial dummy frame to update the display info and notify
269 // the device of the correct values before it sends the initial info to the
270 // renderer. The frame must be submitted because WMR requires frames to be
271 // submitted in the order they're created.
272 UpdateWMRDataForNextFrame();
273 UpdateDisplayInfo();
274 main_thread_task_runner_->PostTask(
275 FROM_HERE,
276 base::BindOnce(on_display_info_changed_, current_display_info_.Clone()));
277 return SubmitCompositedFrame();
278 }
279
StopRuntime()280 void MixedRealityRenderLoop::StopRuntime() {
281 if (window_)
282 ShowWindow(window_->hwnd(), SW_HIDE);
283 holographic_space_ = nullptr;
284 anchor_origin_ = nullptr;
285 stationary_origin_ = nullptr;
286 last_origin_from_attached_ = base::nullopt;
287 attached_ = nullptr;
288 ClearStageStatics();
289 ClearStageOrigin();
290
291 holographic_frame_ = nullptr;
292 timestamp_ = nullptr;
293 pose_ = nullptr;
294 rendering_params_ = nullptr;
295 camera_ = nullptr;
296
297 user_presence_changed_subscription_ = nullptr;
298
299 if (input_helper_)
300 input_helper_->Dispose();
301 input_helper_ = nullptr;
302
303 if (window_)
304 DestroyWindow(window_->hwnd());
305 window_ = nullptr;
306
307 if (initializer_)
308 initializer_ = nullptr;
309
310 {
311 auto hook = MixedRealityDeviceStatics::GetLockedTestHook();
312 if (hook.GetHook()) {
313 hook.GetHook()->DetachCurrentThread();
314 }
315 }
316 }
317
UsesInputEventing()318 bool MixedRealityRenderLoop::UsesInputEventing() {
319 return true;
320 }
321
InitializeOrigin()322 void MixedRealityRenderLoop::InitializeOrigin() {
323 TRACE_EVENT0("xr", "InitializeOrigin");
324
325 stage_transform_needs_updating_ = true;
326
327 // Try to get a stationary frame. We'll hand out all of our poses in this
328 // space.
329 if (!attached_) {
330 attached_ = WMRAttachedOriginFactory::CreateAtCurrentLocation();
331 if (!attached_)
332 return;
333 }
334
335 std::unique_ptr<WMRStationaryOrigin> stationary_frame =
336 WMRStationaryOriginFactory::CreateAtCurrentLocation();
337 if (!stationary_frame)
338 return;
339
340 stationary_origin_ = stationary_frame->CoordinateSystem();
341
342 // Instead of using the stationary_frame, use an anchor.
343 anchor_origin_ =
344 WMRSpatialAnchorFactory::TryCreateRelativeTo(stationary_origin_.get());
345 }
346
ClearStageOrigin()347 void MixedRealityRenderLoop::ClearStageOrigin() {
348 stage_origin_ = nullptr;
349 spatial_stage_ = nullptr;
350 bounds_.clear();
351 bounds_updated_ = true;
352 stage_transform_needs_updating_ = true;
353 }
354
InitializeStageOrigin()355 void MixedRealityRenderLoop::InitializeStageOrigin() {
356 TRACE_EVENT0("xr", "InitializeStageOrigin");
357 if (!EnsureStageStatics())
358 return;
359 stage_transform_needs_updating_ = true;
360
361 // Try to get a SpatialStageFrameOfReference. We'll use this to calculate
362 // the transform between the poses we're handing out and where the floor is.
363 spatial_stage_ = stage_statics_->CurrentStage();
364 if (!spatial_stage_)
365 return;
366
367 stage_origin_ = spatial_stage_->CoordinateSystem();
368 EnsureStageBounds();
369 }
370
EnsureStageStatics()371 bool MixedRealityRenderLoop::EnsureStageStatics() {
372 if (stage_statics_)
373 return true;
374
375 stage_statics_ = WMRStageStaticsFactory::Create();
376 if (!stage_statics_)
377 return false;
378
379 // Since we explicitly null out both the statics and the subscription during
380 // StopRuntime (which happens before destruction), base::Unretained is safe.
381 stage_changed_subscription_ = stage_statics_->AddStageChangedCallback(
382 base::BindRepeating(&MixedRealityRenderLoop::OnCurrentStageChanged,
383 base::Unretained(this)));
384
385 return true;
386 }
387
ClearStageStatics()388 void MixedRealityRenderLoop::ClearStageStatics() {
389 stage_changed_subscription_ = nullptr;
390 stage_statics_ = nullptr;
391 }
392
OnCurrentStageChanged()393 void MixedRealityRenderLoop::OnCurrentStageChanged() {
394 // Unretained is safe here because the task_runner() gets invalidated
395 // during Stop() which happens before our destruction
396 task_runner()->PostTask(FROM_HERE,
397 base::BindOnce(
398 [](MixedRealityRenderLoop* render_loop) {
399 render_loop->stage_origin_ = nullptr;
400 render_loop->InitializeStageOrigin();
401 },
402 base::Unretained(this)));
403 }
404
OnUserPresenceChanged()405 void MixedRealityRenderLoop::OnUserPresenceChanged() {
406 // Unretained is safe here because the task_runner() gets invalidated
407 // during Stop() which happens before our destruction
408 task_runner()->PostTask(FROM_HERE,
409 base::BindOnce(
410 [](MixedRealityRenderLoop* render_loop) {
411 render_loop->UpdateVisibilityState();
412 },
413 base::Unretained(this)));
414 }
415
UpdateVisibilityState()416 void MixedRealityRenderLoop::UpdateVisibilityState() {
417 // We could've had a task get queued up during or before a StopRuntime call.
418 // Which would lead to the holographic space being null. In that case, don't
419 // update the visibility state. We'll get the fresh state when and if the
420 // runtime starts back up again.
421 if (!holographic_space_) {
422 return;
423 }
424
425 switch (holographic_space_->UserPresence()) {
426 // Indicates that the browsers immersive content is visible in the headset
427 // receiving input, and the headset is being worn.
428 case HolographicSpaceUserPresence::
429 HolographicSpaceUserPresence_PresentActive:
430 SetVisibilityState(device::mojom::XRVisibilityState::VISIBLE);
431 return;
432 // Indicates that the browsers immersive content is visible in the headset
433 // and the headset is being worn, but a modal dialog is capturing input.
434 case HolographicSpaceUserPresence::
435 HolographicSpaceUserPresence_PresentPassive:
436 // TODO(1016907): Should report VISIBLE_BLURRED, but changed to VISIBLE to
437 // work around an issue in some versions of Windows Mixed Reality which
438 // only report PresentPassive and never PresentActive. Should be reverted
439 // after the Windows fix has been widely released.
440 SetVisibilityState(device::mojom::XRVisibilityState::VISIBLE);
441 return;
442 // Indicates that the browsers immersive content is not visible in the
443 // headset or the user is not wearing the headset.
444 case HolographicSpaceUserPresence::HolographicSpaceUserPresence_Absent:
445 SetVisibilityState(device::mojom::XRVisibilityState::HIDDEN);
446 return;
447 }
448 }
449
EnsureStageBounds()450 void MixedRealityRenderLoop::EnsureStageBounds() {
451 if (!spatial_stage_)
452 return;
453
454 if (bounds_.size() != 0)
455 return;
456
457 if (!stage_origin_)
458 return;
459
460 SpatialMovementRange movement_range = spatial_stage_->MovementRange();
461 if (movement_range != SpatialMovementRange::SpatialMovementRange_Bounded)
462 return;
463
464 // GetMovementBounds gives us the points in clockwise order, so we don't
465 // need to reverse their order here.
466 std::vector<WFN::Vector3> bounds =
467 spatial_stage_->GetMovementBounds(stage_origin_.get());
468 for (const auto& bound : bounds) {
469 bounds_.emplace_back(bound.X, bound.Y, bound.Z);
470 }
471 bounds_updated_ = (bounds_.size() != 0);
472 }
473
OnSessionStart()474 void MixedRealityRenderLoop::OnSessionStart() {
475 LogViewerType(VrViewerType::WINDOWS_MIXED_REALITY_UNKNOWN);
476
477 // Each session should start with new origins.
478 stationary_origin_ = nullptr;
479 anchor_origin_ = nullptr;
480 attached_ = nullptr;
481 last_origin_from_attached_ = base::nullopt;
482
483 InitializeOrigin();
484
485 ClearStageOrigin();
486 InitializeStageOrigin();
487
488 StartPresenting();
489 }
490
OnWindowDestroyed()491 void MixedRealityRenderLoop::OnWindowDestroyed() {
492 window_ = nullptr;
493 ExitPresent();
494 StopRuntime();
495 }
496
InitializeSpace()497 void MixedRealityRenderLoop::InitializeSpace() {
498 // Create a Window, which is required to get an IHolographicSpace.
499 // base::Unretained is safe because 'this' outlives our window.
500 window_ = std::make_unique<MixedRealityWindow>(base::BindOnce(
501 &MixedRealityRenderLoop::OnWindowDestroyed, base::Unretained(this)));
502
503 // A small arbitrary size that keeps the window from being distracting.
504 window_->Init(NULL, gfx::Rect(25, 10));
505 holographic_space_ =
506 WMRHolographicSpaceFactory::CreateForWindow(window_->hwnd());
507 }
508
StartPresenting()509 void MixedRealityRenderLoop::StartPresenting() {
510 ShowWindow(window_->hwnd(), SW_SHOW);
511 }
512
513 struct EyeToWorldDecomposed {
514 gfx::Quaternion world_to_eye_rotation;
515 gfx::Point3F eye_in_world_space;
516 };
517
DecomposeViewMatrix(const ABI::Windows::Foundation::Numerics::Matrix4x4 & view)518 EyeToWorldDecomposed DecomposeViewMatrix(
519 const ABI::Windows::Foundation::Numerics::Matrix4x4& view) {
520 gfx::Transform world_to_view = ConvertToGfxTransform(view);
521
522 gfx::Transform view_to_world;
523 bool invertable = world_to_view.GetInverse(&view_to_world);
524 DCHECK(invertable);
525
526 gfx::Point3F eye_in_world_space(view_to_world.matrix().get(0, 3),
527 view_to_world.matrix().get(1, 3),
528 view_to_world.matrix().get(2, 3));
529
530 gfx::DecomposedTransform world_to_view_decomposed;
531 bool decomposable =
532 gfx::DecomposeTransform(&world_to_view_decomposed, world_to_view);
533 DCHECK(decomposable);
534
535 gfx::Quaternion world_to_eye_rotation = world_to_view_decomposed.quaternion;
536 return {world_to_eye_rotation.inverse(), eye_in_world_space};
537 }
538
GetMonoViewData(const HolographicStereoTransform & view)539 mojom::VRPosePtr GetMonoViewData(const HolographicStereoTransform& view) {
540 auto eye = DecomposeViewMatrix(view.Left);
541
542 auto pose = mojom::VRPose::New();
543
544 // World to device orientation.
545 pose->orientation = eye.world_to_eye_rotation;
546
547 // Position in world space.
548 pose->position =
549 gfx::Point3F(eye.eye_in_world_space.x(), eye.eye_in_world_space.y(),
550 eye.eye_in_world_space.z());
551
552 return pose;
553 }
554
555 struct PoseAndEyeTransform {
556 mojom::VRPosePtr pose;
557 gfx::Transform head_from_left_eye;
558 gfx::Transform head_from_right_eye;
559 };
560
GetStereoViewData(const HolographicStereoTransform & view)561 PoseAndEyeTransform GetStereoViewData(const HolographicStereoTransform& view) {
562 auto left_eye = DecomposeViewMatrix(view.Left);
563 auto right_eye = DecomposeViewMatrix(view.Right);
564 auto center = gfx::Point3F(
565 (left_eye.eye_in_world_space.x() + right_eye.eye_in_world_space.x()) / 2,
566 (left_eye.eye_in_world_space.y() + right_eye.eye_in_world_space.y()) / 2,
567 (left_eye.eye_in_world_space.z() + right_eye.eye_in_world_space.z()) / 2);
568
569 // We calculate the overal headset pose to be the slerp of per-eye poses as
570 // calculated by the view transform's decompositions.
571 gfx::Quaternion world_to_view_rotation = left_eye.world_to_eye_rotation;
572 world_to_view_rotation.Slerp(right_eye.world_to_eye_rotation, 0.5f);
573
574 // Calculate new eye offsets.
575 PoseAndEyeTransform ret;
576 gfx::Vector3dF left_offset = left_eye.eye_in_world_space - center;
577 gfx::Vector3dF right_offset = right_eye.eye_in_world_space - center;
578
579 gfx::Transform transform(world_to_view_rotation); // World to view.
580 transform.Transpose(); // Now it is view to world.
581
582 // TODO(crbug.com/980791): Get the actual eye-to-head transforms instead of
583 // building them from just the translation components so that angled screens
584 // are handled properly.
585 transform.TransformVector(&left_offset); // Offset is now in view space
586 transform.TransformVector(&right_offset);
587 ret.head_from_left_eye = vr_utils::MakeTranslationTransform(left_offset);
588 ret.head_from_right_eye = vr_utils::MakeTranslationTransform(right_offset);
589
590 ret.pose = mojom::VRPose::New();
591
592 // World to device orientation.
593 ret.pose->orientation = world_to_view_rotation;
594
595 // Position in world space.
596 ret.pose->position = gfx::Point3F(center.x(), center.y(), center.z());
597
598 return ret;
599 }
600
CreateDefaultFrameData(const WMRTimestamp * timestamp,int16_t frame_id)601 mojom::XRFrameDataPtr CreateDefaultFrameData(const WMRTimestamp* timestamp,
602 int16_t frame_id) {
603 mojom::XRFrameDataPtr ret = mojom::XRFrameData::New();
604
605 // relative_time.Duration is a count of 100ns units, so multiply by 100
606 // to get a count of nanoseconds.
607 TimeSpan relative_time = timestamp->PredictionAmount();
608 double milliseconds =
609 base::TimeDelta::FromNanosecondsD(100.0 * relative_time.Duration)
610 .InMillisecondsF();
611 TRACE_EVENT_INSTANT1("gpu", "WebXR pose prediction", TRACE_EVENT_SCOPE_THREAD,
612 "milliseconds", milliseconds);
613
614 DateTime date_time = timestamp->TargetTime();
615 ret->time_delta =
616 base::TimeDelta::FromMicroseconds(date_time.UniversalTime / 10);
617 ret->frame_id = frame_id;
618 return ret;
619 }
620
UpdateWMRDataForNextFrame()621 void MixedRealityRenderLoop::UpdateWMRDataForNextFrame() {
622 holographic_frame_ = nullptr;
623 pose_ = nullptr;
624 rendering_params_ = nullptr;
625 camera_ = nullptr;
626 timestamp_ = nullptr;
627
628 // Start populating this frame's data.
629 holographic_frame_ = holographic_space_->TryCreateNextFrame();
630 if (!holographic_frame_)
631 return;
632
633 auto prediction = holographic_frame_->CurrentPrediction();
634 timestamp_ = prediction->Timestamp();
635
636 auto poses = prediction->CameraPoses();
637
638 // We expect there to only be one pose
639 if (poses.size() != 1)
640 return;
641 pose_ = std::move(poses[0]);
642 rendering_params_ =
643 holographic_frame_->TryGetRenderingParameters(pose_.get());
644 if (!rendering_params_)
645 return;
646
647 // Make sure we have an origin.
648 if (!anchor_origin_) {
649 InitializeOrigin();
650 }
651
652 // Make sure we have a stage origin.
653 if (!stage_origin_)
654 InitializeStageOrigin();
655
656 camera_ = pose_->HolographicCamera();
657 }
658
UpdateDisplayInfo()659 bool MixedRealityRenderLoop::UpdateDisplayInfo() {
660 if (!pose_)
661 return false;
662 if (!camera_)
663 return false;
664
665 ABI::Windows::Graphics::Holographic::HolographicStereoTransform projection =
666 pose_->ProjectionTransform();
667
668 ABI::Windows::Foundation::Size size = camera_->RenderTargetSize();
669 bool stereo = camera_->IsStereo();
670 bool changed = false;
671
672 if (!current_display_info_) {
673 current_display_info_ = mojom::VRDisplayInfo::New();
674 current_display_info_->id =
675 device::mojom::XRDeviceId::WINDOWS_MIXED_REALITY_ID;
676 changed = true;
677 }
678
679 if (!stereo && current_display_info_->right_eye) {
680 changed = true;
681 current_display_info_->right_eye = nullptr;
682 }
683
684 if (!current_display_info_->left_eye) {
685 current_display_info_->left_eye = mojom::VREyeParameters::New();
686 changed = true;
687 }
688
689 if (current_display_info_->left_eye->render_width != size.Width ||
690 current_display_info_->left_eye->render_height != size.Height) {
691 changed = true;
692 current_display_info_->left_eye->render_width = size.Width;
693 current_display_info_->left_eye->render_height = size.Height;
694 }
695
696 auto left_fov = ParseProjection(projection.Left);
697 if (!current_display_info_->left_eye->field_of_view ||
698 !left_fov->Equals(*current_display_info_->left_eye->field_of_view)) {
699 current_display_info_->left_eye->field_of_view = std::move(left_fov);
700 changed = true;
701 }
702
703 if (stereo) {
704 if (!current_display_info_->right_eye) {
705 current_display_info_->right_eye = mojom::VREyeParameters::New();
706 changed = true;
707 }
708
709 if (current_display_info_->right_eye->render_width != size.Width ||
710 current_display_info_->right_eye->render_height != size.Height) {
711 changed = true;
712 current_display_info_->right_eye->render_width = size.Width;
713 current_display_info_->right_eye->render_height = size.Height;
714 }
715
716 auto right_fov = ParseProjection(projection.Right);
717 if (!current_display_info_->right_eye->field_of_view ||
718 !right_fov->Equals(*current_display_info_->right_eye->field_of_view)) {
719 current_display_info_->right_eye->field_of_view = std::move(right_fov);
720 changed = true;
721 }
722 }
723
724 return changed;
725 }
726
UpdateStageParameters()727 bool MixedRealityRenderLoop::UpdateStageParameters() {
728 // TODO(https://crbug.com/945408): We should consider subscribing to
729 // SpatialStageFrameOfReference.CurrentChanged to also re-calculate this.
730 bool changed = false;
731 if (stage_transform_needs_updating_) {
732 if (!(stage_origin_ && anchor_origin_) &&
733 current_display_info_->stage_parameters) {
734 changed = true;
735 current_display_info_->stage_parameters = nullptr;
736 } else if (stage_origin_ && anchor_origin_) {
737 changed = true;
738 current_display_info_->stage_parameters = nullptr;
739
740 mojom::VRStageParametersPtr stage_parameters =
741 mojom::VRStageParameters::New();
742
743 Matrix4x4 origin_to_stage;
744 if (!anchor_origin_->TryGetTransformTo(stage_origin_.get(),
745 &origin_to_stage)) {
746 // We failed to get a transform between the two, so force a
747 // recalculation of the stage origin and leave the stage_parameters
748 // null.
749 ClearStageOrigin();
750 return changed;
751 }
752
753 stage_parameters->standing_transform =
754 ConvertToGfxTransform(origin_to_stage);
755
756 current_display_info_->stage_parameters = std::move(stage_parameters);
757 }
758
759 stage_transform_needs_updating_ = false;
760 }
761
762 EnsureStageBounds();
763 if (bounds_updated_ && current_display_info_->stage_parameters) {
764 current_display_info_->stage_parameters->bounds = bounds_;
765 changed = true;
766 bounds_updated_ = false;
767 }
768 return changed;
769 }
770
GetNextFrameData()771 mojom::XRFrameDataPtr MixedRealityRenderLoop::GetNextFrameData() {
772 UpdateWMRDataForNextFrame();
773 if (!timestamp_) {
774 TRACE_EVENT_INSTANT0("xr", "No Timestamp", TRACE_EVENT_SCOPE_THREAD);
775 mojom::XRFrameDataPtr frame_data = mojom::XRFrameData::New();
776 frame_data->frame_id = next_frame_id_;
777 // TODO(crbug.com/838515): Fix inaccurate time delta reporting in
778 // VRMagicWindowProvider::GetFrameData
779 return frame_data;
780 }
781
782 // Once we have a prediction, we can generate a frame data.
783 mojom::XRFrameDataPtr ret =
784 CreateDefaultFrameData(timestamp_.get(), next_frame_id_);
785
786 if ((!attached_ && !anchor_origin_) || !pose_) {
787 TRACE_EVENT_INSTANT0("xr", "No origin or no pose",
788 TRACE_EVENT_SCOPE_THREAD);
789 // If we don't have an origin or pose for this frame, we can still give out
790 // a timestamp and frame to render head-locked content.
791 return ret;
792 }
793
794 std::unique_ptr<WMRCoordinateSystem> attached_coordinates =
795 attached_->TryGetCoordinatesAtTimestamp(timestamp_.get());
796 if (!attached_coordinates)
797 return ret;
798
799 ABI::Windows::Graphics::Holographic::HolographicStereoTransform view;
800 bool got_view = false;
801 if (anchor_origin_ &&
802 pose_->TryGetViewTransform(anchor_origin_.get(), &view)) {
803 got_view = true;
804 emulated_position_ = false;
805 ABI::Windows::Foundation::Numerics::Matrix4x4 origin_from_attached;
806 if (attached_coordinates->TryGetTransformTo(anchor_origin_.get(),
807 &origin_from_attached)) {
808 last_origin_from_attached_ = ConvertToGfxTransform(origin_from_attached);
809 }
810 } else {
811 emulated_position_ = true;
812 if (!pose_->TryGetViewTransform(attached_coordinates.get(), &view)) {
813 TRACE_EVENT_INSTANT0("xr", "Failed to locate origin",
814 TRACE_EVENT_SCOPE_THREAD);
815 return ret;
816 } else {
817 got_view = true;
818 }
819 }
820
821 if (!got_view) {
822 TRACE_EVENT_INSTANT0("xr", "No view transform", TRACE_EVENT_SCOPE_THREAD);
823 return ret;
824 }
825
826 bool send_new_display_info = UpdateDisplayInfo();
827 if (!current_display_info_) {
828 TRACE_EVENT_INSTANT0("xr", "No display info", TRACE_EVENT_SCOPE_THREAD);
829 return ret;
830 }
831
832 if (current_display_info_->right_eye) {
833 // If we have a right eye, we are stereo.
834 PoseAndEyeTransform pose_and_eye_transform = GetStereoViewData(view);
835 ret->pose = std::move(pose_and_eye_transform.pose);
836
837 if (current_display_info_->left_eye->head_from_eye !=
838 pose_and_eye_transform.head_from_left_eye ||
839 current_display_info_->right_eye->head_from_eye !=
840 pose_and_eye_transform.head_from_right_eye) {
841 current_display_info_->left_eye->head_from_eye =
842 std::move(pose_and_eye_transform.head_from_left_eye);
843 current_display_info_->right_eye->head_from_eye =
844 std::move(pose_and_eye_transform.head_from_right_eye);
845 send_new_display_info = true;
846 }
847 } else {
848 ret->pose = GetMonoViewData(view);
849 gfx::Transform head_from_eye;
850 if (current_display_info_->left_eye->head_from_eye != head_from_eye) {
851 current_display_info_->left_eye->head_from_eye = head_from_eye;
852 send_new_display_info = true;
853 }
854 }
855
856 // The only display info we've updated so far is the eye info.
857 if (send_new_display_info) {
858 // Update the eye info for this frame.
859 ret->left_eye = current_display_info_->left_eye.Clone();
860 ret->right_eye = current_display_info_->right_eye.Clone();
861 }
862
863 bool stage_parameters_updated = UpdateStageParameters();
864 if (stage_parameters_updated) {
865 ret->stage_parameters_updated = true;
866 ret->stage_parameters = current_display_info_->stage_parameters.Clone();
867 }
868
869 if (send_new_display_info || stage_parameters_updated) {
870 // Notify the device about the display info change.
871 main_thread_task_runner_->PostTask(
872 FROM_HERE, base::BindOnce(on_display_info_changed_,
873 current_display_info_.Clone()));
874 }
875
876 ret->input_state =
877 input_helper_->GetInputState(anchor_origin_.get(), timestamp_.get());
878
879 ret->pose->emulated_position = emulated_position_;
880
881 if (emulated_position_ && last_origin_from_attached_) {
882 gfx::DecomposedTransform attached_from_view_decomp;
883 attached_from_view_decomp.quaternion = (*ret->pose->orientation);
884
885 attached_from_view_decomp.translate[0] = ret->pose->position->x();
886 attached_from_view_decomp.translate[1] = ret->pose->position->y();
887 attached_from_view_decomp.translate[2] = ret->pose->position->z();
888
889 gfx::Transform attached_from_view =
890 gfx::ComposeTransform(attached_from_view_decomp);
891 gfx::Transform origin_from_view =
892 (*last_origin_from_attached_) * attached_from_view;
893 gfx::DecomposedTransform origin_from_view_decomposed;
894 bool success =
895 gfx::DecomposeTransform(&origin_from_view_decomposed, origin_from_view);
896 DCHECK(success);
897 ret->pose->orientation = origin_from_view_decomposed.quaternion;
898 ret->pose->position = gfx::Point3F(
899 static_cast<float>(origin_from_view_decomposed.translate[0]),
900 static_cast<float>(origin_from_view_decomposed.translate[1]),
901 static_cast<float>(origin_from_view_decomposed.translate[2]));
902 }
903
904 return ret;
905 }
906
907 } // namespace device
908