| OLD | NEW |
| (Empty) |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "device/vr/android/gvr/gvr_device.h" | |
| 6 | |
| 7 #include <math.h> | |
| 8 #include <algorithm> | |
| 9 | |
| 10 #include "base/time/time.h" | |
| 11 #include "base/trace_event/trace_event.h" | |
| 12 #include "third_party/gvr-android-sdk/src/ndk-beta/include/vr/gvr/capi/include/g
vr.h" | |
| 13 #include "third_party/gvr-android-sdk/src/ndk-beta/include/vr/gvr/capi/include/g
vr_types.h" | |
| 14 #include "ui/gfx/transform.h" | |
| 15 #include "ui/gfx/transform_util.h" | |
| 16 | |
| 17 namespace device { | |
| 18 | |
| 19 namespace { | |
| 20 | |
| 21 static const uint64_t kPredictionTimeWithoutVsyncNanos = 50000000; | |
| 22 | |
| 23 } // namespace | |
| 24 | |
| 25 GvrDevice::GvrDevice(VRDeviceProvider* provider, gvr::GvrApi* gvr_api) | |
| 26 : VRDevice(provider), gvr_api_(gvr_api) {} | |
| 27 | |
| 28 GvrDevice::~GvrDevice() {} | |
| 29 | |
| 30 VRDisplayPtr GvrDevice::GetVRDevice() { | |
| 31 TRACE_EVENT0("input", "GvrDevice::GetVRDevice"); | |
| 32 | |
| 33 VRDisplayPtr device = VRDisplay::New(); | |
| 34 | |
| 35 device->capabilities = VRDisplayCapabilities::New(); | |
| 36 device->capabilities->hasOrientation = true; | |
| 37 device->capabilities->hasPosition = false; | |
| 38 device->capabilities->hasExternalDisplay = false; | |
| 39 device->capabilities->canPresent = true; | |
| 40 | |
| 41 device->leftEye = VREyeParameters::New(); | |
| 42 device->rightEye = VREyeParameters::New(); | |
| 43 VREyeParametersPtr& left_eye = device->leftEye; | |
| 44 VREyeParametersPtr& right_eye = device->rightEye; | |
| 45 | |
| 46 device->displayName = gvr_api_->GetViewerModel(); | |
| 47 | |
| 48 gvr::BufferViewportList gvr_buffer_viewports = | |
| 49 gvr_api_->CreateEmptyBufferViewportList(); | |
| 50 gvr_buffer_viewports.SetToRecommendedBufferViewports(); | |
| 51 | |
| 52 gvr::BufferViewport eye_viewport = gvr_api_->CreateBufferViewport(); | |
| 53 gvr_buffer_viewports.GetBufferViewport(GVR_LEFT_EYE, &eye_viewport); | |
| 54 gvr::Rectf eye_fov = eye_viewport.GetSourceFov(); | |
| 55 left_eye->fieldOfView = VRFieldOfView::New(); | |
| 56 left_eye->fieldOfView->upDegrees = eye_fov.top; | |
| 57 left_eye->fieldOfView->downDegrees = eye_fov.bottom; | |
| 58 left_eye->fieldOfView->leftDegrees = eye_fov.left; | |
| 59 left_eye->fieldOfView->rightDegrees = eye_fov.right; | |
| 60 | |
| 61 eye_viewport = gvr_api_->CreateBufferViewport(); | |
| 62 gvr_buffer_viewports.GetBufferViewport(GVR_RIGHT_EYE, &eye_viewport); | |
| 63 eye_fov = eye_viewport.GetSourceFov(); | |
| 64 right_eye->fieldOfView = VRFieldOfView::New(); | |
| 65 right_eye->fieldOfView->upDegrees = eye_fov.top; | |
| 66 right_eye->fieldOfView->downDegrees = eye_fov.bottom; | |
| 67 right_eye->fieldOfView->leftDegrees = eye_fov.left; | |
| 68 right_eye->fieldOfView->rightDegrees = eye_fov.right; | |
| 69 | |
| 70 gvr::Mat4f left_eye_mat = gvr_api_->GetEyeFromHeadMatrix(GVR_LEFT_EYE); | |
| 71 gvr::Mat4f right_eye_mat = gvr_api_->GetEyeFromHeadMatrix(GVR_RIGHT_EYE); | |
| 72 | |
| 73 left_eye->offset = mojo::Array<float>::New(3); | |
| 74 left_eye->offset[0] = -left_eye_mat.m[0][3]; | |
| 75 left_eye->offset[1] = -left_eye_mat.m[1][3]; | |
| 76 left_eye->offset[2] = -left_eye_mat.m[2][3]; | |
| 77 | |
| 78 right_eye->offset = mojo::Array<float>::New(3); | |
| 79 right_eye->offset[0] = -right_eye_mat.m[0][3]; | |
| 80 right_eye->offset[1] = -right_eye_mat.m[1][3]; | |
| 81 right_eye->offset[2] = -right_eye_mat.m[2][3]; | |
| 82 | |
| 83 gvr::Sizei render_target_size = gvr_api_->GetRecommendedRenderTargetSize(); | |
| 84 | |
| 85 left_eye->renderWidth = render_target_size.width / 2; | |
| 86 left_eye->renderHeight = render_target_size.height; | |
| 87 | |
| 88 right_eye->renderWidth = render_target_size.width / 2; | |
| 89 right_eye->renderHeight = render_target_size.height; | |
| 90 | |
| 91 return device; | |
| 92 } | |
| 93 | |
| 94 VRPosePtr GvrDevice::GetPose() { | |
| 95 TRACE_EVENT0("input", "GvrDevice::GetSensorState"); | |
| 96 | |
| 97 VRPosePtr pose = VRPose::New(); | |
| 98 | |
| 99 pose->timestamp = base::Time::Now().ToJsTime(); | |
| 100 | |
| 101 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | |
| 102 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | |
| 103 | |
| 104 gvr::Mat4f head_mat = gvr_api_->GetHeadPoseInStartSpace(target_time); | |
| 105 | |
| 106 gfx::Transform inv_transform( | |
| 107 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], | |
| 108 head_mat.m[1][0], head_mat.m[1][1], head_mat.m[1][2], head_mat.m[1][3], | |
| 109 head_mat.m[2][0], head_mat.m[2][1], head_mat.m[2][2], head_mat.m[2][3], | |
| 110 head_mat.m[3][0], head_mat.m[3][1], head_mat.m[3][2], head_mat.m[3][3]); | |
| 111 | |
| 112 gfx::Transform transform; | |
| 113 if (inv_transform.GetInverse(&transform)) { | |
| 114 gfx::DecomposedTransform decomposed_transform; | |
| 115 gfx::DecomposeTransform(&decomposed_transform, transform); | |
| 116 | |
| 117 pose->orientation = mojo::Array<float>::New(4); | |
| 118 pose->orientation[0] = decomposed_transform.quaternion[0]; | |
| 119 pose->orientation[1] = decomposed_transform.quaternion[1]; | |
| 120 pose->orientation[2] = decomposed_transform.quaternion[2]; | |
| 121 pose->orientation[3] = decomposed_transform.quaternion[3]; | |
| 122 | |
| 123 pose->position = mojo::Array<float>::New(3); | |
| 124 pose->position[0] = decomposed_transform.translate[0]; | |
| 125 pose->position[1] = decomposed_transform.translate[1]; | |
| 126 pose->position[2] = decomposed_transform.translate[2]; | |
| 127 } | |
| 128 | |
| 129 return pose; | |
| 130 } | |
| 131 | |
| 132 void GvrDevice::ResetPose() { | |
| 133 gvr_api_->ResetTracking(); | |
| 134 } | |
| 135 | |
| 136 } // namespace device | |
| OLD | NEW |