Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "device/vr/android/gvr/gvr_device.h" | |
| 6 | |
| 7 #include <math.h> | |
| 8 #include <algorithm> | |
| 9 | |
| 10 #include "base/strings/string_util.h" | |
| 11 #include "base/strings/utf_string_conversions.h" | |
| 12 #include "base/time/time.h" | |
| 13 #include "base/trace_event/trace_event.h" | |
| 14 #include "third_party/gvr-android-sdk/src/ndk-beta/include/vr/gvr/capi/include/g vr.h" | |
| 15 #include "third_party/gvr-android-sdk/src/ndk-beta/include/vr/gvr/capi/include/g vr_types.h" | |
| 16 #include "ui/gfx/transform.h" | |
| 17 #include "ui/gfx/transform_util.h" | |
| 18 | |
| 19 namespace device { | |
| 20 | |
| 21 GvrDevice::GvrDevice(VRDeviceProvider* provider, gvr::GvrApi* gvr) | |
| 22 : VRDevice(provider), gvr_(gvr) {} | |
| 23 | |
| 24 GvrDevice::~GvrDevice() {} | |
| 25 | |
| 26 VRDisplayPtr GvrDevice::GetVRDevice() { | |
| 27 TRACE_EVENT0("input", "GvrDevice::GetVRDevice"); | |
| 28 VRDisplayPtr device = VRDisplay::New(); | |
| 29 | |
| 30 device->capabilities = VRDisplayCapabilities::New(); | |
| 31 device->capabilities->hasOrientation = true; | |
| 32 device->capabilities->hasPosition = false; | |
| 33 device->capabilities->hasExternalDisplay = false; | |
| 34 device->capabilities->canPresent = true; | |
| 35 | |
| 36 device->leftEye = VREyeParameters::New(); | |
| 37 device->rightEye = VREyeParameters::New(); | |
| 38 VREyeParametersPtr& left_eye = device->leftEye; | |
| 39 VREyeParametersPtr& right_eye = device->rightEye; | |
| 40 | |
| 41 device->displayName = gvr_->GetViewerModel(); | |
| 42 | |
| 43 gvr::BufferViewportList gvr_buffer_viewports = | |
| 44 gvr_->CreateEmptyBufferViewportList(); | |
| 45 gvr_buffer_viewports.SetToRecommendedBufferViewports(); | |
| 46 | |
| 47 gvr::BufferViewport eye_viewport = gvr_->CreateBufferViewport(); | |
| 48 gvr_buffer_viewports.GetBufferViewport(GVR_LEFT_EYE, &eye_viewport); | |
| 49 gvr::Rectf eye_fov = eye_viewport.GetSourceFov(); | |
| 50 left_eye->fieldOfView = VRFieldOfView::New(); | |
| 51 left_eye->fieldOfView->upDegrees = eye_fov.top; | |
| 52 left_eye->fieldOfView->downDegrees = eye_fov.bottom; | |
| 53 left_eye->fieldOfView->leftDegrees = eye_fov.left; | |
| 54 left_eye->fieldOfView->rightDegrees = eye_fov.right; | |
| 55 | |
| 56 eye_viewport = gvr_->CreateBufferViewport(); | |
| 57 gvr_buffer_viewports.GetBufferViewport(GVR_RIGHT_EYE, &eye_viewport); | |
| 58 eye_fov = eye_viewport.GetSourceFov(); | |
| 59 right_eye->fieldOfView = VRFieldOfView::New(); | |
| 60 right_eye->fieldOfView->upDegrees = eye_fov.top; | |
| 61 right_eye->fieldOfView->downDegrees = eye_fov.bottom; | |
| 62 right_eye->fieldOfView->leftDegrees = eye_fov.left; | |
| 63 right_eye->fieldOfView->rightDegrees = eye_fov.right; | |
| 64 | |
| 65 gvr::Mat4f left_eye_mat = gvr_->GetEyeFromHeadMatrix(GVR_LEFT_EYE); | |
| 66 gvr::Mat4f right_eye_mat = gvr_->GetEyeFromHeadMatrix(GVR_RIGHT_EYE); | |
| 67 | |
| 68 left_eye->offset = mojo::Array<float>::New(3); | |
| 69 left_eye->offset[0] = -left_eye_mat.m[0][3]; | |
| 70 left_eye->offset[1] = -left_eye_mat.m[1][3]; | |
| 71 left_eye->offset[2] = -left_eye_mat.m[2][3]; | |
| 72 | |
| 73 right_eye->offset = mojo::Array<float>::New(3); | |
| 74 right_eye->offset[0] = -right_eye_mat.m[0][3]; | |
| 75 right_eye->offset[1] = -right_eye_mat.m[1][3]; | |
| 76 right_eye->offset[2] = -right_eye_mat.m[2][3]; | |
| 77 | |
| 78 gvr::Sizei render_target_size = gvr_->GetRecommendedRenderTargetSize(); | |
| 79 | |
| 80 left_eye->renderWidth = render_target_size.width / 2; | |
| 81 left_eye->renderHeight = render_target_size.height; | |
| 82 | |
| 83 right_eye->renderWidth = render_target_size.width / 2; | |
| 84 right_eye->renderHeight = render_target_size.height; | |
| 85 | |
| 86 return device; | |
| 87 } | |
| 88 | |
| 89 VRPosePtr GvrDevice::GetPose() { | |
| 90 TRACE_EVENT0("input", "GvrDevice::GetSensorState"); | |
| 91 VRPosePtr pose = VRPose::New(); | |
| 92 | |
| 93 pose->timestamp = base::Time::Now().ToJsTime(); | |
| 94 | |
| 95 gvr::Mat4f head_mat = gvr_->GetHeadPoseInStartSpace(gvr_get_time_point_now()); | |
|
mthiesse
2016/08/09 14:51:55
You're not doing any prediction? Most apps would c
| |
| 96 | |
| 97 gfx::Transform inv_transform( | |
| 98 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], | |
| 99 head_mat.m[1][0], head_mat.m[1][1], head_mat.m[1][2], head_mat.m[1][3], | |
| 100 head_mat.m[2][0], head_mat.m[2][1], head_mat.m[2][2], head_mat.m[2][3], | |
| 101 head_mat.m[3][0], head_mat.m[3][1], head_mat.m[3][2], head_mat.m[3][3]); | |
| 102 | |
| 103 gfx::Transform transform; | |
| 104 if (inv_transform.GetInverse(&transform)) { | |
| 105 gfx::DecomposedTransform decomposed_transform; | |
| 106 gfx::DecomposeTransform(&decomposed_transform, transform); | |
| 107 | |
| 108 pose->orientation = mojo::Array<float>::New(4); | |
| 109 pose->orientation[0] = decomposed_transform.quaternion[0]; | |
| 110 pose->orientation[1] = decomposed_transform.quaternion[1]; | |
| 111 pose->orientation[2] = decomposed_transform.quaternion[2]; | |
| 112 pose->orientation[3] = decomposed_transform.quaternion[3]; | |
| 113 | |
| 114 pose->position = mojo::Array<float>::New(3); | |
| 115 pose->position[0] = decomposed_transform.translate[0]; | |
| 116 pose->position[1] = decomposed_transform.translate[1]; | |
| 117 pose->position[2] = decomposed_transform.translate[2]; | |
| 118 } | |
| 119 | |
| 120 return pose; | |
| 121 } | |
| 122 | |
| 123 void GvrDevice::ResetPose() { | |
| 124 gvr_->ResetTracking(); | |
| 125 } | |
| 126 | |
| 127 } // namespace device | |
| OLD | NEW |