Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "device/vr/android/gvr/gvr_delegate.h" | 5 #include "device/vr/android/gvr/gvr_delegate.h" |
| 6 | 6 |
| 7 #include "base/trace_event/trace_event.h" | 7 #include "base/trace_event/trace_event.h" |
| 8 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr.h" | 8 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr.h" |
| 9 #include "ui/gfx/transform.h" | 9 #include "ui/gfx/transform.h" |
| 10 #include "ui/gfx/transform_util.h" | 10 #include "ui/gfx/transform_util.h" |
| 11 | 11 |
| 12 namespace device { | 12 namespace device { |
| 13 | 13 |
| 14 namespace { | 14 namespace { |
| 15 // Default downscale factor for computing the recommended WebVR | 15 // Default downscale factor for computing the recommended WebVR |
| 16 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather | 16 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather |
| 17 // aggressive downscale due to the high overhead of copying pixels | 17 // aggressive downscale due to the high overhead of copying pixels |
| 18 // twice before handing off to GVR. For comparison, the polyfill | 18 // twice before handing off to GVR. For comparison, the polyfill |
| 19 // uses approximately 0.55 on a Pixel XL. | 19 // uses approximately 0.55 on a Pixel XL. |
| 20 static constexpr float kWebVrRecommendedResolutionScale = 0.5; | 20 static constexpr float kWebVrRecommendedResolutionScale = 0.5; |
| 21 | |
| 22 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever | |
| 23 // exposed, use that instead (it defaults to 50ms on most platforms). | |
| 24 static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000; | |
| 25 | |
| 26 // Time offset used for calculating angular velocity from a pair of predicted | |
| 27 // poses. The precise value shouldn't matter as long as it's nonzero and much | |
| 28 // less than a frame. | |
| 29 static constexpr int64_t kAngularVelocityEpsilonNanos = 1000000; | |
| 30 | |
| 31 // Matrix math copied from vr_shell's vr_math.cc, can't use that here | |
| 32 // due to dependency ordering. TODO(mthiesse): move the vr_math code | |
| 33 // to this directory so that both locations can use it. | |
| 34 | |
| 35 // Rotation only, ignore translation components. | |
| 36 gvr::Vec3f MatrixVectorRotate(const gvr::Mat4f& m, const gvr::Vec3f& v) { | |
| 37 gvr::Vec3f res; | |
| 38 res.x = m.m[0][0] * v.x + m.m[0][1] * v.y + m.m[0][2] * v.z; | |
| 39 res.y = m.m[1][0] * v.x + m.m[1][1] * v.y + m.m[1][2] * v.z; | |
| 40 res.z = m.m[2][0] * v.x + m.m[2][1] * v.y + m.m[2][2] * v.z; | |
| 41 return res; | |
| 42 } | |
| 43 | |
| 44 gvr::Mat4f MatrixMul(const gvr::Mat4f& matrix1, const gvr::Mat4f& matrix2) { | |
| 45 gvr::Mat4f result; | |
| 46 for (int i = 0; i < 4; ++i) { | |
| 47 for (int j = 0; j < 4; ++j) { | |
| 48 result.m[i][j] = 0.0f; | |
| 49 for (int k = 0; k < 4; ++k) { | |
| 50 result.m[i][j] += matrix1.m[i][k] * matrix2.m[k][j]; | |
| 51 } | |
| 52 } | |
| 53 } | |
| 54 return result; | |
| 55 } | |
| 56 | |
| 21 } // namespace | 57 } // namespace |
| 22 | 58 |
| 23 /* static */ | 59 /* static */ |
| 24 mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(gvr::Mat4f head_mat) { | 60 mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(gvr::Mat4f head_mat) { |
| 25 mojom::VRPosePtr pose = mojom::VRPose::New(); | 61 mojom::VRPosePtr pose = mojom::VRPose::New(); |
| 26 | 62 |
| 27 pose->orientation.emplace(4); | 63 pose->orientation.emplace(4); |
| 28 | 64 |
| 29 gfx::Transform inv_transform( | 65 gfx::Transform inv_transform( |
| 30 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], | 66 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 45 pose->position.emplace(3); | 81 pose->position.emplace(3); |
| 46 pose->position.value()[0] = decomposed_transform.translate[0]; | 82 pose->position.value()[0] = decomposed_transform.translate[0]; |
| 47 pose->position.value()[1] = decomposed_transform.translate[1]; | 83 pose->position.value()[1] = decomposed_transform.translate[1]; |
| 48 pose->position.value()[2] = decomposed_transform.translate[2]; | 84 pose->position.value()[2] = decomposed_transform.translate[2]; |
| 49 } | 85 } |
| 50 | 86 |
| 51 return pose; | 87 return pose; |
| 52 } | 88 } |
| 53 | 89 |
| 54 /* static */ | 90 /* static */ |
| 91 gvr::Mat4f GvrDelegate::GetGvrPoseWithNeckModel(gvr::GvrApi* gvr_api) { | |
| 92 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | |
| 93 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | |
| 94 | |
| 95 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel( | |
| 96 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f); | |
| 97 | |
| 98 return head_mat; | |
| 99 } | |
| 100 | |
| 101 /* static */ | |
| 102 mojom::VRPosePtr GvrDelegate::GetVRPosePtrWithNeckModel( | |
| 103 gvr::GvrApi* gvr_api, | |
| 104 gvr::Mat4f* head_mat_out) { | |
| 105 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | |
| 106 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | |
| 107 | |
| 108 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel( | |
| 109 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f); | |
| 110 | |
| 111 if (head_mat_out) | |
| 112 *head_mat_out = head_mat; | |
| 113 | |
| 114 target_time.monotonic_system_time_nanos += kAngularVelocityEpsilonNanos; | |
| 115 gvr::Mat4f head_mat_2 = | |
| 116 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time); | |
| 117 | |
| 118 mojom::VRPosePtr pose = GvrDelegate::VRPosePtrFromGvrPose(head_mat); | |
| 119 | |
| 120 // Add headset angular velocity to the pose. | |
|
mthiesse
2017/04/06 00:15:37
nit: Split this out into a separate function for r
klausw
2017/04/06 01:28:57
Done.
| |
| 121 // | |
| 122 // The angular velocity is a 3-element vector pointing along the rotation | |
| 123 // axis with magnitude equal to rotation speed in radians/second, expressed | |
| 124 // in the seated frame of reference. | |
| 125 // | |
| 126 // The 1.1 spec isn't very clear on details, clarification requested in | |
| 127 // https://github.com/w3c/webvr/issues/212 . For now, assuming that we | |
| 128 // want a vector in the sitting reference frame. | |
| 129 // | |
| 130 // Assuming that pose prediction is simply based on adding a time * angular | |
| 131 // velocity rotation to the pose, we can approximate the angular velocity | |
| 132 // from the difference between two successive poses. This is a first order | |
| 133 // estimate that assumes small enough rotations so that we can do linear | |
| 134 // approximation. | |
| 135 // | |
| 136 // See: | |
| 137 // https://en.wikipedia.org/wiki/Angular_velocity#Calculation_from_the_orienta tion_matrix | |
| 138 pose->angularVelocity.emplace(3); | |
| 139 // Assume that epsilon is nonzero since it's a compile-time constant | |
| 140 // provided by the caller. | |
| 141 double epsilon_seconds = kAngularVelocityEpsilonNanos * 1e-9; | |
| 142 gvr::Mat4f delta_mat; | |
| 143 gvr::Mat4f inverse_head_mat; | |
| 144 // Calculate difference matrix, and inverse head matrix rotation. | |
| 145 // For the inverse rotation, just transpose the 3x3 subsection. | |
| 146 for (int j = 0; j < 3; ++j) { | |
| 147 for (int i = 0; i < 3; ++i) { | |
| 148 delta_mat.m[j][i] = | |
| 149 (head_mat_2.m[j][i] - head_mat.m[j][i]) / epsilon_seconds; | |
| 150 inverse_head_mat.m[j][i] = head_mat.m[i][j]; | |
| 151 } | |
| 152 delta_mat.m[j][3] = delta_mat.m[3][j] = 0.0; | |
| 153 inverse_head_mat.m[j][3] = inverse_head_mat.m[3][j] = 0.0; | |
| 154 } | |
| 155 delta_mat.m[3][3] = 1.0; | |
| 156 inverse_head_mat.m[3][3] = 1.0; | |
| 157 gvr::Mat4f omega_mat = device::MatrixMul(delta_mat, inverse_head_mat); | |
| 158 gvr::Vec3f omega_vec; | |
| 159 omega_vec.x = -omega_mat.m[2][1]; | |
| 160 omega_vec.y = omega_mat.m[2][0]; | |
| 161 omega_vec.z = -omega_mat.m[1][0]; | |
| 162 | |
| 163 // Rotate by inverse head matrix to bring into seated space. | |
| 164 gvr::Vec3f angular_velocity = | |
| 165 device::MatrixVectorRotate(inverse_head_mat, omega_vec); | |
| 166 pose->angularVelocity.value()[0] = angular_velocity.x; | |
| 167 pose->angularVelocity.value()[1] = angular_velocity.y; | |
| 168 pose->angularVelocity.value()[2] = angular_velocity.z; | |
| 169 | |
| 170 return pose; | |
| 171 } | |
| 172 | |
| 173 /* static */ | |
| 55 gvr::Sizei GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) { | 174 gvr::Sizei GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) { |
| 56 // Pick a reasonable default size for the WebVR transfer surface | 175 // Pick a reasonable default size for the WebVR transfer surface |
| 57 // based on a downscaled 1:1 render resolution. This size will also | 176 // based on a downscaled 1:1 render resolution. This size will also |
| 58 // be reported to the client via CreateVRDisplayInfo as the | 177 // be reported to the client via CreateVRDisplayInfo as the |
| 59 // client-recommended renderWidth/renderHeight and for the GVR | 178 // client-recommended renderWidth/renderHeight and for the GVR |
| 60 // framebuffer. If the client chooses a different size or resizes it | 179 // framebuffer. If the client chooses a different size or resizes it |
| 61 // while presenting, we'll resize the transfer surface and GVR | 180 // while presenting, we'll resize the transfer surface and GVR |
| 62 // framebuffer to match. | 181 // framebuffer to match. |
| 63 gvr::Sizei render_target_size = | 182 gvr::Sizei render_target_size = |
| 64 gvr_api->GetMaximumEffectiveRenderTargetSize(); | 183 gvr_api->GetMaximumEffectiveRenderTargetSize(); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 123 gvr::Mat4f eye_mat = gvr_api->GetEyeFromHeadMatrix(eye); | 242 gvr::Mat4f eye_mat = gvr_api->GetEyeFromHeadMatrix(eye); |
| 124 eye_params->offset[0] = -eye_mat.m[0][3]; | 243 eye_params->offset[0] = -eye_mat.m[0][3]; |
| 125 eye_params->offset[1] = -eye_mat.m[1][3]; | 244 eye_params->offset[1] = -eye_mat.m[1][3]; |
| 126 eye_params->offset[2] = -eye_mat.m[2][3]; | 245 eye_params->offset[2] = -eye_mat.m[2][3]; |
| 127 } | 246 } |
| 128 | 247 |
| 129 return device; | 248 return device; |
| 130 } | 249 } |
| 131 | 250 |
| 132 } // namespace device | 251 } // namespace device |
| OLD | NEW |