| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "device/vr/android/gvr/gvr_delegate.h" | 5 #include "device/vr/android/gvr/gvr_delegate.h" |
| 6 | 6 |
| 7 #include "base/trace_event/trace_event.h" | 7 #include "base/trace_event/trace_event.h" |
| 8 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/
gvr.h" | 8 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/
gvr.h" |
| 9 #include "ui/gfx/transform.h" | 9 #include "ui/gfx/transform.h" |
| 10 #include "ui/gfx/transform_util.h" | 10 #include "ui/gfx/transform_util.h" |
| 11 | 11 |
| 12 namespace device { | 12 namespace device { |
| 13 | 13 |
| 14 namespace { | 14 namespace { |
| 15 // Default downscale factor for computing the recommended WebVR | 15 // Default downscale factor for computing the recommended WebVR |
| 16 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather | 16 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather |
| 17 // aggressive downscale due to the high overhead of copying pixels | 17 // aggressive downscale due to the high overhead of copying pixels |
| 18 // twice before handing off to GVR. For comparison, the polyfill | 18 // twice before handing off to GVR. For comparison, the polyfill |
| 19 // uses approximately 0.55 on a Pixel XL. | 19 // uses approximately 0.55 on a Pixel XL. |
| 20 static constexpr float kWebVrRecommendedResolutionScale = 0.5; | 20 static constexpr float kWebVrRecommendedResolutionScale = 0.5; |
| 21 |
| 22 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever |
| 23 // exposed, use that instead (it defaults to 50ms on most platforms). |
| 24 static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000; |
| 25 |
| 26 // Time offset used for calculating angular velocity from a pair of predicted |
| 27 // poses. The precise value shouldn't matter as long as it's nonzero and much |
| 28 // less than a frame. |
| 29 static constexpr int64_t kAngularVelocityEpsilonNanos = 1000000; |
| 30 |
| 31 // Matrix math copied from vr_shell's vr_math.cc, can't use that here |
| 32 // due to dependency ordering. TODO(mthiesse): move the vr_math code |
| 33 // to this directory so that both locations can use it. |
| 34 |
| 35 // Rotation only, ignore translation components. |
| 36 gvr::Vec3f MatrixVectorRotate(const gvr::Mat4f& m, const gvr::Vec3f& v) { |
| 37 gvr::Vec3f res; |
| 38 res.x = m.m[0][0] * v.x + m.m[0][1] * v.y + m.m[0][2] * v.z; |
| 39 res.y = m.m[1][0] * v.x + m.m[1][1] * v.y + m.m[1][2] * v.z; |
| 40 res.z = m.m[2][0] * v.x + m.m[2][1] * v.y + m.m[2][2] * v.z; |
| 41 return res; |
| 42 } |
| 43 |
| 44 gvr::Mat4f MatrixMul(const gvr::Mat4f& matrix1, const gvr::Mat4f& matrix2) { |
| 45 gvr::Mat4f result; |
| 46 for (int i = 0; i < 4; ++i) { |
| 47 for (int j = 0; j < 4; ++j) { |
| 48 result.m[i][j] = 0.0f; |
| 49 for (int k = 0; k < 4; ++k) { |
| 50 result.m[i][j] += matrix1.m[i][k] * matrix2.m[k][j]; |
| 51 } |
| 52 } |
| 53 } |
| 54 return result; |
| 55 } |
| 56 |
| 57 gvr::Vec3f GetAngularVelocityFromPoses(gvr::Mat4f head_mat, |
| 58 gvr::Mat4f head_mat_2, |
| 59 double epsilon_seconds) { |
| 60 // The angular velocity is a 3-element vector pointing along the rotation |
| 61 // axis with magnitude equal to rotation speed in radians/second, expressed |
| 62 // in the seated frame of reference. |
| 63 // |
| 64 // The 1.1 spec isn't very clear on details, clarification requested in |
| 65 // https://github.com/w3c/webvr/issues/212 . For now, assuming that we |
| 66 // want a vector in the sitting reference frame. |
| 67 // |
| 68 // Assuming that pose prediction is simply based on adding a time * angular |
| 69 // velocity rotation to the pose, we can approximate the angular velocity |
| 70 // from the difference between two successive poses. This is a first order |
| 71 // estimate that assumes small enough rotations so that we can do linear |
| 72 // approximation. |
| 73 // |
| 74 // See: |
| 75 // https://en.wikipedia.org/wiki/Angular_velocity#Calculation_from_the_orienta
tion_matrix |
| 76 |
| 77 gvr::Mat4f delta_mat; |
| 78 gvr::Mat4f inverse_head_mat; |
| 79 // Calculate difference matrix, and inverse head matrix rotation. |
| 80 // For the inverse rotation, just transpose the 3x3 subsection. |
| 81 // |
| 82 // Assume that epsilon is nonzero since it's based on a compile-time constant |
| 83 // provided by the caller. |
| 84 for (int j = 0; j < 3; ++j) { |
| 85 for (int i = 0; i < 3; ++i) { |
| 86 delta_mat.m[j][i] = |
| 87 (head_mat_2.m[j][i] - head_mat.m[j][i]) / epsilon_seconds; |
| 88 inverse_head_mat.m[j][i] = head_mat.m[i][j]; |
| 89 } |
| 90 delta_mat.m[j][3] = delta_mat.m[3][j] = 0.0; |
| 91 inverse_head_mat.m[j][3] = inverse_head_mat.m[3][j] = 0.0; |
| 92 } |
| 93 delta_mat.m[3][3] = 1.0; |
| 94 inverse_head_mat.m[3][3] = 1.0; |
| 95 gvr::Mat4f omega_mat = device::MatrixMul(delta_mat, inverse_head_mat); |
| 96 gvr::Vec3f omega_vec; |
| 97 omega_vec.x = -omega_mat.m[2][1]; |
| 98 omega_vec.y = omega_mat.m[2][0]; |
| 99 omega_vec.z = -omega_mat.m[1][0]; |
| 100 |
| 101 // Rotate by inverse head matrix to bring into seated space. |
| 102 gvr::Vec3f angular_velocity = |
| 103 device::MatrixVectorRotate(inverse_head_mat, omega_vec); |
| 104 |
| 105 return angular_velocity; |
| 106 } |
| 107 |
| 21 } // namespace | 108 } // namespace |
| 22 | 109 |
| 23 /* static */ | 110 /* static */ |
| 24 mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(gvr::Mat4f head_mat) { | 111 mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(gvr::Mat4f head_mat) { |
| 25 mojom::VRPosePtr pose = mojom::VRPose::New(); | 112 mojom::VRPosePtr pose = mojom::VRPose::New(); |
| 26 | 113 |
| 27 pose->orientation.emplace(4); | 114 pose->orientation.emplace(4); |
| 28 | 115 |
| 29 gfx::Transform inv_transform( | 116 gfx::Transform inv_transform( |
| 30 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], | 117 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], |
| (...skipping 14 matching lines...) Expand all Loading... |
| 45 pose->position.emplace(3); | 132 pose->position.emplace(3); |
| 46 pose->position.value()[0] = decomposed_transform.translate[0]; | 133 pose->position.value()[0] = decomposed_transform.translate[0]; |
| 47 pose->position.value()[1] = decomposed_transform.translate[1]; | 134 pose->position.value()[1] = decomposed_transform.translate[1]; |
| 48 pose->position.value()[2] = decomposed_transform.translate[2]; | 135 pose->position.value()[2] = decomposed_transform.translate[2]; |
| 49 } | 136 } |
| 50 | 137 |
| 51 return pose; | 138 return pose; |
| 52 } | 139 } |
| 53 | 140 |
| 54 /* static */ | 141 /* static */ |
| 142 gvr::Mat4f GvrDelegate::GetGvrPoseWithNeckModel(gvr::GvrApi* gvr_api) { |
| 143 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 144 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 145 |
| 146 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel( |
| 147 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f); |
| 148 |
| 149 return head_mat; |
| 150 } |
| 151 |
| 152 /* static */ |
| 153 mojom::VRPosePtr GvrDelegate::GetVRPosePtrWithNeckModel( |
| 154 gvr::GvrApi* gvr_api, |
| 155 gvr::Mat4f* head_mat_out) { |
| 156 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 157 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 158 |
| 159 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel( |
| 160 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f); |
| 161 |
| 162 if (head_mat_out) |
| 163 *head_mat_out = head_mat; |
| 164 |
| 165 mojom::VRPosePtr pose = GvrDelegate::VRPosePtrFromGvrPose(head_mat); |
| 166 |
| 167 // Get a second pose a bit later to calculate angular velocity. |
| 168 target_time.monotonic_system_time_nanos += kAngularVelocityEpsilonNanos; |
| 169 gvr::Mat4f head_mat_2 = |
| 170 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 171 |
| 172 // Add headset angular velocity to the pose. |
| 173 pose->angularVelocity.emplace(3); |
| 174 double epsilon_seconds = kAngularVelocityEpsilonNanos * 1e-9; |
| 175 gvr::Vec3f angular_velocity = |
| 176 GetAngularVelocityFromPoses(head_mat, head_mat_2, epsilon_seconds); |
| 177 pose->angularVelocity.value()[0] = angular_velocity.x; |
| 178 pose->angularVelocity.value()[1] = angular_velocity.y; |
| 179 pose->angularVelocity.value()[2] = angular_velocity.z; |
| 180 |
| 181 return pose; |
| 182 } |
| 183 |
| 184 /* static */ |
| 55 gvr::Sizei GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) { | 185 gvr::Sizei GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) { |
| 56 // Pick a reasonable default size for the WebVR transfer surface | 186 // Pick a reasonable default size for the WebVR transfer surface |
| 57 // based on a downscaled 1:1 render resolution. This size will also | 187 // based on a downscaled 1:1 render resolution. This size will also |
| 58 // be reported to the client via CreateVRDisplayInfo as the | 188 // be reported to the client via CreateVRDisplayInfo as the |
| 59 // client-recommended renderWidth/renderHeight and for the GVR | 189 // client-recommended renderWidth/renderHeight and for the GVR |
| 60 // framebuffer. If the client chooses a different size or resizes it | 190 // framebuffer. If the client chooses a different size or resizes it |
| 61 // while presenting, we'll resize the transfer surface and GVR | 191 // while presenting, we'll resize the transfer surface and GVR |
| 62 // framebuffer to match. | 192 // framebuffer to match. |
| 63 gvr::Sizei render_target_size = | 193 gvr::Sizei render_target_size = |
| 64 gvr_api->GetMaximumEffectiveRenderTargetSize(); | 194 gvr_api->GetMaximumEffectiveRenderTargetSize(); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 123 gvr::Mat4f eye_mat = gvr_api->GetEyeFromHeadMatrix(eye); | 253 gvr::Mat4f eye_mat = gvr_api->GetEyeFromHeadMatrix(eye); |
| 124 eye_params->offset[0] = -eye_mat.m[0][3]; | 254 eye_params->offset[0] = -eye_mat.m[0][3]; |
| 125 eye_params->offset[1] = -eye_mat.m[1][3]; | 255 eye_params->offset[1] = -eye_mat.m[1][3]; |
| 126 eye_params->offset[2] = -eye_mat.m[2][3]; | 256 eye_params->offset[2] = -eye_mat.m[2][3]; |
| 127 } | 257 } |
| 128 | 258 |
| 129 return device; | 259 return device; |
| 130 } | 260 } |
| 131 | 261 |
| 132 } // namespace device | 262 } // namespace device |
| OLD | NEW |