Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(637)

Side by Side Diff: device/vr/android/gvr/gvr_delegate.cc

Issue 2814443004: Refactor VR math off of GVR types, onto gfx types where possible. (Closed)
Patch Set: Fix tests Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « device/vr/android/gvr/gvr_delegate.h ('k') | device/vr/android/gvr/gvr_device.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "device/vr/android/gvr/gvr_delegate.h" 5 #include "device/vr/android/gvr/gvr_delegate.h"
6 6
7 #include "base/trace_event/trace_event.h" 7 #include "base/trace_event/trace_event.h"
8 #include "device/vr/vr_math.h"
8 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr.h" 9 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr.h"
10 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr_types.h"
9 #include "ui/gfx/transform.h" 11 #include "ui/gfx/transform.h"
10 #include "ui/gfx/transform_util.h" 12 #include "ui/gfx/transform_util.h"
11 13
12 namespace device { 14 namespace device {
13 15
14 namespace { 16 namespace {
15 // Default downscale factor for computing the recommended WebVR 17 // Default downscale factor for computing the recommended WebVR
16 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather 18 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather
17 // aggressive downscale due to the high overhead of copying pixels 19 // aggressive downscale due to the high overhead of copying pixels
18 // twice before handing off to GVR. For comparison, the polyfill 20 // twice before handing off to GVR. For comparison, the polyfill
19 // uses approximately 0.55 on a Pixel XL. 21 // uses approximately 0.55 on a Pixel XL.
20 static constexpr float kWebVrRecommendedResolutionScale = 0.5; 22 static constexpr float kWebVrRecommendedResolutionScale = 0.5;
21 23
22 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever 24 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever
23 // exposed, use that instead (it defaults to 50ms on most platforms). 25 // exposed, use that instead (it defaults to 50ms on most platforms).
24 static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000; 26 static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000;
25 27
26 // Time offset used for calculating angular velocity from a pair of predicted 28 // Time offset used for calculating angular velocity from a pair of predicted
27 // poses. The precise value shouldn't matter as long as it's nonzero and much 29 // poses. The precise value shouldn't matter as long as it's nonzero and much
28 // less than a frame. 30 // less than a frame.
29 static constexpr int64_t kAngularVelocityEpsilonNanos = 1000000; 31 static constexpr int64_t kAngularVelocityEpsilonNanos = 1000000;
30 32
31 // Matrix math copied from vr_shell's vr_math.cc, can't use that here 33 void GvrMatToMatf(const gvr::Mat4f& in, vr::Mat4f* out) {
32 // due to dependency ordering. TODO(mthiesse): move the vr_math code 34 // If our std::array implementation doesn't have any non-data members, we can
33 // to this directory so that both locations can use it. 35 // just cast the gvr matrix to an std::array.
34 36 static_assert(sizeof(in) == sizeof(*out),
35 // Rotation only, ignore translation components. 37 "Cannot reinterpret gvr::Mat4f as vr::Matf");
36 gvr::Vec3f MatrixVectorRotate(const gvr::Mat4f& m, const gvr::Vec3f& v) { 38 *out = *reinterpret_cast<vr::Mat4f*>(const_cast<gvr::Mat4f*>(&in));
37 gvr::Vec3f res;
38 res.x = m.m[0][0] * v.x + m.m[0][1] * v.y + m.m[0][2] * v.z;
39 res.y = m.m[1][0] * v.x + m.m[1][1] * v.y + m.m[1][2] * v.z;
40 res.z = m.m[2][0] * v.x + m.m[2][1] * v.y + m.m[2][2] * v.z;
41 return res;
42 } 39 }
43 40
44 gvr::Mat4f MatrixMul(const gvr::Mat4f& matrix1, const gvr::Mat4f& matrix2) { 41 gfx::Vector3dF GetAngularVelocityFromPoses(vr::Mat4f head_mat,
45 gvr::Mat4f result; 42 vr::Mat4f head_mat_2,
46 for (int i = 0; i < 4; ++i) { 43 double epsilon_seconds) {
47 for (int j = 0; j < 4; ++j) {
48 result.m[i][j] = 0.0f;
49 for (int k = 0; k < 4; ++k) {
50 result.m[i][j] += matrix1.m[i][k] * matrix2.m[k][j];
51 }
52 }
53 }
54 return result;
55 }
56
57 gvr::Vec3f GetAngularVelocityFromPoses(gvr::Mat4f head_mat,
58 gvr::Mat4f head_mat_2,
59 double epsilon_seconds) {
60 // The angular velocity is a 3-element vector pointing along the rotation 44 // The angular velocity is a 3-element vector pointing along the rotation
61 // axis with magnitude equal to rotation speed in radians/second, expressed 45 // axis with magnitude equal to rotation speed in radians/second, expressed
62 // in the seated frame of reference. 46 // in the seated frame of reference.
63 // 47 //
64 // The 1.1 spec isn't very clear on details, clarification requested in 48 // The 1.1 spec isn't very clear on details, clarification requested in
65 // https://github.com/w3c/webvr/issues/212 . For now, assuming that we 49 // https://github.com/w3c/webvr/issues/212 . For now, assuming that we
66 // want a vector in the sitting reference frame. 50 // want a vector in the sitting reference frame.
67 // 51 //
68 // Assuming that pose prediction is simply based on adding a time * angular 52 // Assuming that pose prediction is simply based on adding a time * angular
69 // velocity rotation to the pose, we can approximate the angular velocity 53 // velocity rotation to the pose, we can approximate the angular velocity
70 // from the difference between two successive poses. This is a first order 54 // from the difference between two successive poses. This is a first order
71 // estimate that assumes small enough rotations so that we can do linear 55 // estimate that assumes small enough rotations so that we can do linear
72 // approximation. 56 // approximation.
73 // 57 //
74 // See: 58 // See:
75 // https://en.wikipedia.org/wiki/Angular_velocity#Calculation_from_the_orienta tion_matrix 59 // https://en.wikipedia.org/wiki/Angular_velocity#Calculation_from_the_orienta tion_matrix
76 60
77 gvr::Mat4f delta_mat; 61 vr::Mat4f delta_mat;
78 gvr::Mat4f inverse_head_mat; 62 vr::Mat4f inverse_head_mat;
79 // Calculate difference matrix, and inverse head matrix rotation. 63 // Calculate difference matrix, and inverse head matrix rotation.
80 // For the inverse rotation, just transpose the 3x3 subsection. 64 // For the inverse rotation, just transpose the 3x3 subsection.
81 // 65 //
82 // Assume that epsilon is nonzero since it's based on a compile-time constant 66 // Assume that epsilon is nonzero since it's based on a compile-time constant
83 // provided by the caller. 67 // provided by the caller.
84 for (int j = 0; j < 3; ++j) { 68 for (int j = 0; j < 3; ++j) {
85 for (int i = 0; i < 3; ++i) { 69 for (int i = 0; i < 3; ++i) {
86 delta_mat.m[j][i] = 70 delta_mat[j][i] = (head_mat_2[j][i] - head_mat[j][i]) / epsilon_seconds;
87 (head_mat_2.m[j][i] - head_mat.m[j][i]) / epsilon_seconds; 71 inverse_head_mat[j][i] = head_mat[i][j];
88 inverse_head_mat.m[j][i] = head_mat.m[i][j];
89 } 72 }
90 delta_mat.m[j][3] = delta_mat.m[3][j] = 0.0; 73 delta_mat[j][3] = delta_mat[3][j] = 0.0;
91 inverse_head_mat.m[j][3] = inverse_head_mat.m[3][j] = 0.0; 74 inverse_head_mat[j][3] = inverse_head_mat[3][j] = 0.0;
92 } 75 }
93 delta_mat.m[3][3] = 1.0; 76 delta_mat[3][3] = 1.0;
94 inverse_head_mat.m[3][3] = 1.0; 77 inverse_head_mat[3][3] = 1.0;
95 gvr::Mat4f omega_mat = device::MatrixMul(delta_mat, inverse_head_mat); 78 vr::Mat4f omega_mat;
96 gvr::Vec3f omega_vec; 79 vr::MatrixMul(delta_mat, inverse_head_mat, &omega_mat);
97 omega_vec.x = -omega_mat.m[2][1]; 80 gfx::Vector3dF omega_vec(-omega_mat[2][1], omega_mat[2][0], -omega_mat[1][0]);
98 omega_vec.y = omega_mat.m[2][0];
99 omega_vec.z = -omega_mat.m[1][0];
100 81
101 // Rotate by inverse head matrix to bring into seated space. 82 // Rotate by inverse head matrix to bring into seated space.
102 gvr::Vec3f angular_velocity = 83 return vr::MatrixVectorRotate(inverse_head_mat, omega_vec);
103 device::MatrixVectorRotate(inverse_head_mat, omega_vec);
104
105 return angular_velocity;
106 } 84 }
107 85
108 } // namespace 86 } // namespace
109 87
110 /* static */ 88 /* static */
111 mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(gvr::Mat4f head_mat) { 89 mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(const vr::Mat4f& head_mat) {
112 mojom::VRPosePtr pose = mojom::VRPose::New(); 90 mojom::VRPosePtr pose = mojom::VRPose::New();
113 91
114 pose->orientation.emplace(4); 92 pose->orientation.emplace(4);
115 93
116 gfx::Transform inv_transform( 94 gfx::Transform inv_transform(
117 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], 95 head_mat[0][0], head_mat[0][1], head_mat[0][2], head_mat[0][3],
118 head_mat.m[1][0], head_mat.m[1][1], head_mat.m[1][2], head_mat.m[1][3], 96 head_mat[1][0], head_mat[1][1], head_mat[1][2], head_mat[1][3],
119 head_mat.m[2][0], head_mat.m[2][1], head_mat.m[2][2], head_mat.m[2][3], 97 head_mat[2][0], head_mat[2][1], head_mat[2][2], head_mat[2][3],
120 head_mat.m[3][0], head_mat.m[3][1], head_mat.m[3][2], head_mat.m[3][3]); 98 head_mat[3][0], head_mat[3][1], head_mat[3][2], head_mat[3][3]);
121 99
122 gfx::Transform transform; 100 gfx::Transform transform;
123 if (inv_transform.GetInverse(&transform)) { 101 if (inv_transform.GetInverse(&transform)) {
124 gfx::DecomposedTransform decomposed_transform; 102 gfx::DecomposedTransform decomposed_transform;
125 gfx::DecomposeTransform(&decomposed_transform, transform); 103 gfx::DecomposeTransform(&decomposed_transform, transform);
126 104
127 pose->orientation.value()[0] = decomposed_transform.quaternion[0]; 105 pose->orientation.value()[0] = decomposed_transform.quaternion[0];
128 pose->orientation.value()[1] = decomposed_transform.quaternion[1]; 106 pose->orientation.value()[1] = decomposed_transform.quaternion[1];
129 pose->orientation.value()[2] = decomposed_transform.quaternion[2]; 107 pose->orientation.value()[2] = decomposed_transform.quaternion[2];
130 pose->orientation.value()[3] = decomposed_transform.quaternion[3]; 108 pose->orientation.value()[3] = decomposed_transform.quaternion[3];
131 109
132 pose->position.emplace(3); 110 pose->position.emplace(3);
133 pose->position.value()[0] = decomposed_transform.translate[0]; 111 pose->position.value()[0] = decomposed_transform.translate[0];
134 pose->position.value()[1] = decomposed_transform.translate[1]; 112 pose->position.value()[1] = decomposed_transform.translate[1];
135 pose->position.value()[2] = decomposed_transform.translate[2]; 113 pose->position.value()[2] = decomposed_transform.translate[2];
136 } 114 }
137 115
138 return pose; 116 return pose;
139 } 117 }
140 118
141 /* static */ 119 /* static */
142 gvr::Mat4f GvrDelegate::GetGvrPoseWithNeckModel(gvr::GvrApi* gvr_api) { 120 void GvrDelegate::GetGvrPoseWithNeckModel(gvr::GvrApi* gvr_api,
121 vr::Mat4f* out) {
143 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); 122 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
144 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; 123 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
145 124
146 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel( 125 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel(
147 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f); 126 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f);
148 127
149 return head_mat; 128 GvrMatToMatf(head_mat, out);
150 } 129 }
151 130
152 /* static */ 131 /* static */
153 mojom::VRPosePtr GvrDelegate::GetVRPosePtrWithNeckModel( 132 mojom::VRPosePtr GvrDelegate::GetVRPosePtrWithNeckModel(
154 gvr::GvrApi* gvr_api, 133 gvr::GvrApi* gvr_api,
155 gvr::Mat4f* head_mat_out) { 134 vr::Mat4f* head_mat_out) {
156 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); 135 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
157 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; 136 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
158 137
159 gvr::Mat4f head_mat = gvr_api->ApplyNeckModel( 138 gvr::Mat4f gvr_head_mat = gvr_api->ApplyNeckModel(
160 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f); 139 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f);
161 140
162 if (head_mat_out) 141 vr::Mat4f* head_mat_ptr = head_mat_out;
163 *head_mat_out = head_mat; 142 vr::Mat4f head_mat;
143 if (!head_mat_ptr)
144 head_mat_ptr = &head_mat;
145 GvrMatToMatf(gvr_head_mat, head_mat_ptr);
164 146
165 mojom::VRPosePtr pose = GvrDelegate::VRPosePtrFromGvrPose(head_mat); 147 mojom::VRPosePtr pose = GvrDelegate::VRPosePtrFromGvrPose(*head_mat_ptr);
166 148
167 // Get a second pose a bit later to calculate angular velocity. 149 // Get a second pose a bit later to calculate angular velocity.
168 target_time.monotonic_system_time_nanos += kAngularVelocityEpsilonNanos; 150 target_time.monotonic_system_time_nanos += kAngularVelocityEpsilonNanos;
169 gvr::Mat4f head_mat_2 = 151 gvr::Mat4f gvr_head_mat_2 =
170 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time); 152 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time);
153 vr::Mat4f head_mat_2;
154 GvrMatToMatf(gvr_head_mat_2, &head_mat_2);
171 155
172 // Add headset angular velocity to the pose. 156 // Add headset angular velocity to the pose.
173 pose->angularVelocity.emplace(3); 157 pose->angularVelocity.emplace(3);
174 double epsilon_seconds = kAngularVelocityEpsilonNanos * 1e-9; 158 double epsilon_seconds = kAngularVelocityEpsilonNanos * 1e-9;
175 gvr::Vec3f angular_velocity = 159 gfx::Vector3dF angular_velocity =
176 GetAngularVelocityFromPoses(head_mat, head_mat_2, epsilon_seconds); 160 GetAngularVelocityFromPoses(*head_mat_ptr, head_mat_2, epsilon_seconds);
177 pose->angularVelocity.value()[0] = angular_velocity.x; 161 pose->angularVelocity.value()[0] = angular_velocity.x();
178 pose->angularVelocity.value()[1] = angular_velocity.y; 162 pose->angularVelocity.value()[1] = angular_velocity.y();
179 pose->angularVelocity.value()[2] = angular_velocity.z; 163 pose->angularVelocity.value()[2] = angular_velocity.z();
180 164
181 return pose; 165 return pose;
182 } 166 }
183 167
184 /* static */ 168 /* static */
185 gvr::Sizei GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) { 169 gfx::Size GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) {
186 // Pick a reasonable default size for the WebVR transfer surface 170 // Pick a reasonable default size for the WebVR transfer surface
187 // based on a downscaled 1:1 render resolution. This size will also 171 // based on a downscaled 1:1 render resolution. This size will also
188 // be reported to the client via CreateVRDisplayInfo as the 172 // be reported to the client via CreateVRDisplayInfo as the
189 // client-recommended renderWidth/renderHeight and for the GVR 173 // client-recommended renderWidth/renderHeight and for the GVR
190 // framebuffer. If the client chooses a different size or resizes it 174 // framebuffer. If the client chooses a different size or resizes it
191 // while presenting, we'll resize the transfer surface and GVR 175 // while presenting, we'll resize the transfer surface and GVR
192 // framebuffer to match. 176 // framebuffer to match.
193 gvr::Sizei render_target_size = 177 gvr::Sizei render_target_size =
194 gvr_api->GetMaximumEffectiveRenderTargetSize(); 178 gvr_api->GetMaximumEffectiveRenderTargetSize();
195 gvr::Sizei webvr_size = {static_cast<int>(render_target_size.width * 179
196 kWebVrRecommendedResolutionScale), 180 gfx::Size webvr_size(
197 static_cast<int>(render_target_size.height * 181 render_target_size.width * kWebVrRecommendedResolutionScale,
198 kWebVrRecommendedResolutionScale)}; 182 render_target_size.height * kWebVrRecommendedResolutionScale);
183
199 // Ensure that the width is an even number so that the eyes each 184 // Ensure that the width is an even number so that the eyes each
200 // get the same size, the recommended renderWidth is per eye 185 // get the same size, the recommended renderWidth is per eye
201 // and the client will use the sum of the left and right width. 186 // and the client will use the sum of the left and right width.
202 // 187 //
203 // TODO(klausw,crbug.com/699350): should we round the recommended 188 // TODO(klausw,crbug.com/699350): should we round the recommended
204 // size to a multiple of 2^N pixels to be friendlier to the GPU? The 189 // size to a multiple of 2^N pixels to be friendlier to the GPU? The
205 // exact size doesn't matter, and it might be more efficient. 190 // exact size doesn't matter, and it might be more efficient.
206 webvr_size.width &= ~1; 191 webvr_size.set_width(webvr_size.width() & ~1);
207
208 return webvr_size; 192 return webvr_size;
209 } 193 }
210 194
211 /* static */ 195 /* static */
212 mojom::VRDisplayInfoPtr GvrDelegate::CreateVRDisplayInfo( 196 mojom::VRDisplayInfoPtr GvrDelegate::CreateVRDisplayInfo(
213 gvr::GvrApi* gvr_api, 197 gvr::GvrApi* gvr_api,
214 gvr::Sizei recommended_size, 198 gfx::Size recommended_size,
215 uint32_t device_id) { 199 uint32_t device_id) {
216 TRACE_EVENT0("input", "GvrDelegate::CreateVRDisplayInfo"); 200 TRACE_EVENT0("input", "GvrDelegate::CreateVRDisplayInfo");
217 201
218 mojom::VRDisplayInfoPtr device = mojom::VRDisplayInfo::New(); 202 mojom::VRDisplayInfoPtr device = mojom::VRDisplayInfo::New();
219 203
220 device->index = device_id; 204 device->index = device_id;
221 205
222 device->capabilities = mojom::VRDisplayCapabilities::New(); 206 device->capabilities = mojom::VRDisplayCapabilities::New();
223 device->capabilities->hasPosition = false; 207 device->capabilities->hasPosition = false;
224 device->capabilities->hasExternalDisplay = false; 208 device->capabilities->hasExternalDisplay = false;
225 device->capabilities->canPresent = true; 209 device->capabilities->canPresent = true;
226 210
227 std::string vendor = gvr_api->GetViewerVendor(); 211 std::string vendor = gvr_api->GetViewerVendor();
228 std::string model = gvr_api->GetViewerModel(); 212 std::string model = gvr_api->GetViewerModel();
229 device->displayName = vendor + " " + model; 213 device->displayName = vendor + " " + model;
230 214
231 gvr::BufferViewportList gvr_buffer_viewports = 215 gvr::BufferViewportList gvr_buffer_viewports =
232 gvr_api->CreateEmptyBufferViewportList(); 216 gvr_api->CreateEmptyBufferViewportList();
233 gvr_buffer_viewports.SetToRecommendedBufferViewports(); 217 gvr_buffer_viewports.SetToRecommendedBufferViewports();
234 218
235 device->leftEye = mojom::VREyeParameters::New(); 219 device->leftEye = mojom::VREyeParameters::New();
236 device->rightEye = mojom::VREyeParameters::New(); 220 device->rightEye = mojom::VREyeParameters::New();
237 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { 221 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
238 mojom::VREyeParametersPtr& eye_params = 222 mojom::VREyeParametersPtr& eye_params =
239 (eye == GVR_LEFT_EYE) ? device->leftEye : device->rightEye; 223 (eye == GVR_LEFT_EYE) ? device->leftEye : device->rightEye;
240 eye_params->fieldOfView = mojom::VRFieldOfView::New(); 224 eye_params->fieldOfView = mojom::VRFieldOfView::New();
241 eye_params->offset.resize(3); 225 eye_params->offset.resize(3);
242 eye_params->renderWidth = recommended_size.width / 2; 226 eye_params->renderWidth = recommended_size.width() / 2;
243 eye_params->renderHeight = recommended_size.height; 227 eye_params->renderHeight = recommended_size.height();
244 228
245 gvr::BufferViewport eye_viewport = gvr_api->CreateBufferViewport(); 229 gvr::BufferViewport eye_viewport = gvr_api->CreateBufferViewport();
246 gvr_buffer_viewports.GetBufferViewport(eye, &eye_viewport); 230 gvr_buffer_viewports.GetBufferViewport(eye, &eye_viewport);
247 gvr::Rectf eye_fov = eye_viewport.GetSourceFov(); 231 gvr::Rectf eye_fov = eye_viewport.GetSourceFov();
248 eye_params->fieldOfView->upDegrees = eye_fov.top; 232 eye_params->fieldOfView->upDegrees = eye_fov.top;
249 eye_params->fieldOfView->downDegrees = eye_fov.bottom; 233 eye_params->fieldOfView->downDegrees = eye_fov.bottom;
250 eye_params->fieldOfView->leftDegrees = eye_fov.left; 234 eye_params->fieldOfView->leftDegrees = eye_fov.left;
251 eye_params->fieldOfView->rightDegrees = eye_fov.right; 235 eye_params->fieldOfView->rightDegrees = eye_fov.right;
252 236
253 gvr::Mat4f eye_mat = gvr_api->GetEyeFromHeadMatrix(eye); 237 gvr::Mat4f eye_mat = gvr_api->GetEyeFromHeadMatrix(eye);
254 eye_params->offset[0] = -eye_mat.m[0][3]; 238 eye_params->offset[0] = -eye_mat.m[0][3];
255 eye_params->offset[1] = -eye_mat.m[1][3]; 239 eye_params->offset[1] = -eye_mat.m[1][3];
256 eye_params->offset[2] = -eye_mat.m[2][3]; 240 eye_params->offset[2] = -eye_mat.m[2][3];
257 } 241 }
258 242
259 return device; 243 return device;
260 } 244 }
261 245
262 } // namespace device 246 } // namespace device
OLDNEW
« no previous file with comments | « device/vr/android/gvr/gvr_delegate.h ('k') | device/vr/android/gvr/gvr_device.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698