Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "device/vr/android/gvr/gvr_device.h" | 5 #include "device/vr/android/gvr/gvr_device.h" |
| 6 | 6 |
| 7 #include <math.h> | 7 #include <math.h> |
| 8 #include <algorithm> | 8 #include <algorithm> |
| 9 | 9 |
| 10 #include "base/time/time.h" | 10 #include "base/time/time.h" |
| 11 #include "base/trace_event/trace_event.h" | 11 #include "base/trace_event/trace_event.h" |
| 12 #include "device/vr/android/gvr/gvr_delegate.h" | 12 #include "device/vr/android/gvr/gvr_delegate.h" |
| 13 #include "device/vr/android/gvr/gvr_device_provider.h" | 13 #include "device/vr/android/gvr/gvr_device_provider.h" |
| 14 #include "device/vr/vr_device_manager.h" | 14 #include "device/vr/vr_device_manager.h" |
| 15 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr.h" | 15 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr.h" |
| 16 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr_types.h" | 16 #include "third_party/gvr-android-sdk/src/libraries/headers/vr/gvr/capi/include/ gvr_types.h" |
| 17 #include "ui/gfx/transform.h" | 17 #include "ui/gfx/transform.h" |
| 18 #include "ui/gfx/transform_util.h" | 18 #include "ui/gfx/transform_util.h" |
| 19 | 19 |
| 20 namespace device { | 20 namespace device { |
| 21 | 21 |
| 22 namespace { | 22 namespace { |
| 23 | 23 |
| 24 static const uint64_t kPredictionTimeWithoutVsyncNanos = 50000000; | 24 // TODO(klausw): adjust this based on historic render times |
| 25 static const double kDefaultRenderTimeMillis = 15.0; | |
| 26 static const double kPredictionTimeWithoutVsyncMillis = 24.0; | |
| 25 | 27 |
| 26 } // namespace | 28 } // namespace |
| 27 | 29 |
| 30 // THIS IS A COPY, keep in sync with VRDisplay.cpp ? | |
| 31 StatTracker::StatTracker(unsigned int capacity) : m_capacity(capacity) {} | |
| 32 | |
| 33 StatTracker::~StatTracker() = default; | |
| 34 | |
| 35 void StatTracker::add(double item) { | |
| 36 if (m_items.size() >= m_capacity) { | |
| 37 m_items.pop_front(); | |
| 38 } | |
| 39 m_items.push_back(item); | |
| 40 } | |
| 41 | |
| 42 void StatTracker::clear() { | |
| 43 m_items.clear(); | |
| 44 } | |
| 45 | |
| 46 bool StatTracker::hasPrediction() { | |
| 47 return m_items.size() > 0; | |
| 48 } | |
| 49 | |
| 50 double StatTracker::getPrediction() { | |
| 51 assert(hasPrediction()); | |
| 52 | |
| 53 // If we have 3 or more items, ignore min and max outliers and | |
| 54 // average the rest. For 2 or less, minmax.first and minmax.second | |
| 55 // will both be m_items.end(), so it's just a plain average. | |
| 56 auto minmax = m_items.size() > 2 ? | |
| 57 std::minmax_element(m_items.begin(), m_items.end()) : | |
| 58 std::minmax_element(m_items.end(), m_items.end()); | |
| 59 | |
| 60 double sum = 0.0; | |
| 61 int count = 0; | |
| 62 //VLOG(2) << __FUNCTION__ << ": stat start"; | |
| 63 for (auto it = m_items.begin(); it != m_items.end(); ++it) { | |
| 64 //VLOG(2) << __FUNCTION__ << ": val=" << *it; | |
| 65 if (it == minmax.first || it == minmax.second) continue; | |
| 66 sum += *it; | |
| 67 ++count; | |
| 68 } | |
| 69 //VLOG(2) << __FUNCTION__ << ": stat return " << sum / count; | |
| 70 return sum / count; | |
| 71 } | |
| 72 // COPY end | |
| 73 | |
| 28 GvrDevice::GvrDevice(GvrDeviceProvider* provider, GvrDelegate* delegate) | 74 GvrDevice::GvrDevice(GvrDeviceProvider* provider, GvrDelegate* delegate) |
| 29 : VRDevice(), delegate_(delegate), gvr_provider_(provider) {} | 75 : VRDevice(), delegate_(delegate), gvr_provider_(provider) { |
| 76 VLOG(1) << __FUNCTION__ << ": CONSTRUCTOR this=" << (void*)this << " ********* ******************************************************************************** ******************"; | |
| 77 } | |
| 30 | 78 |
| 31 GvrDevice::~GvrDevice() {} | 79 GvrDevice::~GvrDevice() { |
| 80 VLOG(1) << __FUNCTION__ << ": DESTRUCTOR this=" << (void*)this << " ********** ******************************************************************************** *****************"; | |
|
artem.bolgar
2017/02/14 05:04:24
It is possible that callback_map_ is not empty her
| |
| 81 } | |
| 32 | 82 |
| 33 mojom::VRDisplayInfoPtr GvrDevice::GetVRDevice() { | 83 mojom::VRDisplayInfoPtr GvrDevice::GetVRDevice() { |
| 34 TRACE_EVENT0("input", "GvrDevice::GetVRDevice"); | 84 TRACE_EVENT0("input", "GvrDevice::GetVRDevice"); |
| 35 | 85 |
| 36 mojom::VRDisplayInfoPtr device = mojom::VRDisplayInfo::New(); | 86 mojom::VRDisplayInfoPtr device = mojom::VRDisplayInfo::New(); |
| 37 | 87 |
| 38 device->index = id(); | 88 device->index = id(); |
| 39 | 89 |
| 40 device->capabilities = mojom::VRDisplayCapabilities::New(); | 90 device->capabilities = mojom::VRDisplayCapabilities::New(); |
| 41 device->capabilities->hasOrientation = true; | 91 device->capabilities->hasOrientation = true; |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 58 // we can't render into it yet. Other code uses this to check | 108 // we can't render into it yet. Other code uses this to check |
| 59 // for valid state. | 109 // for valid state. |
| 60 gvr::Sizei render_target_size = kInvalidRenderTargetSize; | 110 gvr::Sizei render_target_size = kInvalidRenderTargetSize; |
| 61 left_eye->renderWidth = render_target_size.width / 2; | 111 left_eye->renderWidth = render_target_size.width / 2; |
| 62 left_eye->renderHeight = render_target_size.height; | 112 left_eye->renderHeight = render_target_size.height; |
| 63 | 113 |
| 64 right_eye->renderWidth = left_eye->renderWidth; | 114 right_eye->renderWidth = left_eye->renderWidth; |
| 65 right_eye->renderHeight = left_eye->renderHeight; | 115 right_eye->renderHeight = left_eye->renderHeight; |
| 66 | 116 |
| 67 gvr::GvrApi* gvr_api = GetGvrApi(); | 117 gvr::GvrApi* gvr_api = GetGvrApi(); |
| 118 VLOG(1) << __FUNCTION__ << ": gvr_api=" << gvr_api; | |
| 68 if (!gvr_api) { | 119 if (!gvr_api) { |
| 69 // We may not be able to get an instance of GvrApi right away, so | 120 // We may not be able to get an instance of GvrApi right away, so |
| 70 // stub in some data till we have one. | 121 // stub in some data till we have one. |
| 122 | |
| 123 // TODO(klausw): I think we never get here ?! | |
| 71 device->displayName = "Unknown"; | 124 device->displayName = "Unknown"; |
| 72 | 125 |
| 73 left_eye->fieldOfView->upDegrees = 45; | 126 left_eye->fieldOfView->upDegrees = 45; |
| 74 left_eye->fieldOfView->downDegrees = 45; | 127 left_eye->fieldOfView->downDegrees = 45; |
| 75 left_eye->fieldOfView->leftDegrees = 45; | 128 left_eye->fieldOfView->leftDegrees = 45; |
| 76 left_eye->fieldOfView->rightDegrees = 45; | 129 left_eye->fieldOfView->rightDegrees = 45; |
| 77 | 130 |
| 78 right_eye->fieldOfView->upDegrees = 45; | 131 right_eye->fieldOfView->upDegrees = 45; |
| 79 right_eye->fieldOfView->downDegrees = 45; | 132 right_eye->fieldOfView->downDegrees = 45; |
| 80 right_eye->fieldOfView->leftDegrees = 45; | 133 right_eye->fieldOfView->leftDegrees = 45; |
| 81 right_eye->fieldOfView->rightDegrees = 45; | 134 right_eye->fieldOfView->rightDegrees = 45; |
| 82 | 135 |
| 83 left_eye->offset[0] = -0.0; | 136 left_eye->offset[0] = -0.0; |
| 84 left_eye->offset[1] = -0.0; | 137 left_eye->offset[1] = -0.0; |
| 85 left_eye->offset[2] = -0.03; | 138 left_eye->offset[2] = -0.03; |
| 86 | 139 |
| 87 right_eye->offset[0] = 0.0; | 140 right_eye->offset[0] = 0.0; |
| 88 right_eye->offset[1] = 0.0; | 141 right_eye->offset[1] = 0.0; |
| 89 right_eye->offset[2] = 0.03; | 142 right_eye->offset[2] = 0.03; |
| 90 | 143 |
| 144 #if 0 | |
| 91 // Tell the delegate not to draw yet, to avoid a race condition | 145 // Tell the delegate not to draw yet, to avoid a race condition |
| 92 // (and visible wobble) on entering VR. | 146 // (and visible wobble) on entering VR. |
| 93 if (delegate_) { | 147 if (delegate_) { |
| 94 delegate_->SetWebVRRenderSurfaceSize(kInvalidRenderTargetSize.width, | 148 delegate_->SetWebVRRenderSurfaceSize(kInvalidRenderTargetSize.width, |
| 95 kInvalidRenderTargetSize.height); | 149 kInvalidRenderTargetSize.height); |
| 96 } | 150 } |
| 97 | 151 #endif |
| 98 return device; | 152 return device; |
| 99 } | 153 } |
| 100 | 154 |
| 101 // In compositor mode, we have to use the current compositor window's | 155 render_target_size = gvr_api->GetMaximumEffectiveRenderTargetSize(); |
| 102 // surface size. Would be nice to change it, but that needs more browser | 156 // Render at less than the maximum effective render target size as a |
| 103 // internals to be modified. TODO(klausw,crbug.com/655722): remove this once | 157 // compromise between image quality and performance, similar to |
| 104 // we can pick our own surface size. | 158 // current polyfill. TODO(klausw): allow clients to override this to |
| 105 gvr::Sizei compositor_size = delegate_->GetWebVRCompositorSurfaceSize(); | 159 // get full resolution. |
| 106 left_eye->renderWidth = compositor_size.width / 2; | 160 left_eye->renderWidth = render_target_size.width * 55 / 100 / 2; |
| 107 left_eye->renderHeight = compositor_size.height; | 161 left_eye->renderHeight = render_target_size.height * 55 / 100; |
| 108 right_eye->renderWidth = left_eye->renderWidth; | 162 right_eye->renderWidth = left_eye->renderWidth; |
| 109 right_eye->renderHeight = left_eye->renderHeight; | 163 right_eye->renderHeight = left_eye->renderHeight; |
| 164 LOG(INFO) << "klausw: render_target_size=" << render_target_size.width << "x" | |
| 165 << render_target_size.height; | |
| 110 | 166 |
| 111 std::string vendor = gvr_api->GetViewerVendor(); | 167 std::string vendor = gvr_api->GetViewerVendor(); |
| 112 std::string model = gvr_api->GetViewerModel(); | 168 std::string model = gvr_api->GetViewerModel(); |
| 113 device->displayName = vendor + " " + model; | 169 device->displayName = vendor + " " + model; |
| 114 | 170 |
| 171 VLOG(1) << __FUNCTION__ << ": gvr_api creating buffer viewports"; | |
| 115 gvr::BufferViewportList gvr_buffer_viewports = | 172 gvr::BufferViewportList gvr_buffer_viewports = |
| 116 gvr_api->CreateEmptyBufferViewportList(); | 173 gvr_api->CreateEmptyBufferViewportList(); |
| 117 gvr_buffer_viewports.SetToRecommendedBufferViewports(); | 174 gvr_buffer_viewports.SetToRecommendedBufferViewports(); |
| 118 | 175 |
| 119 gvr::BufferViewport eye_viewport = gvr_api->CreateBufferViewport(); | 176 gvr::BufferViewport eye_viewport = gvr_api->CreateBufferViewport(); |
| 120 gvr_buffer_viewports.GetBufferViewport(GVR_LEFT_EYE, &eye_viewport); | 177 gvr_buffer_viewports.GetBufferViewport(GVR_LEFT_EYE, &eye_viewport); |
| 121 gvr::Rectf eye_fov = eye_viewport.GetSourceFov(); | 178 gvr::Rectf eye_fov = eye_viewport.GetSourceFov(); |
| 122 left_eye->fieldOfView->upDegrees = eye_fov.top; | 179 left_eye->fieldOfView->upDegrees = eye_fov.top; |
| 123 left_eye->fieldOfView->downDegrees = eye_fov.bottom; | 180 left_eye->fieldOfView->downDegrees = eye_fov.bottom; |
| 124 left_eye->fieldOfView->leftDegrees = eye_fov.left; | 181 left_eye->fieldOfView->leftDegrees = eye_fov.left; |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 135 gvr::Mat4f left_eye_mat = gvr_api->GetEyeFromHeadMatrix(GVR_LEFT_EYE); | 192 gvr::Mat4f left_eye_mat = gvr_api->GetEyeFromHeadMatrix(GVR_LEFT_EYE); |
| 136 left_eye->offset[0] = -left_eye_mat.m[0][3]; | 193 left_eye->offset[0] = -left_eye_mat.m[0][3]; |
| 137 left_eye->offset[1] = -left_eye_mat.m[1][3]; | 194 left_eye->offset[1] = -left_eye_mat.m[1][3]; |
| 138 left_eye->offset[2] = -left_eye_mat.m[2][3]; | 195 left_eye->offset[2] = -left_eye_mat.m[2][3]; |
| 139 | 196 |
| 140 gvr::Mat4f right_eye_mat = gvr_api->GetEyeFromHeadMatrix(GVR_RIGHT_EYE); | 197 gvr::Mat4f right_eye_mat = gvr_api->GetEyeFromHeadMatrix(GVR_RIGHT_EYE); |
| 141 right_eye->offset[0] = -right_eye_mat.m[0][3]; | 198 right_eye->offset[0] = -right_eye_mat.m[0][3]; |
| 142 right_eye->offset[1] = -right_eye_mat.m[1][3]; | 199 right_eye->offset[1] = -right_eye_mat.m[1][3]; |
| 143 right_eye->offset[2] = -right_eye_mat.m[2][3]; | 200 right_eye->offset[2] = -right_eye_mat.m[2][3]; |
| 144 | 201 |
| 145 if (delegate_) { | |
| 146 delegate_->SetWebVRRenderSurfaceSize(2 * left_eye->renderWidth, | |
| 147 left_eye->renderHeight); | |
| 148 } | |
| 149 | |
| 150 return device; | 202 return device; |
| 151 } | 203 } |
| 152 | 204 |
| 153 mojom::VRPosePtr GvrDevice::GetPose() { | 205 mojom::VRPosePtr GvrDevice::GetPose() { |
| 154 TRACE_EVENT0("input", "GvrDevice::GetSensorState"); | 206 TRACE_EVENT0("input", "GvrDevice::GetSensorState"); |
| 155 | 207 |
| 208 // Increment pose frame counter always, even if it's a faked pose. | |
| 209 ++pose_index_; | |
| 210 // Don't allow an actual pose index to be negative or zero, those | |
| 211 // are reserved for error or "no pose" states. | |
| 212 if (pose_index_ <= 0) pose_index_ = 1; | |
| 213 | |
| 214 VLOG(2) << __FUNCTION__ << ": frame " << pose_index_; | |
| 215 TRACE_EVENT1("media", "klausw:GetPose", "frame", pose_index_); | |
| 216 | |
| 156 mojom::VRPosePtr pose = mojom::VRPose::New(); | 217 mojom::VRPosePtr pose = mojom::VRPose::New(); |
| 157 | |
| 158 pose->timestamp = base::Time::Now().ToJsTime(); | 218 pose->timestamp = base::Time::Now().ToJsTime(); |
| 159 | 219 pose->poseIndex = pose_index_; |
| 160 // Increment pose frame counter always, even if it's a faked pose. | |
| 161 pose->poseIndex = ++pose_index_; | |
| 162 | |
| 163 pose->orientation.emplace(4); | 220 pose->orientation.emplace(4); |
| 164 | 221 |
| 165 gvr::GvrApi* gvr_api = GetGvrApi(); | 222 gvr::GvrApi* gvr_api = GetGvrApi(); |
| 166 if (!gvr_api) { | 223 if (!gvr_api) { |
| 167 // If we don't have a GvrApi instance return a static forward orientation. | 224 // If we don't have a GvrApi instance return a static forward orientation. |
| 168 pose->orientation.value()[0] = 0.0; | 225 pose->orientation.value()[0] = 0.0; |
| 169 pose->orientation.value()[1] = 0.0; | 226 pose->orientation.value()[1] = 0.0; |
| 170 pose->orientation.value()[2] = 0.0; | 227 pose->orientation.value()[2] = 0.0; |
| 171 pose->orientation.value()[3] = 1.0; | 228 pose->orientation.value()[3] = 1.0; |
| 172 | 229 |
| 173 return pose; | 230 return pose; |
| 174 } | 231 } |
| 175 | 232 |
| 176 if (!delegate_) | 233 if (!delegate_) { |
| 234 VLOG(2) << __FUNCTION__ << ": no delegate, no pose!"; | |
| 177 return nullptr; | 235 return nullptr; |
| 236 } | |
| 178 | 237 |
| 179 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 238 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 180 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 239 int64_t pose_time = target_time.monotonic_system_time_nanos; |
| 240 | |
| 241 #if 0 | |
| 242 int64_t nstime = std::chrono::duration_cast<std::chrono::nanoseconds>(std::chr ono::steady_clock::now().time_since_epoch()).count(); | |
| 243 LOG(INFO) << "klausw: gvr ns=" << pose_time << " steady ns=" << steady_nanos < < " delta=" << pose_time - nstime; | |
| 244 #endif | |
| 245 | |
| 246 // TODO(klausw): sanity check | |
| 247 double render_time_ms = last_processing_ms_.hasPrediction() && last_render_ms_ .hasPrediction() ? | |
| 248 last_processing_ms_.getPrediction() + last_render_ms_.getPrediction() : | |
| 249 kDefaultRenderTimeMillis; | |
| 250 double predict_time_ms = render_time_ms + kPredictionTimeWithoutVsyncMillis; | |
| 251 VLOG(2) << __FUNCTION__ << ": predict_time_ms=" << predict_time_ms; | |
| 252 target_time.monotonic_system_time_nanos += static_cast<uint64_t>(predict_time_ ms * 1e6); | |
| 181 | 253 |
| 182 gvr::Mat4f head_mat = | 254 gvr::Mat4f head_mat = |
| 183 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time); | 255 gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 184 head_mat = gvr_api->ApplyNeckModel(head_mat, 1.0f); | 256 head_mat = gvr_api->ApplyNeckModel(head_mat, 1.0f); |
| 185 | 257 |
| 186 gfx::Transform inv_transform( | 258 gfx::Transform inv_transform( |
| 187 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], | 259 head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], |
| 188 head_mat.m[1][0], head_mat.m[1][1], head_mat.m[1][2], head_mat.m[1][3], | 260 head_mat.m[1][0], head_mat.m[1][1], head_mat.m[1][2], head_mat.m[1][3], |
| 189 head_mat.m[2][0], head_mat.m[2][1], head_mat.m[2][2], head_mat.m[2][3], | 261 head_mat.m[2][0], head_mat.m[2][1], head_mat.m[2][2], head_mat.m[2][3], |
| 190 head_mat.m[3][0], head_mat.m[3][1], head_mat.m[3][2], head_mat.m[3][3]); | 262 head_mat.m[3][0], head_mat.m[3][1], head_mat.m[3][2], head_mat.m[3][3]); |
| 191 | 263 |
| 192 gfx::Transform transform; | 264 gfx::Transform transform; |
| 193 if (inv_transform.GetInverse(&transform)) { | 265 if (inv_transform.GetInverse(&transform)) { |
| 194 gfx::DecomposedTransform decomposed_transform; | 266 gfx::DecomposedTransform decomposed_transform; |
| 195 gfx::DecomposeTransform(&decomposed_transform, transform); | 267 gfx::DecomposeTransform(&decomposed_transform, transform); |
| 196 | 268 |
| 197 pose->orientation.value()[0] = decomposed_transform.quaternion[0]; | 269 pose->orientation.value()[0] = decomposed_transform.quaternion[0]; |
| 198 pose->orientation.value()[1] = decomposed_transform.quaternion[1]; | 270 pose->orientation.value()[1] = decomposed_transform.quaternion[1]; |
| 199 pose->orientation.value()[2] = decomposed_transform.quaternion[2]; | 271 pose->orientation.value()[2] = decomposed_transform.quaternion[2]; |
| 200 pose->orientation.value()[3] = decomposed_transform.quaternion[3]; | 272 pose->orientation.value()[3] = decomposed_transform.quaternion[3]; |
| 201 | 273 |
| 202 pose->position.emplace(3); | 274 pose->position.emplace(3); |
| 203 pose->position.value()[0] = decomposed_transform.translate[0]; | 275 pose->position.value()[0] = decomposed_transform.translate[0]; |
| 204 pose->position.value()[1] = decomposed_transform.translate[1]; | 276 pose->position.value()[1] = decomposed_transform.translate[1]; |
| 205 pose->position.value()[2] = decomposed_transform.translate[2]; | 277 pose->position.value()[2] = decomposed_transform.translate[2]; |
| 206 } | 278 } |
| 207 | 279 |
| 208 // Save the underlying GVR pose for use by rendering. It can't use a | 280 // Save the underlying GVR pose for use by rendering. It can't use a |
| 209 // VRPosePtr since that's a different data type. | 281 // VRPosePtr since that's a different data type. |
| 210 delegate_->SetGvrPoseForWebVr(head_mat, pose_index_); | 282 delegate_->SetWebVRGvrPose(head_mat, pose_index_, pose_time); |
| 211 | 283 |
| 212 return pose; | 284 return pose; |
| 213 } | 285 } |
| 214 | 286 |
| 215 void GvrDevice::ResetPose() { | 287 void GvrDevice::ResetPose() { |
| 216 gvr::GvrApi* gvr_api = GetGvrApi(); | 288 gvr::GvrApi* gvr_api = GetGvrApi(); |
| 217 | 289 |
| 218 // Should never call RecenterTracking when using with Daydream viewers. On | 290 // Should never call RecenterTracking when using with Daydream viewers. On |
| 219 // those devices recentering should only be done via the controller. | 291 // those devices recentering should only be done via the controller. |
| 220 if (gvr_api && gvr_api->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) | 292 if (gvr_api && gvr_api->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) |
| 221 gvr_api->RecenterTracking(); | 293 gvr_api->RecenterTracking(); |
| 222 } | 294 } |
| 223 | 295 |
| 224 void GvrDevice::RequestPresent(const base::Callback<void(bool)>& callback) { | 296 void GvrDevice::RequestPresent(const base::Callback<void(bool)>& callback) { |
| 225 gvr_provider_->RequestPresent(callback); | 297 gvr_provider_->RequestPresent(callback); |
| 226 } | 298 } |
| 227 | 299 |
| 228 void GvrDevice::SetSecureOrigin(bool secure_origin) { | 300 void GvrDevice::SetSecureOrigin(bool secure_origin) { |
| 229 secure_origin_ = secure_origin; | 301 secure_origin_ = secure_origin; |
| 230 if (delegate_) | 302 if (delegate_) |
| 231 delegate_->SetWebVRSecureOrigin(secure_origin_); | 303 delegate_->SetWebVRSecureOrigin(secure_origin_); |
| 232 } | 304 } |
| 233 | 305 |
| 234 void GvrDevice::ExitPresent() { | 306 void GvrDevice::ExitPresent() { |
| 307 // Run pending "frame complete" callbacks | |
| 308 for (auto it : callback_map_) { | |
|
artem.bolgar
2017/02/14 05:04:24
This cleanup code block should be moved to GvrDevi
| |
| 309 VLOG(2) << __FUNCTION__ << ": ExitPresent running pending callback for frame : " << it.first; | |
| 310 it.second.Run(0, it.first, -1.0); | |
| 311 } | |
| 312 callback_map_.clear(); | |
| 235 gvr_provider_->ExitPresent(); | 313 gvr_provider_->ExitPresent(); |
| 236 OnExitPresent(); | 314 OnExitPresent(); |
| 237 } | 315 } |
| 238 | 316 |
| 239 void GvrDevice::SubmitFrame(mojom::VRPosePtr pose) { | 317 void GvrDevice::SubmitFrame(int32_t surfaceHandle, |
| 240 if (delegate_) | 318 mojom::VRPosePtr pose, |
| 241 delegate_->SubmitWebVRFrame(); | 319 const mojom::VRDisplay::SubmitFrameCallback& callbac k) { |
| 320 TRACE_EVENT1("media", "klausw:SubmitFrame", "frame", pose->poseIndex); | |
| 321 VLOG(2) << __FUNCTION__ << ": frame " << pose->poseIndex; | |
| 322 if (delegate_) { | |
| 323 VLOG(2) << __FUNCTION__ << ": save callback for frame index " << pose->poseI ndex; | |
| 324 callback_map_.insert(std::make_pair(pose->poseIndex, std::move(callback))); | |
| 325 last_processing_ms_.add(pose->ts_submit - pose->ts_getPose); | |
| 326 delegate_->SubmitWebVRFrame(surfaceHandle, std::move(pose)); | |
| 327 } else { | |
| 328 VLOG(2) << __FUNCTION__ << ": No delegate, calling callback now for frame in dex " << pose->poseIndex; | |
| 329 callback.Run(surfaceHandle, pose->poseIndex, -1.0); | |
| 330 VLOG(2) << __FUNCTION__ << ": run callback done"; | |
| 331 } | |
| 332 } | |
| 333 | |
| 334 void GvrDevice::OnFrameSubmitted(int32_t surfaceHandle, uint32_t frame_index, do uble elapsed) { | |
| 335 auto it = callback_map_.find(frame_index); | |
| 336 if (it == callback_map_.end()) { | |
| 337 VLOG(2) << __FUNCTION__ << ": no callback found for frame index " << frame_i ndex; | |
| 338 } else { | |
| 339 VLOG(2) << __FUNCTION__ << ": run callback for frame index " << frame_index; | |
| 340 auto callback = std::move(it->second); | |
| 341 callback.Run(surfaceHandle, frame_index, elapsed); | |
| 342 last_render_ms_.add(elapsed); | |
| 343 VLOG(2) << __FUNCTION__ << ": run callback done, erasing"; | |
| 344 callback_map_.erase(it); | |
| 345 VLOG(2) << __FUNCTION__ << ": erasing done"; | |
| 346 } | |
| 347 | |
| 348 for (auto it : callback_map_) { | |
| 349 VLOG(2) << __FUNCTION__ << ": pending frame: " << it.first; | |
| 350 } | |
| 242 } | 351 } |
| 243 | 352 |
| 244 void GvrDevice::UpdateLayerBounds(mojom::VRLayerBoundsPtr left_bounds, | 353 void GvrDevice::UpdateLayerBounds(mojom::VRLayerBoundsPtr left_bounds, |
| 245 mojom::VRLayerBoundsPtr right_bounds) { | 354 mojom::VRLayerBoundsPtr right_bounds) { |
| 246 if (!delegate_) | 355 if (!delegate_) |
| 247 return; | 356 return; |
| 248 | 357 |
| 249 gvr::Rectf left_gvr_bounds; | 358 gvr::Rectf left_gvr_bounds; |
| 250 left_gvr_bounds.left = left_bounds->left; | 359 left_gvr_bounds.left = left_bounds->left; |
| 251 left_gvr_bounds.top = 1.0f - left_bounds->top; | 360 left_gvr_bounds.top = 1.0f - left_bounds->top; |
| 252 left_gvr_bounds.right = left_bounds->left + left_bounds->width; | 361 left_gvr_bounds.right = left_bounds->left + left_bounds->width; |
| 253 left_gvr_bounds.bottom = 1.0f - (left_bounds->top + left_bounds->height); | 362 left_gvr_bounds.bottom = 1.0f - (left_bounds->top + left_bounds->height); |
| 254 | 363 |
| 255 gvr::Rectf right_gvr_bounds; | 364 gvr::Rectf right_gvr_bounds; |
| 256 right_gvr_bounds.left = right_bounds->left; | 365 right_gvr_bounds.left = right_bounds->left; |
| 257 right_gvr_bounds.top = 1.0f - right_bounds->top; | 366 right_gvr_bounds.top = 1.0f - right_bounds->top; |
| 258 right_gvr_bounds.right = right_bounds->left + right_bounds->width; | 367 right_gvr_bounds.right = right_bounds->left + right_bounds->width; |
| 259 right_gvr_bounds.bottom = 1.0f - (right_bounds->top + right_bounds->height); | 368 right_gvr_bounds.bottom = 1.0f - (right_bounds->top + right_bounds->height); |
| 260 | 369 |
| 261 delegate_->UpdateWebVRTextureBounds(left_gvr_bounds, right_gvr_bounds); | 370 // TODO(klausw): tie this to pose_index_ for future execution |
| 371 delegate_->UpdateWebVRTextureBounds(left_bounds->forPoseIndex, left_gvr_bounds , right_gvr_bounds); | |
| 372 } | |
| 373 | |
| 374 void GvrDevice::GetSurfaceHandle(int32_t width, int32_t height, const mojom::VRD isplay::GetSurfaceHandleCallback& callback) { | |
| 375 // LOG(ERROR) << "klausw:GvrDevice.GetSurfaceHandle, delegate=" << delegate_; | |
| 376 if (!delegate_) | |
| 377 callback.Run(0); | |
| 378 | |
| 379 delegate_->GetWebVRSurfaceHandle(width, height, callback); | |
| 262 } | 380 } |
| 263 | 381 |
| 264 void GvrDevice::SetDelegate(GvrDelegate* delegate) { | 382 void GvrDevice::SetDelegate(GvrDelegate* delegate) { |
| 265 delegate_ = delegate; | 383 delegate_ = delegate; |
| 266 | 384 |
| 267 // Notify the clients that this device has changed | 385 // Notify the clients that this device has changed |
| 268 if (delegate_) { | 386 if (delegate_) { |
| 269 delegate_->SetWebVRSecureOrigin(secure_origin_); | 387 delegate_->SetWebVRSecureOrigin(secure_origin_); |
| 270 OnChanged(); | 388 OnChanged(); |
| 271 } | 389 } |
| 272 } | 390 } |
| 273 | 391 |
| 274 gvr::GvrApi* GvrDevice::GetGvrApi() { | 392 gvr::GvrApi* GvrDevice::GetGvrApi() { |
| 275 if (!delegate_) | 393 if (!delegate_) |
| 276 return nullptr; | 394 return nullptr; |
| 277 | 395 |
| 278 return delegate_->gvr_api(); | 396 return delegate_->gvr_api(); |
| 279 } | 397 } |
| 280 | 398 |
| 281 } // namespace device | 399 } // namespace device |
| OLD | NEW |