Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include "base/android/jni_android.h" | 7 #include "base/android/jni_android.h" |
| 8 #include "base/memory/ptr_util.h" | 8 #include "base/memory/ptr_util.h" |
| 9 #include "base/metrics/histogram_macros.h" | 9 #include "base/metrics/histogram_macros.h" |
| 10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
| 11 #include "chrome/browser/android/vr_shell/ui_elements.h" | 11 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 12 #include "chrome/browser/android/vr_shell/ui_scene.h" | 12 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 13 #include "chrome/browser/android/vr_shell/vr_controller.h" | 13 #include "chrome/browser/android/vr_shell/vr_controller.h" |
| 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
| 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" | 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" |
| 16 #include "chrome/browser/android/vr_shell/vr_math.h" | 16 #include "chrome/browser/android/vr_shell/vr_math.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_shell.h" | 17 #include "chrome/browser/android/vr_shell/vr_shell.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
| 19 #include "device/vr/android/gvr/gvr_device.h" | |
| 19 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 20 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
| 20 #include "ui/gfx/vsync_provider.h" | |
| 21 #include "ui/gl/android/scoped_java_surface.h" | 21 #include "ui/gl/android/scoped_java_surface.h" |
| 22 #include "ui/gl/android/surface_texture.h" | 22 #include "ui/gl/android/surface_texture.h" |
| 23 #include "ui/gl/gl_bindings.h" | 23 #include "ui/gl/gl_bindings.h" |
| 24 #include "ui/gl/gl_context.h" | 24 #include "ui/gl/gl_context.h" |
| 25 #include "ui/gl/gl_surface.h" | 25 #include "ui/gl/gl_surface.h" |
| 26 #include "ui/gl/init/gl_factory.h" | 26 #include "ui/gl/init/gl_factory.h" |
| 27 | 27 |
| 28 namespace vr_shell { | 28 namespace vr_shell { |
| 29 | 29 |
| 30 namespace { | 30 namespace { |
| 31 // Constant taken from treasure_hunt demo. | 31 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever |
| 32 // exposed, use that instead (it defaults to 50ms on most platforms). | |
| 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; | 33 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; |
| 33 | 34 |
| 34 static constexpr float kZNear = 0.1f; | 35 static constexpr float kZNear = 0.1f; |
| 35 static constexpr float kZFar = 1000.0f; | 36 static constexpr float kZFar = 1000.0f; |
| 36 | 37 |
| 37 // Screen angle in degrees. 0 = vertical, positive = top closer. | 38 // Screen angle in degrees. 0 = vertical, positive = top closer. |
| 38 static constexpr float kDesktopScreenTiltDefault = 0; | 39 static constexpr float kDesktopScreenTiltDefault = 0; |
| 39 | 40 |
| 40 static constexpr float kReticleWidth = 0.025f; | 41 static constexpr float kReticleWidth = 0.025f; |
| 41 static constexpr float kReticleHeight = 0.025f; | 42 static constexpr float kReticleHeight = 0.025f; |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 135 return mouse_event; | 136 return mouse_event; |
| 136 } | 137 } |
| 137 | 138 |
| 138 enum class ViewerType { | 139 enum class ViewerType { |
| 139 UNKNOWN_TYPE = 0, | 140 UNKNOWN_TYPE = 0, |
| 140 CARDBOARD = 1, | 141 CARDBOARD = 1, |
| 141 DAYDREAM = 2, | 142 DAYDREAM = 2, |
| 142 VIEWER_TYPE_MAX, | 143 VIEWER_TYPE_MAX, |
| 143 }; | 144 }; |
| 144 | 145 |
| 145 int GetPixelEncodedPoseIndexByte() { | |
| 146 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); | |
| 147 // Read the pose index encoded in a bottom left pixel as color values. | |
| 148 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 149 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | |
| 150 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 151 // if not valid due to bad magic number. | |
| 152 uint8_t pixels[4]; | |
| 153 // Assume we're reading from the framebuffer we just wrote to. | |
| 154 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 155 // or equivalent if the rendering setup changes in the future. | |
| 156 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 157 | |
| 158 // Check for the magic number written by VRDevice.cpp on submit. | |
| 159 // This helps avoid glitches from garbage data in the render | |
| 160 // buffer that can appear during initialization or resizing. These | |
| 161 // often appear as flashes of all-black or all-white pixels. | |
| 162 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | |
| 163 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | |
| 164 // Pose is good. | |
| 165 return pixels[0]; | |
| 166 } | |
| 167 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 168 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 169 return -1; | |
| 170 } | |
| 171 | |
| 172 int64_t TimeInMicroseconds() { | 146 int64_t TimeInMicroseconds() { |
| 173 return std::chrono::duration_cast<std::chrono::microseconds>( | 147 return std::chrono::duration_cast<std::chrono::microseconds>( |
| 174 std::chrono::steady_clock::now().time_since_epoch()).count(); | 148 std::chrono::steady_clock::now().time_since_epoch()).count(); |
| 175 } | 149 } |
| 176 | 150 |
| 177 void WaitForSwapAck(const base::Closure& callback, gfx::SwapResult result) { | |
| 178 callback.Run(); | |
| 179 } | |
| 180 | |
| 181 } // namespace | 151 } // namespace |
| 182 | 152 |
| 183 VrShellGl::VrShellGl( | 153 VrShellGl::VrShellGl( |
| 184 const base::WeakPtr<VrShell>& weak_vr_shell, | 154 const base::WeakPtr<VrShell>& weak_vr_shell, |
| 185 const base::WeakPtr<VrInputManager>& content_input_manager, | 155 const base::WeakPtr<VrInputManager>& content_input_manager, |
| 186 const base::WeakPtr<VrInputManager>& ui_input_manager, | 156 const base::WeakPtr<VrInputManager>& ui_input_manager, |
| 187 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, | 157 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, |
| 188 gvr_context* gvr_api, | 158 gvr_context* gvr_api, |
| 189 bool initially_web_vr, | 159 bool initially_web_vr, |
| 190 bool reprojected_rendering) | 160 bool reprojected_rendering) |
| 191 : web_vr_mode_(initially_web_vr), | 161 : web_vr_mode_(initially_web_vr), |
| 192 surfaceless_rendering_(reprojected_rendering), | 162 surfaceless_rendering_(reprojected_rendering), |
| 193 task_runner_(base::ThreadTaskRunnerHandle::Get()), | 163 task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 164 binding_(this), | |
| 194 weak_vr_shell_(weak_vr_shell), | 165 weak_vr_shell_(weak_vr_shell), |
| 195 content_input_manager_(content_input_manager), | 166 content_input_manager_(content_input_manager), |
| 196 ui_input_manager_(ui_input_manager), | 167 ui_input_manager_(ui_input_manager), |
| 197 main_thread_task_runner_(std::move(main_thread_task_runner)), | 168 main_thread_task_runner_(std::move(main_thread_task_runner)), |
| 198 weak_ptr_factory_(this) { | 169 weak_ptr_factory_(this) { |
| 199 GvrInit(gvr_api); | 170 GvrInit(gvr_api); |
| 200 } | 171 } |
| 201 | 172 |
| 202 VrShellGl::~VrShellGl() { | 173 VrShellGl::~VrShellGl() { |
| 203 draw_task_.Cancel(); | 174 vsync_task_.Cancel(); |
| 204 } | 175 } |
| 205 | 176 |
| 206 void VrShellGl::Initialize() { | 177 void VrShellGl::Initialize() { |
| 207 gvr::Mat4f identity; | |
| 208 SetIdentityM(identity); | |
| 209 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 210 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 211 | |
| 212 scene_.reset(new UiScene); | 178 scene_.reset(new UiScene); |
| 213 | 179 |
| 214 if (surfaceless_rendering_) { | 180 if (surfaceless_rendering_) { |
| 215 // If we're rendering surfaceless, we'll never get a java surface to render | 181 // If we're rendering surfaceless, we'll never get a java surface to render |
| 216 // into, so we can initialize GL right away. | 182 // into, so we can initialize GL right away. |
| 217 InitializeGl(nullptr); | 183 InitializeGl(nullptr); |
| 218 } | 184 } |
| 219 } | 185 } |
| 220 | 186 |
| 221 void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { | 187 void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { |
| (...skipping 22 matching lines...) Expand all Loading... | |
| 244 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 210 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
| 245 ForceExitVr(); | 211 ForceExitVr(); |
| 246 return; | 212 return; |
| 247 } | 213 } |
| 248 if (!context_->MakeCurrent(surface_.get())) { | 214 if (!context_->MakeCurrent(surface_.get())) { |
| 249 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 215 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
| 250 ForceExitVr(); | 216 ForceExitVr(); |
| 251 return; | 217 return; |
| 252 } | 218 } |
| 253 | 219 |
| 254 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is | |
| 255 // sort of okay, because the GVR swap chain will block if we render too fast, | |
| 256 // but we should address this properly. | |
| 257 if (surface_->GetVSyncProvider()) { | |
| 258 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( | |
| 259 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); | |
| 260 } else { | |
| 261 LOG(ERROR) << "No VSync Provider"; | |
| 262 } | |
| 263 | |
| 264 unsigned int textures[2]; | 220 unsigned int textures[2]; |
| 265 glGenTextures(2, textures); | 221 glGenTextures(2, textures); |
| 266 ui_texture_id_ = textures[0]; | 222 ui_texture_id_ = textures[0]; |
| 267 content_texture_id_ = textures[1]; | 223 content_texture_id_ = textures[1]; |
| 268 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 224 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
| 269 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 225 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
| 270 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); | 226 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); |
| 271 content_surface_.reset(new gl::ScopedJavaSurface( | 227 content_surface_.reset(new gl::ScopedJavaSurface( |
| 272 content_surface_texture_.get())); | 228 content_surface_texture_.get())); |
| 273 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 229 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 274 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 230 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 275 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 231 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 276 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 232 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 277 | 233 |
| 278 content_surface_texture_->SetDefaultBufferSize( | 234 content_surface_texture_->SetDefaultBufferSize( |
| 279 content_tex_physical_size_.width, content_tex_physical_size_.height); | 235 content_tex_physical_size_.width, content_tex_physical_size_.height); |
| 280 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | 236 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, |
| 281 ui_tex_physical_size_.height); | 237 ui_tex_physical_size_.height); |
| 282 | 238 |
| 283 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( | 239 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 284 &VrShell::SurfacesChanged, weak_vr_shell_, | 240 &VrShell::SurfacesChanged, weak_vr_shell_, |
| 285 content_surface_->j_surface().obj(), | 241 content_surface_->j_surface().obj(), |
| 286 ui_surface_->j_surface().obj())); | 242 ui_surface_->j_surface().obj())); |
| 287 | 243 |
| 288 InitializeRenderer(); | 244 InitializeRenderer(); |
| 289 | 245 |
| 290 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | 246 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 291 ScheduleNextDrawFrame(); | 247 OnVSync(); |
| 292 | 248 |
| 293 ready_to_draw_ = true; | 249 ready_to_draw_ = true; |
| 294 } | 250 } |
| 295 | 251 |
| 296 void VrShellGl::OnUIFrameAvailable() { | 252 void VrShellGl::OnUIFrameAvailable() { |
| 297 ui_surface_texture_->UpdateTexImage(); | 253 ui_surface_texture_->UpdateTexImage(); |
| 298 } | 254 } |
| 299 | 255 |
| 300 void VrShellGl::OnContentFrameAvailable() { | 256 void VrShellGl::OnContentFrameAvailable() { |
| 301 content_surface_texture_->UpdateTexImage(); | 257 content_surface_texture_->UpdateTexImage(); |
| 258 received_frame_ = true; | |
| 259 } | |
| 260 | |
| 261 bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { | |
| 262 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); | |
| 263 if (!received_frame_) { | |
| 264 *pose_index = last_pose_; | |
| 265 return true; | |
| 266 } | |
| 267 received_frame_ = false; | |
| 268 | |
| 269 // Read the pose index encoded in a bottom left pixel as color values. | |
| 270 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 271 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | |
| 272 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 273 // if not valid due to bad magic number. | |
| 274 uint8_t pixels[4]; | |
| 275 // Assume we're reading from the framebuffer we just wrote to. | |
| 276 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 277 // or equivalent if the rendering setup changes in the future. | |
| 278 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 279 | |
| 280 // Check for the magic number written by VRDevice.cpp on submit. | |
| 281 // This helps avoid glitches from garbage data in the render | |
| 282 // buffer that can appear during initialization or resizing. These | |
| 283 // often appear as flashes of all-black or all-white pixels. | |
| 284 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | |
| 285 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | |
| 286 // Pose is good. | |
| 287 *pose_index = pixels[0]; | |
| 288 last_pose_ = pixels[0]; | |
| 289 return true; | |
| 290 } | |
| 291 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] | |
| 292 << ", bad magic number " << (int)pixels[1] << ", " | |
| 293 << (int)pixels[2]; | |
| 294 return false; | |
| 302 } | 295 } |
| 303 | 296 |
| 304 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 297 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
| 305 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 298 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
| 306 controller_.reset(new VrController(gvr_api)); | 299 controller_.reset(new VrController(gvr_api)); |
| 307 | 300 |
| 308 ViewerType viewerType; | 301 ViewerType viewerType; |
| 309 switch (gvr_api_->GetViewerType()) { | 302 switch (gvr_api_->GetViewerType()) { |
| 310 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 303 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
| 311 viewerType = ViewerType::DAYDREAM; | 304 viewerType = ViewerType::DAYDREAM; |
| 312 break; | 305 break; |
| 313 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 306 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
| 314 viewerType = ViewerType::CARDBOARD; | 307 viewerType = ViewerType::CARDBOARD; |
| 315 break; | 308 break; |
| 316 default: | 309 default: |
| 317 NOTREACHED(); | 310 NOTREACHED(); |
| 318 viewerType = ViewerType::UNKNOWN_TYPE; | 311 viewerType = ViewerType::UNKNOWN_TYPE; |
| 319 break; | 312 break; |
| 320 } | 313 } |
| 321 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 314 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 322 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 315 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 323 } | 316 } |
| 324 | 317 |
| 325 void VrShellGl::InitializeRenderer() { | 318 void VrShellGl::InitializeRenderer() { |
| 326 // While WebVR is going through the compositor path, it shares | 319 // While WebVR is going through the compositor path, it shares |
| 327 // the same texture ID. This will change once it gets its own | 320 // the same texture ID. This will change once it gets its own |
| 328 // surface, but store it separately to avoid future confusion. | 321 // surface, but store it separately to avoid future confusion. |
| 329 // TODO(klausw,crbug.com/655722): remove this. | 322 // TODO(klausw,crbug.com/655722): remove this. |
| 330 webvr_texture_id_ = content_texture_id_; | 323 webvr_texture_id_ = content_texture_id_; |
| 331 // Out of paranoia, explicitly reset the "pose valid" flags to false | |
| 332 // from the GL thread. The constructor ran in the UI thread. | |
| 333 // TODO(klausw,crbug.com/655722): remove this. | |
| 334 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); | |
| 335 | 324 |
| 336 gvr_api_->InitializeGl(); | 325 gvr_api_->InitializeGl(); |
| 326 webvr_head_pose_.assign(kPoseRingBufferSize, | |
| 327 gvr_api_->GetHeadSpaceFromStartSpaceRotation( | |
| 328 gvr::GvrApi::GetTimePointNow())); | |
| 329 | |
| 337 std::vector<gvr::BufferSpec> specs; | 330 std::vector<gvr::BufferSpec> specs; |
| 338 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 331 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
| 339 specs.push_back(gvr_api_->CreateBufferSpec()); | 332 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 340 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 333 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
| 341 | 334 |
| 342 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 335 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
| 343 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 336 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
| 344 specs.push_back(gvr_api_->CreateBufferSpec()); | 337 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 345 specs.back().SetSize(kHeadlockedBufferDimensions); | 338 specs.back().SetSize(kHeadlockedBufferDimensions); |
| 346 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 339 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
| (...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 586 DCHECK(input_target != InputTarget::NONE); | 579 DCHECK(input_target != InputTarget::NONE); |
| 587 const base::WeakPtr<VrInputManager>& weak_ptr = | 580 const base::WeakPtr<VrInputManager>& weak_ptr = |
| 588 input_target == InputTarget::CONTENT ? content_input_manager_ | 581 input_target == InputTarget::CONTENT ? content_input_manager_ |
| 589 : ui_input_manager_; | 582 : ui_input_manager_; |
| 590 main_thread_task_runner_->PostTask( | 583 main_thread_task_runner_->PostTask( |
| 591 FROM_HERE, | 584 FROM_HERE, |
| 592 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, | 585 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, |
| 593 base::Passed(std::move(event)))); | 586 base::Passed(std::move(event)))); |
| 594 } | 587 } |
| 595 | 588 |
| 596 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { | |
| 597 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; | |
| 598 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; | |
| 599 } | |
| 600 | |
| 601 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { | |
| 602 if (pose_index_byte < 0) { | |
| 603 return false; | |
| 604 } | |
| 605 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { | |
| 606 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << | |
| 607 ", not a valid pose"; | |
| 608 return false; | |
| 609 } | |
| 610 return true; | |
| 611 } | |
| 612 | |
| 613 void VrShellGl::DrawFrame() { | 589 void VrShellGl::DrawFrame() { |
| 614 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); | 590 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
| 591 | |
| 615 // Reset the viewport list to just the pair of viewports for the | 592 // Reset the viewport list to just the pair of viewports for the |
| 616 // primary buffer each frame. Head-locked viewports get added by | 593 // primary buffer each frame. Head-locked viewports get added by |
| 617 // DrawVrShell if needed. | 594 // DrawVrShell if needed. |
| 618 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 595 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 619 | 596 |
| 620 gvr::Frame frame = swap_chain_->AcquireFrame(); | 597 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 621 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 598 if (!frame.is_valid()) { |
| 622 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 599 return; |
| 600 } | |
| 601 frame.BindBuffer(kFramePrimaryBuffer); | |
| 602 if (web_vr_mode_) { | |
| 603 DrawWebVr(); | |
| 604 } | |
| 623 | 605 |
| 624 gvr::Mat4f head_pose = | 606 int pose_index; |
| 625 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 607 gvr::Mat4f head_pose; |
| 608 | |
| 609 // When using async reprojection, we need to know which pose was used in | |
| 610 // the WebVR app for drawing this frame. Due to unknown amounts of | |
| 611 // buffering in the compositor and SurfaceTexture, we read the pose number | |
| 612 // from a corner pixel. There's no point in doing this for legacy | |
| 613 // distortion rendering since that doesn't need a pose, and reading back | |
| 614 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
| 615 // doing this once we have working no-compositor rendering for WebVR. | |
| 616 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && | |
| 617 GetPixelEncodedPoseIndexByte(&pose_index)) { | |
| 618 head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; | |
| 619 } else { | |
| 620 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | |
| 621 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | |
| 622 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | |
| 623 } | |
| 626 | 624 |
| 627 gvr::Vec3f position = GetTranslation(head_pose); | 625 gvr::Vec3f position = GetTranslation(head_pose); |
| 628 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 626 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 629 // This appears to be a 3DOF pose without a neck model. Add one. | 627 // This appears to be a 3DOF pose without a neck model. Add one. |
| 630 // The head pose has redundant data. Assume we're only using the | 628 // The head pose has redundant data. Assume we're only using the |
| 631 // object_from_reference_matrix, we're not updating position_external. | 629 // object_from_reference_matrix, we're not updating position_external. |
| 632 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 630 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 633 // it. For now, removing it seems working fine. | 631 // it. For now, removing it seems working fine. |
| 634 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 632 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 635 } | 633 } |
| 636 | 634 |
| 637 frame.BindBuffer(kFramePrimaryBuffer); | |
| 638 | |
| 639 // Update the render position of all UI elements (including desktop). | 635 // Update the render position of all UI elements (including desktop). |
| 640 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 636 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 641 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); | 637 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
| 642 | 638 |
| 643 UpdateController(GetForwardVector(head_pose)); | 639 UpdateController(GetForwardVector(head_pose)); |
| 644 | 640 |
| 645 if (web_vr_mode_) { | |
| 646 DrawWebVr(); | |
| 647 | |
| 648 // When using async reprojection, we need to know which pose was used in | |
| 649 // the WebVR app for drawing this frame. Due to unknown amounts of | |
| 650 // buffering in the compositor and SurfaceTexture, we read the pose number | |
| 651 // from a corner pixel. There's no point in doing this for legacy | |
| 652 // distortion rendering since that doesn't need a pose, and reading back | |
| 653 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
| 654 // doing this once we have working no-compositor rendering for WebVR. | |
| 655 if (gvr_api_->GetAsyncReprojectionEnabled()) { | |
| 656 int pose_index_byte = GetPixelEncodedPoseIndexByte(); | |
| 657 if (WebVrPoseByteIsValid(pose_index_byte)) { | |
| 658 // We have a valid pose, use it for reprojection. | |
| 659 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | |
| 660 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | |
| 661 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; | |
| 662 // We can't mark the used pose as invalid since unfortunately | |
| 663 // we have to reuse them. The compositor will re-submit stale | |
| 664 // frames on vsync, and we can't tell that this has happened | |
| 665 // until we've read the pose index from it, and at that point | |
| 666 // it's too late to skip rendering. | |
| 667 } else { | |
| 668 // If we don't get a valid frame ID back we shouldn't attempt | |
| 669 // to reproject by an invalid matrix, so turn off reprojection | |
| 670 // instead. Invalid poses can permanently break reprojection | |
| 671 // for this GVR instance: http://crbug.com/667327 | |
| 672 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | |
| 673 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | |
| 674 } | |
| 675 } | |
| 676 } | |
| 677 | |
| 678 DrawVrShell(head_pose, frame); | 641 DrawVrShell(head_pose, frame); |
| 679 | 642 |
| 680 frame.Unbind(); | 643 frame.Unbind(); |
| 681 frame.Submit(*buffer_viewport_list_, head_pose); | 644 frame.Submit(*buffer_viewport_list_, head_pose); |
| 682 | 645 |
| 683 // No need to swap buffers for surfaceless rendering. | 646 // No need to swap buffers for surfaceless rendering. |
| 684 if (surfaceless_rendering_) { | 647 if (!surfaceless_rendering_) { |
| 685 ScheduleNextDrawFrame(); | 648 // TODO(mthiesse): Support asynchronous SwapBuffers. |
| 686 return; | |
| 687 } | |
| 688 | |
| 689 if (surface_->SupportsAsyncSwap()) { | |
| 690 surface_->SwapBuffersAsync(base::Bind(&WaitForSwapAck, base::Bind( | |
| 691 &VrShellGl::ScheduleNextDrawFrame, weak_ptr_factory_.GetWeakPtr()))); | |
| 692 } else { | |
| 693 surface_->SwapBuffers(); | 649 surface_->SwapBuffers(); |
| 694 ScheduleNextDrawFrame(); | |
| 695 } | 650 } |
| 696 } | 651 } |
| 697 | 652 |
| 698 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, | 653 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, |
| 699 gvr::Frame &frame) { | 654 gvr::Frame &frame) { |
| 700 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 655 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
| 701 std::vector<const ContentRectangle*> head_locked_elements; | 656 std::vector<const ContentRectangle*> head_locked_elements; |
| 702 std::vector<const ContentRectangle*> world_elements; | 657 std::vector<const ContentRectangle*> world_elements; |
| 703 for (const auto& rect : scene_->GetUiElements()) { | 658 for (const auto& rect : scene_->GetUiElements()) { |
| 704 if (!rect->visible) { | 659 if (!rect->visible) { |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 908 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 863 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 909 *webvr_right_viewport_); | 864 *webvr_right_viewport_); |
| 910 } | 865 } |
| 911 | 866 |
| 912 void VrShellGl::OnTriggerEvent() { | 867 void VrShellGl::OnTriggerEvent() { |
| 913 // Set a flag to handle this on the render thread at the next frame. | 868 // Set a flag to handle this on the render thread at the next frame. |
| 914 touch_pending_ = true; | 869 touch_pending_ = true; |
| 915 } | 870 } |
| 916 | 871 |
| 917 void VrShellGl::OnPause() { | 872 void VrShellGl::OnPause() { |
| 918 draw_task_.Cancel(); | 873 vsync_task_.Cancel(); |
| 919 controller_->OnPause(); | 874 controller_->OnPause(); |
| 920 gvr_api_->PauseTracking(); | 875 gvr_api_->PauseTracking(); |
| 921 } | 876 } |
| 922 | 877 |
| 923 void VrShellGl::OnResume() { | 878 void VrShellGl::OnResume() { |
| 924 gvr_api_->RefreshViewerProfile(); | 879 gvr_api_->RefreshViewerProfile(); |
| 925 gvr_api_->ResumeTracking(); | 880 gvr_api_->ResumeTracking(); |
| 926 controller_->OnResume(); | 881 controller_->OnResume(); |
| 927 if (ready_to_draw_) { | 882 if (ready_to_draw_) { |
| 928 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | 883 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 929 ScheduleNextDrawFrame(); | 884 OnVSync(); |
| 930 } | 885 } |
| 931 } | 886 } |
| 932 | 887 |
| 933 void VrShellGl::SetWebVrMode(bool enabled) { | 888 void VrShellGl::SetWebVrMode(bool enabled) { |
| 934 web_vr_mode_ = enabled; | 889 web_vr_mode_ = enabled; |
| 935 } | 890 } |
| 936 | 891 |
| 937 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | 892 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| 938 const gvr::Rectf& right_bounds) { | 893 const gvr::Rectf& right_bounds) { |
| 939 webvr_left_viewport_->SetSourceUv(left_bounds); | 894 webvr_left_viewport_->SetSourceUv(left_bounds); |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 966 if (ui_surface_texture_.get()) | 921 if (ui_surface_texture_.get()) |
| 967 ui_surface_texture_->SetDefaultBufferSize(width, height); | 922 ui_surface_texture_->SetDefaultBufferSize(width, height); |
| 968 ui_tex_physical_size_.width = width; | 923 ui_tex_physical_size_.width = width; |
| 969 ui_tex_physical_size_.height = height; | 924 ui_tex_physical_size_.height = height; |
| 970 } | 925 } |
| 971 | 926 |
| 972 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 927 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
| 973 return weak_ptr_factory_.GetWeakPtr(); | 928 return weak_ptr_factory_.GetWeakPtr(); |
| 974 } | 929 } |
| 975 | 930 |
| 976 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase, | 931 void VrShellGl::OnVSync() { |
| 977 const base::TimeDelta interval) { | |
| 978 vsync_timebase_ = timebase; | |
| 979 vsync_interval_ = interval; | |
| 980 } | |
| 981 | |
| 982 void VrShellGl::ScheduleNextDrawFrame() { | |
| 983 base::TimeTicks now = base::TimeTicks::Now(); | 932 base::TimeTicks now = base::TimeTicks::Now(); |
| 984 base::TimeTicks target; | 933 base::TimeTicks target; |
| 985 | 934 |
| 986 if (vsync_interval_.is_zero()) { | 935 // Don't send VSyncs until we have a timebase/interval. |
| 987 target = now; | 936 if (vsync_interval_.is_zero()) |
| 937 return; | |
| 938 target = now + vsync_interval_; | |
| 939 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | |
| 940 target = vsync_timebase_ + intervals * vsync_interval_; | |
| 941 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | |
| 942 target - now); | |
| 943 | |
| 944 double time = (intervals * vsync_interval_).InSecondsF(); | |
| 945 if (!callback_.is_null()) { | |
| 946 callback_.Run(GetPose(), time); | |
| 947 callback_.Reset(); | |
| 988 } else { | 948 } else { |
| 989 target = now + vsync_interval_; | 949 pending_vsync_ = true; |
| 990 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 950 pending_time_ = time; |
| 991 target = vsync_timebase_ + intervals * vsync_interval_; | |
| 992 } | 951 } |
| 952 DrawFrame(); | |
| 953 } | |
| 993 | 954 |
| 994 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now); | 955 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
| 956 binding_.Close(); | |
| 957 binding_.Bind(std::move(request)); | |
| 958 } | |
| 959 | |
| 960 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | |
| 961 if (!pending_vsync_) { | |
| 962 callback_ = std::move(callback); | |
|
dcheng
2017/01/12 09:56:56
One thing I'm not certain about is if it's OK to j
mthiesse
2017/01/12 15:49:03
It's not okay to just drop pending callbacks. I've
| |
| 963 return; | |
| 964 } | |
| 965 pending_vsync_ = false; | |
| 966 callback.Run(GetPose(), pending_time_); | |
| 967 } | |
| 968 | |
| 969 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, | |
| 970 double interval_seconds) { | |
| 971 vsync_timebase_ = base::TimeTicks(); | |
| 972 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); | |
| 973 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); | |
| 974 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | |
| 975 OnVSync(); | |
| 995 } | 976 } |
| 996 | 977 |
| 997 void VrShellGl::ForceExitVr() { | 978 void VrShellGl::ForceExitVr() { |
| 998 main_thread_task_runner_->PostTask( | 979 main_thread_task_runner_->PostTask( |
| 999 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); | 980 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); |
| 1000 } | 981 } |
| 1001 | 982 |
| 1002 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { | 983 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
| 1003 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); | 984 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
| 1004 } | 985 } |
| 1005 | 986 |
| 987 device::mojom::VRPosePtr VrShellGl::GetPose() { | |
| 988 TRACE_EVENT0("input", "VrShellGl::GetPose"); | |
| 989 | |
| 990 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | |
| 991 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | |
| 992 | |
| 993 gvr::Mat4f head_mat = | |
| 994 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | |
| 995 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); | |
| 996 | |
| 997 uint32_t pose_index = pose_index_++; | |
| 998 webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; | |
| 999 | |
| 1000 return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); | |
| 1001 } | |
| 1002 | |
| 1006 } // namespace vr_shell | 1003 } // namespace vr_shell |
| OLD | NEW |