| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include <limits> | 7 #include <limits> |
| 8 #include <utility> | 8 #include <utility> |
| 9 | 9 |
| 10 #include "base/android/jni_android.h" | 10 #include "base/android/jni_android.h" |
| 11 #include "base/callback_helpers.h" | 11 #include "base/callback_helpers.h" |
| 12 #include "base/memory/ptr_util.h" | 12 #include "base/memory/ptr_util.h" |
| 13 #include "base/metrics/histogram_macros.h" | 13 #include "base/metrics/histogram_macros.h" |
| 14 #include "base/threading/thread_task_runner_handle.h" | 14 #include "base/threading/thread_task_runner_handle.h" |
| 15 #include "chrome/browser/android/vr_shell/ui_elements.h" | 15 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 16 #include "chrome/browser/android/vr_shell/ui_scene.h" | 16 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_controller.h" | 17 #include "chrome/browser/android/vr_shell/vr_controller.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 18 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
| 19 #include "chrome/browser/android/vr_shell/vr_math.h" | 19 #include "chrome/browser/android/vr_shell/vr_math.h" |
| 20 #include "chrome/browser/android/vr_shell/vr_shell.h" | 20 #include "chrome/browser/android/vr_shell/vr_shell.h" |
| 21 #include "chrome/browser/android/vr_shell/vr_shell_command_buffer_gl.h" |
| 21 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" | 22 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" |
| 22 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 23 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
| 23 #include "device/vr/android/gvr/gvr_device.h" | 24 #include "device/vr/android/gvr/gvr_device.h" |
| 24 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 25 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
| 25 #include "third_party/WebKit/public/platform/WebMouseEvent.h" | 26 #include "third_party/WebKit/public/platform/WebMouseEvent.h" |
| 26 #include "ui/gl/android/scoped_java_surface.h" | 27 #include "ui/gl/android/scoped_java_surface.h" |
| 27 #include "ui/gl/android/surface_texture.h" | 28 #include "ui/gl/android/surface_texture.h" |
| 28 #include "ui/gl/gl_bindings.h" | 29 #include "ui/gl/gl_bindings.h" |
| 29 #include "ui/gl/gl_context.h" | 30 #include "ui/gl/gl_context.h" |
| 30 #include "ui/gl/gl_surface.h" | 31 #include "ui/gl/gl_surface.h" |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 76 | 77 |
| 77 // The GVR viewport list has two entries (left eye and right eye) for each | 78 // The GVR viewport list has two entries (left eye and right eye) for each |
| 78 // GVR buffer. | 79 // GVR buffer. |
| 79 static constexpr int kViewportListPrimaryOffset = 0; | 80 static constexpr int kViewportListPrimaryOffset = 0; |
| 80 static constexpr int kViewportListHeadlockedOffset = 2; | 81 static constexpr int kViewportListHeadlockedOffset = 2; |
| 81 | 82 |
| 82 // Buffer size large enough to handle the current backlog of poses which is | 83 // Buffer size large enough to handle the current backlog of poses which is |
| 83 // 2-3 frames. | 84 // 2-3 frames. |
| 84 static constexpr unsigned kPoseRingBufferSize = 8; | 85 static constexpr unsigned kPoseRingBufferSize = 8; |
| 85 | 86 |
| 86 // Magic numbers used to mark valid pose index values encoded in frame | 87 // Default downscale factor for computing the recommended WebVR |
| 87 // data. Must match the magic numbers used in blink's VRDisplay.cpp. | 88 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather |
| 88 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; | 89 // aggressive downscale due to the high overhead of copying pixels |
| 90 // twice before handing off to GVR. For comparison, the polyfill |
| 91 // uses approximately 0.55 on a Pixel XL. |
| 92 static constexpr float kWebVrRecommendedResolutionScale = 0.5; |
| 89 | 93 |
| 90 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 94 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
| 91 float xdiff = (vec1.x - vec2.x); | 95 float xdiff = (vec1.x - vec2.x); |
| 92 float ydiff = (vec1.y - vec2.y); | 96 float ydiff = (vec1.y - vec2.y); |
| 93 float zdiff = (vec1.z - vec2.z); | 97 float zdiff = (vec1.z - vec2.z); |
| 94 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 98 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
| 95 return std::sqrt(scale); | 99 return std::sqrt(scale); |
| 96 } | 100 } |
| 97 | 101 |
| 98 // Generate a quaternion representing the rotation from the negative Z axis | 102 // Generate a quaternion representing the rotation from the negative Z axis |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 228 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 232 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
| 229 ForceExitVr(); | 233 ForceExitVr(); |
| 230 return; | 234 return; |
| 231 } | 235 } |
| 232 if (!context_->MakeCurrent(surface_.get())) { | 236 if (!context_->MakeCurrent(surface_.get())) { |
| 233 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 237 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
| 234 ForceExitVr(); | 238 ForceExitVr(); |
| 235 return; | 239 return; |
| 236 } | 240 } |
| 237 | 241 |
| 238 unsigned int textures[2]; | 242 unsigned int textures[3]; |
| 239 glGenTextures(2, textures); | 243 glGenTextures(3, textures); |
| 240 ui_texture_id_ = textures[0]; | 244 ui_texture_id_ = textures[0]; |
| 241 content_texture_id_ = textures[1]; | 245 content_texture_id_ = textures[1]; |
| 246 webvr_texture_id_ = textures[2]; |
| 242 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 247 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
| 243 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 248 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
| 249 webvr_surface_texture_ = gl::SurfaceTexture::Create(webvr_texture_id_); |
| 244 CreateUiSurface(); | 250 CreateUiSurface(); |
| 245 CreateContentSurface(); | 251 CreateContentSurface(); |
| 252 // WebVR surface is created below. |
| 246 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 253 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 247 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 254 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 248 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 255 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 249 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 256 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 257 webvr_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 258 &VrShellGl::OnWebVRFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 259 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, |
| 260 ui_tex_physical_size_.height); |
| 250 content_surface_texture_->SetDefaultBufferSize( | 261 content_surface_texture_->SetDefaultBufferSize( |
| 251 content_tex_physical_size_.width, content_tex_physical_size_.height); | 262 content_tex_physical_size_.width, content_tex_physical_size_.height); |
| 252 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
| 253 ui_tex_physical_size_.height); | |
| 254 InitializeRenderer(); | 263 InitializeRenderer(); |
| 255 | 264 |
| 265 // Pick a size for the WebVR transfer surface based on a downscaled |
| 266 // recommended render resolution, and also use that size as the |
| 267 // client-recommended renderWidth/renderHeight and for the GVR |
| 268 // framebuffer. If the client chooses a different size or resizes |
| 269 // it while presenting, we'll resize the transfer surface and GVR |
| 270 // framebuffer to match. |
| 271 |
| 272 auto render_target_size = gvr_api_->GetMaximumEffectiveRenderTargetSize(); |
| 273 |
| 274 gvr::Sizei webvr_size = {static_cast<int>(render_target_size.width * |
| 275 kWebVrRecommendedResolutionScale), |
| 276 static_cast<int>(render_target_size.height * |
| 277 kWebVrRecommendedResolutionScale)}; |
| 278 |
| 279 // TODO(klausw): should the size be rounded to a multiple of N pixels |
| 280 // to be friendlier to the GPU? The exact size doesn't matter. |
| 281 |
| 282 CreateOrResizeWebVRSurface(webvr_size); |
| 283 |
| 256 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 284 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 257 OnVSync(); | 285 OnVSync(); |
| 258 | 286 |
| 259 ready_to_draw_ = true; | 287 ready_to_draw_ = true; |
| 260 } | 288 } |
| 261 | 289 |
| 262 void VrShellGl::CreateContentSurface() { | 290 void VrShellGl::CreateContentSurface() { |
| 263 content_surface_ = | 291 content_surface_ = |
| 264 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); | 292 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); |
| 265 main_thread_task_runner_->PostTask( | 293 main_thread_task_runner_->PostTask( |
| 266 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, | 294 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, |
| 267 content_surface_->j_surface().obj())); | 295 content_surface_->j_surface().obj())); |
| 268 } | 296 } |
| 269 | 297 |
| 270 void VrShellGl::CreateUiSurface() { | 298 void VrShellGl::CreateUiSurface() { |
| 271 ui_surface_ = | 299 ui_surface_ = |
| 272 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); | 300 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); |
| 273 main_thread_task_runner_->PostTask( | 301 main_thread_task_runner_->PostTask( |
| 274 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, | 302 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, |
| 275 ui_surface_->j_surface().obj())); | 303 ui_surface_->j_surface().obj())); |
| 276 } | 304 } |
| 277 | 305 |
| 306 void VrShellGl::CreateOrResizeWebVRSurface(const gvr::Sizei& size) { |
| 307 if (!webvr_surface_texture_) { |
| 308 LOG(ERROR) << "No WebVR surface texture available"; |
| 309 return; |
| 310 } |
| 311 |
| 312 // ContentPhysicalBoundsChanged is getting called twice with |
| 313 // identical sizes? Avoid thrashing the existing context. |
| 314 if (size == webvr_surface_size_) { |
| 315 return; |
| 316 } |
| 317 |
| 318 if (!size.width || !size.height) { |
| 319 // Invalid size, defer until a new size arrives on a future bounds update. |
| 320 return; |
| 321 } |
| 322 |
| 323 webvr_surface_texture_->SetDefaultBufferSize(size.width, size.height); |
| 324 webvr_surface_size_ = size; |
| 325 |
| 326 if (command_buffer_gl_) { |
| 327 command_buffer_gl_->ResizeSurface(size.width, size.height); |
| 328 } else { |
| 329 command_buffer_gl_ = base::MakeUnique<VrShellCommandBufferGl>(); |
| 330 webvr_surface_ = command_buffer_gl_->CreateSurface(webvr_surface_texture_); |
| 331 } |
| 332 } |
| 333 |
| 334 void VrShellGl::SubmitWebVRFrame(int16_t frame_index, |
| 335 const gpu::MailboxHolder& mailbox) { |
| 336 TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame"); |
| 337 |
| 338 bool swapped = command_buffer_gl_->CopyFrameToSurface( |
| 339 frame_index, mailbox, !pending_frames_.empty()); |
| 340 // Expect a new frame on the surface queue if draw was successful. |
| 341 if (swapped) { |
| 342 main_thread_task_runner_->PostTask( |
| 343 FROM_HERE, base::Bind(&VrShell::OnSubmitWebVRFrameTransferred, |
| 344 weak_vr_shell_, frame_index)); |
| 345 pending_frames_.emplace(frame_index); |
| 346 } |
| 347 |
| 348 TRACE_EVENT0("gpu", "VrShellGl::glFinish"); |
| 349 // This is a load-bearing glFinish, please don't remove it without |
| 350 // before/after timing comparisons. Goal is to clear the GPU queue |
| 351 // of the native GL context to avoid stalls later in GVR frame |
| 352 // acquire/submit. |
| 353 glFinish(); |
| 354 } |
| 355 |
| 278 void VrShellGl::OnUIFrameAvailable() { | 356 void VrShellGl::OnUIFrameAvailable() { |
| 279 ui_surface_texture_->UpdateTexImage(); | 357 ui_surface_texture_->UpdateTexImage(); |
| 280 } | 358 } |
| 281 | 359 |
| 282 void VrShellGl::OnContentFrameAvailable() { | 360 void VrShellGl::OnContentFrameAvailable() { |
| 283 content_surface_texture_->UpdateTexImage(); | 361 content_surface_texture_->UpdateTexImage(); |
| 284 received_frame_ = true; | 362 received_frame_ = true; |
| 285 } | 363 } |
| 286 | 364 |
| 287 bool VrShellGl::GetPixelEncodedFrameIndex(uint16_t* frame_index) { | 365 void VrShellGl::OnWebVRFrameAvailable() { |
| 288 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); | 366 // A "while" loop here is a bad idea. It's legal to call |
| 289 if (!received_frame_) { | 367 // UpdateTexImage repeatedly even if no frames are available, but |
| 290 if (last_frame_index_ == (uint16_t)-1) | 368 // that does *not* wait for a new frame, it just reuses the most |
| 291 return false; | 369 // recent one. That would mess up the count. |
| 292 *frame_index = last_frame_index_; | 370 if (pending_frames_.empty()) { |
| 293 return true; | 371 // We're expecting a frame, but it's not here yet. Retry in OnVsync. |
| 372 ++premature_received_frames_; |
| 373 return; |
| 294 } | 374 } |
| 295 received_frame_ = false; | |
| 296 | 375 |
| 297 // Read the pose index encoded in a bottom left pixel as color values. | 376 webvr_surface_texture_->UpdateTexImage(); |
| 298 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | 377 int frame_index = pending_frames_.front(); |
| 299 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | 378 TRACE_EVENT1("gpu", "VrShellGl::OnWebVRFrameAvailable", "frame", frame_index); |
| 300 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | 379 pending_frames_.pop(); |
| 301 // if not valid due to bad magic number. | |
| 302 uint8_t pixels[4]; | |
| 303 // Assume we're reading from the framebuffer we just wrote to. | |
| 304 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 305 // or equivalent if the rendering setup changes in the future. | |
| 306 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 307 | 380 |
| 308 // Check for the magic number written by VRDevice.cpp on submit. | 381 // It is be legal for the WebVR client to submit a new frame now, |
| 309 // This helps avoid glitches from garbage data in the render | 382 // since we've consumed the image. TODO(klausw): would timing be |
| 310 // buffer that can appear during initialization or resizing. These | 383 // better to move the rendered notification after draw? |
| 311 // often appear as flashes of all-black or all-white pixels. | 384 |
| 312 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | 385 main_thread_task_runner_->PostTask( |
| 313 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | 386 FROM_HERE, base::Bind(&VrShell::OnSubmitWebVRFrameRendered, |
| 314 // Pose is good. | 387 weak_vr_shell_, frame_index)); |
| 315 *frame_index = pixels[0]; | 388 |
| 316 last_frame_index_ = pixels[0]; | 389 DrawFrame(frame_index); |
| 317 return true; | |
| 318 } | |
| 319 VLOG(1) << "WebVR: reject decoded pose index " << static_cast<int>(pixels[0]) | |
| 320 << ", bad magic number " << static_cast<int>(pixels[1]) << ", " | |
| 321 << static_cast<int>(pixels[2]); | |
| 322 return false; | |
| 323 } | 390 } |
| 324 | 391 |
| 325 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 392 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
| 326 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 393 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
| 327 controller_.reset(new VrController(gvr_api)); | 394 controller_.reset(new VrController(gvr_api)); |
| 328 | 395 |
| 329 ViewerType viewerType; | 396 ViewerType viewerType; |
| 330 switch (gvr_api_->GetViewerType()) { | 397 switch (gvr_api_->GetViewerType()) { |
| 331 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 398 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
| 332 viewerType = ViewerType::DAYDREAM; | 399 viewerType = ViewerType::DAYDREAM; |
| 333 break; | 400 break; |
| 334 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 401 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
| 335 viewerType = ViewerType::CARDBOARD; | 402 viewerType = ViewerType::CARDBOARD; |
| 336 break; | 403 break; |
| 337 default: | 404 default: |
| 338 NOTREACHED(); | 405 NOTREACHED(); |
| 339 viewerType = ViewerType::UNKNOWN_TYPE; | 406 viewerType = ViewerType::UNKNOWN_TYPE; |
| 340 break; | 407 break; |
| 341 } | 408 } |
| 342 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 409 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 343 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 410 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 344 } | 411 } |
| 345 | 412 |
| 346 void VrShellGl::InitializeRenderer() { | 413 void VrShellGl::InitializeRenderer() { |
| 347 // While WebVR is going through the compositor path, it shares | |
| 348 // the same texture ID. This will change once it gets its own | |
| 349 // surface, but store it separately to avoid future confusion. | |
| 350 // TODO(klausw,crbug.com/655722): remove this. | |
| 351 webvr_texture_id_ = content_texture_id_; | |
| 352 | |
| 353 gvr_api_->InitializeGl(); | 414 gvr_api_->InitializeGl(); |
| 354 webvr_head_pose_.assign(kPoseRingBufferSize, | 415 webvr_head_pose_.assign(kPoseRingBufferSize, |
| 355 gvr_api_->GetHeadSpaceFromStartSpaceRotation( | 416 gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
| 356 gvr::GvrApi::GetTimePointNow())); | 417 gvr::GvrApi::GetTimePointNow())); |
| 357 | 418 |
| 358 std::vector<gvr::BufferSpec> specs; | 419 std::vector<gvr::BufferSpec> specs; |
| 359 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 420 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
| 360 specs.push_back(gvr_api_->CreateBufferSpec()); | 421 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 361 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 422 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
| 423 render_size_vrshell_ = render_size_primary_; |
| 362 | 424 |
| 363 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 425 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
| 364 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 426 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
| 365 specs.push_back(gvr_api_->CreateBufferSpec()); | 427 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 366 specs.back().SetSize(kHeadlockedBufferDimensions); | 428 specs.back().SetSize(kHeadlockedBufferDimensions); |
| 367 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 429 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
| 368 | 430 |
| 369 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); | 431 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); |
| 370 | 432 |
| 371 vr_shell_renderer_.reset(new VrShellRenderer()); | 433 vr_shell_renderer_.reset(new VrShellRenderer()); |
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 623 std::unique_ptr<blink::WebInputEvent> event) { | 685 std::unique_ptr<blink::WebInputEvent> event) { |
| 624 DCHECK(input_target != InputTarget::NONE); | 686 DCHECK(input_target != InputTarget::NONE); |
| 625 auto&& target = input_target == InputTarget::CONTENT | 687 auto&& target = input_target == InputTarget::CONTENT |
| 626 ? &VrShell::ProcessContentGesture | 688 ? &VrShell::ProcessContentGesture |
| 627 : &VrShell::ProcessUIGesture; | 689 : &VrShell::ProcessUIGesture; |
| 628 main_thread_task_runner_->PostTask( | 690 main_thread_task_runner_->PostTask( |
| 629 FROM_HERE, | 691 FROM_HERE, |
| 630 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); | 692 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); |
| 631 } | 693 } |
| 632 | 694 |
| 633 void VrShellGl::DrawFrame() { | 695 void VrShellGl::DrawFrame(int frame_index) { |
| 634 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); | 696 TRACE_EVENT1("gpu", "VrShellGl::DrawFrame", "frame", frame_index); |
| 635 | 697 |
| 636 // Reset the viewport list to just the pair of viewports for the | 698 // Reset the viewport list to just the pair of viewports for the |
| 637 // primary buffer each frame. Head-locked viewports get added by | 699 // primary buffer each frame. Head-locked viewports get added by |
| 638 // DrawVrShell if needed. | 700 // DrawVrShell if needed. |
| 639 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 701 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 640 | 702 |
| 703 // If needed, resize the primary buffer for use with WebVR. |
| 704 if (web_vr_mode_) { |
| 705 if (render_size_primary_ != webvr_surface_size_) { |
| 706 if (!webvr_surface_size_.width) { |
| 707 return; |
| 708 } |
| 709 render_size_primary_ = webvr_surface_size_; |
| 710 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); |
| 711 } |
| 712 } else { |
| 713 if (render_size_primary_ != render_size_vrshell_) { |
| 714 render_size_primary_ = render_size_vrshell_; |
| 715 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); |
| 716 } |
| 717 } |
| 718 |
| 719 TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame"); |
| 641 gvr::Frame frame = swap_chain_->AcquireFrame(); | 720 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 721 TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame"); |
| 642 if (!frame.is_valid()) { | 722 if (!frame.is_valid()) { |
| 643 return; | 723 return; |
| 644 } | 724 } |
| 645 frame.BindBuffer(kFramePrimaryBuffer); | 725 frame.BindBuffer(kFramePrimaryBuffer); |
| 646 if (web_vr_mode_) { | 726 if (web_vr_mode_) { |
| 647 DrawWebVr(); | 727 DrawWebVr(); |
| 648 } | 728 } |
| 649 | 729 |
| 650 uint16_t frame_index; | |
| 651 gvr::Mat4f head_pose; | 730 gvr::Mat4f head_pose; |
| 652 | 731 |
| 653 // When using async reprojection, we need to know which pose was used in | 732 // When using async reprojection, we need to know which pose was used in |
| 654 // the WebVR app for drawing this frame. Due to unknown amounts of | 733 // the WebVR app for drawing this frame. Only needed if reprojection is |
| 655 // buffering in the compositor and SurfaceTexture, we read the pose number | 734 // in use. |
| 656 // from a corner pixel. There's no point in doing this for legacy | 735 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled()) { |
| 657 // distortion rendering since that doesn't need a pose, and reading back | |
| 658 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
| 659 // doing this once we have working no-compositor rendering for WebVR. | |
| 660 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && | |
| 661 GetPixelEncodedFrameIndex(&frame_index)) { | |
| 662 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), | 736 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
| 663 "kPoseRingBufferSize must be a power of 2"); | 737 "kPoseRingBufferSize must be a power of 2"); |
| 664 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; | 738 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; |
| 665 // Process all pending_bounds_ changes targeted for before this frame, being | 739 // Process all pending_bounds_ changes targeted for before this frame, being |
| 666 // careful of wrapping frame indices. | 740 // careful of wrapping frame indices. |
| 667 static constexpr unsigned max = | 741 static constexpr unsigned max = |
| 668 std::numeric_limits<decltype(frame_index_)>::max(); | 742 std::numeric_limits<decltype(frame_index_)>::max(); |
| 669 static_assert(max > kPoseRingBufferSize * 2, | 743 static_assert(max > kPoseRingBufferSize * 2, |
| 670 "To detect wrapping, kPoseRingBufferSize must be smaller " | 744 "To detect wrapping, kPoseRingBufferSize must be smaller " |
| 671 "than half of frame_index_ range."); | 745 "than half of frame_index_ range."); |
| 672 while (!pending_bounds_.empty()) { | 746 while (!pending_bounds_.empty()) { |
| 673 uint16_t index = pending_bounds_.front().first; | 747 uint16_t index = pending_bounds_.front().first; |
| 674 // If index is less than the frame_index it's possible we've wrapped, so | 748 // If index is less than the frame_index it's possible we've wrapped, so |
| 675 // we extend the range and 'un-wrap' to account for this. | 749 // we extend the range and 'un-wrap' to account for this. |
| 676 if (index < frame_index) | 750 if (index < frame_index) |
| 677 index += max; | 751 index += max; |
| 678 // If the pending bounds change is for an upcoming frame within our buffer | 752 // If the pending bounds change is for an upcoming frame within our buffer |
| 679 // size, wait to apply it. Otherwise, apply it immediately. This | 753 // size, wait to apply it. Otherwise, apply it immediately. This |
| 680 // guarantees that even if we miss many frames, the queue can't fill up | 754 // guarantees that even if we miss many frames, the queue can't fill up |
| 681 // with stale bounds. | 755 // with stale bounds. |
| 682 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) | 756 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) |
| 683 break; | 757 break; |
| 684 | 758 |
| 685 const BoundsPair& bounds = pending_bounds_.front().second; | 759 const WebVrBounds& bounds = pending_bounds_.front().second; |
| 686 webvr_left_viewport_->SetSourceUv(bounds.first); | 760 webvr_left_viewport_->SetSourceUv(bounds.left_bounds); |
| 687 webvr_right_viewport_->SetSourceUv(bounds.second); | 761 webvr_right_viewport_->SetSourceUv(bounds.right_bounds); |
| 762 CreateOrResizeWebVRSurface(bounds.source_size); |
| 688 pending_bounds_.pop(); | 763 pending_bounds_.pop(); |
| 689 } | 764 } |
| 690 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 765 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| 691 *webvr_left_viewport_); | 766 *webvr_left_viewport_); |
| 692 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 767 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 693 *webvr_right_viewport_); | 768 *webvr_right_viewport_); |
| 694 } else { | 769 } else { |
| 695 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 770 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 696 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 771 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 697 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 772 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 698 } | 773 } |
| 699 | 774 |
| 700 gvr::Vec3f position = GetTranslation(head_pose); | 775 gvr::Vec3f position = GetTranslation(head_pose); |
| 701 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 776 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 702 // This appears to be a 3DOF pose without a neck model. Add one. | 777 // This appears to be a 3DOF pose without a neck model. Add one. |
| 703 // The head pose has redundant data. Assume we're only using the | 778 // The head pose has redundant data. Assume we're only using the |
| 704 // object_from_reference_matrix, we're not updating position_external. | 779 // object_from_reference_matrix, we're not updating position_external. |
| 705 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 780 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 706 // it. For now, removing it seems working fine. | 781 // it. For now, removing it seems working fine. |
| 707 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 782 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 708 } | 783 } |
| 709 | 784 |
| 710 // Update the render position of all UI elements (including desktop). | 785 // Update the render position of all UI elements (including desktop). |
| 711 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 786 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 712 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); | 787 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
| 713 | 788 |
| 714 UpdateController(GetForwardVector(head_pose)); | 789 { |
| 790 TRACE_EVENT0("gpu", "VrShellGl::UpdateController"); |
| 791 UpdateController(GetForwardVector(head_pose)); |
| 792 } |
| 715 | 793 |
| 716 DrawVrShell(head_pose, frame); | 794 // Finish drawing in the primary buffer, and draw the headlocked buffer |
| 795 // if needed. This must be the last drawing call, this method will |
| 796 // return with no frame being bound. |
| 797 DrawVrShellAndUnbind(head_pose, frame); |
| 717 | 798 |
| 718 frame.Unbind(); | 799 { |
| 719 frame.Submit(*buffer_viewport_list_, head_pose); | 800 TRACE_EVENT0("gpu", "VrShellGl::Submit"); |
| 801 frame.Submit(*buffer_viewport_list_, head_pose); |
| 802 } |
| 720 | 803 |
| 721 // No need to swap buffers for surfaceless rendering. | 804 // No need to swap buffers for surfaceless rendering. |
| 722 if (!surfaceless_rendering_) { | 805 if (!surfaceless_rendering_) { |
| 723 // TODO(mthiesse): Support asynchronous SwapBuffers. | 806 // TODO(mthiesse): Support asynchronous SwapBuffers. |
| 807 TRACE_EVENT0("gpu", "VrShellGl::SwapBuffers"); |
| 724 surface_->SwapBuffers(); | 808 surface_->SwapBuffers(); |
| 725 } | 809 } |
| 726 } | 810 } |
| 727 | 811 |
| 728 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, gvr::Frame& frame) { | 812 void VrShellGl::DrawVrShellAndUnbind(const gvr::Mat4f& head_pose, |
| 813 gvr::Frame& frame) { |
| 729 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 814 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
| 730 std::vector<const ContentRectangle*> head_locked_elements; | 815 std::vector<const ContentRectangle*> head_locked_elements; |
| 731 std::vector<const ContentRectangle*> world_elements; | 816 std::vector<const ContentRectangle*> world_elements; |
| 732 for (const auto& rect : scene_->GetUiElements()) { | 817 for (const auto& rect : scene_->GetUiElements()) { |
| 733 if (!rect->IsVisible()) | 818 if (!rect->IsVisible()) |
| 734 continue; | 819 continue; |
| 735 if (rect->lock_to_fov) { | 820 if (rect->lock_to_fov) { |
| 736 head_locked_elements.push_back(rect.get()); | 821 head_locked_elements.push_back(rect.get()); |
| 737 } else { | 822 } else { |
| 738 world_elements.push_back(rect.get()); | 823 world_elements.push_back(rect.get()); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 755 | 840 |
| 756 const Colorf& backgroundColor = scene_->GetBackgroundColor(); | 841 const Colorf& backgroundColor = scene_->GetBackgroundColor(); |
| 757 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, | 842 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, |
| 758 backgroundColor.a); | 843 backgroundColor.a); |
| 759 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 844 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 760 } | 845 } |
| 761 if (!world_elements.empty()) { | 846 if (!world_elements.empty()) { |
| 762 DrawUiView(&head_pose, world_elements, render_size_primary_, | 847 DrawUiView(&head_pose, world_elements, render_size_primary_, |
| 763 kViewportListPrimaryOffset); | 848 kViewportListPrimaryOffset); |
| 764 } | 849 } |
| 850 frame.Unbind(); // Done with the primary buffer. |
| 765 | 851 |
| 766 if (!head_locked_elements.empty()) { | 852 if (!head_locked_elements.empty()) { |
| 767 // Add head-locked viewports. The list gets reset to just | 853 // Add head-locked viewports. The list gets reset to just |
| 768 // the recommended viewports (for the primary buffer) each frame. | 854 // the recommended viewports (for the primary buffer) each frame. |
| 769 buffer_viewport_list_->SetBufferViewport( | 855 buffer_viewport_list_->SetBufferViewport( |
| 770 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 856 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
| 771 *headlocked_left_viewport_); | 857 *headlocked_left_viewport_); |
| 772 buffer_viewport_list_->SetBufferViewport( | 858 buffer_viewport_list_->SetBufferViewport( |
| 773 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 859 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
| 774 *headlocked_right_viewport_); | 860 *headlocked_right_viewport_); |
| 775 | 861 |
| 776 // Bind the headlocked framebuffer. | 862 // Bind the headlocked framebuffer. |
| 777 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order | |
| 778 // here. | |
| 779 frame.BindBuffer(kFrameHeadlockedBuffer); | 863 frame.BindBuffer(kFrameHeadlockedBuffer); |
| 780 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 864 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
| 781 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 865 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 782 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, | 866 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, |
| 783 kViewportListHeadlockedOffset); | 867 kViewportListHeadlockedOffset); |
| 868 frame.Unbind(); // Done with the headlocked buffer. |
| 784 } | 869 } |
| 785 } | 870 } |
| 786 | 871 |
| 787 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { | |
| 788 // This is a stopgap while we're using the WebVR compositor rendering path. | |
| 789 // TODO(klausw,crbug.com/655722): Remove this method and member once we're | |
| 790 // using a separate WebVR render surface. | |
| 791 return content_tex_physical_size_; | |
| 792 } | |
| 793 | |
| 794 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, | 872 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, |
| 795 const std::vector<const ContentRectangle*>& elements, | 873 const std::vector<const ContentRectangle*>& elements, |
| 796 const gvr::Sizei& render_size, | 874 const gvr::Sizei& render_size, |
| 797 int viewport_offset) { | 875 int viewport_offset) { |
| 798 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); | 876 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); |
| 799 | 877 |
| 800 gvr::Mat4f view_matrix; | 878 gvr::Mat4f view_matrix; |
| 801 if (head_pose) { | 879 if (head_pose) { |
| 802 view_matrix = *head_pose; | 880 view_matrix = *head_pose; |
| 803 } else { | 881 } else { |
| (...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 990 void VrShellGl::DrawWebVr() { | 1068 void VrShellGl::DrawWebVr() { |
| 991 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); | 1069 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); |
| 992 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 1070 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
| 993 glDisable(GL_CULL_FACE); | 1071 glDisable(GL_CULL_FACE); |
| 994 glDepthMask(GL_FALSE); | 1072 glDepthMask(GL_FALSE); |
| 995 glDisable(GL_DEPTH_TEST); | 1073 glDisable(GL_DEPTH_TEST); |
| 996 glDisable(GL_SCISSOR_TEST); | 1074 glDisable(GL_SCISSOR_TEST); |
| 997 glDisable(GL_BLEND); | 1075 glDisable(GL_BLEND); |
| 998 glDisable(GL_POLYGON_OFFSET_FILL); | 1076 glDisable(GL_POLYGON_OFFSET_FILL); |
| 999 | 1077 |
| 1000 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 1078 glViewport(0, 0, webvr_surface_size_.width, webvr_surface_size_.height); |
| 1001 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 1079 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| 1002 } | 1080 } |
| 1003 | 1081 |
| 1004 void VrShellGl::OnTriggerEvent() { | 1082 void VrShellGl::OnTriggerEvent() { |
| 1005 // Set a flag to handle this on the render thread at the next frame. | 1083 // Set a flag to handle this on the render thread at the next frame. |
| 1006 touch_pending_ = true; | 1084 touch_pending_ = true; |
| 1007 } | 1085 } |
| 1008 | 1086 |
| 1009 void VrShellGl::OnPause() { | 1087 void VrShellGl::OnPause() { |
| 1010 vsync_task_.Cancel(); | 1088 vsync_task_.Cancel(); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1021 OnVSync(); | 1099 OnVSync(); |
| 1022 } | 1100 } |
| 1023 } | 1101 } |
| 1024 | 1102 |
| 1025 void VrShellGl::SetWebVrMode(bool enabled) { | 1103 void VrShellGl::SetWebVrMode(bool enabled) { |
| 1026 web_vr_mode_ = enabled; | 1104 web_vr_mode_ = enabled; |
| 1027 } | 1105 } |
| 1028 | 1106 |
| 1029 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, | 1107 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
| 1030 const gvr::Rectf& left_bounds, | 1108 const gvr::Rectf& left_bounds, |
| 1031 const gvr::Rectf& right_bounds) { | 1109 const gvr::Rectf& right_bounds, |
| 1110 const gvr::Sizei& source_size) { |
| 1032 if (frame_index < 0) { | 1111 if (frame_index < 0) { |
| 1033 webvr_left_viewport_->SetSourceUv(left_bounds); | 1112 webvr_left_viewport_->SetSourceUv(left_bounds); |
| 1034 webvr_right_viewport_->SetSourceUv(right_bounds); | 1113 webvr_right_viewport_->SetSourceUv(right_bounds); |
| 1035 } else { | 1114 } else { |
| 1036 pending_bounds_.emplace( | 1115 pending_bounds_.emplace(std::make_pair( |
| 1037 std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); | 1116 frame_index, WebVrBounds(left_bounds, right_bounds, source_size))); |
| 1038 } | 1117 } |
| 1039 } | 1118 } |
| 1040 | 1119 |
| 1041 void VrShellGl::ContentBoundsChanged(int width, int height) { | 1120 void VrShellGl::ContentBoundsChanged(int width, int height) { |
| 1042 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); | 1121 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
| 1043 content_tex_css_width_ = width; | 1122 content_tex_css_width_ = width; |
| 1044 content_tex_css_height_ = height; | 1123 content_tex_css_height_ = height; |
| 1045 } | 1124 } |
| 1046 | 1125 |
| 1047 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { | 1126 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1061 ui_surface_texture_->SetDefaultBufferSize(width, height); | 1140 ui_surface_texture_->SetDefaultBufferSize(width, height); |
| 1062 ui_tex_physical_size_.width = width; | 1141 ui_tex_physical_size_.width = width; |
| 1063 ui_tex_physical_size_.height = height; | 1142 ui_tex_physical_size_.height = height; |
| 1064 } | 1143 } |
| 1065 | 1144 |
| 1066 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 1145 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
| 1067 return weak_ptr_factory_.GetWeakPtr(); | 1146 return weak_ptr_factory_.GetWeakPtr(); |
| 1068 } | 1147 } |
| 1069 | 1148 |
| 1070 void VrShellGl::OnVSync() { | 1149 void VrShellGl::OnVSync() { |
| 1150 while (premature_received_frames_ > 0) { |
| 1151 TRACE_EVENT0("gpu", "VrShellGl::OnWebVRFrameAvailableRetry"); |
| 1152 --premature_received_frames_; |
| 1153 OnWebVRFrameAvailable(); |
| 1154 } |
| 1155 |
| 1071 base::TimeTicks now = base::TimeTicks::Now(); | 1156 base::TimeTicks now = base::TimeTicks::Now(); |
| 1072 base::TimeTicks target; | 1157 base::TimeTicks target; |
| 1073 | 1158 |
| 1074 // Don't send VSyncs until we have a timebase/interval. | 1159 // Don't send VSyncs until we have a timebase/interval. |
| 1075 if (vsync_interval_.is_zero()) | 1160 if (vsync_interval_.is_zero()) |
| 1076 return; | 1161 return; |
| 1077 target = now + vsync_interval_; | 1162 target = now + vsync_interval_; |
| 1078 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 1163 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
| 1079 target = vsync_timebase_ + intervals * vsync_interval_; | 1164 target = vsync_timebase_ + intervals * vsync_interval_; |
| 1080 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | 1165 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
| 1081 target - now); | 1166 target - now); |
| 1082 | 1167 |
| 1083 base::TimeDelta time = intervals * vsync_interval_; | 1168 base::TimeDelta time = intervals * vsync_interval_; |
| 1084 if (!callback_.is_null()) { | 1169 if (!callback_.is_null()) { |
| 1085 SendVSync(time, base::ResetAndReturn(&callback_)); | 1170 SendVSync(time, base::ResetAndReturn(&callback_)); |
| 1086 } else { | 1171 } else { |
| 1087 pending_vsync_ = true; | 1172 pending_vsync_ = true; |
| 1088 pending_time_ = time; | 1173 pending_time_ = time; |
| 1089 } | 1174 } |
| 1090 DrawFrame(); | 1175 if (!web_vr_mode_) { |
| 1176 DrawFrame(-1); |
| 1177 } |
| 1091 } | 1178 } |
| 1092 | 1179 |
| 1093 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { | 1180 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
| 1094 binding_.Close(); | 1181 binding_.Close(); |
| 1095 binding_.Bind(std::move(request)); | 1182 binding_.Bind(std::move(request)); |
| 1096 } | 1183 } |
| 1097 | 1184 |
| 1098 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | 1185 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
| 1099 if (!pending_vsync_) { | 1186 if (!pending_vsync_) { |
| 1100 if (!callback_.is_null()) { | 1187 if (!callback_.is_null()) { |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1150 void VrShellGl::ResetPose() { | 1237 void VrShellGl::ResetPose() { |
| 1151 // Should never call RecenterTracking when using with Daydream viewers. On | 1238 // Should never call RecenterTracking when using with Daydream viewers. On |
| 1152 // those devices recentering should only be done via the controller. | 1239 // those devices recentering should only be done via the controller. |
| 1153 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) | 1240 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) |
| 1154 gvr_api_->RecenterTracking(); | 1241 gvr_api_->RecenterTracking(); |
| 1155 } | 1242 } |
| 1156 | 1243 |
| 1157 void VrShellGl::CreateVRDisplayInfo( | 1244 void VrShellGl::CreateVRDisplayInfo( |
| 1158 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, | 1245 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, |
| 1159 uint32_t device_id) { | 1246 uint32_t device_id) { |
| 1247 // This assumes that the initial webvr_surface_size_ was set to the |
| 1248 // appropriate recommended render resolution as the default size during |
| 1249 // InitializeGl. Revisit if the initialization order changes. |
| 1160 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( | 1250 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( |
| 1161 gvr_api_.get(), content_tex_physical_size_, device_id); | 1251 gvr_api_.get(), webvr_surface_size_, device_id); |
| 1162 main_thread_task_runner_->PostTask( | 1252 main_thread_task_runner_->PostTask( |
| 1163 FROM_HERE, | 1253 FROM_HERE, |
| 1164 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); | 1254 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); |
| 1165 } | 1255 } |
| 1166 | 1256 |
| 1167 } // namespace vr_shell | 1257 } // namespace vr_shell |
| OLD | NEW |