OLD | NEW |
---|---|
1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
6 | 6 |
7 #include <limits> | 7 #include <limits> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/android/jni_android.h" | 10 #include "base/android/jni_android.h" |
11 #include "base/callback_helpers.h" | 11 #include "base/callback_helpers.h" |
12 #include "base/memory/ptr_util.h" | 12 #include "base/memory/ptr_util.h" |
13 #include "base/metrics/histogram_macros.h" | 13 #include "base/metrics/histogram_macros.h" |
14 #include "base/threading/thread_task_runner_handle.h" | 14 #include "base/threading/thread_task_runner_handle.h" |
15 #include "chrome/browser/android/vr_shell/ui_elements.h" | 15 #include "chrome/browser/android/vr_shell/ui_elements.h" |
16 #include "chrome/browser/android/vr_shell/ui_scene.h" | 16 #include "chrome/browser/android/vr_shell/ui_scene.h" |
17 #include "chrome/browser/android/vr_shell/vr_controller.h" | 17 #include "chrome/browser/android/vr_shell/vr_controller.h" |
18 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 18 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
19 #include "chrome/browser/android/vr_shell/vr_math.h" | 19 #include "chrome/browser/android/vr_shell/vr_math.h" |
20 #include "chrome/browser/android/vr_shell/vr_shell.h" | 20 #include "chrome/browser/android/vr_shell/vr_shell.h" |
21 #include "chrome/browser/android/vr_shell/vr_shell_command_buffer_gl.h" | |
21 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" | 22 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" |
22 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 23 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
23 #include "device/vr/android/gvr/gvr_device.h" | 24 #include "device/vr/android/gvr/gvr_device.h" |
24 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 25 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
25 #include "third_party/WebKit/public/platform/WebMouseEvent.h" | 26 #include "third_party/WebKit/public/platform/WebMouseEvent.h" |
26 #include "ui/gl/android/scoped_java_surface.h" | 27 #include "ui/gl/android/scoped_java_surface.h" |
27 #include "ui/gl/android/surface_texture.h" | 28 #include "ui/gl/android/surface_texture.h" |
28 #include "ui/gl/gl_bindings.h" | 29 #include "ui/gl/gl_bindings.h" |
29 #include "ui/gl/gl_context.h" | 30 #include "ui/gl/gl_context.h" |
30 #include "ui/gl/gl_surface.h" | 31 #include "ui/gl/gl_surface.h" |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
76 | 77 |
77 // The GVR viewport list has two entries (left eye and right eye) for each | 78 // The GVR viewport list has two entries (left eye and right eye) for each |
78 // GVR buffer. | 79 // GVR buffer. |
79 static constexpr int kViewportListPrimaryOffset = 0; | 80 static constexpr int kViewportListPrimaryOffset = 0; |
80 static constexpr int kViewportListHeadlockedOffset = 2; | 81 static constexpr int kViewportListHeadlockedOffset = 2; |
81 | 82 |
82 // Buffer size large enough to handle the current backlog of poses which is | 83 // Buffer size large enough to handle the current backlog of poses which is |
83 // 2-3 frames. | 84 // 2-3 frames. |
84 static constexpr unsigned kPoseRingBufferSize = 8; | 85 static constexpr unsigned kPoseRingBufferSize = 8; |
85 | 86 |
86 // Magic numbers used to mark valid pose index values encoded in frame | 87 // Default downscale percentage for computing the recommended WebVR |
87 // data. Must match the magic numbers used in blink's VRDisplay.cpp. | 88 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather |
88 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; | 89 // aggressive downscale due to the high overhead of copying pixels |
90 // twice before handing off to GVR. For comparison, the polyfill | |
91 // uses approximately 55% on a Pixel XL. | |
92 static constexpr unsigned kWebVrRecommendedResolutionPercent = 50; | |
bajones
2017/03/07 00:48:07
Nit: I'd recommend making this float kWebVrRecomme
klausw
2017/03/07 02:55:55
Done.
| |
89 | 93 |
90 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 94 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
91 float xdiff = (vec1.x - vec2.x); | 95 float xdiff = (vec1.x - vec2.x); |
92 float ydiff = (vec1.y - vec2.y); | 96 float ydiff = (vec1.y - vec2.y); |
93 float zdiff = (vec1.z - vec2.z); | 97 float zdiff = (vec1.z - vec2.z); |
94 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 98 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
95 return std::sqrt(scale); | 99 return std::sqrt(scale); |
96 } | 100 } |
97 | 101 |
98 // Generate a quaternion representing the rotation from the negative Z axis | 102 // Generate a quaternion representing the rotation from the negative Z axis |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
228 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 232 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
229 ForceExitVr(); | 233 ForceExitVr(); |
230 return; | 234 return; |
231 } | 235 } |
232 if (!context_->MakeCurrent(surface_.get())) { | 236 if (!context_->MakeCurrent(surface_.get())) { |
233 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 237 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
234 ForceExitVr(); | 238 ForceExitVr(); |
235 return; | 239 return; |
236 } | 240 } |
237 | 241 |
238 unsigned int textures[2]; | 242 unsigned int textures[3]; |
239 glGenTextures(2, textures); | 243 glGenTextures(3, textures); |
240 ui_texture_id_ = textures[0]; | 244 ui_texture_id_ = textures[0]; |
241 content_texture_id_ = textures[1]; | 245 content_texture_id_ = textures[1]; |
246 webvr_texture_id_ = textures[2]; | |
242 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 247 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
243 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 248 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
249 webvr_surface_texture_ = gl::SurfaceTexture::Create(webvr_texture_id_); | |
244 CreateUiSurface(); | 250 CreateUiSurface(); |
245 CreateContentSurface(); | 251 CreateContentSurface(); |
252 // Don't create the WebVR surface yet, wait until we need it. | |
bajones
2017/03/07 00:48:07
Comment seems to imply WebVR surface will be creat
klausw
2017/03/07 02:55:55
Changed to: WebVR surface is created below.
| |
246 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 253 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
247 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 254 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
248 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 255 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
249 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 256 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
257 webvr_surface_texture_->SetFrameAvailableCallback(base::Bind( | |
258 &VrShellGl::OnWebVRFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | |
259 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
260 ui_tex_physical_size_.height); | |
250 content_surface_texture_->SetDefaultBufferSize( | 261 content_surface_texture_->SetDefaultBufferSize( |
251 content_tex_physical_size_.width, content_tex_physical_size_.height); | 262 content_tex_physical_size_.width, content_tex_physical_size_.height); |
252 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
253 ui_tex_physical_size_.height); | |
254 InitializeRenderer(); | 263 InitializeRenderer(); |
255 | 264 |
265 // Pick a size for the WebVR transfer surface based on a downscaled | |
266 // recommended render resolution, and also use that size as the | |
267 // client-recommended renderWidth/renderHeight and for the GVR | |
268 // framebuffer. If the client chooses a different size or resizes | |
269 // it while presenting, we'll resize the transfer surface and GVR | |
270 // framebuffer to match. | |
271 | |
272 auto render_target_size = gvr_api_->GetMaximumEffectiveRenderTargetSize(); | |
273 | |
274 // There are some rendering artifacts in the right eye where straight lines | |
275 // have small dents in them. Round size to multiple of 16 pixel per eye | |
276 // to compensate? | |
277 gvr::Sizei size; | |
278 size.width = | |
279 render_target_size.width * kWebVrRecommendedResolutionPercent / 100; | |
280 size.height = | |
281 render_target_size.height * kWebVrRecommendedResolutionPercent / 100; | |
282 size.width &= ~0x1f; | |
bajones
2017/03/07 00:48:07
Is this supposed to be 0xf instead of 0x1f? Also,
klausw
2017/03/07 02:55:55
I've removed the rounding and replaced with a TODO
| |
283 size.height &= ~0xf; | |
284 CreateOrResizeWebVRSurface(size); | |
285 | |
256 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 286 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
257 OnVSync(); | 287 OnVSync(); |
258 | 288 |
259 ready_to_draw_ = true; | 289 ready_to_draw_ = true; |
260 } | 290 } |
261 | 291 |
262 void VrShellGl::CreateContentSurface() { | 292 void VrShellGl::CreateContentSurface() { |
263 content_surface_ = | 293 content_surface_ = |
264 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); | 294 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); |
265 main_thread_task_runner_->PostTask( | 295 main_thread_task_runner_->PostTask( |
266 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, | 296 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, |
267 content_surface_->j_surface().obj())); | 297 content_surface_->j_surface().obj())); |
268 } | 298 } |
269 | 299 |
270 void VrShellGl::CreateUiSurface() { | 300 void VrShellGl::CreateUiSurface() { |
271 ui_surface_ = | 301 ui_surface_ = |
272 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); | 302 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); |
273 main_thread_task_runner_->PostTask( | 303 main_thread_task_runner_->PostTask( |
274 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, | 304 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, |
275 ui_surface_->j_surface().obj())); | 305 ui_surface_->j_surface().obj())); |
276 } | 306 } |
277 | 307 |
308 void VrShellGl::CreateOrResizeWebVRSurface(const gvr::Sizei& size) { | |
309 if (!webvr_surface_texture_) { | |
310 LOG(ERROR) << "No WebVR surface texture available"; | |
311 return; | |
312 } | |
313 | |
314 // ContentPhysicalBoundsChanged is getting called twice with | |
315 // identical sizes? Avoid thrashing the existing context. | |
316 if (size == webvr_surface_size_) { | |
317 return; | |
318 } | |
319 | |
320 if (!size.width || !size.height) { | |
321 // Invalid size, defer until a new size arrives on a future bounds update. | |
322 return; | |
323 } | |
324 | |
325 webvr_surface_texture_->SetDefaultBufferSize(size.width, size.height); | |
326 webvr_surface_size_ = size; | |
327 | |
328 if (command_buffer_gl_) { | |
329 command_buffer_gl_->ResizeSurface(size.width, size.height); | |
330 } else { | |
331 command_buffer_gl_ = base::MakeUnique<VrShellCommandBufferGl>(); | |
332 webvr_surface_ = command_buffer_gl_->CreateSurface(webvr_surface_texture_); | |
333 } | |
334 } | |
335 | |
336 void VrShellGl::SubmitWebVRFrame(int16_t frame_index, | |
337 gpu::Mailbox mailbox, | |
338 gpu::SyncToken sync_token) { | |
339 TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame"); | |
340 | |
341 bool swapped = command_buffer_gl_->CopyFrameToSurface( | |
342 frame_index, mailbox, sync_token, !pending_frames_.empty()); | |
343 // Expect a new frame on the surface queue if draw was successful. | |
344 if (swapped) { | |
345 main_thread_task_runner_->PostTask( | |
346 FROM_HERE, base::Bind(&VrShell::OnSubmitWebVRFrameTransferred, | |
347 weak_vr_shell_, frame_index)); | |
348 pending_frames_.emplace(frame_index); | |
349 } | |
350 | |
351 TRACE_EVENT0("gpu", "VrShellGl::glFinish"); | |
352 // This is a load-bearing glFinish, please don't remove it without | |
353 // before/after timing comparisons. Goal is to clear the GPU queue | |
354 // of the native GL context to avoid stalls later in GVR frame | |
355 // acquire/submit. | |
356 glFinish(); | |
357 } | |
358 | |
278 void VrShellGl::OnUIFrameAvailable() { | 359 void VrShellGl::OnUIFrameAvailable() { |
279 ui_surface_texture_->UpdateTexImage(); | 360 ui_surface_texture_->UpdateTexImage(); |
280 } | 361 } |
281 | 362 |
282 void VrShellGl::OnContentFrameAvailable() { | 363 void VrShellGl::OnContentFrameAvailable() { |
283 content_surface_texture_->UpdateTexImage(); | 364 content_surface_texture_->UpdateTexImage(); |
284 received_frame_ = true; | 365 received_frame_ = true; |
285 } | 366 } |
286 | 367 |
287 bool VrShellGl::GetPixelEncodedFrameIndex(uint16_t* frame_index) { | 368 void VrShellGl::OnWebVRFrameAvailable() { |
288 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); | 369 // A "while" loop here is a bad idea. It's legal to call |
289 if (!received_frame_) { | 370 // UpdateTexImage repeatedly even if no frames are available, but |
290 if (last_frame_index_ == (uint16_t)-1) | 371 // that does *not* wait for a new frame, it just reuses the most |
291 return false; | 372 // recent one. That would mess up the count. |
292 *frame_index = last_frame_index_; | 373 if (pending_frames_.empty()) { |
293 return true; | 374 // We're expecting a frame, but it's not here yet. Retry in OnVsync. |
375 ++premature_received_frames_; | |
376 return; | |
294 } | 377 } |
295 received_frame_ = false; | |
296 | 378 |
297 // Read the pose index encoded in a bottom left pixel as color values. | 379 webvr_surface_texture_->UpdateTexImage(); |
298 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | 380 int frame_index = pending_frames_.front(); |
299 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | 381 TRACE_EVENT1("gpu", "VrShellGl::OnWebVRFrameAvailable", "frame", frame_index); |
300 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | 382 pending_frames_.pop(); |
301 // if not valid due to bad magic number. | |
302 uint8_t pixels[4]; | |
303 // Assume we're reading from the framebuffer we just wrote to. | |
304 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
305 // or equivalent if the rendering setup changes in the future. | |
306 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
307 | 383 |
308 // Check for the magic number written by VRDevice.cpp on submit. | 384 // It is be legal for the WebVR client to submit a new frame now, |
309 // This helps avoid glitches from garbage data in the render | 385 // since we've consumed the image. TODO(klausw): would timing be |
310 // buffer that can appear during initialization or resizing. These | 386 // better to move the rendered notification after draw? |
311 // often appear as flashes of all-black or all-white pixels. | 387 |
312 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | 388 main_thread_task_runner_->PostTask( |
313 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | 389 FROM_HERE, base::Bind(&VrShell::OnSubmitWebVRFrameRendered, |
314 // Pose is good. | 390 weak_vr_shell_, frame_index)); |
315 *frame_index = pixels[0]; | 391 |
316 last_frame_index_ = pixels[0]; | 392 DrawFrame(frame_index); |
317 return true; | |
318 } | |
319 VLOG(1) << "WebVR: reject decoded pose index " << static_cast<int>(pixels[0]) | |
320 << ", bad magic number " << static_cast<int>(pixels[1]) << ", " | |
321 << static_cast<int>(pixels[2]); | |
322 return false; | |
323 } | 393 } |
324 | 394 |
325 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 395 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
326 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 396 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
327 controller_.reset(new VrController(gvr_api)); | 397 controller_.reset(new VrController(gvr_api)); |
328 | 398 |
329 ViewerType viewerType; | 399 ViewerType viewerType; |
330 switch (gvr_api_->GetViewerType()) { | 400 switch (gvr_api_->GetViewerType()) { |
331 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 401 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
332 viewerType = ViewerType::DAYDREAM; | 402 viewerType = ViewerType::DAYDREAM; |
333 break; | 403 break; |
334 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 404 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
335 viewerType = ViewerType::CARDBOARD; | 405 viewerType = ViewerType::CARDBOARD; |
336 break; | 406 break; |
337 default: | 407 default: |
338 NOTREACHED(); | 408 NOTREACHED(); |
339 viewerType = ViewerType::UNKNOWN_TYPE; | 409 viewerType = ViewerType::UNKNOWN_TYPE; |
340 break; | 410 break; |
341 } | 411 } |
342 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 412 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
343 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 413 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
344 } | 414 } |
345 | 415 |
346 void VrShellGl::InitializeRenderer() { | 416 void VrShellGl::InitializeRenderer() { |
347 // While WebVR is going through the compositor path, it shares | |
348 // the same texture ID. This will change once it gets its own | |
349 // surface, but store it separately to avoid future confusion. | |
350 // TODO(klausw,crbug.com/655722): remove this. | |
351 webvr_texture_id_ = content_texture_id_; | |
352 | |
353 gvr_api_->InitializeGl(); | 417 gvr_api_->InitializeGl(); |
354 webvr_head_pose_.assign(kPoseRingBufferSize, | 418 webvr_head_pose_.assign(kPoseRingBufferSize, |
355 gvr_api_->GetHeadSpaceFromStartSpaceRotation( | 419 gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
356 gvr::GvrApi::GetTimePointNow())); | 420 gvr::GvrApi::GetTimePointNow())); |
357 | 421 |
358 std::vector<gvr::BufferSpec> specs; | 422 std::vector<gvr::BufferSpec> specs; |
359 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 423 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
360 specs.push_back(gvr_api_->CreateBufferSpec()); | 424 specs.push_back(gvr_api_->CreateBufferSpec()); |
361 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 425 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
426 render_size_vrshell_ = render_size_primary_; | |
362 | 427 |
363 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 428 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
364 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 429 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
365 specs.push_back(gvr_api_->CreateBufferSpec()); | 430 specs.push_back(gvr_api_->CreateBufferSpec()); |
366 specs.back().SetSize(kHeadlockedBufferDimensions); | 431 specs.back().SetSize(kHeadlockedBufferDimensions); |
367 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 432 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
368 | 433 |
369 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); | 434 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); |
370 | 435 |
371 vr_shell_renderer_.reset(new VrShellRenderer()); | 436 vr_shell_renderer_.reset(new VrShellRenderer()); |
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
623 std::unique_ptr<blink::WebInputEvent> event) { | 688 std::unique_ptr<blink::WebInputEvent> event) { |
624 DCHECK(input_target != InputTarget::NONE); | 689 DCHECK(input_target != InputTarget::NONE); |
625 auto&& target = input_target == InputTarget::CONTENT | 690 auto&& target = input_target == InputTarget::CONTENT |
626 ? &VrShell::ProcessContentGesture | 691 ? &VrShell::ProcessContentGesture |
627 : &VrShell::ProcessUIGesture; | 692 : &VrShell::ProcessUIGesture; |
628 main_thread_task_runner_->PostTask( | 693 main_thread_task_runner_->PostTask( |
629 FROM_HERE, | 694 FROM_HERE, |
630 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); | 695 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); |
631 } | 696 } |
632 | 697 |
633 void VrShellGl::DrawFrame() { | 698 void VrShellGl::DrawFrame(int frame_index) { |
634 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); | 699 TRACE_EVENT1("gpu", "VrShellGl::DrawFrame", "frame", frame_index); |
635 | 700 |
636 // Reset the viewport list to just the pair of viewports for the | 701 // Reset the viewport list to just the pair of viewports for the |
637 // primary buffer each frame. Head-locked viewports get added by | 702 // primary buffer each frame. Head-locked viewports get added by |
638 // DrawVrShell if needed. | 703 // DrawVrShell if needed. |
639 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 704 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
640 | 705 |
706 // If needed, resize the primary buffer for use with WebVR. | |
707 if (web_vr_mode_) { | |
708 if (render_size_primary_ != webvr_surface_size_) { | |
709 if (!webvr_surface_size_.width) { | |
710 return; | |
711 } | |
712 render_size_primary_ = webvr_surface_size_; | |
713 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | |
714 } | |
715 } else { | |
716 if (render_size_primary_ != render_size_vrshell_) { | |
717 render_size_primary_ = render_size_vrshell_; | |
718 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | |
719 } | |
720 } | |
721 | |
722 TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame"); | |
641 gvr::Frame frame = swap_chain_->AcquireFrame(); | 723 gvr::Frame frame = swap_chain_->AcquireFrame(); |
724 TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame"); | |
642 if (!frame.is_valid()) { | 725 if (!frame.is_valid()) { |
643 return; | 726 return; |
644 } | 727 } |
645 frame.BindBuffer(kFramePrimaryBuffer); | 728 frame.BindBuffer(kFramePrimaryBuffer); |
646 if (web_vr_mode_) { | 729 if (web_vr_mode_) { |
647 DrawWebVr(); | 730 DrawWebVr(); |
648 } | 731 } |
649 | 732 |
650 uint16_t frame_index; | |
651 gvr::Mat4f head_pose; | 733 gvr::Mat4f head_pose; |
652 | 734 |
653 // When using async reprojection, we need to know which pose was used in | 735 // When using async reprojection, we need to know which pose was used in |
654 // the WebVR app for drawing this frame. Due to unknown amounts of | 736 // the WebVR app for drawing this frame. Only needed if reprojection is |
655 // buffering in the compositor and SurfaceTexture, we read the pose number | 737 // in use. |
656 // from a corner pixel. There's no point in doing this for legacy | 738 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled()) { |
657 // distortion rendering since that doesn't need a pose, and reading back | |
658 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
659 // doing this once we have working no-compositor rendering for WebVR. | |
660 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && | |
661 GetPixelEncodedFrameIndex(&frame_index)) { | |
662 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), | 739 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
663 "kPoseRingBufferSize must be a power of 2"); | 740 "kPoseRingBufferSize must be a power of 2"); |
664 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; | 741 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; |
665 // Process all pending_bounds_ changes targeted for before this frame, being | 742 // Process all pending_bounds_ changes targeted for before this frame, being |
666 // careful of wrapping frame indices. | 743 // careful of wrapping frame indices. |
667 static constexpr unsigned max = | 744 static constexpr unsigned max = |
668 std::numeric_limits<decltype(frame_index_)>::max(); | 745 std::numeric_limits<decltype(frame_index_)>::max(); |
669 static_assert(max > kPoseRingBufferSize * 2, | 746 static_assert(max > kPoseRingBufferSize * 2, |
670 "To detect wrapping, kPoseRingBufferSize must be smaller " | 747 "To detect wrapping, kPoseRingBufferSize must be smaller " |
671 "than half of frame_index_ range."); | 748 "than half of frame_index_ range."); |
672 while (!pending_bounds_.empty()) { | 749 while (!pending_bounds_.empty()) { |
673 uint16_t index = pending_bounds_.front().first; | 750 uint16_t index = pending_bounds_.front().first; |
674 // If index is less than the frame_index it's possible we've wrapped, so | 751 // If index is less than the frame_index it's possible we've wrapped, so |
675 // we extend the range and 'un-wrap' to account for this. | 752 // we extend the range and 'un-wrap' to account for this. |
676 if (index < frame_index) | 753 if (index < frame_index) |
677 index += max; | 754 index += max; |
678 // If the pending bounds change is for an upcoming frame within our buffer | 755 // If the pending bounds change is for an upcoming frame within our buffer |
679 // size, wait to apply it. Otherwise, apply it immediately. This | 756 // size, wait to apply it. Otherwise, apply it immediately. This |
680 // guarantees that even if we miss many frames, the queue can't fill up | 757 // guarantees that even if we miss many frames, the queue can't fill up |
681 // with stale bounds. | 758 // with stale bounds. |
682 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) | 759 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) |
683 break; | 760 break; |
684 | 761 |
685 const BoundsPair& bounds = pending_bounds_.front().second; | 762 const WebVrBounds& bounds = pending_bounds_.front().second; |
686 webvr_left_viewport_->SetSourceUv(bounds.first); | 763 webvr_left_viewport_->SetSourceUv(bounds.left_bounds); |
687 webvr_right_viewport_->SetSourceUv(bounds.second); | 764 webvr_right_viewport_->SetSourceUv(bounds.right_bounds); |
765 CreateOrResizeWebVRSurface(bounds.source_size); | |
688 pending_bounds_.pop(); | 766 pending_bounds_.pop(); |
689 } | 767 } |
690 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 768 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
691 *webvr_left_viewport_); | 769 *webvr_left_viewport_); |
692 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 770 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
693 *webvr_right_viewport_); | 771 *webvr_right_viewport_); |
694 } else { | 772 } else { |
695 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 773 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
696 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 774 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
697 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 775 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
698 } | 776 } |
699 | 777 |
700 gvr::Vec3f position = GetTranslation(head_pose); | 778 gvr::Vec3f position = GetTranslation(head_pose); |
701 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 779 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
702 // This appears to be a 3DOF pose without a neck model. Add one. | 780 // This appears to be a 3DOF pose without a neck model. Add one. |
703 // The head pose has redundant data. Assume we're only using the | 781 // The head pose has redundant data. Assume we're only using the |
704 // object_from_reference_matrix, we're not updating position_external. | 782 // object_from_reference_matrix, we're not updating position_external. |
705 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 783 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
706 // it. For now, removing it seems working fine. | 784 // it. For now, removing it seems working fine. |
707 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 785 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
708 } | 786 } |
709 | 787 |
710 // Update the render position of all UI elements (including desktop). | 788 // Update the render position of all UI elements (including desktop). |
711 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 789 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
712 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); | 790 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
713 | 791 |
714 UpdateController(GetForwardVector(head_pose)); | 792 { |
793 TRACE_EVENT0("gpu", "VrShellGl::UpdateController"); | |
794 UpdateController(GetForwardVector(head_pose)); | |
795 } | |
715 | 796 |
716 DrawVrShell(head_pose, frame); | 797 // Finish drawing in the primary buffer, and draw the headlocked buffer |
798 // if needed. This must be the last drawing call, this method will | |
799 // return with no frame being bound. | |
800 DrawVrShellAndUnbind(head_pose, frame); | |
717 | 801 |
718 frame.Unbind(); | 802 { |
719 frame.Submit(*buffer_viewport_list_, head_pose); | 803 TRACE_EVENT0("gpu", "VrShellGl::Submit"); |
804 frame.Submit(*buffer_viewport_list_, head_pose); | |
805 } | |
720 | 806 |
721 // No need to swap buffers for surfaceless rendering. | 807 // No need to swap buffers for surfaceless rendering. |
722 if (!surfaceless_rendering_) { | 808 if (!surfaceless_rendering_) { |
723 // TODO(mthiesse): Support asynchronous SwapBuffers. | 809 // TODO(mthiesse): Support asynchronous SwapBuffers. |
810 TRACE_EVENT0("gpu", "VrShellGl::SwapBuffers"); | |
724 surface_->SwapBuffers(); | 811 surface_->SwapBuffers(); |
725 } | 812 } |
726 } | 813 } |
727 | 814 |
728 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, gvr::Frame& frame) { | 815 void VrShellGl::DrawVrShellAndUnbind(const gvr::Mat4f& head_pose, |
816 gvr::Frame& frame) { | |
729 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 817 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
730 std::vector<const ContentRectangle*> head_locked_elements; | 818 std::vector<const ContentRectangle*> head_locked_elements; |
731 std::vector<const ContentRectangle*> world_elements; | 819 std::vector<const ContentRectangle*> world_elements; |
732 for (const auto& rect : scene_->GetUiElements()) { | 820 for (const auto& rect : scene_->GetUiElements()) { |
733 if (!rect->IsVisible()) | 821 if (!rect->IsVisible()) |
734 continue; | 822 continue; |
735 if (rect->lock_to_fov) { | 823 if (rect->lock_to_fov) { |
736 head_locked_elements.push_back(rect.get()); | 824 head_locked_elements.push_back(rect.get()); |
737 } else { | 825 } else { |
738 world_elements.push_back(rect.get()); | 826 world_elements.push_back(rect.get()); |
(...skipping 16 matching lines...) Expand all Loading... | |
755 | 843 |
756 const Colorf& backgroundColor = scene_->GetBackgroundColor(); | 844 const Colorf& backgroundColor = scene_->GetBackgroundColor(); |
757 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, | 845 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, |
758 backgroundColor.a); | 846 backgroundColor.a); |
759 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 847 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
760 } | 848 } |
761 if (!world_elements.empty()) { | 849 if (!world_elements.empty()) { |
762 DrawUiView(&head_pose, world_elements, render_size_primary_, | 850 DrawUiView(&head_pose, world_elements, render_size_primary_, |
763 kViewportListPrimaryOffset); | 851 kViewportListPrimaryOffset); |
764 } | 852 } |
853 frame.Unbind(); // Done with the primary buffer. | |
765 | 854 |
766 if (!head_locked_elements.empty()) { | 855 if (!head_locked_elements.empty()) { |
767 // Add head-locked viewports. The list gets reset to just | 856 // Add head-locked viewports. The list gets reset to just |
768 // the recommended viewports (for the primary buffer) each frame. | 857 // the recommended viewports (for the primary buffer) each frame. |
769 buffer_viewport_list_->SetBufferViewport( | 858 buffer_viewport_list_->SetBufferViewport( |
770 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 859 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
771 *headlocked_left_viewport_); | 860 *headlocked_left_viewport_); |
772 buffer_viewport_list_->SetBufferViewport( | 861 buffer_viewport_list_->SetBufferViewport( |
773 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 862 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
774 *headlocked_right_viewport_); | 863 *headlocked_right_viewport_); |
775 | 864 |
776 // Bind the headlocked framebuffer. | 865 // Bind the headlocked framebuffer. |
777 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order | |
778 // here. | |
779 frame.BindBuffer(kFrameHeadlockedBuffer); | 866 frame.BindBuffer(kFrameHeadlockedBuffer); |
780 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 867 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
781 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 868 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
782 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, | 869 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, |
783 kViewportListHeadlockedOffset); | 870 kViewportListHeadlockedOffset); |
871 frame.Unbind(); // Done with the headlocked buffer. | |
784 } | 872 } |
785 } | 873 } |
786 | 874 |
787 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { | |
788 // This is a stopgap while we're using the WebVR compositor rendering path. | |
789 // TODO(klausw,crbug.com/655722): Remove this method and member once we're | |
790 // using a separate WebVR render surface. | |
791 return content_tex_physical_size_; | |
792 } | |
793 | |
794 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, | 875 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, |
795 const std::vector<const ContentRectangle*>& elements, | 876 const std::vector<const ContentRectangle*>& elements, |
796 const gvr::Sizei& render_size, | 877 const gvr::Sizei& render_size, |
797 int viewport_offset) { | 878 int viewport_offset) { |
798 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); | 879 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); |
799 | 880 |
800 gvr::Mat4f view_matrix; | 881 gvr::Mat4f view_matrix; |
801 if (head_pose) { | 882 if (head_pose) { |
802 view_matrix = *head_pose; | 883 view_matrix = *head_pose; |
803 } else { | 884 } else { |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
990 void VrShellGl::DrawWebVr() { | 1071 void VrShellGl::DrawWebVr() { |
991 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); | 1072 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); |
992 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 1073 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
993 glDisable(GL_CULL_FACE); | 1074 glDisable(GL_CULL_FACE); |
994 glDepthMask(GL_FALSE); | 1075 glDepthMask(GL_FALSE); |
995 glDisable(GL_DEPTH_TEST); | 1076 glDisable(GL_DEPTH_TEST); |
996 glDisable(GL_SCISSOR_TEST); | 1077 glDisable(GL_SCISSOR_TEST); |
997 glDisable(GL_BLEND); | 1078 glDisable(GL_BLEND); |
998 glDisable(GL_POLYGON_OFFSET_FILL); | 1079 glDisable(GL_POLYGON_OFFSET_FILL); |
999 | 1080 |
1000 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 1081 glViewport(0, 0, webvr_surface_size_.width, webvr_surface_size_.height); |
1001 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 1082 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
1002 } | 1083 } |
1003 | 1084 |
1004 void VrShellGl::OnTriggerEvent() { | 1085 void VrShellGl::OnTriggerEvent() { |
1005 // Set a flag to handle this on the render thread at the next frame. | 1086 // Set a flag to handle this on the render thread at the next frame. |
1006 touch_pending_ = true; | 1087 touch_pending_ = true; |
1007 } | 1088 } |
1008 | 1089 |
1009 void VrShellGl::OnPause() { | 1090 void VrShellGl::OnPause() { |
1010 vsync_task_.Cancel(); | 1091 vsync_task_.Cancel(); |
(...skipping 10 matching lines...) Expand all Loading... | |
1021 OnVSync(); | 1102 OnVSync(); |
1022 } | 1103 } |
1023 } | 1104 } |
1024 | 1105 |
1025 void VrShellGl::SetWebVrMode(bool enabled) { | 1106 void VrShellGl::SetWebVrMode(bool enabled) { |
1026 web_vr_mode_ = enabled; | 1107 web_vr_mode_ = enabled; |
1027 } | 1108 } |
1028 | 1109 |
1029 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, | 1110 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
1030 const gvr::Rectf& left_bounds, | 1111 const gvr::Rectf& left_bounds, |
1031 const gvr::Rectf& right_bounds) { | 1112 const gvr::Rectf& right_bounds, |
1113 const gvr::Sizei& source_size) { | |
1032 if (frame_index < 0) { | 1114 if (frame_index < 0) { |
1033 webvr_left_viewport_->SetSourceUv(left_bounds); | 1115 webvr_left_viewport_->SetSourceUv(left_bounds); |
1034 webvr_right_viewport_->SetSourceUv(right_bounds); | 1116 webvr_right_viewport_->SetSourceUv(right_bounds); |
1035 } else { | 1117 } else { |
1036 pending_bounds_.emplace( | 1118 pending_bounds_.emplace(std::make_pair( |
1037 std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); | 1119 frame_index, WebVrBounds(left_bounds, right_bounds, source_size))); |
1038 } | 1120 } |
1039 } | 1121 } |
1040 | 1122 |
1041 void VrShellGl::ContentBoundsChanged(int width, int height) { | 1123 void VrShellGl::ContentBoundsChanged(int width, int height) { |
1042 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); | 1124 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
1043 content_tex_css_width_ = width; | 1125 content_tex_css_width_ = width; |
1044 content_tex_css_height_ = height; | 1126 content_tex_css_height_ = height; |
1045 } | 1127 } |
1046 | 1128 |
1047 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { | 1129 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { |
(...skipping 13 matching lines...) Expand all Loading... | |
1061 ui_surface_texture_->SetDefaultBufferSize(width, height); | 1143 ui_surface_texture_->SetDefaultBufferSize(width, height); |
1062 ui_tex_physical_size_.width = width; | 1144 ui_tex_physical_size_.width = width; |
1063 ui_tex_physical_size_.height = height; | 1145 ui_tex_physical_size_.height = height; |
1064 } | 1146 } |
1065 | 1147 |
1066 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 1148 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
1067 return weak_ptr_factory_.GetWeakPtr(); | 1149 return weak_ptr_factory_.GetWeakPtr(); |
1068 } | 1150 } |
1069 | 1151 |
1070 void VrShellGl::OnVSync() { | 1152 void VrShellGl::OnVSync() { |
1153 while (premature_received_frames_ > 0) { | |
1154 TRACE_EVENT0("gpu", "VrShellGl::OnWebVRFrameAvailableRetry"); | |
1155 --premature_received_frames_; | |
1156 OnWebVRFrameAvailable(); | |
1157 } | |
1158 | |
1071 base::TimeTicks now = base::TimeTicks::Now(); | 1159 base::TimeTicks now = base::TimeTicks::Now(); |
1072 base::TimeTicks target; | 1160 base::TimeTicks target; |
1073 | 1161 |
1074 // Don't send VSyncs until we have a timebase/interval. | 1162 // Don't send VSyncs until we have a timebase/interval. |
1075 if (vsync_interval_.is_zero()) | 1163 if (vsync_interval_.is_zero()) |
1076 return; | 1164 return; |
1077 target = now + vsync_interval_; | 1165 target = now + vsync_interval_; |
1078 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 1166 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
1079 target = vsync_timebase_ + intervals * vsync_interval_; | 1167 target = vsync_timebase_ + intervals * vsync_interval_; |
1080 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | 1168 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
1081 target - now); | 1169 target - now); |
1082 | 1170 |
1083 base::TimeDelta time = intervals * vsync_interval_; | 1171 base::TimeDelta time = intervals * vsync_interval_; |
1084 if (!callback_.is_null()) { | 1172 if (!callback_.is_null()) { |
1085 SendVSync(time, base::ResetAndReturn(&callback_)); | 1173 SendVSync(time, base::ResetAndReturn(&callback_)); |
1086 } else { | 1174 } else { |
1087 pending_vsync_ = true; | 1175 pending_vsync_ = true; |
1088 pending_time_ = time; | 1176 pending_time_ = time; |
1089 } | 1177 } |
1090 DrawFrame(); | 1178 if (!web_vr_mode_) { |
1179 DrawFrame(-1); | |
1180 } | |
1091 } | 1181 } |
1092 | 1182 |
1093 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { | 1183 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
1094 binding_.Close(); | 1184 binding_.Close(); |
1095 binding_.Bind(std::move(request)); | 1185 binding_.Bind(std::move(request)); |
1096 } | 1186 } |
1097 | 1187 |
1098 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | 1188 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
1099 if (!pending_vsync_) { | 1189 if (!pending_vsync_) { |
1100 if (!callback_.is_null()) { | 1190 if (!callback_.is_null()) { |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1150 void VrShellGl::ResetPose() { | 1240 void VrShellGl::ResetPose() { |
1151 // Should never call RecenterTracking when using with Daydream viewers. On | 1241 // Should never call RecenterTracking when using with Daydream viewers. On |
1152 // those devices recentering should only be done via the controller. | 1242 // those devices recentering should only be done via the controller. |
1153 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) | 1243 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) |
1154 gvr_api_->RecenterTracking(); | 1244 gvr_api_->RecenterTracking(); |
1155 } | 1245 } |
1156 | 1246 |
1157 void VrShellGl::CreateVRDisplayInfo( | 1247 void VrShellGl::CreateVRDisplayInfo( |
1158 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, | 1248 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, |
1159 uint32_t device_id) { | 1249 uint32_t device_id) { |
1250 // This assumes that the initial webvr_surface_size_ was set to the | |
1251 // appropriate recommended render resolution as the default size during | |
1252 // InitializeGl. Revisit if the initialization order changes. | |
1160 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( | 1253 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( |
1161 gvr_api_.get(), content_tex_physical_size_, device_id); | 1254 gvr_api_.get(), webvr_surface_size_, device_id); |
1162 main_thread_task_runner_->PostTask( | 1255 main_thread_task_runner_->PostTask( |
1163 FROM_HERE, | 1256 FROM_HERE, |
1164 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); | 1257 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); |
1165 } | 1258 } |
1166 | 1259 |
1167 } // namespace vr_shell | 1260 } // namespace vr_shell |
OLD | NEW |