OLD | NEW |
---|---|
1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
6 | 6 |
7 #include <limits> | 7 #include <limits> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/android/jni_android.h" | 10 #include "base/android/jni_android.h" |
11 #include "base/callback_helpers.h" | 11 #include "base/callback_helpers.h" |
12 #include "base/memory/ptr_util.h" | 12 #include "base/memory/ptr_util.h" |
13 #include "base/metrics/histogram_macros.h" | 13 #include "base/metrics/histogram_macros.h" |
14 #include "base/threading/thread_task_runner_handle.h" | 14 #include "base/threading/thread_task_runner_handle.h" |
15 #include "chrome/browser/android/vr_shell/mailbox_to_surface_bridge.h" | |
15 #include "chrome/browser/android/vr_shell/ui_elements.h" | 16 #include "chrome/browser/android/vr_shell/ui_elements.h" |
16 #include "chrome/browser/android/vr_shell/ui_scene.h" | 17 #include "chrome/browser/android/vr_shell/ui_scene.h" |
17 #include "chrome/browser/android/vr_shell/vr_controller.h" | 18 #include "chrome/browser/android/vr_shell/vr_controller.h" |
18 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 19 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
19 #include "chrome/browser/android/vr_shell/vr_math.h" | 20 #include "chrome/browser/android/vr_shell/vr_math.h" |
20 #include "chrome/browser/android/vr_shell/vr_shell.h" | 21 #include "chrome/browser/android/vr_shell/vr_shell.h" |
21 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" | 22 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" |
22 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 23 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
23 #include "device/vr/android/gvr/gvr_device.h" | 24 #include "device/vr/android/gvr/gvr_device.h" |
24 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 25 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
76 | 77 |
77 // The GVR viewport list has two entries (left eye and right eye) for each | 78 // The GVR viewport list has two entries (left eye and right eye) for each |
78 // GVR buffer. | 79 // GVR buffer. |
79 static constexpr int kViewportListPrimaryOffset = 0; | 80 static constexpr int kViewportListPrimaryOffset = 0; |
80 static constexpr int kViewportListHeadlockedOffset = 2; | 81 static constexpr int kViewportListHeadlockedOffset = 2; |
81 | 82 |
82 // Buffer size large enough to handle the current backlog of poses which is | 83 // Buffer size large enough to handle the current backlog of poses which is |
83 // 2-3 frames. | 84 // 2-3 frames. |
84 static constexpr unsigned kPoseRingBufferSize = 8; | 85 static constexpr unsigned kPoseRingBufferSize = 8; |
85 | 86 |
86 // Magic numbers used to mark valid pose index values encoded in frame | 87 // Default downscale factor for computing the recommended WebVR |
87 // data. Must match the magic numbers used in blink's VRDisplay.cpp. | 88 // renderWidth/Height from the 1:1 pixel mapped size. Using a rather |
88 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; | 89 // aggressive downscale due to the high overhead of copying pixels |
90 // twice before handing off to GVR. For comparison, the polyfill | |
91 // uses approximately 0.55 on a Pixel XL. | |
92 static constexpr float kWebVrRecommendedResolutionScale = 0.5; | |
mthiesse
2017/03/08 01:00:05
Why does gvr choose a size so large that we have t
klausw
2017/03/08 02:59:22
GVR reports a maximum recommended render resolutio
| |
89 | 93 |
90 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 94 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
91 float xdiff = (vec1.x - vec2.x); | 95 float xdiff = (vec1.x - vec2.x); |
92 float ydiff = (vec1.y - vec2.y); | 96 float ydiff = (vec1.y - vec2.y); |
93 float zdiff = (vec1.z - vec2.z); | 97 float zdiff = (vec1.z - vec2.z); |
94 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 98 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
95 return std::sqrt(scale); | 99 return std::sqrt(scale); |
96 } | 100 } |
97 | 101 |
98 // Generate a quaternion representing the rotation from the negative Z axis | 102 // Generate a quaternion representing the rotation from the negative Z axis |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
168 binding_(this), | 172 binding_(this), |
169 weak_vr_shell_(weak_vr_shell), | 173 weak_vr_shell_(weak_vr_shell), |
170 delegate_provider_(delegate_provider), | 174 delegate_provider_(delegate_provider), |
171 main_thread_task_runner_(std::move(main_thread_task_runner)), | 175 main_thread_task_runner_(std::move(main_thread_task_runner)), |
172 weak_ptr_factory_(this) { | 176 weak_ptr_factory_(this) { |
173 GvrInit(gvr_api); | 177 GvrInit(gvr_api); |
174 } | 178 } |
175 | 179 |
176 VrShellGl::~VrShellGl() { | 180 VrShellGl::~VrShellGl() { |
177 vsync_task_.Cancel(); | 181 vsync_task_.Cancel(); |
182 binding_.Close(); | |
178 if (!callback_.is_null()) { | 183 if (!callback_.is_null()) { |
179 // When this VSync provider is going away we have to respond to pending | 184 // When this VSync provider is going away we have to respond to pending |
180 // callbacks, so instead of providing a VSync, tell the requester to try | 185 // callbacks, so instead of providing a VSync, tell the requester to try |
181 // again. A VSyncProvider is guaranteed to exist, so the request in response | 186 // again. A VSyncProvider is guaranteed to exist, so the request in response |
182 // to this message will go through some other VSyncProvider. | 187 // to this message will go through some other VSyncProvider. |
183 base::ResetAndReturn(&callback_) | 188 base::ResetAndReturn(&callback_) |
184 .Run(nullptr, base::TimeDelta(), -1, | 189 .Run(nullptr, base::TimeDelta(), -1, |
185 device::mojom::VRVSyncProvider::Status::RETRY); | 190 device::mojom::VRVSyncProvider::Status::CLOSING); |
186 } | |
187 if (binding_.is_bound()) { | |
188 main_thread_task_runner_->PostTask( | |
189 FROM_HERE, | |
190 base::Bind(&VrShellDelegate::OnVRVsyncProviderRequest, | |
191 delegate_provider_, base::Passed(binding_.Unbind()))); | |
192 } | 191 } |
193 } | 192 } |
194 | 193 |
195 void VrShellGl::Initialize() { | 194 void VrShellGl::Initialize() { |
196 scene_.reset(new UiScene); | 195 scene_.reset(new UiScene); |
197 | 196 |
198 if (surfaceless_rendering_) { | 197 if (surfaceless_rendering_) { |
199 // If we're rendering surfaceless, we'll never get a java surface to render | 198 // If we're rendering surfaceless, we'll never get a java surface to render |
200 // into, so we can initialize GL right away. | 199 // into, so we can initialize GL right away. |
201 InitializeGl(nullptr); | 200 InitializeGl(nullptr); |
(...skipping 26 matching lines...) Expand all Loading... | |
228 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 227 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
229 ForceExitVr(); | 228 ForceExitVr(); |
230 return; | 229 return; |
231 } | 230 } |
232 if (!context_->MakeCurrent(surface_.get())) { | 231 if (!context_->MakeCurrent(surface_.get())) { |
233 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 232 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
234 ForceExitVr(); | 233 ForceExitVr(); |
235 return; | 234 return; |
236 } | 235 } |
237 | 236 |
238 unsigned int textures[2]; | 237 unsigned int textures[3]; |
239 glGenTextures(2, textures); | 238 glGenTextures(3, textures); |
240 ui_texture_id_ = textures[0]; | 239 ui_texture_id_ = textures[0]; |
241 content_texture_id_ = textures[1]; | 240 content_texture_id_ = textures[1]; |
241 webvr_texture_id_ = textures[2]; | |
242 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 242 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
243 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 243 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
244 webvr_surface_texture_ = gl::SurfaceTexture::Create(webvr_texture_id_); | |
244 CreateUiSurface(); | 245 CreateUiSurface(); |
245 CreateContentSurface(); | 246 CreateContentSurface(); |
247 // WebVR surface is created below. | |
mthiesse
2017/03/08 01:00:02
nit: useless comment
klausw
2017/03/08 02:59:22
Done.
| |
246 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 248 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
247 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 249 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
248 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 250 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
249 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 251 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
252 webvr_surface_texture_->SetFrameAvailableCallback(base::Bind( | |
253 &VrShellGl::OnWebVRFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | |
254 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
255 ui_tex_physical_size_.height); | |
250 content_surface_texture_->SetDefaultBufferSize( | 256 content_surface_texture_->SetDefaultBufferSize( |
251 content_tex_physical_size_.width, content_tex_physical_size_.height); | 257 content_tex_physical_size_.width, content_tex_physical_size_.height); |
252 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
253 ui_tex_physical_size_.height); | |
254 InitializeRenderer(); | 258 InitializeRenderer(); |
255 | 259 |
260 // Pick a size for the WebVR transfer surface based on a downscaled | |
mthiesse
2017/03/08 01:00:05
Can you comment on why it's okay to do this?
klausw
2017/03/08 02:59:22
Expanded the comment, it's arbitrary and just need
| |
261 // recommended render resolution, and also use that size as the | |
262 // client-recommended renderWidth/renderHeight and for the GVR | |
263 // framebuffer. If the client chooses a different size or resizes | |
264 // it while presenting, we'll resize the transfer surface and GVR | |
265 // framebuffer to match. | |
266 | |
mthiesse
2017/03/08 01:00:04
nit: remove blank line
klausw
2017/03/08 02:59:22
Done.
| |
267 auto render_target_size = gvr_api_->GetMaximumEffectiveRenderTargetSize(); | |
268 | |
269 gvr::Sizei webvr_size = {static_cast<int>(render_target_size.width * | |
270 kWebVrRecommendedResolutionScale), | |
271 static_cast<int>(render_target_size.height * | |
272 kWebVrRecommendedResolutionScale)}; | |
273 | |
274 // TODO(klausw): should the size be rounded to a multiple of N pixels | |
mthiesse
2017/03/08 01:00:03
File a bug?
klausw
2017/03/08 02:59:22
Done, crbug.com/699350
| |
275 // to be friendlier to the GPU? The exact size doesn't matter. | |
276 | |
277 CreateOrResizeWebVRSurface(webvr_size); | |
278 | |
256 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 279 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
257 OnVSync(); | 280 OnVSync(); |
258 | 281 |
259 ready_to_draw_ = true; | 282 ready_to_draw_ = true; |
260 } | 283 } |
261 | 284 |
262 void VrShellGl::CreateContentSurface() { | 285 void VrShellGl::CreateContentSurface() { |
263 content_surface_ = | 286 content_surface_ = |
264 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); | 287 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); |
265 main_thread_task_runner_->PostTask( | 288 main_thread_task_runner_->PostTask( |
266 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, | 289 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, |
267 content_surface_->j_surface().obj())); | 290 content_surface_->j_surface().obj())); |
268 } | 291 } |
269 | 292 |
270 void VrShellGl::CreateUiSurface() { | 293 void VrShellGl::CreateUiSurface() { |
271 ui_surface_ = | 294 ui_surface_ = |
272 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); | 295 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); |
273 main_thread_task_runner_->PostTask( | 296 main_thread_task_runner_->PostTask( |
274 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, | 297 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, |
275 ui_surface_->j_surface().obj())); | 298 ui_surface_->j_surface().obj())); |
276 } | 299 } |
277 | 300 |
301 void VrShellGl::CreateOrResizeWebVRSurface(const gvr::Sizei& size) { | |
302 if (!webvr_surface_texture_) { | |
303 LOG(ERROR) << "No WebVR surface texture available"; | |
mthiesse
2017/03/08 01:00:05
nit: DLOG?
klausw
2017/03/08 02:59:22
Done.
| |
304 return; | |
305 } | |
306 | |
307 // ContentPhysicalBoundsChanged is getting called twice with | |
308 // identical sizes? Avoid thrashing the existing context. | |
309 if (size == webvr_surface_size_) { | |
310 return; | |
311 } | |
312 | |
313 if (!size.width || !size.height) { | |
314 // Invalid size, defer until a new size arrives on a future bounds update. | |
315 return; | |
316 } | |
317 | |
318 webvr_surface_texture_->SetDefaultBufferSize(size.width, size.height); | |
319 webvr_surface_size_ = size; | |
320 | |
321 if (mailbox_bridge_) { | |
322 mailbox_bridge_->ResizeSurface(size.width, size.height); | |
323 } else { | |
324 mailbox_bridge_ = base::MakeUnique<MailboxToSurfaceBridge>(); | |
325 webvr_surface_ = mailbox_bridge_->CreateSurface(webvr_surface_texture_); | |
326 } | |
327 } | |
328 | |
329 void VrShellGl::SubmitWebVRFrame(int16_t frame_index, | |
330 const gpu::MailboxHolder& mailbox) { | |
331 TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame"); | |
332 | |
333 bool swapped = mailbox_bridge_->CopyFrameToSurface(frame_index, mailbox, | |
334 !pending_frames_.empty()); | |
335 // Expect a new frame on the surface queue if draw was successful. | |
336 if (swapped) { | |
337 submit_client_->OnSubmitFrameTransferred(); | |
338 pending_frames_.emplace(frame_index); | |
339 } | |
340 | |
341 TRACE_EVENT0("gpu", "VrShellGl::glFinish"); | |
342 // This is a load-bearing glFinish, please don't remove it without | |
343 // before/after timing comparisons. Goal is to clear the GPU queue | |
344 // of the native GL context to avoid stalls later in GVR frame | |
345 // acquire/submit. | |
346 glFinish(); | |
347 } | |
348 | |
349 void VrShellGl::SetSubmitClient( | |
350 device::mojom::VRSubmitFrameClientPtrInfo submit_client_info) { | |
351 submit_client_.Bind(std::move(submit_client_info)); | |
352 } | |
353 | |
278 void VrShellGl::OnUIFrameAvailable() { | 354 void VrShellGl::OnUIFrameAvailable() { |
279 ui_surface_texture_->UpdateTexImage(); | 355 ui_surface_texture_->UpdateTexImage(); |
280 } | 356 } |
281 | 357 |
282 void VrShellGl::OnContentFrameAvailable() { | 358 void VrShellGl::OnContentFrameAvailable() { |
283 content_surface_texture_->UpdateTexImage(); | 359 content_surface_texture_->UpdateTexImage(); |
284 received_frame_ = true; | 360 received_frame_ = true; |
285 } | 361 } |
286 | 362 |
287 bool VrShellGl::GetPixelEncodedFrameIndex(uint16_t* frame_index) { | 363 void VrShellGl::OnWebVRFrameAvailable() { |
288 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); | 364 // A "while" loop here is a bad idea. It's legal to call |
289 if (!received_frame_) { | 365 // UpdateTexImage repeatedly even if no frames are available, but |
290 if (last_frame_index_ == (uint16_t)-1) | 366 // that does *not* wait for a new frame, it just reuses the most |
291 return false; | 367 // recent one. That would mess up the count. |
292 *frame_index = last_frame_index_; | 368 if (pending_frames_.empty()) { |
293 return true; | 369 // We're expecting a frame, but it's not here yet. Retry in OnVsync. |
370 ++premature_received_frames_; | |
371 return; | |
294 } | 372 } |
295 received_frame_ = false; | |
296 | 373 |
297 // Read the pose index encoded in a bottom left pixel as color values. | 374 webvr_surface_texture_->UpdateTexImage(); |
298 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | 375 int frame_index = pending_frames_.front(); |
299 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | 376 TRACE_EVENT1("gpu", "VrShellGl::OnWebVRFrameAvailable", "frame", frame_index); |
300 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | 377 pending_frames_.pop(); |
301 // if not valid due to bad magic number. | |
302 uint8_t pixels[4]; | |
303 // Assume we're reading from the framebuffer we just wrote to. | |
304 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
305 // or equivalent if the rendering setup changes in the future. | |
306 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
307 | 378 |
308 // Check for the magic number written by VRDevice.cpp on submit. | 379 // It is be legal for the WebVR client to submit a new frame now, |
mthiesse
2017/03/08 01:00:02
nit: s/It is be/It is/
klausw
2017/03/08 02:59:22
Done.
| |
309 // This helps avoid glitches from garbage data in the render | 380 // since we've consumed the image. TODO(klausw): would timing be |
310 // buffer that can appear during initialization or resizing. These | 381 // better to move the rendered notification after draw? |
311 // often appear as flashes of all-black or all-white pixels. | 382 |
312 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | 383 submit_client_->OnSubmitFrameRendered(); |
313 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | 384 |
314 // Pose is good. | 385 DrawFrame(frame_index); |
315 *frame_index = pixels[0]; | |
316 last_frame_index_ = pixels[0]; | |
317 return true; | |
318 } | |
319 VLOG(1) << "WebVR: reject decoded pose index " << static_cast<int>(pixels[0]) | |
320 << ", bad magic number " << static_cast<int>(pixels[1]) << ", " | |
321 << static_cast<int>(pixels[2]); | |
322 return false; | |
323 } | 386 } |
324 | 387 |
325 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 388 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
326 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 389 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
327 controller_.reset(new VrController(gvr_api)); | 390 controller_.reset(new VrController(gvr_api)); |
328 | 391 |
329 ViewerType viewerType; | 392 ViewerType viewerType; |
330 switch (gvr_api_->GetViewerType()) { | 393 switch (gvr_api_->GetViewerType()) { |
331 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 394 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
332 viewerType = ViewerType::DAYDREAM; | 395 viewerType = ViewerType::DAYDREAM; |
333 break; | 396 break; |
334 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 397 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
335 viewerType = ViewerType::CARDBOARD; | 398 viewerType = ViewerType::CARDBOARD; |
336 break; | 399 break; |
337 default: | 400 default: |
338 NOTREACHED(); | 401 NOTREACHED(); |
339 viewerType = ViewerType::UNKNOWN_TYPE; | 402 viewerType = ViewerType::UNKNOWN_TYPE; |
340 break; | 403 break; |
341 } | 404 } |
342 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 405 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
343 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 406 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
344 } | 407 } |
345 | 408 |
346 void VrShellGl::InitializeRenderer() { | 409 void VrShellGl::InitializeRenderer() { |
347 // While WebVR is going through the compositor path, it shares | |
348 // the same texture ID. This will change once it gets its own | |
349 // surface, but store it separately to avoid future confusion. | |
350 // TODO(klausw,crbug.com/655722): remove this. | |
351 webvr_texture_id_ = content_texture_id_; | |
352 | |
353 gvr_api_->InitializeGl(); | 410 gvr_api_->InitializeGl(); |
354 webvr_head_pose_.assign(kPoseRingBufferSize, | 411 webvr_head_pose_.assign(kPoseRingBufferSize, |
355 gvr_api_->GetHeadSpaceFromStartSpaceRotation( | 412 gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
356 gvr::GvrApi::GetTimePointNow())); | 413 gvr::GvrApi::GetTimePointNow())); |
357 | 414 |
358 std::vector<gvr::BufferSpec> specs; | 415 std::vector<gvr::BufferSpec> specs; |
359 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 416 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
360 specs.push_back(gvr_api_->CreateBufferSpec()); | 417 specs.push_back(gvr_api_->CreateBufferSpec()); |
361 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 418 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
419 render_size_vrshell_ = render_size_primary_; | |
362 | 420 |
363 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 421 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
364 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 422 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
365 specs.push_back(gvr_api_->CreateBufferSpec()); | 423 specs.push_back(gvr_api_->CreateBufferSpec()); |
366 specs.back().SetSize(kHeadlockedBufferDimensions); | 424 specs.back().SetSize(kHeadlockedBufferDimensions); |
367 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 425 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
368 | 426 |
369 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); | 427 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); |
370 | 428 |
371 vr_shell_renderer_.reset(new VrShellRenderer()); | 429 vr_shell_renderer_.reset(new VrShellRenderer()); |
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
623 std::unique_ptr<blink::WebInputEvent> event) { | 681 std::unique_ptr<blink::WebInputEvent> event) { |
624 DCHECK(input_target != InputTarget::NONE); | 682 DCHECK(input_target != InputTarget::NONE); |
625 auto&& target = input_target == InputTarget::CONTENT | 683 auto&& target = input_target == InputTarget::CONTENT |
626 ? &VrShell::ProcessContentGesture | 684 ? &VrShell::ProcessContentGesture |
627 : &VrShell::ProcessUIGesture; | 685 : &VrShell::ProcessUIGesture; |
628 main_thread_task_runner_->PostTask( | 686 main_thread_task_runner_->PostTask( |
629 FROM_HERE, | 687 FROM_HERE, |
630 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); | 688 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); |
631 } | 689 } |
632 | 690 |
633 void VrShellGl::DrawFrame() { | 691 void VrShellGl::DrawFrame(int frame_index) { |
634 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); | 692 TRACE_EVENT1("gpu", "VrShellGl::DrawFrame", "frame", frame_index); |
635 | 693 |
636 // Reset the viewport list to just the pair of viewports for the | 694 // Reset the viewport list to just the pair of viewports for the |
637 // primary buffer each frame. Head-locked viewports get added by | 695 // primary buffer each frame. Head-locked viewports get added by |
638 // DrawVrShell if needed. | 696 // DrawVrShell if needed. |
639 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 697 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
640 | 698 |
699 // If needed, resize the primary buffer for use with WebVR. | |
700 if (web_vr_mode_) { | |
701 if (render_size_primary_ != webvr_surface_size_) { | |
702 if (!webvr_surface_size_.width) { | |
703 return; | |
704 } | |
705 render_size_primary_ = webvr_surface_size_; | |
706 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | |
707 } | |
708 } else { | |
709 if (render_size_primary_ != render_size_vrshell_) { | |
710 render_size_primary_ = render_size_vrshell_; | |
711 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | |
712 } | |
713 } | |
714 | |
715 TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame"); | |
641 gvr::Frame frame = swap_chain_->AcquireFrame(); | 716 gvr::Frame frame = swap_chain_->AcquireFrame(); |
717 TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame"); | |
642 if (!frame.is_valid()) { | 718 if (!frame.is_valid()) { |
643 return; | 719 return; |
644 } | 720 } |
645 frame.BindBuffer(kFramePrimaryBuffer); | 721 frame.BindBuffer(kFramePrimaryBuffer); |
646 if (web_vr_mode_) { | 722 if (web_vr_mode_) { |
647 DrawWebVr(); | 723 DrawWebVr(); |
648 } | 724 } |
649 | 725 |
650 uint16_t frame_index; | |
651 gvr::Mat4f head_pose; | 726 gvr::Mat4f head_pose; |
652 | 727 |
653 // When using async reprojection, we need to know which pose was used in | 728 // When using async reprojection, we need to know which pose was used in |
654 // the WebVR app for drawing this frame. Due to unknown amounts of | 729 // the WebVR app for drawing this frame. Only needed if reprojection is |
655 // buffering in the compositor and SurfaceTexture, we read the pose number | 730 // in use. |
656 // from a corner pixel. There's no point in doing this for legacy | 731 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled()) { |
657 // distortion rendering since that doesn't need a pose, and reading back | |
658 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
659 // doing this once we have working no-compositor rendering for WebVR. | |
660 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && | |
661 GetPixelEncodedFrameIndex(&frame_index)) { | |
662 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), | 732 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
663 "kPoseRingBufferSize must be a power of 2"); | 733 "kPoseRingBufferSize must be a power of 2"); |
664 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; | 734 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; |
665 // Process all pending_bounds_ changes targeted for before this frame, being | 735 // Process all pending_bounds_ changes targeted for before this frame, being |
666 // careful of wrapping frame indices. | 736 // careful of wrapping frame indices. |
667 static constexpr unsigned max = | 737 static constexpr unsigned max = |
668 std::numeric_limits<decltype(frame_index_)>::max(); | 738 std::numeric_limits<decltype(frame_index_)>::max(); |
669 static_assert(max > kPoseRingBufferSize * 2, | 739 static_assert(max > kPoseRingBufferSize * 2, |
670 "To detect wrapping, kPoseRingBufferSize must be smaller " | 740 "To detect wrapping, kPoseRingBufferSize must be smaller " |
671 "than half of frame_index_ range."); | 741 "than half of frame_index_ range."); |
672 while (!pending_bounds_.empty()) { | 742 while (!pending_bounds_.empty()) { |
673 uint16_t index = pending_bounds_.front().first; | 743 uint16_t index = pending_bounds_.front().first; |
674 // If index is less than the frame_index it's possible we've wrapped, so | 744 // If index is less than the frame_index it's possible we've wrapped, so |
675 // we extend the range and 'un-wrap' to account for this. | 745 // we extend the range and 'un-wrap' to account for this. |
676 if (index < frame_index) | 746 if (index < frame_index) |
677 index += max; | 747 index += max; |
678 // If the pending bounds change is for an upcoming frame within our buffer | 748 // If the pending bounds change is for an upcoming frame within our buffer |
679 // size, wait to apply it. Otherwise, apply it immediately. This | 749 // size, wait to apply it. Otherwise, apply it immediately. This |
680 // guarantees that even if we miss many frames, the queue can't fill up | 750 // guarantees that even if we miss many frames, the queue can't fill up |
681 // with stale bounds. | 751 // with stale bounds. |
682 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) | 752 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) |
683 break; | 753 break; |
684 | 754 |
685 const BoundsPair& bounds = pending_bounds_.front().second; | 755 const WebVrBounds& bounds = pending_bounds_.front().second; |
686 webvr_left_viewport_->SetSourceUv(bounds.first); | 756 webvr_left_viewport_->SetSourceUv(bounds.left_bounds); |
687 webvr_right_viewport_->SetSourceUv(bounds.second); | 757 webvr_right_viewport_->SetSourceUv(bounds.right_bounds); |
758 CreateOrResizeWebVRSurface(bounds.source_size); | |
688 pending_bounds_.pop(); | 759 pending_bounds_.pop(); |
689 } | 760 } |
690 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 761 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
691 *webvr_left_viewport_); | 762 *webvr_left_viewport_); |
692 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 763 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
693 *webvr_right_viewport_); | 764 *webvr_right_viewport_); |
694 } else { | 765 } else { |
695 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 766 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
696 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 767 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
697 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 768 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
698 } | 769 } |
699 | 770 |
700 gvr::Vec3f position = GetTranslation(head_pose); | 771 gvr::Vec3f position = GetTranslation(head_pose); |
701 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 772 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
702 // This appears to be a 3DOF pose without a neck model. Add one. | 773 // This appears to be a 3DOF pose without a neck model. Add one. |
703 // The head pose has redundant data. Assume we're only using the | 774 // The head pose has redundant data. Assume we're only using the |
704 // object_from_reference_matrix, we're not updating position_external. | 775 // object_from_reference_matrix, we're not updating position_external. |
705 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 776 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
706 // it. For now, removing it seems working fine. | 777 // it. For now, removing it seems working fine. |
707 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 778 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
708 } | 779 } |
709 | 780 |
710 // Update the render position of all UI elements (including desktop). | 781 // Update the render position of all UI elements (including desktop). |
711 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 782 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
712 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); | 783 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
713 | 784 |
714 UpdateController(GetForwardVector(head_pose)); | 785 { |
786 TRACE_EVENT0("gpu", "VrShellGl::UpdateController"); | |
787 UpdateController(GetForwardVector(head_pose)); | |
788 } | |
715 | 789 |
716 DrawVrShell(head_pose, frame); | 790 // Finish drawing in the primary buffer, and draw the headlocked buffer |
791 // if needed. This must be the last drawing call, this method will | |
792 // return with no frame being bound. | |
793 DrawVrShellAndUnbind(head_pose, frame); | |
717 | 794 |
718 frame.Unbind(); | 795 { |
719 frame.Submit(*buffer_viewport_list_, head_pose); | 796 TRACE_EVENT0("gpu", "VrShellGl::Submit"); |
797 frame.Submit(*buffer_viewport_list_, head_pose); | |
798 } | |
720 | 799 |
721 // No need to swap buffers for surfaceless rendering. | 800 // No need to swap buffers for surfaceless rendering. |
722 if (!surfaceless_rendering_) { | 801 if (!surfaceless_rendering_) { |
723 // TODO(mthiesse): Support asynchronous SwapBuffers. | 802 // TODO(mthiesse): Support asynchronous SwapBuffers. |
803 TRACE_EVENT0("gpu", "VrShellGl::SwapBuffers"); | |
724 surface_->SwapBuffers(); | 804 surface_->SwapBuffers(); |
725 } | 805 } |
726 } | 806 } |
727 | 807 |
728 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, gvr::Frame& frame) { | 808 void VrShellGl::DrawVrShellAndUnbind(const gvr::Mat4f& head_pose, |
809 gvr::Frame& frame) { | |
729 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 810 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
730 std::vector<const ContentRectangle*> head_locked_elements; | 811 std::vector<const ContentRectangle*> head_locked_elements; |
731 std::vector<const ContentRectangle*> world_elements; | 812 std::vector<const ContentRectangle*> world_elements; |
732 for (const auto& rect : scene_->GetUiElements()) { | 813 for (const auto& rect : scene_->GetUiElements()) { |
733 if (!rect->IsVisible()) | 814 if (!rect->IsVisible()) |
734 continue; | 815 continue; |
735 if (rect->lock_to_fov) { | 816 if (rect->lock_to_fov) { |
736 head_locked_elements.push_back(rect.get()); | 817 head_locked_elements.push_back(rect.get()); |
737 } else { | 818 } else { |
738 world_elements.push_back(rect.get()); | 819 world_elements.push_back(rect.get()); |
(...skipping 16 matching lines...) Expand all Loading... | |
755 | 836 |
756 const Colorf& backgroundColor = scene_->GetBackgroundColor(); | 837 const Colorf& backgroundColor = scene_->GetBackgroundColor(); |
757 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, | 838 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, |
758 backgroundColor.a); | 839 backgroundColor.a); |
759 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 840 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
760 } | 841 } |
761 if (!world_elements.empty()) { | 842 if (!world_elements.empty()) { |
762 DrawUiView(&head_pose, world_elements, render_size_primary_, | 843 DrawUiView(&head_pose, world_elements, render_size_primary_, |
763 kViewportListPrimaryOffset); | 844 kViewportListPrimaryOffset); |
764 } | 845 } |
846 frame.Unbind(); // Done with the primary buffer. | |
765 | 847 |
766 if (!head_locked_elements.empty()) { | 848 if (!head_locked_elements.empty()) { |
767 // Add head-locked viewports. The list gets reset to just | 849 // Add head-locked viewports. The list gets reset to just |
768 // the recommended viewports (for the primary buffer) each frame. | 850 // the recommended viewports (for the primary buffer) each frame. |
769 buffer_viewport_list_->SetBufferViewport( | 851 buffer_viewport_list_->SetBufferViewport( |
770 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 852 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
771 *headlocked_left_viewport_); | 853 *headlocked_left_viewport_); |
772 buffer_viewport_list_->SetBufferViewport( | 854 buffer_viewport_list_->SetBufferViewport( |
773 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 855 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
774 *headlocked_right_viewport_); | 856 *headlocked_right_viewport_); |
775 | 857 |
776 // Bind the headlocked framebuffer. | 858 // Bind the headlocked framebuffer. |
777 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order | |
778 // here. | |
779 frame.BindBuffer(kFrameHeadlockedBuffer); | 859 frame.BindBuffer(kFrameHeadlockedBuffer); |
780 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 860 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
781 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 861 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
782 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, | 862 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, |
783 kViewportListHeadlockedOffset); | 863 kViewportListHeadlockedOffset); |
864 frame.Unbind(); // Done with the headlocked buffer. | |
784 } | 865 } |
785 } | 866 } |
786 | 867 |
787 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { | |
788 // This is a stopgap while we're using the WebVR compositor rendering path. | |
789 // TODO(klausw,crbug.com/655722): Remove this method and member once we're | |
790 // using a separate WebVR render surface. | |
791 return content_tex_physical_size_; | |
792 } | |
793 | |
794 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, | 868 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, |
795 const std::vector<const ContentRectangle*>& elements, | 869 const std::vector<const ContentRectangle*>& elements, |
796 const gvr::Sizei& render_size, | 870 const gvr::Sizei& render_size, |
797 int viewport_offset) { | 871 int viewport_offset) { |
798 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); | 872 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); |
799 | 873 |
800 gvr::Mat4f view_matrix; | 874 gvr::Mat4f view_matrix; |
801 if (head_pose) { | 875 if (head_pose) { |
802 view_matrix = *head_pose; | 876 view_matrix = *head_pose; |
803 } else { | 877 } else { |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
990 void VrShellGl::DrawWebVr() { | 1064 void VrShellGl::DrawWebVr() { |
991 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); | 1065 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); |
992 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 1066 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
993 glDisable(GL_CULL_FACE); | 1067 glDisable(GL_CULL_FACE); |
994 glDepthMask(GL_FALSE); | 1068 glDepthMask(GL_FALSE); |
995 glDisable(GL_DEPTH_TEST); | 1069 glDisable(GL_DEPTH_TEST); |
996 glDisable(GL_SCISSOR_TEST); | 1070 glDisable(GL_SCISSOR_TEST); |
997 glDisable(GL_BLEND); | 1071 glDisable(GL_BLEND); |
998 glDisable(GL_POLYGON_OFFSET_FILL); | 1072 glDisable(GL_POLYGON_OFFSET_FILL); |
999 | 1073 |
1000 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 1074 glViewport(0, 0, webvr_surface_size_.width, webvr_surface_size_.height); |
1001 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 1075 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
1002 } | 1076 } |
1003 | 1077 |
1004 void VrShellGl::OnTriggerEvent() { | 1078 void VrShellGl::OnTriggerEvent() { |
1005 // Set a flag to handle this on the render thread at the next frame. | 1079 // Set a flag to handle this on the render thread at the next frame. |
1006 touch_pending_ = true; | 1080 touch_pending_ = true; |
1007 } | 1081 } |
1008 | 1082 |
1009 void VrShellGl::OnPause() { | 1083 void VrShellGl::OnPause() { |
1010 vsync_task_.Cancel(); | 1084 vsync_task_.Cancel(); |
(...skipping 10 matching lines...) Expand all Loading... | |
1021 OnVSync(); | 1095 OnVSync(); |
1022 } | 1096 } |
1023 } | 1097 } |
1024 | 1098 |
1025 void VrShellGl::SetWebVrMode(bool enabled) { | 1099 void VrShellGl::SetWebVrMode(bool enabled) { |
1026 web_vr_mode_ = enabled; | 1100 web_vr_mode_ = enabled; |
1027 } | 1101 } |
1028 | 1102 |
1029 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, | 1103 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
1030 const gvr::Rectf& left_bounds, | 1104 const gvr::Rectf& left_bounds, |
1031 const gvr::Rectf& right_bounds) { | 1105 const gvr::Rectf& right_bounds, |
1106 const gvr::Sizei& source_size) { | |
1032 if (frame_index < 0) { | 1107 if (frame_index < 0) { |
1033 webvr_left_viewport_->SetSourceUv(left_bounds); | 1108 webvr_left_viewport_->SetSourceUv(left_bounds); |
1034 webvr_right_viewport_->SetSourceUv(right_bounds); | 1109 webvr_right_viewport_->SetSourceUv(right_bounds); |
1035 } else { | 1110 } else { |
1036 pending_bounds_.emplace( | 1111 pending_bounds_.emplace(std::make_pair( |
1037 std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); | 1112 frame_index, WebVrBounds(left_bounds, right_bounds, source_size))); |
1038 } | 1113 } |
1039 } | 1114 } |
1040 | 1115 |
1041 void VrShellGl::ContentBoundsChanged(int width, int height) { | 1116 void VrShellGl::ContentBoundsChanged(int width, int height) { |
1042 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); | 1117 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
1043 content_tex_css_width_ = width; | 1118 content_tex_css_width_ = width; |
1044 content_tex_css_height_ = height; | 1119 content_tex_css_height_ = height; |
1045 } | 1120 } |
1046 | 1121 |
1047 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { | 1122 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { |
(...skipping 13 matching lines...) Expand all Loading... | |
1061 ui_surface_texture_->SetDefaultBufferSize(width, height); | 1136 ui_surface_texture_->SetDefaultBufferSize(width, height); |
1062 ui_tex_physical_size_.width = width; | 1137 ui_tex_physical_size_.width = width; |
1063 ui_tex_physical_size_.height = height; | 1138 ui_tex_physical_size_.height = height; |
1064 } | 1139 } |
1065 | 1140 |
1066 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 1141 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
1067 return weak_ptr_factory_.GetWeakPtr(); | 1142 return weak_ptr_factory_.GetWeakPtr(); |
1068 } | 1143 } |
1069 | 1144 |
1070 void VrShellGl::OnVSync() { | 1145 void VrShellGl::OnVSync() { |
1146 while (premature_received_frames_ > 0) { | |
1147 TRACE_EVENT0("gpu", "VrShellGl::OnWebVRFrameAvailableRetry"); | |
1148 --premature_received_frames_; | |
1149 OnWebVRFrameAvailable(); | |
1150 } | |
1151 | |
1071 base::TimeTicks now = base::TimeTicks::Now(); | 1152 base::TimeTicks now = base::TimeTicks::Now(); |
1072 base::TimeTicks target; | 1153 base::TimeTicks target; |
1073 | 1154 |
1074 // Don't send VSyncs until we have a timebase/interval. | 1155 // Don't send VSyncs until we have a timebase/interval. |
1075 if (vsync_interval_.is_zero()) | 1156 if (vsync_interval_.is_zero()) |
1076 return; | 1157 return; |
1077 target = now + vsync_interval_; | 1158 target = now + vsync_interval_; |
1078 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 1159 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
1079 target = vsync_timebase_ + intervals * vsync_interval_; | 1160 target = vsync_timebase_ + intervals * vsync_interval_; |
1080 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | 1161 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
1081 target - now); | 1162 target - now); |
1082 | 1163 |
1083 base::TimeDelta time = intervals * vsync_interval_; | 1164 base::TimeDelta time = intervals * vsync_interval_; |
1084 if (!callback_.is_null()) { | 1165 if (!callback_.is_null()) { |
1085 SendVSync(time, base::ResetAndReturn(&callback_)); | 1166 SendVSync(time, base::ResetAndReturn(&callback_)); |
1086 } else { | 1167 } else { |
1087 pending_vsync_ = true; | 1168 pending_vsync_ = true; |
1088 pending_time_ = time; | 1169 pending_time_ = time; |
1089 } | 1170 } |
1090 DrawFrame(); | 1171 if (!web_vr_mode_) { |
1172 DrawFrame(-1); | |
1173 } | |
1091 } | 1174 } |
1092 | 1175 |
1093 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { | 1176 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
1094 binding_.Close(); | 1177 binding_.Close(); |
1095 binding_.Bind(std::move(request)); | 1178 binding_.Bind(std::move(request)); |
1096 } | 1179 } |
1097 | 1180 |
1098 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | 1181 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
1099 if (!pending_vsync_) { | 1182 if (!pending_vsync_) { |
1100 if (!callback_.is_null()) { | 1183 if (!callback_.is_null()) { |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1150 void VrShellGl::ResetPose() { | 1233 void VrShellGl::ResetPose() { |
1151 // Should never call RecenterTracking when using with Daydream viewers. On | 1234 // Should never call RecenterTracking when using with Daydream viewers. On |
1152 // those devices recentering should only be done via the controller. | 1235 // those devices recentering should only be done via the controller. |
1153 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) | 1236 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) |
1154 gvr_api_->RecenterTracking(); | 1237 gvr_api_->RecenterTracking(); |
1155 } | 1238 } |
1156 | 1239 |
1157 void VrShellGl::CreateVRDisplayInfo( | 1240 void VrShellGl::CreateVRDisplayInfo( |
1158 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, | 1241 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, |
1159 uint32_t device_id) { | 1242 uint32_t device_id) { |
1243 // This assumes that the initial webvr_surface_size_ was set to the | |
1244 // appropriate recommended render resolution as the default size during | |
1245 // InitializeGl. Revisit if the initialization order changes. | |
1160 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( | 1246 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( |
1161 gvr_api_.get(), content_tex_physical_size_, device_id); | 1247 gvr_api_.get(), webvr_surface_size_, device_id); |
1162 main_thread_task_runner_->PostTask( | 1248 main_thread_task_runner_->PostTask( |
1163 FROM_HERE, | 1249 FROM_HERE, |
1164 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); | 1250 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); |
1165 } | 1251 } |
1166 | 1252 |
1167 } // namespace vr_shell | 1253 } // namespace vr_shell |
OLD | NEW |