OLD | NEW |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
6 | 6 |
7 #include <limits> | 7 #include <limits> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/android/jni_android.h" | 10 #include "base/android/jni_android.h" |
11 #include "base/callback_helpers.h" | 11 #include "base/callback_helpers.h" |
12 #include "base/memory/ptr_util.h" | 12 #include "base/memory/ptr_util.h" |
13 #include "base/metrics/histogram_macros.h" | 13 #include "base/metrics/histogram_macros.h" |
14 #include "base/threading/thread_task_runner_handle.h" | 14 #include "base/threading/thread_task_runner_handle.h" |
15 #include "chrome/browser/android/vr_shell/mailbox_to_surface_bridge.h" | |
16 #include "chrome/browser/android/vr_shell/ui_elements.h" | 15 #include "chrome/browser/android/vr_shell/ui_elements.h" |
17 #include "chrome/browser/android/vr_shell/ui_scene.h" | 16 #include "chrome/browser/android/vr_shell/ui_scene.h" |
18 #include "chrome/browser/android/vr_shell/vr_controller.h" | 17 #include "chrome/browser/android/vr_shell/vr_controller.h" |
19 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 18 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
20 #include "chrome/browser/android/vr_shell/vr_math.h" | 19 #include "chrome/browser/android/vr_shell/vr_math.h" |
21 #include "chrome/browser/android/vr_shell/vr_shell.h" | 20 #include "chrome/browser/android/vr_shell/vr_shell.h" |
22 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" | 21 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" |
23 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 22 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
24 #include "device/vr/android/gvr/gvr_device.h" | 23 #include "device/vr/android/gvr/gvr_device.h" |
25 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 24 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
74 | 73 |
75 // The GVR viewport list has two entries (left eye and right eye) for each | 74 // The GVR viewport list has two entries (left eye and right eye) for each |
76 // GVR buffer. | 75 // GVR buffer. |
77 static constexpr int kViewportListPrimaryOffset = 0; | 76 static constexpr int kViewportListPrimaryOffset = 0; |
78 static constexpr int kViewportListHeadlockedOffset = 2; | 77 static constexpr int kViewportListHeadlockedOffset = 2; |
79 | 78 |
80 // Buffer size large enough to handle the current backlog of poses which is | 79 // Buffer size large enough to handle the current backlog of poses which is |
81 // 2-3 frames. | 80 // 2-3 frames. |
82 static constexpr unsigned kPoseRingBufferSize = 8; | 81 static constexpr unsigned kPoseRingBufferSize = 8; |
83 | 82 |
| 83 // Magic numbers used to mark valid pose index values encoded in frame |
| 84 // data. Must match the magic numbers used in blink's VRDisplay.cpp. |
| 85 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; |
| 86 |
84 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 87 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
85 float xdiff = (vec1.x - vec2.x); | 88 float xdiff = (vec1.x - vec2.x); |
86 float ydiff = (vec1.y - vec2.y); | 89 float ydiff = (vec1.y - vec2.y); |
87 float zdiff = (vec1.z - vec2.z); | 90 float zdiff = (vec1.z - vec2.z); |
88 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 91 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
89 return std::sqrt(scale); | 92 return std::sqrt(scale); |
90 } | 93 } |
91 | 94 |
92 // Generate a quaternion representing the rotation from the negative Z axis | 95 // Generate a quaternion representing the rotation from the negative Z axis |
93 // (0, 0, -1) to a specified vector. This is an optimized version of a more | 96 // (0, 0, -1) to a specified vector. This is an optimized version of a more |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
162 binding_(this), | 165 binding_(this), |
163 weak_vr_shell_(weak_vr_shell), | 166 weak_vr_shell_(weak_vr_shell), |
164 delegate_provider_(delegate_provider), | 167 delegate_provider_(delegate_provider), |
165 main_thread_task_runner_(std::move(main_thread_task_runner)), | 168 main_thread_task_runner_(std::move(main_thread_task_runner)), |
166 weak_ptr_factory_(this) { | 169 weak_ptr_factory_(this) { |
167 GvrInit(gvr_api); | 170 GvrInit(gvr_api); |
168 } | 171 } |
169 | 172 |
170 VrShellGl::~VrShellGl() { | 173 VrShellGl::~VrShellGl() { |
171 vsync_task_.Cancel(); | 174 vsync_task_.Cancel(); |
172 // TODO(mthiesse): Can we omit the Close() here? Concern is that if | |
173 // both ends of the connection ever live in the same process for | |
174 // some reason, we could receive another VSync request in response | |
175 // to the closing message in the destructor but fail to respond to | |
176 // the callback. | |
177 binding_.Close(); | |
178 if (!callback_.is_null()) { | 175 if (!callback_.is_null()) { |
179 // When this VSync provider is going away we have to respond to pending | 176 // When this VSync provider is going away we have to respond to pending |
180 // callbacks, so instead of providing a VSync, tell the requester to try | 177 // callbacks, so instead of providing a VSync, tell the requester to try |
181 // again. A VSyncProvider is guaranteed to exist, so the request in response | 178 // again. A VSyncProvider is guaranteed to exist, so the request in response |
182 // to this message will go through some other VSyncProvider. | 179 // to this message will go through some other VSyncProvider. |
183 base::ResetAndReturn(&callback_) | 180 base::ResetAndReturn(&callback_) |
184 .Run(nullptr, base::TimeDelta(), -1, | 181 .Run(nullptr, base::TimeDelta(), -1, |
185 device::mojom::VRVSyncProvider::Status::CLOSING); | 182 device::mojom::VRVSyncProvider::Status::RETRY); |
| 183 } |
| 184 if (binding_.is_bound()) { |
| 185 main_thread_task_runner_->PostTask( |
| 186 FROM_HERE, |
| 187 base::Bind(&VrShellDelegate::OnVRVsyncProviderRequest, |
| 188 delegate_provider_, base::Passed(binding_.Unbind()))); |
186 } | 189 } |
187 } | 190 } |
188 | 191 |
189 void VrShellGl::Initialize() { | 192 void VrShellGl::Initialize() { |
190 scene_.reset(new UiScene); | 193 scene_.reset(new UiScene); |
191 | 194 |
192 if (surfaceless_rendering_) { | 195 if (surfaceless_rendering_) { |
193 // If we're rendering surfaceless, we'll never get a java surface to render | 196 // If we're rendering surfaceless, we'll never get a java surface to render |
194 // into, so we can initialize GL right away. | 197 // into, so we can initialize GL right away. |
195 InitializeGl(nullptr); | 198 InitializeGl(nullptr); |
(...skipping 26 matching lines...) Expand all Loading... |
222 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 225 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
223 ForceExitVr(); | 226 ForceExitVr(); |
224 return; | 227 return; |
225 } | 228 } |
226 if (!context_->MakeCurrent(surface_.get())) { | 229 if (!context_->MakeCurrent(surface_.get())) { |
227 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 230 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
228 ForceExitVr(); | 231 ForceExitVr(); |
229 return; | 232 return; |
230 } | 233 } |
231 | 234 |
232 unsigned int textures[3]; | 235 unsigned int textures[2]; |
233 glGenTextures(3, textures); | 236 glGenTextures(2, textures); |
234 ui_texture_id_ = textures[0]; | 237 ui_texture_id_ = textures[0]; |
235 content_texture_id_ = textures[1]; | 238 content_texture_id_ = textures[1]; |
236 webvr_texture_id_ = textures[2]; | |
237 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 239 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
238 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 240 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
239 webvr_surface_texture_ = gl::SurfaceTexture::Create(webvr_texture_id_); | |
240 CreateUiSurface(); | 241 CreateUiSurface(); |
241 CreateContentSurface(); | 242 CreateContentSurface(); |
242 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 243 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
243 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 244 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
244 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 245 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
245 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 246 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
246 webvr_surface_texture_->SetFrameAvailableCallback(base::Bind( | 247 content_surface_texture_->SetDefaultBufferSize( |
247 &VrShellGl::OnWebVRFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 248 content_tex_physical_size_.width, content_tex_physical_size_.height); |
248 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | 249 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, |
249 ui_tex_physical_size_.height); | 250 ui_tex_physical_size_.height); |
250 content_surface_texture_->SetDefaultBufferSize( | |
251 content_tex_physical_size_.width, content_tex_physical_size_.height); | |
252 InitializeRenderer(); | 251 InitializeRenderer(); |
253 | 252 |
254 gvr::Sizei webvr_size = VrShell::GetRecommendedWebVrSize(gvr_api_.get()); | |
255 DVLOG(1) << __FUNCTION__ << ": resize initial to " << webvr_size.width << "x" | |
256 << webvr_size.height; | |
257 | |
258 CreateOrResizeWebVRSurface(webvr_size); | |
259 | |
260 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 253 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
261 OnVSync(); | 254 OnVSync(); |
262 | 255 |
263 ready_to_draw_ = true; | 256 ready_to_draw_ = true; |
264 } | 257 } |
265 | 258 |
266 void VrShellGl::CreateContentSurface() { | 259 void VrShellGl::CreateContentSurface() { |
267 content_surface_ = | 260 content_surface_ = |
268 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); | 261 base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get()); |
269 main_thread_task_runner_->PostTask( | 262 main_thread_task_runner_->PostTask( |
270 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, | 263 FROM_HERE, base::Bind(&VrShell::ContentSurfaceChanged, weak_vr_shell_, |
271 content_surface_->j_surface().obj())); | 264 content_surface_->j_surface().obj())); |
272 } | 265 } |
273 | 266 |
274 void VrShellGl::CreateUiSurface() { | 267 void VrShellGl::CreateUiSurface() { |
275 ui_surface_ = | 268 ui_surface_ = |
276 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); | 269 base::MakeUnique<gl::ScopedJavaSurface>(ui_surface_texture_.get()); |
277 main_thread_task_runner_->PostTask( | 270 main_thread_task_runner_->PostTask( |
278 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, | 271 FROM_HERE, base::Bind(&VrShell::UiSurfaceChanged, weak_vr_shell_, |
279 ui_surface_->j_surface().obj())); | 272 ui_surface_->j_surface().obj())); |
280 } | 273 } |
281 | 274 |
282 void VrShellGl::CreateOrResizeWebVRSurface(const gvr::Sizei& size) { | |
283 if (!webvr_surface_texture_) { | |
284 DLOG(ERROR) << "No WebVR surface texture available"; | |
285 return; | |
286 } | |
287 | |
288 // ContentPhysicalBoundsChanged is getting called twice with | |
289 // identical sizes? Avoid thrashing the existing context. | |
290 if (size == webvr_surface_size_) { | |
291 return; | |
292 } | |
293 | |
294 if (!size.width || !size.height) { | |
295 // Invalid size, defer until a new size arrives on a future bounds update. | |
296 return; | |
297 } | |
298 | |
299 webvr_surface_texture_->SetDefaultBufferSize(size.width, size.height); | |
300 webvr_surface_size_ = size; | |
301 | |
302 if (mailbox_bridge_) { | |
303 mailbox_bridge_->ResizeSurface(size.width, size.height); | |
304 } else { | |
305 mailbox_bridge_ = base::MakeUnique<MailboxToSurfaceBridge>(); | |
306 mailbox_bridge_->CreateSurface(webvr_surface_texture_.get()); | |
307 } | |
308 } | |
309 | |
310 void VrShellGl::SubmitWebVRFrame(int16_t frame_index, | |
311 const gpu::MailboxHolder& mailbox) { | |
312 TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame"); | |
313 | |
314 // Swapping twice on a Surface without calling updateTexImage in | |
315 // between can lose frames, so don't draw+swap if we already have | |
316 // a pending frame we haven't consumed yet. | |
317 bool swapped = false; | |
318 if (pending_frames_.empty()) { | |
319 swapped = mailbox_bridge_->CopyMailboxToSurfaceAndSwap(mailbox); | |
320 if (swapped) { | |
321 // Tell OnWebVRFrameAvailable to expect a new frame to arrive on | |
322 // the SurfaceTexture, and save the associated frame index. | |
323 pending_frames_.emplace(frame_index); | |
324 } | |
325 } | |
326 // Always notify the client that we're done with the mailbox even | |
327 // if we haven't drawn it, so that it's eligible for destruction. | |
328 submit_client_->OnSubmitFrameTransferred(); | |
329 if (!swapped) { | |
330 // We dropped without drawing, report this as completed rendering | |
331 // now to unblock the client. We're not going to receive it in | |
332 // OnWebVRFrameAvailable where we'd normally report that. | |
333 submit_client_->OnSubmitFrameRendered(); | |
334 } | |
335 | |
336 TRACE_EVENT0("gpu", "VrShellGl::glFinish"); | |
337 // This is a load-bearing glFinish, please don't remove it without | |
338 // before/after timing comparisons. Goal is to clear the GPU queue | |
339 // of the native GL context to avoid stalls later in GVR frame | |
340 // acquire/submit. | |
341 glFinish(); | |
342 } | |
343 | |
344 void VrShellGl::SetSubmitClient( | |
345 device::mojom::VRSubmitFrameClientPtrInfo submit_client_info) { | |
346 submit_client_.Bind(std::move(submit_client_info)); | |
347 } | |
348 | |
349 void VrShellGl::OnUIFrameAvailable() { | 275 void VrShellGl::OnUIFrameAvailable() { |
350 ui_surface_texture_->UpdateTexImage(); | 276 ui_surface_texture_->UpdateTexImage(); |
351 } | 277 } |
352 | 278 |
353 void VrShellGl::OnContentFrameAvailable() { | 279 void VrShellGl::OnContentFrameAvailable() { |
354 content_surface_texture_->UpdateTexImage(); | 280 content_surface_texture_->UpdateTexImage(); |
355 received_frame_ = true; | 281 received_frame_ = true; |
356 } | 282 } |
357 | 283 |
358 void VrShellGl::OnWebVRFrameAvailable() { | 284 bool VrShellGl::GetPixelEncodedFrameIndex(uint16_t* frame_index) { |
359 // A "while" loop here is a bad idea. It's legal to call | 285 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); |
360 // UpdateTexImage repeatedly even if no frames are available, but | 286 if (!received_frame_) { |
361 // that does *not* wait for a new frame, it just reuses the most | 287 if (last_frame_index_ == (uint16_t)-1) |
362 // recent one. That would mess up the count. | 288 return false; |
363 if (pending_frames_.empty()) { | 289 *frame_index = last_frame_index_; |
364 // We're expecting a frame, but it's not here yet. Retry in OnVsync. | 290 return true; |
365 ++premature_received_frames_; | |
366 return; | |
367 } | 291 } |
| 292 received_frame_ = false; |
368 | 293 |
369 webvr_surface_texture_->UpdateTexImage(); | 294 // Read the pose index encoded in a bottom left pixel as color values. |
370 int frame_index = pending_frames_.front(); | 295 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
371 TRACE_EVENT1("gpu", "VrShellGl::OnWebVRFrameAvailable", "frame", frame_index); | 296 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
372 pending_frames_.pop(); | 297 // which tracks poses. Returns the low byte (0..255) if valid, or -1 |
| 298 // if not valid due to bad magic number. |
| 299 uint8_t pixels[4]; |
| 300 // Assume we're reading from the framebuffer we just wrote to. |
| 301 // That's true currently, we may need to use glReadBuffer(GL_BACK) |
| 302 // or equivalent if the rendering setup changes in the future. |
| 303 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
373 | 304 |
374 // It is legal for the WebVR client to submit a new frame now, since | 305 // Check for the magic number written by VRDevice.cpp on submit. |
375 // we've consumed the image. TODO(klausw): would timing be better if | 306 // This helps avoid glitches from garbage data in the render |
376 // we move the "rendered" notification after draw, or suppress | 307 // buffer that can appear during initialization or resizing. These |
377 // the next vsync until that's done? | 308 // often appear as flashes of all-black or all-white pixels. |
378 | 309 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
379 submit_client_->OnSubmitFrameRendered(); | 310 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
380 | 311 // Pose is good. |
381 DrawFrame(frame_index); | 312 *frame_index = pixels[0]; |
| 313 last_frame_index_ = pixels[0]; |
| 314 return true; |
| 315 } |
| 316 VLOG(1) << "WebVR: reject decoded pose index " << static_cast<int>(pixels[0]) |
| 317 << ", bad magic number " << static_cast<int>(pixels[1]) << ", " |
| 318 << static_cast<int>(pixels[2]); |
| 319 return false; |
382 } | 320 } |
383 | 321 |
384 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 322 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
385 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 323 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
386 controller_.reset(new VrController(gvr_api)); | 324 controller_.reset(new VrController(gvr_api)); |
387 | 325 |
388 ViewerType viewerType; | 326 ViewerType viewerType; |
389 switch (gvr_api_->GetViewerType()) { | 327 switch (gvr_api_->GetViewerType()) { |
390 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 328 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
391 viewerType = ViewerType::DAYDREAM; | 329 viewerType = ViewerType::DAYDREAM; |
392 break; | 330 break; |
393 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 331 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
394 viewerType = ViewerType::CARDBOARD; | 332 viewerType = ViewerType::CARDBOARD; |
395 break; | 333 break; |
396 default: | 334 default: |
397 NOTREACHED(); | 335 NOTREACHED(); |
398 viewerType = ViewerType::UNKNOWN_TYPE; | 336 viewerType = ViewerType::UNKNOWN_TYPE; |
399 break; | 337 break; |
400 } | 338 } |
401 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 339 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
402 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 340 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
403 } | 341 } |
404 | 342 |
405 void VrShellGl::InitializeRenderer() { | 343 void VrShellGl::InitializeRenderer() { |
| 344 // While WebVR is going through the compositor path, it shares |
| 345 // the same texture ID. This will change once it gets its own |
| 346 // surface, but store it separately to avoid future confusion. |
| 347 // TODO(klausw,crbug.com/655722): remove this. |
| 348 webvr_texture_id_ = content_texture_id_; |
| 349 |
406 gvr_api_->InitializeGl(); | 350 gvr_api_->InitializeGl(); |
407 webvr_head_pose_.assign(kPoseRingBufferSize, | 351 webvr_head_pose_.assign(kPoseRingBufferSize, |
408 gvr_api_->GetHeadSpaceFromStartSpaceRotation( | 352 gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
409 gvr::GvrApi::GetTimePointNow())); | 353 gvr::GvrApi::GetTimePointNow())); |
410 | 354 |
411 std::vector<gvr::BufferSpec> specs; | 355 std::vector<gvr::BufferSpec> specs; |
412 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 356 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
413 specs.push_back(gvr_api_->CreateBufferSpec()); | 357 specs.push_back(gvr_api_->CreateBufferSpec()); |
414 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 358 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
415 render_size_vrshell_ = render_size_primary_; | |
416 | 359 |
417 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 360 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
418 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 361 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
419 specs.push_back(gvr_api_->CreateBufferSpec()); | 362 specs.push_back(gvr_api_->CreateBufferSpec()); |
420 specs.back().SetSize(kHeadlockedBufferDimensions); | 363 specs.back().SetSize(kHeadlockedBufferDimensions); |
421 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 364 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
422 | 365 |
423 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); | 366 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); |
424 | 367 |
425 vr_shell_renderer_.reset(new VrShellRenderer()); | 368 vr_shell_renderer_.reset(new VrShellRenderer()); |
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
682 std::unique_ptr<blink::WebInputEvent> event) { | 625 std::unique_ptr<blink::WebInputEvent> event) { |
683 DCHECK(input_target != InputTarget::NONE); | 626 DCHECK(input_target != InputTarget::NONE); |
684 auto&& target = input_target == InputTarget::CONTENT | 627 auto&& target = input_target == InputTarget::CONTENT |
685 ? &VrShell::ProcessContentGesture | 628 ? &VrShell::ProcessContentGesture |
686 : &VrShell::ProcessUIGesture; | 629 : &VrShell::ProcessUIGesture; |
687 main_thread_task_runner_->PostTask( | 630 main_thread_task_runner_->PostTask( |
688 FROM_HERE, | 631 FROM_HERE, |
689 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); | 632 base::Bind(target, weak_vr_shell_, base::Passed(std::move(event)))); |
690 } | 633 } |
691 | 634 |
692 void VrShellGl::DrawFrame(int16_t frame_index) { | 635 void VrShellGl::DrawFrame() { |
693 TRACE_EVENT1("gpu", "VrShellGl::DrawFrame", "frame", frame_index); | 636 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
694 | 637 |
695 // Reset the viewport list to just the pair of viewports for the | 638 // Reset the viewport list to just the pair of viewports for the |
696 // primary buffer each frame. Head-locked viewports get added by | 639 // primary buffer each frame. Head-locked viewports get added by |
697 // DrawVrShell if needed. | 640 // DrawVrShell if needed. |
698 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 641 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
699 | 642 |
700 // If needed, resize the primary buffer for use with WebVR. Resizing | 643 gvr::Frame frame = swap_chain_->AcquireFrame(); |
701 // needs to happen before acquiring a frame. | 644 if (!frame.is_valid()) { |
| 645 return; |
| 646 } |
| 647 frame.BindBuffer(kFramePrimaryBuffer); |
702 if (web_vr_mode_) { | 648 if (web_vr_mode_) { |
703 // Process all pending_bounds_ changes targeted for before this | 649 DrawWebVr(); |
704 // frame, being careful of wrapping frame indices. | 650 } |
| 651 |
| 652 uint16_t frame_index; |
| 653 gvr::Mat4f head_pose; |
| 654 |
| 655 // When using async reprojection, we need to know which pose was used in |
| 656 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 657 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 658 // from a corner pixel. There's no point in doing this for legacy |
| 659 // distortion rendering since that doesn't need a pose, and reading back |
| 660 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 661 // doing this once we have working no-compositor rendering for WebVR. |
| 662 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
| 663 GetPixelEncodedFrameIndex(&frame_index)) { |
| 664 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
| 665 "kPoseRingBufferSize must be a power of 2"); |
| 666 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; |
| 667 // Process all pending_bounds_ changes targeted for before this frame, being |
| 668 // careful of wrapping frame indices. |
705 static constexpr unsigned max = | 669 static constexpr unsigned max = |
706 std::numeric_limits<decltype(frame_index_)>::max(); | 670 std::numeric_limits<decltype(frame_index_)>::max(); |
707 static_assert(max > kPoseRingBufferSize * 2, | 671 static_assert(max > kPoseRingBufferSize * 2, |
708 "To detect wrapping, kPoseRingBufferSize must be smaller " | 672 "To detect wrapping, kPoseRingBufferSize must be smaller " |
709 "than half of frame_index_ range."); | 673 "than half of frame_index_ range."); |
710 while (!pending_bounds_.empty()) { | 674 while (!pending_bounds_.empty()) { |
711 uint16_t index = pending_bounds_.front().first; | 675 uint16_t index = pending_bounds_.front().first; |
712 // If index is less than the frame_index it's possible we've | 676 // If index is less than the frame_index it's possible we've wrapped, so |
713 // wrapped, so we extend the range and 'un-wrap' to account | 677 // we extend the range and 'un-wrap' to account for this. |
714 // for this. | |
715 if (index < frame_index) | 678 if (index < frame_index) |
716 index += max; | 679 index += max; |
717 // If the pending bounds change is for an upcoming frame | 680 // If the pending bounds change is for an upcoming frame within our buffer |
718 // within our buffer size, wait to apply it. Otherwise, apply | 681 // size, wait to apply it. Otherwise, apply it immediately. This |
719 // it immediately. This guarantees that even if we miss many | 682 // guarantees that even if we miss many frames, the queue can't fill up |
720 // frames, the queue can't fill up with stale bounds. | 683 // with stale bounds. |
721 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) | 684 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) |
722 break; | 685 break; |
723 | 686 |
724 const WebVrBounds& bounds = pending_bounds_.front().second; | 687 const BoundsPair& bounds = pending_bounds_.front().second; |
725 webvr_left_viewport_->SetSourceUv(bounds.left_bounds); | 688 webvr_left_viewport_->SetSourceUv(bounds.first); |
726 webvr_right_viewport_->SetSourceUv(bounds.right_bounds); | 689 webvr_right_viewport_->SetSourceUv(bounds.second); |
727 DVLOG(1) << __FUNCTION__ << ": resize from pending_bounds to " | |
728 << bounds.source_size.width << "x" << bounds.source_size.height; | |
729 CreateOrResizeWebVRSurface(bounds.source_size); | |
730 pending_bounds_.pop(); | 690 pending_bounds_.pop(); |
731 } | 691 } |
732 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 692 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
733 *webvr_left_viewport_); | 693 *webvr_left_viewport_); |
734 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 694 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
735 *webvr_right_viewport_); | 695 *webvr_right_viewport_); |
736 if (render_size_primary_ != webvr_surface_size_) { | |
737 if (!webvr_surface_size_.width) { | |
738 // Don't try to resize to 0x0 pixels, drop frames until we get a | |
739 // valid size. | |
740 return; | |
741 } | |
742 | |
743 render_size_primary_ = webvr_surface_size_; | |
744 DVLOG(1) << __FUNCTION__ << ": resize GVR to " | |
745 << render_size_primary_.width << "x" | |
746 << render_size_primary_.height; | |
747 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | |
748 } | |
749 } else { | |
750 if (render_size_primary_ != render_size_vrshell_) { | |
751 render_size_primary_ = render_size_vrshell_; | |
752 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | |
753 } | |
754 } | |
755 | |
756 TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame"); | |
757 gvr::Frame frame = swap_chain_->AcquireFrame(); | |
758 TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame"); | |
759 if (!frame.is_valid()) { | |
760 return; | |
761 } | |
762 frame.BindBuffer(kFramePrimaryBuffer); | |
763 | |
764 if (web_vr_mode_) { | |
765 DrawWebVr(); | |
766 } | |
767 | |
768 gvr::Mat4f head_pose; | |
769 | |
770 // When using async reprojection, we need to know which pose was | |
771 // used in the WebVR app for drawing this frame and supply it when | |
772 // submitting. Technically we don't need a pose if not reprojecting, | |
773 // but keeping it uninitialized seems likely to cause problems down | |
774 // the road. Copying it is cheaper than fetching a new one. | |
775 if (web_vr_mode_) { | |
776 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), | |
777 "kPoseRingBufferSize must be a power of 2"); | |
778 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; | |
779 } else { | 696 } else { |
780 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 697 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
781 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 698 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
782 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 699 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
783 } | 700 } |
784 | 701 |
785 gvr::Vec3f position = GetTranslation(head_pose); | 702 gvr::Vec3f position = GetTranslation(head_pose); |
786 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 703 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
787 // This appears to be a 3DOF pose without a neck model. Add one. | 704 // This appears to be a 3DOF pose without a neck model. Add one. |
788 // The head pose has redundant data. Assume we're only using the | 705 // The head pose has redundant data. Assume we're only using the |
789 // object_from_reference_matrix, we're not updating position_external. | 706 // object_from_reference_matrix, we're not updating position_external. |
790 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 707 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
791 // it. For now, removing it seems working fine. | 708 // it. For now, removing it seems working fine. |
792 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 709 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
793 } | 710 } |
794 | 711 |
795 // Update the render position of all UI elements (including desktop). | 712 // Update the render position of all UI elements (including desktop). |
796 scene_->UpdateTransforms(TimeInMicroseconds()); | 713 scene_->UpdateTransforms(TimeInMicroseconds()); |
797 | 714 |
798 { | 715 UpdateController(GetForwardVector(head_pose)); |
799 TRACE_EVENT0("gpu", "VrShellGl::UpdateController"); | |
800 UpdateController(GetForwardVector(head_pose)); | |
801 } | |
802 | 716 |
803 // Finish drawing in the primary buffer, and draw the headlocked buffer | 717 DrawVrShell(head_pose, frame); |
804 // if needed. This must be the last drawing call, this method will | |
805 // return with no frame being bound. | |
806 DrawVrShellAndUnbind(head_pose, frame); | |
807 | 718 |
808 { | 719 frame.Unbind(); |
809 TRACE_EVENT0("gpu", "VrShellGl::Submit"); | 720 frame.Submit(*buffer_viewport_list_, head_pose); |
810 frame.Submit(*buffer_viewport_list_, head_pose); | |
811 } | |
812 | 721 |
813 // No need to swap buffers for surfaceless rendering. | 722 // No need to swap buffers for surfaceless rendering. |
814 if (!surfaceless_rendering_) { | 723 if (!surfaceless_rendering_) { |
815 // TODO(mthiesse): Support asynchronous SwapBuffers. | 724 // TODO(mthiesse): Support asynchronous SwapBuffers. |
816 TRACE_EVENT0("gpu", "VrShellGl::SwapBuffers"); | |
817 surface_->SwapBuffers(); | 725 surface_->SwapBuffers(); |
818 } | 726 } |
819 } | 727 } |
820 | 728 |
821 void VrShellGl::DrawVrShellAndUnbind(const gvr::Mat4f& head_pose, | 729 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, gvr::Frame& frame) { |
822 gvr::Frame& frame) { | |
823 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 730 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
824 std::vector<const ContentRectangle*> head_locked_elements; | 731 std::vector<const ContentRectangle*> head_locked_elements; |
825 std::vector<const ContentRectangle*> world_elements; | 732 std::vector<const ContentRectangle*> world_elements; |
826 for (const auto& rect : scene_->GetUiElements()) { | 733 for (const auto& rect : scene_->GetUiElements()) { |
827 if (!rect->IsVisible()) | 734 if (!rect->IsVisible()) |
828 continue; | 735 continue; |
829 if (rect->lock_to_fov) { | 736 if (rect->lock_to_fov) { |
830 head_locked_elements.push_back(rect.get()); | 737 head_locked_elements.push_back(rect.get()); |
831 } else { | 738 } else { |
832 world_elements.push_back(rect.get()); | 739 world_elements.push_back(rect.get()); |
(...skipping 16 matching lines...) Expand all Loading... |
849 | 756 |
850 const Colorf& backgroundColor = scene_->GetBackgroundColor(); | 757 const Colorf& backgroundColor = scene_->GetBackgroundColor(); |
851 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, | 758 glClearColor(backgroundColor.r, backgroundColor.g, backgroundColor.b, |
852 backgroundColor.a); | 759 backgroundColor.a); |
853 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
854 } | 761 } |
855 if (!world_elements.empty()) { | 762 if (!world_elements.empty()) { |
856 DrawUiView(&head_pose, world_elements, render_size_primary_, | 763 DrawUiView(&head_pose, world_elements, render_size_primary_, |
857 kViewportListPrimaryOffset); | 764 kViewportListPrimaryOffset); |
858 } | 765 } |
859 frame.Unbind(); // Done with the primary buffer. | |
860 | 766 |
861 if (!head_locked_elements.empty()) { | 767 if (!head_locked_elements.empty()) { |
862 // Add head-locked viewports. The list gets reset to just | 768 // Add head-locked viewports. The list gets reset to just |
863 // the recommended viewports (for the primary buffer) each frame. | 769 // the recommended viewports (for the primary buffer) each frame. |
864 buffer_viewport_list_->SetBufferViewport( | 770 buffer_viewport_list_->SetBufferViewport( |
865 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 771 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
866 *headlocked_left_viewport_); | 772 *headlocked_left_viewport_); |
867 buffer_viewport_list_->SetBufferViewport( | 773 buffer_viewport_list_->SetBufferViewport( |
868 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 774 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
869 *headlocked_right_viewport_); | 775 *headlocked_right_viewport_); |
870 | 776 |
871 // Bind the headlocked framebuffer. | 777 // Bind the headlocked framebuffer. |
| 778 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order |
| 779 // here. |
872 frame.BindBuffer(kFrameHeadlockedBuffer); | 780 frame.BindBuffer(kFrameHeadlockedBuffer); |
873 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 781 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
874 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 782 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
875 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, | 783 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, |
876 kViewportListHeadlockedOffset); | 784 kViewportListHeadlockedOffset); |
877 frame.Unbind(); // Done with the headlocked buffer. | |
878 } | 785 } |
879 } | 786 } |
880 | 787 |
| 788 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { |
| 789 // This is a stopgap while we're using the WebVR compositor rendering path. |
| 790 // TODO(klausw,crbug.com/655722): Remove this method and member once we're |
| 791 // using a separate WebVR render surface. |
| 792 return content_tex_physical_size_; |
| 793 } |
| 794 |
881 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, | 795 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, |
882 const std::vector<const ContentRectangle*>& elements, | 796 const std::vector<const ContentRectangle*>& elements, |
883 const gvr::Sizei& render_size, | 797 const gvr::Sizei& render_size, |
884 int viewport_offset) { | 798 int viewport_offset) { |
885 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); | 799 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); |
886 | 800 |
887 gvr::Mat4f view_matrix; | 801 gvr::Mat4f view_matrix; |
888 if (head_pose) { | 802 if (head_pose) { |
889 view_matrix = *head_pose; | 803 view_matrix = *head_pose; |
890 } else { | 804 } else { |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1077 void VrShellGl::DrawWebVr() { | 991 void VrShellGl::DrawWebVr() { |
1078 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); | 992 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); |
1079 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 993 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
1080 glDisable(GL_CULL_FACE); | 994 glDisable(GL_CULL_FACE); |
1081 glDepthMask(GL_FALSE); | 995 glDepthMask(GL_FALSE); |
1082 glDisable(GL_DEPTH_TEST); | 996 glDisable(GL_DEPTH_TEST); |
1083 glDisable(GL_SCISSOR_TEST); | 997 glDisable(GL_SCISSOR_TEST); |
1084 glDisable(GL_BLEND); | 998 glDisable(GL_BLEND); |
1085 glDisable(GL_POLYGON_OFFSET_FILL); | 999 glDisable(GL_POLYGON_OFFSET_FILL); |
1086 | 1000 |
1087 // We're redrawing over the entire viewport, but it's generally more | 1001 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
1088 // efficient on mobile tiling GPUs to clear anyway as a hint that | |
1089 // we're done with the old content. TODO(klausw,crbug.com/700389): | |
1090 // investigate using glDiscardFramebufferEXT here since that's more | |
1091 // efficient on desktop, but it would need a capability check since | |
1092 // it's not supported on older devices such as Nexus 5X. | |
1093 glClear(GL_COLOR_BUFFER_BIT); | |
1094 | |
1095 glViewport(0, 0, webvr_surface_size_.width, webvr_surface_size_.height); | |
1096 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 1002 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
1097 } | 1003 } |
1098 | 1004 |
1099 void VrShellGl::OnTriggerEvent() { | 1005 void VrShellGl::OnTriggerEvent() { |
1100 // Set a flag to handle this on the render thread at the next frame. | 1006 // Set a flag to handle this on the render thread at the next frame. |
1101 touch_pending_ = true; | 1007 touch_pending_ = true; |
1102 } | 1008 } |
1103 | 1009 |
1104 void VrShellGl::OnPause() { | 1010 void VrShellGl::OnPause() { |
1105 vsync_task_.Cancel(); | 1011 vsync_task_.Cancel(); |
(...skipping 10 matching lines...) Expand all Loading... |
1116 OnVSync(); | 1022 OnVSync(); |
1117 } | 1023 } |
1118 } | 1024 } |
1119 | 1025 |
1120 void VrShellGl::SetWebVrMode(bool enabled) { | 1026 void VrShellGl::SetWebVrMode(bool enabled) { |
1121 web_vr_mode_ = enabled; | 1027 web_vr_mode_ = enabled; |
1122 } | 1028 } |
1123 | 1029 |
1124 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, | 1030 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
1125 const gvr::Rectf& left_bounds, | 1031 const gvr::Rectf& left_bounds, |
1126 const gvr::Rectf& right_bounds, | 1032 const gvr::Rectf& right_bounds) { |
1127 const gvr::Sizei& source_size) { | |
1128 if (frame_index < 0) { | 1033 if (frame_index < 0) { |
1129 webvr_left_viewport_->SetSourceUv(left_bounds); | 1034 webvr_left_viewport_->SetSourceUv(left_bounds); |
1130 webvr_right_viewport_->SetSourceUv(right_bounds); | 1035 webvr_right_viewport_->SetSourceUv(right_bounds); |
1131 CreateOrResizeWebVRSurface(source_size); | |
1132 } else { | 1036 } else { |
1133 pending_bounds_.emplace( | 1037 pending_bounds_.emplace( |
1134 frame_index, WebVrBounds(left_bounds, right_bounds, source_size)); | 1038 std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); |
1135 } | 1039 } |
1136 } | 1040 } |
1137 | 1041 |
1138 void VrShellGl::ContentBoundsChanged(int width, int height) { | 1042 void VrShellGl::ContentBoundsChanged(int width, int height) { |
1139 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); | 1043 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
1140 content_tex_css_width_ = width; | 1044 content_tex_css_width_ = width; |
1141 content_tex_css_height_ = height; | 1045 content_tex_css_height_ = height; |
1142 } | 1046 } |
1143 | 1047 |
1144 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { | 1048 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { |
(...skipping 13 matching lines...) Expand all Loading... |
1158 ui_surface_texture_->SetDefaultBufferSize(width, height); | 1062 ui_surface_texture_->SetDefaultBufferSize(width, height); |
1159 ui_tex_physical_size_.width = width; | 1063 ui_tex_physical_size_.width = width; |
1160 ui_tex_physical_size_.height = height; | 1064 ui_tex_physical_size_.height = height; |
1161 } | 1065 } |
1162 | 1066 |
1163 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 1067 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
1164 return weak_ptr_factory_.GetWeakPtr(); | 1068 return weak_ptr_factory_.GetWeakPtr(); |
1165 } | 1069 } |
1166 | 1070 |
1167 void VrShellGl::OnVSync() { | 1071 void VrShellGl::OnVSync() { |
1168 while (premature_received_frames_ > 0) { | |
1169 TRACE_EVENT0("gpu", "VrShellGl::OnWebVRFrameAvailableRetry"); | |
1170 --premature_received_frames_; | |
1171 OnWebVRFrameAvailable(); | |
1172 } | |
1173 | |
1174 base::TimeTicks now = base::TimeTicks::Now(); | 1072 base::TimeTicks now = base::TimeTicks::Now(); |
1175 base::TimeTicks target; | 1073 base::TimeTicks target; |
1176 | 1074 |
1177 // Don't send VSyncs until we have a timebase/interval. | 1075 // Don't send VSyncs until we have a timebase/interval. |
1178 if (vsync_interval_.is_zero()) | 1076 if (vsync_interval_.is_zero()) |
1179 return; | 1077 return; |
1180 target = now + vsync_interval_; | 1078 target = now + vsync_interval_; |
1181 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 1079 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
1182 target = vsync_timebase_ + intervals * vsync_interval_; | 1080 target = vsync_timebase_ + intervals * vsync_interval_; |
1183 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | 1081 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
1184 target - now); | 1082 target - now); |
1185 | 1083 |
1186 base::TimeDelta time = intervals * vsync_interval_; | 1084 base::TimeDelta time = intervals * vsync_interval_; |
1187 if (!callback_.is_null()) { | 1085 if (!callback_.is_null()) { |
1188 SendVSync(time, base::ResetAndReturn(&callback_)); | 1086 SendVSync(time, base::ResetAndReturn(&callback_)); |
1189 } else { | 1087 } else { |
1190 pending_vsync_ = true; | 1088 pending_vsync_ = true; |
1191 pending_time_ = time; | 1089 pending_time_ = time; |
1192 } | 1090 } |
1193 if (!web_vr_mode_) { | 1091 DrawFrame(); |
1194 DrawFrame(-1); | |
1195 } | |
1196 } | 1092 } |
1197 | 1093 |
1198 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { | 1094 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
1199 binding_.Close(); | 1095 binding_.Close(); |
1200 binding_.Bind(std::move(request)); | 1096 binding_.Bind(std::move(request)); |
1201 } | 1097 } |
1202 | 1098 |
1203 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | 1099 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
1204 if (!pending_vsync_) { | 1100 if (!pending_vsync_) { |
1205 if (!callback_.is_null()) { | 1101 if (!callback_.is_null()) { |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1255 void VrShellGl::ResetPose() { | 1151 void VrShellGl::ResetPose() { |
1256 // Should never call RecenterTracking when using with Daydream viewers. On | 1152 // Should never call RecenterTracking when using with Daydream viewers. On |
1257 // those devices recentering should only be done via the controller. | 1153 // those devices recentering should only be done via the controller. |
1258 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) | 1154 if (gvr_api_ && gvr_api_->GetViewerType() == GVR_VIEWER_TYPE_CARDBOARD) |
1259 gvr_api_->RecenterTracking(); | 1155 gvr_api_->RecenterTracking(); |
1260 } | 1156 } |
1261 | 1157 |
1262 void VrShellGl::CreateVRDisplayInfo( | 1158 void VrShellGl::CreateVRDisplayInfo( |
1263 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, | 1159 const base::Callback<void(device::mojom::VRDisplayInfoPtr)>& callback, |
1264 uint32_t device_id) { | 1160 uint32_t device_id) { |
1265 // This assumes that the initial webvr_surface_size_ was set to the | |
1266 // appropriate recommended render resolution as the default size during | |
1267 // InitializeGl. Revisit if the initialization order changes. | |
1268 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( | 1161 device::mojom::VRDisplayInfoPtr info = VrShell::CreateVRDisplayInfo( |
1269 gvr_api_.get(), webvr_surface_size_, device_id); | 1162 gvr_api_.get(), content_tex_physical_size_, device_id); |
1270 main_thread_task_runner_->PostTask( | 1163 main_thread_task_runner_->PostTask( |
1271 FROM_HERE, | 1164 FROM_HERE, |
1272 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); | 1165 base::Bind(&RunVRDisplayInfoCallback, callback, base::Passed(&info))); |
1273 } | 1166 } |
1274 | 1167 |
1275 } // namespace vr_shell | 1168 } // namespace vr_shell |
OLD | NEW |