| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include <utility> |
| 8 |
| 7 #include "base/android/jni_android.h" | 9 #include "base/android/jni_android.h" |
| 8 #include "base/memory/ptr_util.h" | 10 #include "base/memory/ptr_util.h" |
| 9 #include "base/metrics/histogram_macros.h" | 11 #include "base/metrics/histogram_macros.h" |
| 10 #include "base/threading/thread_task_runner_handle.h" | 12 #include "base/threading/thread_task_runner_handle.h" |
| 11 #include "chrome/browser/android/vr_shell/ui_elements.h" | 13 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 12 #include "chrome/browser/android/vr_shell/ui_scene.h" | 14 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 13 #include "chrome/browser/android/vr_shell/vr_controller.h" | 15 #include "chrome/browser/android/vr_shell/vr_controller.h" |
| 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 16 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
| 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" | 17 #include "chrome/browser/android/vr_shell/vr_input_manager.h" |
| 16 #include "chrome/browser/android/vr_shell/vr_math.h" | 18 #include "chrome/browser/android/vr_shell/vr_math.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_shell.h" | 19 #include "chrome/browser/android/vr_shell/vr_shell.h" |
| 20 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 21 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
| 22 #include "device/vr/android/gvr/gvr_device.h" |
| 19 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 23 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
| 20 #include "ui/gfx/vsync_provider.h" | |
| 21 #include "ui/gl/android/scoped_java_surface.h" | 24 #include "ui/gl/android/scoped_java_surface.h" |
| 22 #include "ui/gl/android/surface_texture.h" | 25 #include "ui/gl/android/surface_texture.h" |
| 23 #include "ui/gl/gl_bindings.h" | 26 #include "ui/gl/gl_bindings.h" |
| 24 #include "ui/gl/gl_context.h" | 27 #include "ui/gl/gl_context.h" |
| 25 #include "ui/gl/gl_surface.h" | 28 #include "ui/gl/gl_surface.h" |
| 26 #include "ui/gl/init/gl_factory.h" | 29 #include "ui/gl/init/gl_factory.h" |
| 27 | 30 |
| 28 namespace vr_shell { | 31 namespace vr_shell { |
| 29 | 32 |
| 30 namespace { | 33 namespace { |
| 31 // Constant taken from treasure_hunt demo. | 34 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever |
| 35 // exposed, use that instead (it defaults to 50ms on most platforms). |
| 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; | 36 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; |
| 33 | 37 |
| 34 static constexpr float kZNear = 0.1f; | 38 static constexpr float kZNear = 0.1f; |
| 35 static constexpr float kZFar = 1000.0f; | 39 static constexpr float kZFar = 1000.0f; |
| 36 | 40 |
| 37 // Screen angle in degrees. 0 = vertical, positive = top closer. | 41 // Screen angle in degrees. 0 = vertical, positive = top closer. |
| 38 static constexpr float kDesktopScreenTiltDefault = 0; | 42 static constexpr float kDesktopScreenTiltDefault = 0; |
| 39 | 43 |
| 40 static constexpr float kReticleWidth = 0.025f; | 44 static constexpr float kReticleWidth = 0.025f; |
| 41 static constexpr float kReticleHeight = 0.025f; | 45 static constexpr float kReticleHeight = 0.025f; |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 133 return mouse_event; | 137 return mouse_event; |
| 134 } | 138 } |
| 135 | 139 |
| 136 enum class ViewerType { | 140 enum class ViewerType { |
| 137 UNKNOWN_TYPE = 0, | 141 UNKNOWN_TYPE = 0, |
| 138 CARDBOARD = 1, | 142 CARDBOARD = 1, |
| 139 DAYDREAM = 2, | 143 DAYDREAM = 2, |
| 140 VIEWER_TYPE_MAX, | 144 VIEWER_TYPE_MAX, |
| 141 }; | 145 }; |
| 142 | 146 |
| 143 int GetPixelEncodedPoseIndexByte() { | |
| 144 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); | |
| 145 // Read the pose index encoded in a bottom left pixel as color values. | |
| 146 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 147 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | |
| 148 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 149 // if not valid due to bad magic number. | |
| 150 uint8_t pixels[4]; | |
| 151 // Assume we're reading from the framebuffer we just wrote to. | |
| 152 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 153 // or equivalent if the rendering setup changes in the future. | |
| 154 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 155 | |
| 156 // Check for the magic number written by VRDevice.cpp on submit. | |
| 157 // This helps avoid glitches from garbage data in the render | |
| 158 // buffer that can appear during initialization or resizing. These | |
| 159 // often appear as flashes of all-black or all-white pixels. | |
| 160 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | |
| 161 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | |
| 162 // Pose is good. | |
| 163 return pixels[0]; | |
| 164 } | |
| 165 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 166 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 167 return -1; | |
| 168 } | |
| 169 | |
| 170 int64_t TimeInMicroseconds() { | 147 int64_t TimeInMicroseconds() { |
| 171 return std::chrono::duration_cast<std::chrono::microseconds>( | 148 return std::chrono::duration_cast<std::chrono::microseconds>( |
| 172 std::chrono::steady_clock::now().time_since_epoch()).count(); | 149 std::chrono::steady_clock::now().time_since_epoch()).count(); |
| 173 } | 150 } |
| 174 | 151 |
| 175 void WaitForSwapAck(const base::Closure& callback, gfx::SwapResult result) { | |
| 176 callback.Run(); | |
| 177 } | |
| 178 | |
| 179 } // namespace | 152 } // namespace |
| 180 | 153 |
| 181 VrShellGl::VrShellGl( | 154 VrShellGl::VrShellGl( |
| 182 const base::WeakPtr<VrShell>& weak_vr_shell, | 155 const base::WeakPtr<VrShell>& weak_vr_shell, |
| 183 const base::WeakPtr<VrInputManager>& content_input_manager, | 156 const base::WeakPtr<VrInputManager>& content_input_manager, |
| 184 const base::WeakPtr<VrInputManager>& ui_input_manager, | 157 const base::WeakPtr<VrInputManager>& ui_input_manager, |
| 158 const base::WeakPtr<VrShellDelegate>& delegate_provider, |
| 185 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, | 159 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, |
| 186 gvr_context* gvr_api, | 160 gvr_context* gvr_api, |
| 187 bool initially_web_vr, | 161 bool initially_web_vr, |
| 188 bool reprojected_rendering) | 162 bool reprojected_rendering) |
| 189 : web_vr_mode_(initially_web_vr), | 163 : web_vr_mode_(initially_web_vr), |
| 190 surfaceless_rendering_(reprojected_rendering), | 164 surfaceless_rendering_(reprojected_rendering), |
| 191 task_runner_(base::ThreadTaskRunnerHandle::Get()), | 165 task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 166 binding_(this), |
| 192 weak_vr_shell_(weak_vr_shell), | 167 weak_vr_shell_(weak_vr_shell), |
| 193 content_input_manager_(content_input_manager), | 168 content_input_manager_(content_input_manager), |
| 194 ui_input_manager_(ui_input_manager), | 169 ui_input_manager_(ui_input_manager), |
| 170 delegate_provider_(delegate_provider), |
| 195 main_thread_task_runner_(std::move(main_thread_task_runner)), | 171 main_thread_task_runner_(std::move(main_thread_task_runner)), |
| 196 weak_ptr_factory_(this) { | 172 weak_ptr_factory_(this) { |
| 197 GvrInit(gvr_api); | 173 GvrInit(gvr_api); |
| 198 } | 174 } |
| 199 | 175 |
| 200 VrShellGl::~VrShellGl() { | 176 VrShellGl::~VrShellGl() { |
| 201 draw_task_.Cancel(); | 177 vsync_task_.Cancel(); |
| 178 if (!callback_.is_null()) |
| 179 callback_.Run(nullptr, base::TimeDelta()); |
| 180 if (binding_.is_bound()) { |
| 181 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 182 &VrShellDelegate::OnVRVsyncProviderRequest, delegate_provider_, |
| 183 base::Passed(binding_.Unbind()))); |
| 184 } |
| 202 } | 185 } |
| 203 | 186 |
| 204 void VrShellGl::Initialize() { | 187 void VrShellGl::Initialize() { |
| 205 gvr::Mat4f identity; | |
| 206 SetIdentityM(identity); | |
| 207 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 208 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 209 | |
| 210 scene_.reset(new UiScene); | 188 scene_.reset(new UiScene); |
| 211 | 189 |
| 212 if (surfaceless_rendering_) { | 190 if (surfaceless_rendering_) { |
| 213 // If we're rendering surfaceless, we'll never get a java surface to render | 191 // If we're rendering surfaceless, we'll never get a java surface to render |
| 214 // into, so we can initialize GL right away. | 192 // into, so we can initialize GL right away. |
| 215 InitializeGl(nullptr); | 193 InitializeGl(nullptr); |
| 216 } | 194 } |
| 217 } | 195 } |
| 218 | 196 |
| 219 void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { | 197 void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 242 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 220 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
| 243 ForceExitVr(); | 221 ForceExitVr(); |
| 244 return; | 222 return; |
| 245 } | 223 } |
| 246 if (!context_->MakeCurrent(surface_.get())) { | 224 if (!context_->MakeCurrent(surface_.get())) { |
| 247 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 225 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
| 248 ForceExitVr(); | 226 ForceExitVr(); |
| 249 return; | 227 return; |
| 250 } | 228 } |
| 251 | 229 |
| 252 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is | |
| 253 // sort of okay, because the GVR swap chain will block if we render too fast, | |
| 254 // but we should address this properly. | |
| 255 if (surface_->GetVSyncProvider()) { | |
| 256 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( | |
| 257 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); | |
| 258 } else { | |
| 259 LOG(ERROR) << "No VSync Provider"; | |
| 260 } | |
| 261 | |
| 262 unsigned int textures[2]; | 230 unsigned int textures[2]; |
| 263 glGenTextures(2, textures); | 231 glGenTextures(2, textures); |
| 264 ui_texture_id_ = textures[0]; | 232 ui_texture_id_ = textures[0]; |
| 265 content_texture_id_ = textures[1]; | 233 content_texture_id_ = textures[1]; |
| 266 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 234 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
| 267 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 235 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
| 268 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); | 236 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); |
| 269 content_surface_.reset(new gl::ScopedJavaSurface( | 237 content_surface_.reset(new gl::ScopedJavaSurface( |
| 270 content_surface_texture_.get())); | 238 content_surface_texture_.get())); |
| 271 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 239 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 272 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 240 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 273 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 241 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 274 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 242 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 275 | 243 |
| 276 content_surface_texture_->SetDefaultBufferSize( | 244 content_surface_texture_->SetDefaultBufferSize( |
| 277 content_tex_physical_size_.width, content_tex_physical_size_.height); | 245 content_tex_physical_size_.width, content_tex_physical_size_.height); |
| 278 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | 246 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, |
| 279 ui_tex_physical_size_.height); | 247 ui_tex_physical_size_.height); |
| 280 | 248 |
| 281 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( | 249 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 282 &VrShell::SurfacesChanged, weak_vr_shell_, | 250 &VrShell::SurfacesChanged, weak_vr_shell_, |
| 283 content_surface_->j_surface().obj(), | 251 content_surface_->j_surface().obj(), |
| 284 ui_surface_->j_surface().obj())); | 252 ui_surface_->j_surface().obj())); |
| 285 | 253 |
| 286 InitializeRenderer(); | 254 InitializeRenderer(); |
| 287 | 255 |
| 288 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | 256 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 289 ScheduleNextDrawFrame(); | 257 OnVSync(); |
| 290 | 258 |
| 291 ready_to_draw_ = true; | 259 ready_to_draw_ = true; |
| 292 } | 260 } |
| 293 | 261 |
| 294 void VrShellGl::OnUIFrameAvailable() { | 262 void VrShellGl::OnUIFrameAvailable() { |
| 295 ui_surface_texture_->UpdateTexImage(); | 263 ui_surface_texture_->UpdateTexImage(); |
| 296 } | 264 } |
| 297 | 265 |
| 298 void VrShellGl::OnContentFrameAvailable() { | 266 void VrShellGl::OnContentFrameAvailable() { |
| 299 content_surface_texture_->UpdateTexImage(); | 267 content_surface_texture_->UpdateTexImage(); |
| 268 received_frame_ = true; |
| 269 } |
| 270 |
| 271 bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { |
| 272 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
| 273 if (!received_frame_) { |
| 274 *pose_index = last_pose_; |
| 275 return true; |
| 276 } |
| 277 received_frame_ = false; |
| 278 |
| 279 // Read the pose index encoded in a bottom left pixel as color values. |
| 280 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
| 281 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
| 282 // which tracks poses. Returns the low byte (0..255) if valid, or -1 |
| 283 // if not valid due to bad magic number. |
| 284 uint8_t pixels[4]; |
| 285 // Assume we're reading from the framebuffer we just wrote to. |
| 286 // That's true currently, we may need to use glReadBuffer(GL_BACK) |
| 287 // or equivalent if the rendering setup changes in the future. |
| 288 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
| 289 |
| 290 // Check for the magic number written by VRDevice.cpp on submit. |
| 291 // This helps avoid glitches from garbage data in the render |
| 292 // buffer that can appear during initialization or resizing. These |
| 293 // often appear as flashes of all-black or all-white pixels. |
| 294 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| 295 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| 296 // Pose is good. |
| 297 *pose_index = pixels[0]; |
| 298 last_pose_ = pixels[0]; |
| 299 return true; |
| 300 } |
| 301 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
| 302 << ", bad magic number " << (int)pixels[1] << ", " |
| 303 << (int)pixels[2]; |
| 304 return false; |
| 300 } | 305 } |
| 301 | 306 |
| 302 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 307 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
| 303 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 308 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
| 304 controller_.reset(new VrController(gvr_api)); | 309 controller_.reset(new VrController(gvr_api)); |
| 305 | 310 |
| 306 ViewerType viewerType; | 311 ViewerType viewerType; |
| 307 switch (gvr_api_->GetViewerType()) { | 312 switch (gvr_api_->GetViewerType()) { |
| 308 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 313 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
| 309 viewerType = ViewerType::DAYDREAM; | 314 viewerType = ViewerType::DAYDREAM; |
| 310 break; | 315 break; |
| 311 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 316 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
| 312 viewerType = ViewerType::CARDBOARD; | 317 viewerType = ViewerType::CARDBOARD; |
| 313 break; | 318 break; |
| 314 default: | 319 default: |
| 315 NOTREACHED(); | 320 NOTREACHED(); |
| 316 viewerType = ViewerType::UNKNOWN_TYPE; | 321 viewerType = ViewerType::UNKNOWN_TYPE; |
| 317 break; | 322 break; |
| 318 } | 323 } |
| 319 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 324 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 320 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 325 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 321 } | 326 } |
| 322 | 327 |
| 323 void VrShellGl::InitializeRenderer() { | 328 void VrShellGl::InitializeRenderer() { |
| 324 // While WebVR is going through the compositor path, it shares | 329 // While WebVR is going through the compositor path, it shares |
| 325 // the same texture ID. This will change once it gets its own | 330 // the same texture ID. This will change once it gets its own |
| 326 // surface, but store it separately to avoid future confusion. | 331 // surface, but store it separately to avoid future confusion. |
| 327 // TODO(klausw,crbug.com/655722): remove this. | 332 // TODO(klausw,crbug.com/655722): remove this. |
| 328 webvr_texture_id_ = content_texture_id_; | 333 webvr_texture_id_ = content_texture_id_; |
| 329 // Out of paranoia, explicitly reset the "pose valid" flags to false | |
| 330 // from the GL thread. The constructor ran in the UI thread. | |
| 331 // TODO(klausw,crbug.com/655722): remove this. | |
| 332 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); | |
| 333 | 334 |
| 334 gvr_api_->InitializeGl(); | 335 gvr_api_->InitializeGl(); |
| 336 webvr_head_pose_.assign(kPoseRingBufferSize, |
| 337 gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
| 338 gvr::GvrApi::GetTimePointNow())); |
| 339 |
| 335 std::vector<gvr::BufferSpec> specs; | 340 std::vector<gvr::BufferSpec> specs; |
| 336 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 341 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
| 337 specs.push_back(gvr_api_->CreateBufferSpec()); | 342 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 338 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 343 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
| 339 | 344 |
| 340 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 345 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
| 341 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 346 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
| 342 specs.push_back(gvr_api_->CreateBufferSpec()); | 347 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 343 specs.back().SetSize(kHeadlockedBufferDimensions); | 348 specs.back().SetSize(kHeadlockedBufferDimensions); |
| 344 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 349 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
| (...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 582 DCHECK(input_target != InputTarget::NONE); | 587 DCHECK(input_target != InputTarget::NONE); |
| 583 const base::WeakPtr<VrInputManager>& weak_ptr = | 588 const base::WeakPtr<VrInputManager>& weak_ptr = |
| 584 input_target == InputTarget::CONTENT ? content_input_manager_ | 589 input_target == InputTarget::CONTENT ? content_input_manager_ |
| 585 : ui_input_manager_; | 590 : ui_input_manager_; |
| 586 main_thread_task_runner_->PostTask( | 591 main_thread_task_runner_->PostTask( |
| 587 FROM_HERE, | 592 FROM_HERE, |
| 588 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, | 593 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, |
| 589 base::Passed(std::move(event)))); | 594 base::Passed(std::move(event)))); |
| 590 } | 595 } |
| 591 | 596 |
| 592 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { | |
| 593 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; | |
| 594 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; | |
| 595 } | |
| 596 | |
| 597 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { | |
| 598 if (pose_index_byte < 0) { | |
| 599 return false; | |
| 600 } | |
| 601 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { | |
| 602 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << | |
| 603 ", not a valid pose"; | |
| 604 return false; | |
| 605 } | |
| 606 return true; | |
| 607 } | |
| 608 | |
| 609 void VrShellGl::DrawFrame() { | 597 void VrShellGl::DrawFrame() { |
| 610 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); | 598 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
| 599 |
| 611 // Reset the viewport list to just the pair of viewports for the | 600 // Reset the viewport list to just the pair of viewports for the |
| 612 // primary buffer each frame. Head-locked viewports get added by | 601 // primary buffer each frame. Head-locked viewports get added by |
| 613 // DrawVrShell if needed. | 602 // DrawVrShell if needed. |
| 614 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 603 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 615 | 604 |
| 616 gvr::Frame frame = swap_chain_->AcquireFrame(); | 605 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 617 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 606 if (!frame.is_valid()) { |
| 618 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 607 return; |
| 608 } |
| 609 frame.BindBuffer(kFramePrimaryBuffer); |
| 610 if (web_vr_mode_) { |
| 611 DrawWebVr(); |
| 612 } |
| 619 | 613 |
| 620 gvr::Mat4f head_pose = | 614 int pose_index; |
| 621 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 615 gvr::Mat4f head_pose; |
| 616 |
| 617 // When using async reprojection, we need to know which pose was used in |
| 618 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 619 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 620 // from a corner pixel. There's no point in doing this for legacy |
| 621 // distortion rendering since that doesn't need a pose, and reading back |
| 622 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 623 // doing this once we have working no-compositor rendering for WebVR. |
| 624 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
| 625 GetPixelEncodedPoseIndexByte(&pose_index)) { |
| 626 head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; |
| 627 } else { |
| 628 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 629 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 630 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 631 } |
| 622 | 632 |
| 623 gvr::Vec3f position = GetTranslation(head_pose); | 633 gvr::Vec3f position = GetTranslation(head_pose); |
| 624 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 634 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 625 // This appears to be a 3DOF pose without a neck model. Add one. | 635 // This appears to be a 3DOF pose without a neck model. Add one. |
| 626 // The head pose has redundant data. Assume we're only using the | 636 // The head pose has redundant data. Assume we're only using the |
| 627 // object_from_reference_matrix, we're not updating position_external. | 637 // object_from_reference_matrix, we're not updating position_external. |
| 628 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 638 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 629 // it. For now, removing it seems working fine. | 639 // it. For now, removing it seems working fine. |
| 630 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 640 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 631 } | 641 } |
| 632 | 642 |
| 633 frame.BindBuffer(kFramePrimaryBuffer); | |
| 634 | |
| 635 // Update the render position of all UI elements (including desktop). | 643 // Update the render position of all UI elements (including desktop). |
| 636 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 644 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 637 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); | 645 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
| 638 | 646 |
| 639 UpdateController(GetForwardVector(head_pose)); | 647 UpdateController(GetForwardVector(head_pose)); |
| 640 | 648 |
| 641 if (web_vr_mode_) { | |
| 642 DrawWebVr(); | |
| 643 | |
| 644 // When using async reprojection, we need to know which pose was used in | |
| 645 // the WebVR app for drawing this frame. Due to unknown amounts of | |
| 646 // buffering in the compositor and SurfaceTexture, we read the pose number | |
| 647 // from a corner pixel. There's no point in doing this for legacy | |
| 648 // distortion rendering since that doesn't need a pose, and reading back | |
| 649 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
| 650 // doing this once we have working no-compositor rendering for WebVR. | |
| 651 if (gvr_api_->GetAsyncReprojectionEnabled()) { | |
| 652 int pose_index_byte = GetPixelEncodedPoseIndexByte(); | |
| 653 if (WebVrPoseByteIsValid(pose_index_byte)) { | |
| 654 // We have a valid pose, use it for reprojection. | |
| 655 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | |
| 656 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | |
| 657 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; | |
| 658 // We can't mark the used pose as invalid since unfortunately | |
| 659 // we have to reuse them. The compositor will re-submit stale | |
| 660 // frames on vsync, and we can't tell that this has happened | |
| 661 // until we've read the pose index from it, and at that point | |
| 662 // it's too late to skip rendering. | |
| 663 } else { | |
| 664 // If we don't get a valid frame ID back we shouldn't attempt | |
| 665 // to reproject by an invalid matrix, so turn off reprojection | |
| 666 // instead. Invalid poses can permanently break reprojection | |
| 667 // for this GVR instance: http://crbug.com/667327 | |
| 668 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | |
| 669 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | |
| 670 } | |
| 671 } | |
| 672 } | |
| 673 | |
| 674 DrawVrShell(head_pose, frame); | 649 DrawVrShell(head_pose, frame); |
| 675 | 650 |
| 676 frame.Unbind(); | 651 frame.Unbind(); |
| 677 frame.Submit(*buffer_viewport_list_, head_pose); | 652 frame.Submit(*buffer_viewport_list_, head_pose); |
| 678 | 653 |
| 679 // No need to swap buffers for surfaceless rendering. | 654 // No need to swap buffers for surfaceless rendering. |
| 680 if (surfaceless_rendering_) { | 655 if (!surfaceless_rendering_) { |
| 681 ScheduleNextDrawFrame(); | 656 // TODO(mthiesse): Support asynchronous SwapBuffers. |
| 682 return; | |
| 683 } | |
| 684 | |
| 685 if (surface_->SupportsAsyncSwap()) { | |
| 686 surface_->SwapBuffersAsync(base::Bind(&WaitForSwapAck, base::Bind( | |
| 687 &VrShellGl::ScheduleNextDrawFrame, weak_ptr_factory_.GetWeakPtr()))); | |
| 688 } else { | |
| 689 surface_->SwapBuffers(); | 657 surface_->SwapBuffers(); |
| 690 ScheduleNextDrawFrame(); | |
| 691 } | 658 } |
| 692 } | 659 } |
| 693 | 660 |
| 694 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, | 661 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, |
| 695 gvr::Frame &frame) { | 662 gvr::Frame &frame) { |
| 696 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 663 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
| 697 std::vector<const ContentRectangle*> head_locked_elements; | 664 std::vector<const ContentRectangle*> head_locked_elements; |
| 698 std::vector<const ContentRectangle*> world_elements; | 665 std::vector<const ContentRectangle*> world_elements; |
| 699 for (const auto& rect : scene_->GetUiElements()) { | 666 for (const auto& rect : scene_->GetUiElements()) { |
| 700 if (!rect->IsVisible()) | 667 if (!rect->IsVisible()) |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 903 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 870 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 904 *webvr_right_viewport_); | 871 *webvr_right_viewport_); |
| 905 } | 872 } |
| 906 | 873 |
| 907 void VrShellGl::OnTriggerEvent() { | 874 void VrShellGl::OnTriggerEvent() { |
| 908 // Set a flag to handle this on the render thread at the next frame. | 875 // Set a flag to handle this on the render thread at the next frame. |
| 909 touch_pending_ = true; | 876 touch_pending_ = true; |
| 910 } | 877 } |
| 911 | 878 |
| 912 void VrShellGl::OnPause() { | 879 void VrShellGl::OnPause() { |
| 913 draw_task_.Cancel(); | 880 vsync_task_.Cancel(); |
| 914 controller_->OnPause(); | 881 controller_->OnPause(); |
| 915 gvr_api_->PauseTracking(); | 882 gvr_api_->PauseTracking(); |
| 916 } | 883 } |
| 917 | 884 |
| 918 void VrShellGl::OnResume() { | 885 void VrShellGl::OnResume() { |
| 919 gvr_api_->RefreshViewerProfile(); | 886 gvr_api_->RefreshViewerProfile(); |
| 920 gvr_api_->ResumeTracking(); | 887 gvr_api_->ResumeTracking(); |
| 921 controller_->OnResume(); | 888 controller_->OnResume(); |
| 922 if (ready_to_draw_) { | 889 if (ready_to_draw_) { |
| 923 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | 890 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 924 ScheduleNextDrawFrame(); | 891 OnVSync(); |
| 925 } | 892 } |
| 926 } | 893 } |
| 927 | 894 |
| 928 void VrShellGl::SetWebVrMode(bool enabled) { | 895 void VrShellGl::SetWebVrMode(bool enabled) { |
| 929 web_vr_mode_ = enabled; | 896 web_vr_mode_ = enabled; |
| 930 } | 897 } |
| 931 | 898 |
| 932 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | 899 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| 933 const gvr::Rectf& right_bounds) { | 900 const gvr::Rectf& right_bounds) { |
| 934 webvr_left_viewport_->SetSourceUv(left_bounds); | 901 webvr_left_viewport_->SetSourceUv(left_bounds); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 961 if (ui_surface_texture_.get()) | 928 if (ui_surface_texture_.get()) |
| 962 ui_surface_texture_->SetDefaultBufferSize(width, height); | 929 ui_surface_texture_->SetDefaultBufferSize(width, height); |
| 963 ui_tex_physical_size_.width = width; | 930 ui_tex_physical_size_.width = width; |
| 964 ui_tex_physical_size_.height = height; | 931 ui_tex_physical_size_.height = height; |
| 965 } | 932 } |
| 966 | 933 |
| 967 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 934 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
| 968 return weak_ptr_factory_.GetWeakPtr(); | 935 return weak_ptr_factory_.GetWeakPtr(); |
| 969 } | 936 } |
| 970 | 937 |
| 971 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase, | 938 void VrShellGl::OnVSync() { |
| 972 const base::TimeDelta interval) { | |
| 973 vsync_timebase_ = timebase; | |
| 974 vsync_interval_ = interval; | |
| 975 } | |
| 976 | |
| 977 void VrShellGl::ScheduleNextDrawFrame() { | |
| 978 base::TimeTicks now = base::TimeTicks::Now(); | 939 base::TimeTicks now = base::TimeTicks::Now(); |
| 979 base::TimeTicks target; | 940 base::TimeTicks target; |
| 980 | 941 |
| 981 if (vsync_interval_.is_zero()) { | 942 // Don't send VSyncs until we have a timebase/interval. |
| 982 target = now; | 943 if (vsync_interval_.is_zero()) |
| 944 return; |
| 945 target = now + vsync_interval_; |
| 946 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
| 947 target = vsync_timebase_ + intervals * vsync_interval_; |
| 948 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
| 949 target - now); |
| 950 |
| 951 base::TimeDelta time = intervals * vsync_interval_; |
| 952 if (!callback_.is_null()) { |
| 953 callback_.Run(GetPose(), time); |
| 954 callback_.Reset(); |
| 983 } else { | 955 } else { |
| 984 target = now + vsync_interval_; | 956 pending_vsync_ = true; |
| 985 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 957 pending_time_ = time; |
| 986 target = vsync_timebase_ + intervals * vsync_interval_; | |
| 987 } | 958 } |
| 959 DrawFrame(); |
| 960 } |
| 988 | 961 |
| 989 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now); | 962 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
| 963 binding_.Close(); |
| 964 binding_.Bind(std::move(request)); |
| 965 } |
| 966 |
| 967 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
| 968 if (!pending_vsync_) { |
| 969 if (!callback_.is_null()) { |
| 970 mojo::ReportBadMessage("Requested VSync before waiting for response to " |
| 971 "previous request."); |
| 972 return; |
| 973 } |
| 974 callback_ = callback; |
| 975 return; |
| 976 } |
| 977 pending_vsync_ = false; |
| 978 callback.Run(GetPose(), pending_time_); |
| 979 } |
| 980 |
| 981 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
| 982 double interval_seconds) { |
| 983 vsync_timebase_ = base::TimeTicks(); |
| 984 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); |
| 985 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); |
| 986 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 987 OnVSync(); |
| 990 } | 988 } |
| 991 | 989 |
| 992 void VrShellGl::ForceExitVr() { | 990 void VrShellGl::ForceExitVr() { |
| 993 main_thread_task_runner_->PostTask( | 991 main_thread_task_runner_->PostTask( |
| 994 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); | 992 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); |
| 995 } | 993 } |
| 996 | 994 |
| 997 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { | 995 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
| 998 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); | 996 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
| 999 } | 997 } |
| 1000 | 998 |
| 999 device::mojom::VRPosePtr VrShellGl::GetPose() { |
| 1000 TRACE_EVENT0("input", "VrShellGl::GetPose"); |
| 1001 |
| 1002 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 1003 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 1004 |
| 1005 gvr::Mat4f head_mat = |
| 1006 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 1007 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
| 1008 |
| 1009 uint32_t pose_index = pose_index_++; |
| 1010 webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; |
| 1011 |
| 1012 return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); |
| 1013 } |
| 1014 |
| 1001 } // namespace vr_shell | 1015 } // namespace vr_shell |
| OLD | NEW |