| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include "base/android/jni_android.h" | 7 #include "base/android/jni_android.h" |
| 8 #include "base/memory/ptr_util.h" | 8 #include "base/memory/ptr_util.h" |
| 9 #include "base/metrics/histogram_macros.h" | 9 #include "base/metrics/histogram_macros.h" |
| 10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
| 11 #include "chrome/browser/android/vr_shell/ui_elements.h" | 11 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 12 #include "chrome/browser/android/vr_shell/ui_scene.h" | 12 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 13 #include "chrome/browser/android/vr_shell/vr_controller.h" | 13 #include "chrome/browser/android/vr_shell/vr_controller.h" |
| 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
| 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" | 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" |
| 16 #include "chrome/browser/android/vr_shell/vr_math.h" | 16 #include "chrome/browser/android/vr_shell/vr_math.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_shell.h" | 17 #include "chrome/browser/android/vr_shell/vr_shell.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 19 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
| 20 #include "device/vr/android/gvr/gvr_device.h" |
| 19 #include "third_party/WebKit/public/platform/WebInputEvent.h" | 21 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
| 20 #include "ui/gfx/vsync_provider.h" | |
| 21 #include "ui/gl/android/scoped_java_surface.h" | 22 #include "ui/gl/android/scoped_java_surface.h" |
| 22 #include "ui/gl/android/surface_texture.h" | 23 #include "ui/gl/android/surface_texture.h" |
| 23 #include "ui/gl/gl_bindings.h" | 24 #include "ui/gl/gl_bindings.h" |
| 24 #include "ui/gl/gl_context.h" | 25 #include "ui/gl/gl_context.h" |
| 25 #include "ui/gl/gl_surface.h" | 26 #include "ui/gl/gl_surface.h" |
| 26 #include "ui/gl/init/gl_factory.h" | 27 #include "ui/gl/init/gl_factory.h" |
| 27 | 28 |
| 28 namespace vr_shell { | 29 namespace vr_shell { |
| 29 | 30 |
| 30 namespace { | 31 namespace { |
| 31 // Constant taken from treasure_hunt demo. | 32 // TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever |
| 33 // exposed, use that instead (it defaults to 50ms on most platforms). |
| 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; | 34 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; |
| 33 | 35 |
| 34 static constexpr float kZNear = 0.1f; | 36 static constexpr float kZNear = 0.1f; |
| 35 static constexpr float kZFar = 1000.0f; | 37 static constexpr float kZFar = 1000.0f; |
| 36 | 38 |
| 37 // Screen angle in degrees. 0 = vertical, positive = top closer. | 39 // Screen angle in degrees. 0 = vertical, positive = top closer. |
| 38 static constexpr float kDesktopScreenTiltDefault = 0; | 40 static constexpr float kDesktopScreenTiltDefault = 0; |
| 39 | 41 |
| 40 static constexpr float kReticleWidth = 0.025f; | 42 static constexpr float kReticleWidth = 0.025f; |
| 41 static constexpr float kReticleHeight = 0.025f; | 43 static constexpr float kReticleHeight = 0.025f; |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 133 return mouse_event; | 135 return mouse_event; |
| 134 } | 136 } |
| 135 | 137 |
| 136 enum class ViewerType { | 138 enum class ViewerType { |
| 137 UNKNOWN_TYPE = 0, | 139 UNKNOWN_TYPE = 0, |
| 138 CARDBOARD = 1, | 140 CARDBOARD = 1, |
| 139 DAYDREAM = 2, | 141 DAYDREAM = 2, |
| 140 VIEWER_TYPE_MAX, | 142 VIEWER_TYPE_MAX, |
| 141 }; | 143 }; |
| 142 | 144 |
| 143 int GetPixelEncodedPoseIndexByte() { | |
| 144 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); | |
| 145 // Read the pose index encoded in a bottom left pixel as color values. | |
| 146 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 147 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | |
| 148 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 149 // if not valid due to bad magic number. | |
| 150 uint8_t pixels[4]; | |
| 151 // Assume we're reading from the framebuffer we just wrote to. | |
| 152 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 153 // or equivalent if the rendering setup changes in the future. | |
| 154 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 155 | |
| 156 // Check for the magic number written by VRDevice.cpp on submit. | |
| 157 // This helps avoid glitches from garbage data in the render | |
| 158 // buffer that can appear during initialization or resizing. These | |
| 159 // often appear as flashes of all-black or all-white pixels. | |
| 160 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | |
| 161 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | |
| 162 // Pose is good. | |
| 163 return pixels[0]; | |
| 164 } | |
| 165 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 166 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 167 return -1; | |
| 168 } | |
| 169 | |
| 170 int64_t TimeInMicroseconds() { | 145 int64_t TimeInMicroseconds() { |
| 171 return std::chrono::duration_cast<std::chrono::microseconds>( | 146 return std::chrono::duration_cast<std::chrono::microseconds>( |
| 172 std::chrono::steady_clock::now().time_since_epoch()).count(); | 147 std::chrono::steady_clock::now().time_since_epoch()).count(); |
| 173 } | 148 } |
| 174 | 149 |
| 175 void WaitForSwapAck(const base::Closure& callback, gfx::SwapResult result) { | |
| 176 callback.Run(); | |
| 177 } | |
| 178 | |
| 179 } // namespace | 150 } // namespace |
| 180 | 151 |
| 181 VrShellGl::VrShellGl( | 152 VrShellGl::VrShellGl( |
| 182 const base::WeakPtr<VrShell>& weak_vr_shell, | 153 const base::WeakPtr<VrShell>& weak_vr_shell, |
| 183 const base::WeakPtr<VrInputManager>& content_input_manager, | 154 const base::WeakPtr<VrInputManager>& content_input_manager, |
| 184 const base::WeakPtr<VrInputManager>& ui_input_manager, | 155 const base::WeakPtr<VrInputManager>& ui_input_manager, |
| 156 const base::WeakPtr<VrShellDelegate>& delegate_provider, |
| 185 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, | 157 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, |
| 186 gvr_context* gvr_api, | 158 gvr_context* gvr_api, |
| 187 bool initially_web_vr, | 159 bool initially_web_vr, |
| 188 bool reprojected_rendering) | 160 bool reprojected_rendering) |
| 189 : web_vr_mode_(initially_web_vr), | 161 : web_vr_mode_(initially_web_vr), |
| 190 surfaceless_rendering_(reprojected_rendering), | 162 surfaceless_rendering_(reprojected_rendering), |
| 191 task_runner_(base::ThreadTaskRunnerHandle::Get()), | 163 task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 164 binding_(this), |
| 192 weak_vr_shell_(weak_vr_shell), | 165 weak_vr_shell_(weak_vr_shell), |
| 193 content_input_manager_(content_input_manager), | 166 content_input_manager_(content_input_manager), |
| 194 ui_input_manager_(ui_input_manager), | 167 ui_input_manager_(ui_input_manager), |
| 168 delegate_provider_(delegate_provider), |
| 195 main_thread_task_runner_(std::move(main_thread_task_runner)), | 169 main_thread_task_runner_(std::move(main_thread_task_runner)), |
| 196 weak_ptr_factory_(this) { | 170 weak_ptr_factory_(this) { |
| 197 GvrInit(gvr_api); | 171 GvrInit(gvr_api); |
| 198 } | 172 } |
| 199 | 173 |
| 200 VrShellGl::~VrShellGl() { | 174 VrShellGl::~VrShellGl() { |
| 201 draw_task_.Cancel(); | 175 vsync_task_.Cancel(); |
| 176 if (!callback_.is_null()) |
| 177 callback_.Run(nullptr, base::TimeDelta()); |
| 178 if (binding_.is_bound()) { |
| 179 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 180 &VrShellDelegate::OnVRVsyncProviderRequest, delegate_provider_, |
| 181 base::Passed(binding_.Unbind()))); |
| 182 } |
| 202 } | 183 } |
| 203 | 184 |
| 204 void VrShellGl::Initialize() { | 185 void VrShellGl::Initialize() { |
| 205 gvr::Mat4f identity; | |
| 206 SetIdentityM(identity); | |
| 207 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 208 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 209 | |
| 210 scene_.reset(new UiScene); | 186 scene_.reset(new UiScene); |
| 211 | 187 |
| 212 if (surfaceless_rendering_) { | 188 if (surfaceless_rendering_) { |
| 213 // If we're rendering surfaceless, we'll never get a java surface to render | 189 // If we're rendering surfaceless, we'll never get a java surface to render |
| 214 // into, so we can initialize GL right away. | 190 // into, so we can initialize GL right away. |
| 215 InitializeGl(nullptr); | 191 InitializeGl(nullptr); |
| 216 } | 192 } |
| 217 } | 193 } |
| 218 | 194 |
| 219 void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { | 195 void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 242 LOG(ERROR) << "gl::init::CreateGLContext failed"; | 218 LOG(ERROR) << "gl::init::CreateGLContext failed"; |
| 243 ForceExitVr(); | 219 ForceExitVr(); |
| 244 return; | 220 return; |
| 245 } | 221 } |
| 246 if (!context_->MakeCurrent(surface_.get())) { | 222 if (!context_->MakeCurrent(surface_.get())) { |
| 247 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | 223 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; |
| 248 ForceExitVr(); | 224 ForceExitVr(); |
| 249 return; | 225 return; |
| 250 } | 226 } |
| 251 | 227 |
| 252 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is | |
| 253 // sort of okay, because the GVR swap chain will block if we render too fast, | |
| 254 // but we should address this properly. | |
| 255 if (surface_->GetVSyncProvider()) { | |
| 256 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( | |
| 257 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); | |
| 258 } else { | |
| 259 LOG(ERROR) << "No VSync Provider"; | |
| 260 } | |
| 261 | |
| 262 unsigned int textures[2]; | 228 unsigned int textures[2]; |
| 263 glGenTextures(2, textures); | 229 glGenTextures(2, textures); |
| 264 ui_texture_id_ = textures[0]; | 230 ui_texture_id_ = textures[0]; |
| 265 content_texture_id_ = textures[1]; | 231 content_texture_id_ = textures[1]; |
| 266 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | 232 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); |
| 267 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | 233 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); |
| 268 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); | 234 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); |
| 269 content_surface_.reset(new gl::ScopedJavaSurface( | 235 content_surface_.reset(new gl::ScopedJavaSurface( |
| 270 content_surface_texture_.get())); | 236 content_surface_texture_.get())); |
| 271 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | 237 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 272 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 238 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 273 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | 239 content_surface_texture_->SetFrameAvailableCallback(base::Bind( |
| 274 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | 240 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); |
| 275 | 241 |
| 276 content_surface_texture_->SetDefaultBufferSize( | 242 content_surface_texture_->SetDefaultBufferSize( |
| 277 content_tex_physical_size_.width, content_tex_physical_size_.height); | 243 content_tex_physical_size_.width, content_tex_physical_size_.height); |
| 278 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | 244 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, |
| 279 ui_tex_physical_size_.height); | 245 ui_tex_physical_size_.height); |
| 280 | 246 |
| 281 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( | 247 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 282 &VrShell::SurfacesChanged, weak_vr_shell_, | 248 &VrShell::SurfacesChanged, weak_vr_shell_, |
| 283 content_surface_->j_surface().obj(), | 249 content_surface_->j_surface().obj(), |
| 284 ui_surface_->j_surface().obj())); | 250 ui_surface_->j_surface().obj())); |
| 285 | 251 |
| 286 InitializeRenderer(); | 252 InitializeRenderer(); |
| 287 | 253 |
| 288 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | 254 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 289 ScheduleNextDrawFrame(); | 255 OnVSync(); |
| 290 | 256 |
| 291 ready_to_draw_ = true; | 257 ready_to_draw_ = true; |
| 292 } | 258 } |
| 293 | 259 |
| 294 void VrShellGl::OnUIFrameAvailable() { | 260 void VrShellGl::OnUIFrameAvailable() { |
| 295 ui_surface_texture_->UpdateTexImage(); | 261 ui_surface_texture_->UpdateTexImage(); |
| 296 } | 262 } |
| 297 | 263 |
| 298 void VrShellGl::OnContentFrameAvailable() { | 264 void VrShellGl::OnContentFrameAvailable() { |
| 299 content_surface_texture_->UpdateTexImage(); | 265 content_surface_texture_->UpdateTexImage(); |
| 266 received_frame_ = true; |
| 267 } |
| 268 |
| 269 bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { |
| 270 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
| 271 if (!received_frame_) { |
| 272 *pose_index = last_pose_; |
| 273 return true; |
| 274 } |
| 275 received_frame_ = false; |
| 276 |
| 277 // Read the pose index encoded in a bottom left pixel as color values. |
| 278 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
| 279 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
| 280 // which tracks poses. Returns the low byte (0..255) if valid, or -1 |
| 281 // if not valid due to bad magic number. |
| 282 uint8_t pixels[4]; |
| 283 // Assume we're reading from the framebuffer we just wrote to. |
| 284 // That's true currently, we may need to use glReadBuffer(GL_BACK) |
| 285 // or equivalent if the rendering setup changes in the future. |
| 286 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
| 287 |
| 288 // Check for the magic number written by VRDevice.cpp on submit. |
| 289 // This helps avoid glitches from garbage data in the render |
| 290 // buffer that can appear during initialization or resizing. These |
| 291 // often appear as flashes of all-black or all-white pixels. |
| 292 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| 293 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| 294 // Pose is good. |
| 295 *pose_index = pixels[0]; |
| 296 last_pose_ = pixels[0]; |
| 297 return true; |
| 298 } |
| 299 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
| 300 << ", bad magic number " << (int)pixels[1] << ", " |
| 301 << (int)pixels[2]; |
| 302 return false; |
| 300 } | 303 } |
| 301 | 304 |
| 302 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 305 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
| 303 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 306 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
| 304 controller_.reset(new VrController(gvr_api)); | 307 controller_.reset(new VrController(gvr_api)); |
| 305 | 308 |
| 306 ViewerType viewerType; | 309 ViewerType viewerType; |
| 307 switch (gvr_api_->GetViewerType()) { | 310 switch (gvr_api_->GetViewerType()) { |
| 308 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 311 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
| 309 viewerType = ViewerType::DAYDREAM; | 312 viewerType = ViewerType::DAYDREAM; |
| 310 break; | 313 break; |
| 311 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 314 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
| 312 viewerType = ViewerType::CARDBOARD; | 315 viewerType = ViewerType::CARDBOARD; |
| 313 break; | 316 break; |
| 314 default: | 317 default: |
| 315 NOTREACHED(); | 318 NOTREACHED(); |
| 316 viewerType = ViewerType::UNKNOWN_TYPE; | 319 viewerType = ViewerType::UNKNOWN_TYPE; |
| 317 break; | 320 break; |
| 318 } | 321 } |
| 319 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 322 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 320 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 323 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 321 } | 324 } |
| 322 | 325 |
| 323 void VrShellGl::InitializeRenderer() { | 326 void VrShellGl::InitializeRenderer() { |
| 324 // While WebVR is going through the compositor path, it shares | 327 // While WebVR is going through the compositor path, it shares |
| 325 // the same texture ID. This will change once it gets its own | 328 // the same texture ID. This will change once it gets its own |
| 326 // surface, but store it separately to avoid future confusion. | 329 // surface, but store it separately to avoid future confusion. |
| 327 // TODO(klausw,crbug.com/655722): remove this. | 330 // TODO(klausw,crbug.com/655722): remove this. |
| 328 webvr_texture_id_ = content_texture_id_; | 331 webvr_texture_id_ = content_texture_id_; |
| 329 // Out of paranoia, explicitly reset the "pose valid" flags to false | |
| 330 // from the GL thread. The constructor ran in the UI thread. | |
| 331 // TODO(klausw,crbug.com/655722): remove this. | |
| 332 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); | |
| 333 | 332 |
| 334 gvr_api_->InitializeGl(); | 333 gvr_api_->InitializeGl(); |
| 334 webvr_head_pose_.assign(kPoseRingBufferSize, |
| 335 gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
| 336 gvr::GvrApi::GetTimePointNow())); |
| 337 |
| 335 std::vector<gvr::BufferSpec> specs; | 338 std::vector<gvr::BufferSpec> specs; |
| 336 // For kFramePrimaryBuffer (primary VrShell and WebVR content) | 339 // For kFramePrimaryBuffer (primary VrShell and WebVR content) |
| 337 specs.push_back(gvr_api_->CreateBufferSpec()); | 340 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 338 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); | 341 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); |
| 339 | 342 |
| 340 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). | 343 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). |
| 341 // Set this up at fixed resolution, the (smaller) FOV gets set below. | 344 // Set this up at fixed resolution, the (smaller) FOV gets set below. |
| 342 specs.push_back(gvr_api_->CreateBufferSpec()); | 345 specs.push_back(gvr_api_->CreateBufferSpec()); |
| 343 specs.back().SetSize(kHeadlockedBufferDimensions); | 346 specs.back().SetSize(kHeadlockedBufferDimensions); |
| 344 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); | 347 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); |
| (...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 582 DCHECK(input_target != InputTarget::NONE); | 585 DCHECK(input_target != InputTarget::NONE); |
| 583 const base::WeakPtr<VrInputManager>& weak_ptr = | 586 const base::WeakPtr<VrInputManager>& weak_ptr = |
| 584 input_target == InputTarget::CONTENT ? content_input_manager_ | 587 input_target == InputTarget::CONTENT ? content_input_manager_ |
| 585 : ui_input_manager_; | 588 : ui_input_manager_; |
| 586 main_thread_task_runner_->PostTask( | 589 main_thread_task_runner_->PostTask( |
| 587 FROM_HERE, | 590 FROM_HERE, |
| 588 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, | 591 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, |
| 589 base::Passed(std::move(event)))); | 592 base::Passed(std::move(event)))); |
| 590 } | 593 } |
| 591 | 594 |
| 592 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { | |
| 593 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; | |
| 594 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; | |
| 595 } | |
| 596 | |
| 597 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { | |
| 598 if (pose_index_byte < 0) { | |
| 599 return false; | |
| 600 } | |
| 601 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { | |
| 602 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << | |
| 603 ", not a valid pose"; | |
| 604 return false; | |
| 605 } | |
| 606 return true; | |
| 607 } | |
| 608 | |
| 609 void VrShellGl::DrawFrame() { | 595 void VrShellGl::DrawFrame() { |
| 610 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); | 596 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
| 597 |
| 611 // Reset the viewport list to just the pair of viewports for the | 598 // Reset the viewport list to just the pair of viewports for the |
| 612 // primary buffer each frame. Head-locked viewports get added by | 599 // primary buffer each frame. Head-locked viewports get added by |
| 613 // DrawVrShell if needed. | 600 // DrawVrShell if needed. |
| 614 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 601 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 615 | 602 |
| 616 gvr::Frame frame = swap_chain_->AcquireFrame(); | 603 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 617 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 604 if (!frame.is_valid()) { |
| 618 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 605 return; |
| 606 } |
| 607 frame.BindBuffer(kFramePrimaryBuffer); |
| 608 if (web_vr_mode_) { |
| 609 DrawWebVr(); |
| 610 } |
| 619 | 611 |
| 620 gvr::Mat4f head_pose = | 612 int pose_index; |
| 621 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 613 gvr::Mat4f head_pose; |
| 614 |
| 615 // When using async reprojection, we need to know which pose was used in |
| 616 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 617 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 618 // from a corner pixel. There's no point in doing this for legacy |
| 619 // distortion rendering since that doesn't need a pose, and reading back |
| 620 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 621 // doing this once we have working no-compositor rendering for WebVR. |
| 622 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
| 623 GetPixelEncodedPoseIndexByte(&pose_index)) { |
| 624 head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; |
| 625 } else { |
| 626 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 627 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 628 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 629 } |
| 622 | 630 |
| 623 gvr::Vec3f position = GetTranslation(head_pose); | 631 gvr::Vec3f position = GetTranslation(head_pose); |
| 624 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 632 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 625 // This appears to be a 3DOF pose without a neck model. Add one. | 633 // This appears to be a 3DOF pose without a neck model. Add one. |
| 626 // The head pose has redundant data. Assume we're only using the | 634 // The head pose has redundant data. Assume we're only using the |
| 627 // object_from_reference_matrix, we're not updating position_external. | 635 // object_from_reference_matrix, we're not updating position_external. |
| 628 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 636 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 629 // it. For now, removing it seems working fine. | 637 // it. For now, removing it seems working fine. |
| 630 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 638 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 631 } | 639 } |
| 632 | 640 |
| 633 frame.BindBuffer(kFramePrimaryBuffer); | |
| 634 | |
| 635 // Update the render position of all UI elements (including desktop). | 641 // Update the render position of all UI elements (including desktop). |
| 636 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 642 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 637 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); | 643 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
| 638 | 644 |
| 639 UpdateController(GetForwardVector(head_pose)); | 645 UpdateController(GetForwardVector(head_pose)); |
| 640 | 646 |
| 641 if (web_vr_mode_) { | |
| 642 DrawWebVr(); | |
| 643 | |
| 644 // When using async reprojection, we need to know which pose was used in | |
| 645 // the WebVR app for drawing this frame. Due to unknown amounts of | |
| 646 // buffering in the compositor and SurfaceTexture, we read the pose number | |
| 647 // from a corner pixel. There's no point in doing this for legacy | |
| 648 // distortion rendering since that doesn't need a pose, and reading back | |
| 649 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | |
| 650 // doing this once we have working no-compositor rendering for WebVR. | |
| 651 if (gvr_api_->GetAsyncReprojectionEnabled()) { | |
| 652 int pose_index_byte = GetPixelEncodedPoseIndexByte(); | |
| 653 if (WebVrPoseByteIsValid(pose_index_byte)) { | |
| 654 // We have a valid pose, use it for reprojection. | |
| 655 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | |
| 656 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | |
| 657 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; | |
| 658 // We can't mark the used pose as invalid since unfortunately | |
| 659 // we have to reuse them. The compositor will re-submit stale | |
| 660 // frames on vsync, and we can't tell that this has happened | |
| 661 // until we've read the pose index from it, and at that point | |
| 662 // it's too late to skip rendering. | |
| 663 } else { | |
| 664 // If we don't get a valid frame ID back we shouldn't attempt | |
| 665 // to reproject by an invalid matrix, so turn off reprojection | |
| 666 // instead. Invalid poses can permanently break reprojection | |
| 667 // for this GVR instance: http://crbug.com/667327 | |
| 668 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | |
| 669 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | |
| 670 } | |
| 671 } | |
| 672 } | |
| 673 | |
| 674 DrawVrShell(head_pose, frame); | 647 DrawVrShell(head_pose, frame); |
| 675 | 648 |
| 676 frame.Unbind(); | 649 frame.Unbind(); |
| 677 frame.Submit(*buffer_viewport_list_, head_pose); | 650 frame.Submit(*buffer_viewport_list_, head_pose); |
| 678 | 651 |
| 679 // No need to swap buffers for surfaceless rendering. | 652 // No need to swap buffers for surfaceless rendering. |
| 680 if (surfaceless_rendering_) { | 653 if (!surfaceless_rendering_) { |
| 681 ScheduleNextDrawFrame(); | 654 // TODO(mthiesse): Support asynchronous SwapBuffers. |
| 682 return; | |
| 683 } | |
| 684 | |
| 685 if (surface_->SupportsAsyncSwap()) { | |
| 686 surface_->SwapBuffersAsync(base::Bind(&WaitForSwapAck, base::Bind( | |
| 687 &VrShellGl::ScheduleNextDrawFrame, weak_ptr_factory_.GetWeakPtr()))); | |
| 688 } else { | |
| 689 surface_->SwapBuffers(); | 655 surface_->SwapBuffers(); |
| 690 ScheduleNextDrawFrame(); | |
| 691 } | 656 } |
| 692 } | 657 } |
| 693 | 658 |
| 694 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, | 659 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, |
| 695 gvr::Frame &frame) { | 660 gvr::Frame &frame) { |
| 696 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); | 661 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
| 697 std::vector<const ContentRectangle*> head_locked_elements; | 662 std::vector<const ContentRectangle*> head_locked_elements; |
| 698 std::vector<const ContentRectangle*> world_elements; | 663 std::vector<const ContentRectangle*> world_elements; |
| 699 for (const auto& rect : scene_->GetUiElements()) { | 664 for (const auto& rect : scene_->GetUiElements()) { |
| 700 if (!rect->IsVisible()) | 665 if (!rect->IsVisible()) |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 903 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 868 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 904 *webvr_right_viewport_); | 869 *webvr_right_viewport_); |
| 905 } | 870 } |
| 906 | 871 |
| 907 void VrShellGl::OnTriggerEvent() { | 872 void VrShellGl::OnTriggerEvent() { |
| 908 // Set a flag to handle this on the render thread at the next frame. | 873 // Set a flag to handle this on the render thread at the next frame. |
| 909 touch_pending_ = true; | 874 touch_pending_ = true; |
| 910 } | 875 } |
| 911 | 876 |
| 912 void VrShellGl::OnPause() { | 877 void VrShellGl::OnPause() { |
| 913 draw_task_.Cancel(); | 878 vsync_task_.Cancel(); |
| 914 controller_->OnPause(); | 879 controller_->OnPause(); |
| 915 gvr_api_->PauseTracking(); | 880 gvr_api_->PauseTracking(); |
| 916 } | 881 } |
| 917 | 882 |
| 918 void VrShellGl::OnResume() { | 883 void VrShellGl::OnResume() { |
| 919 gvr_api_->RefreshViewerProfile(); | 884 gvr_api_->RefreshViewerProfile(); |
| 920 gvr_api_->ResumeTracking(); | 885 gvr_api_->ResumeTracking(); |
| 921 controller_->OnResume(); | 886 controller_->OnResume(); |
| 922 if (ready_to_draw_) { | 887 if (ready_to_draw_) { |
| 923 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | 888 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 924 ScheduleNextDrawFrame(); | 889 OnVSync(); |
| 925 } | 890 } |
| 926 } | 891 } |
| 927 | 892 |
| 928 void VrShellGl::SetWebVrMode(bool enabled) { | 893 void VrShellGl::SetWebVrMode(bool enabled) { |
| 929 web_vr_mode_ = enabled; | 894 web_vr_mode_ = enabled; |
| 930 } | 895 } |
| 931 | 896 |
| 932 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | 897 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| 933 const gvr::Rectf& right_bounds) { | 898 const gvr::Rectf& right_bounds) { |
| 934 webvr_left_viewport_->SetSourceUv(left_bounds); | 899 webvr_left_viewport_->SetSourceUv(left_bounds); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 961 if (ui_surface_texture_.get()) | 926 if (ui_surface_texture_.get()) |
| 962 ui_surface_texture_->SetDefaultBufferSize(width, height); | 927 ui_surface_texture_->SetDefaultBufferSize(width, height); |
| 963 ui_tex_physical_size_.width = width; | 928 ui_tex_physical_size_.width = width; |
| 964 ui_tex_physical_size_.height = height; | 929 ui_tex_physical_size_.height = height; |
| 965 } | 930 } |
| 966 | 931 |
| 967 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { | 932 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
| 968 return weak_ptr_factory_.GetWeakPtr(); | 933 return weak_ptr_factory_.GetWeakPtr(); |
| 969 } | 934 } |
| 970 | 935 |
| 971 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase, | 936 void VrShellGl::OnVSync() { |
| 972 const base::TimeDelta interval) { | |
| 973 vsync_timebase_ = timebase; | |
| 974 vsync_interval_ = interval; | |
| 975 } | |
| 976 | |
| 977 void VrShellGl::ScheduleNextDrawFrame() { | |
| 978 base::TimeTicks now = base::TimeTicks::Now(); | 937 base::TimeTicks now = base::TimeTicks::Now(); |
| 979 base::TimeTicks target; | 938 base::TimeTicks target; |
| 980 | 939 |
| 981 if (vsync_interval_.is_zero()) { | 940 // Don't send VSyncs until we have a timebase/interval. |
| 982 target = now; | 941 if (vsync_interval_.is_zero()) |
| 942 return; |
| 943 target = now + vsync_interval_; |
| 944 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
| 945 target = vsync_timebase_ + intervals * vsync_interval_; |
| 946 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
| 947 target - now); |
| 948 |
| 949 base::TimeDelta time = intervals * vsync_interval_; |
| 950 if (!callback_.is_null()) { |
| 951 callback_.Run(GetPose(), time); |
| 952 callback_.Reset(); |
| 983 } else { | 953 } else { |
| 984 target = now + vsync_interval_; | 954 pending_vsync_ = true; |
| 985 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 955 pending_time_ = time; |
| 986 target = vsync_timebase_ + intervals * vsync_interval_; | |
| 987 } | 956 } |
| 957 DrawFrame(); |
| 958 } |
| 988 | 959 |
| 989 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now); | 960 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
| 961 if (binding_.is_bound()) |
| 962 binding_.Close(); |
| 963 binding_.Bind(std::move(request)); |
| 964 } |
| 965 |
| 966 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
| 967 if (!pending_vsync_) { |
| 968 if (!callback_.is_null()) { |
| 969 mojo::ReportBadMessage("Requested VSync before waiting for response to " |
| 970 "previous request."); |
| 971 return; |
| 972 } |
| 973 callback_ = std::move(callback); |
| 974 return; |
| 975 } |
| 976 pending_vsync_ = false; |
| 977 callback.Run(GetPose(), pending_time_); |
| 978 } |
| 979 |
| 980 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
| 981 double interval_seconds) { |
| 982 vsync_timebase_ = base::TimeTicks(); |
| 983 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); |
| 984 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); |
| 985 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 986 OnVSync(); |
| 990 } | 987 } |
| 991 | 988 |
| 992 void VrShellGl::ForceExitVr() { | 989 void VrShellGl::ForceExitVr() { |
| 993 main_thread_task_runner_->PostTask( | 990 main_thread_task_runner_->PostTask( |
| 994 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); | 991 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); |
| 995 } | 992 } |
| 996 | 993 |
| 997 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { | 994 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
| 998 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); | 995 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
| 999 } | 996 } |
| 1000 | 997 |
| 998 device::mojom::VRPosePtr VrShellGl::GetPose() { |
| 999 TRACE_EVENT0("input", "VrShellGl::GetPose"); |
| 1000 |
| 1001 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 1002 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 1003 |
| 1004 gvr::Mat4f head_mat = |
| 1005 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 1006 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
| 1007 |
| 1008 uint32_t pose_index = pose_index_++; |
| 1009 webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; |
| 1010 |
| 1011 return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); |
| 1012 } |
| 1013 |
| 1001 } // namespace vr_shell | 1014 } // namespace vr_shell |
| OLD | NEW |