Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include "base/memory/ptr_util.h" | |
| 7 #include "base/metrics/histogram_macros.h" | 8 #include "base/metrics/histogram_macros.h" |
| 9 #include "base/threading/thread_task_runner_handle.h" | |
| 8 #include "chrome/browser/android/vr_shell/ui_elements.h" | 10 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 9 #include "chrome/browser/android/vr_shell/ui_interface.h" | 11 #include "chrome/browser/android/vr_shell/ui_interface.h" |
| 10 #include "chrome/browser/android/vr_shell/ui_scene.h" | 12 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 11 #include "chrome/browser/android/vr_shell/vr_compositor.h" | |
| 12 #include "chrome/browser/android/vr_shell/vr_controller.h" | 13 #include "chrome/browser/android/vr_shell/vr_controller.h" |
| 13 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
| 14 #include "chrome/browser/android/vr_shell/vr_input_manager.h" | 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" |
| 15 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" | 16 #include "chrome/browser/android/vr_shell/vr_math.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_shell.h" | |
| 16 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_usage_monitor.h" | 19 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_web_contents_observer.h" | 20 #include "ui/gfx/vsync_provider.h" |
| 19 #include "content/public/browser/navigation_controller.h" | 21 #include "ui/gl/android/scoped_java_surface.h" |
| 20 #include "content/public/browser/render_view_host.h" | 22 #include "ui/gl/android/surface_texture.h" |
| 21 #include "content/public/browser/render_widget_host.h" | |
| 22 #include "content/public/browser/render_widget_host_view.h" | |
| 23 #include "content/public/browser/web_contents.h" | |
| 24 #include "content/public/common/referrer.h" | |
| 25 #include "device/vr/android/gvr/gvr_device_provider.h" | |
| 26 #include "jni/VrShellImpl_jni.h" | |
| 27 #include "ui/android/view_android.h" | |
| 28 #include "ui/android/window_android.h" | |
| 29 #include "ui/base/page_transition_types.h" | |
| 30 #include "ui/display/display.h" | |
| 31 #include "ui/display/screen.h" | |
| 32 #include "ui/gl/gl_bindings.h" | 23 #include "ui/gl/gl_bindings.h" |
| 24 #include "ui/gl/gl_context.h" | |
| 25 #include "ui/gl/gl_surface.h" | |
| 33 #include "ui/gl/init/gl_factory.h" | 26 #include "ui/gl/init/gl_factory.h" |
| 34 | 27 |
| 35 using base::android::JavaParamRef; | |
| 36 | |
| 37 namespace vr_shell { | 28 namespace vr_shell { |
| 38 | 29 |
| 39 namespace { | 30 namespace { |
| 40 // Constant taken from treasure_hunt demo. | 31 // Constant taken from treasure_hunt demo. |
| 41 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; | 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; |
| 42 | 33 |
| 43 static constexpr float kZNear = 0.1f; | 34 static constexpr float kZNear = 0.1f; |
| 44 static constexpr float kZFar = 1000.0f; | 35 static constexpr float kZFar = 1000.0f; |
| 45 | 36 |
| 46 // Screen angle in degrees. 0 = vertical, positive = top closer. | 37 // Screen angle in degrees. 0 = vertical, positive = top closer. |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 88 | 79 |
| 89 // The GVR viewport list has two entries (left eye and right eye) for each | 80 // The GVR viewport list has two entries (left eye and right eye) for each |
| 90 // GVR buffer. | 81 // GVR buffer. |
| 91 static constexpr int kViewportListPrimaryOffset = 0; | 82 static constexpr int kViewportListPrimaryOffset = 0; |
| 92 static constexpr int kViewportListHeadlockedOffset = 2; | 83 static constexpr int kViewportListHeadlockedOffset = 2; |
| 93 | 84 |
| 94 // Magic numbers used to mark valid pose index values encoded in frame | 85 // Magic numbers used to mark valid pose index values encoded in frame |
| 95 // data. Must match the magic numbers used in blink's VRDisplay.cpp. | 86 // data. Must match the magic numbers used in blink's VRDisplay.cpp. |
| 96 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; | 87 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; |
| 97 | 88 |
| 98 vr_shell::VrShell* g_instance; | |
| 99 | |
| 100 static const char kVrShellUIURL[] = "chrome://vr-shell-ui"; | |
| 101 | |
| 102 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 89 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
| 103 float xdiff = (vec1.x - vec2.x); | 90 float xdiff = (vec1.x - vec2.x); |
| 104 float ydiff = (vec1.y - vec2.y); | 91 float ydiff = (vec1.y - vec2.y); |
| 105 float zdiff = (vec1.z - vec2.z); | 92 float zdiff = (vec1.z - vec2.z); |
| 106 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 93 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
| 107 return std::sqrt(scale); | 94 return std::sqrt(scale); |
| 108 } | 95 } |
| 109 | 96 |
| 110 // Generate a quaternion representing the rotation from the negative Z axis | 97 // Generate a quaternion representing the rotation from the negative Z axis |
| 111 // (0, 0, -1) to a specified vector. This is an optimized version of a more | 98 // (0, 0, -1) to a specified vector. This is an optimized version of a more |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 140 mouse_event->x = x; | 127 mouse_event->x = x; |
| 141 mouse_event->y = y; | 128 mouse_event->y = y; |
| 142 mouse_event->windowX = x; | 129 mouse_event->windowX = x; |
| 143 mouse_event->windowY = y; | 130 mouse_event->windowY = y; |
| 144 mouse_event->timeStampSeconds = timestamp; | 131 mouse_event->timeStampSeconds = timestamp; |
| 145 mouse_event->clickCount = 1; | 132 mouse_event->clickCount = 1; |
| 146 mouse_event->modifiers = 0; | 133 mouse_event->modifiers = 0; |
| 147 | 134 |
| 148 return mouse_event; | 135 return mouse_event; |
| 149 } | 136 } |
| 150 } // namespace | |
| 151 | |
| 152 VrShell::VrShell(JNIEnv* env, | |
| 153 jobject obj, | |
| 154 content::WebContents* main_contents, | |
| 155 ui::WindowAndroid* content_window, | |
| 156 content::WebContents* ui_contents, | |
| 157 ui::WindowAndroid* ui_window, | |
| 158 bool for_web_vr) | |
| 159 : WebContentsObserver(ui_contents), | |
| 160 main_contents_(main_contents), | |
| 161 ui_contents_(ui_contents), | |
| 162 metrics_helper_(new VrMetricsHelper(main_contents)), | |
| 163 main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 164 weak_ptr_factory_(this) { | |
| 165 DCHECK(g_instance == nullptr); | |
| 166 g_instance = this; | |
| 167 j_vr_shell_.Reset(env, obj); | |
| 168 scene_.reset(new UiScene); | |
| 169 | |
| 170 if (for_web_vr) | |
| 171 metrics_helper_->SetWebVREnabled(true); | |
| 172 html_interface_.reset(new UiInterface( | |
| 173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD, | |
| 174 main_contents_->IsFullscreen())); | |
| 175 content_compositor_.reset(new VrCompositor(content_window, false)); | |
| 176 ui_compositor_.reset(new VrCompositor(ui_window, true)); | |
| 177 vr_web_contents_observer_.reset(new VrWebContentsObserver( | |
| 178 main_contents, html_interface_.get(), this)); | |
| 179 | |
| 180 LoadUIContentOnUI(); | |
| 181 | |
| 182 gvr::Mat4f identity; | |
| 183 SetIdentityM(identity); | |
| 184 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 186 | |
| 187 content_input_manager_.reset(new VrInputManager(main_contents_)); | |
| 188 ui_input_manager_.reset(new VrInputManager(ui_contents_)); | |
| 189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr(); | |
| 190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr(); | |
| 191 | |
| 192 SetShowingOverscrollGlowOnUI(false); | |
| 193 } | |
| 194 | |
| 195 void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env, | |
| 196 const JavaParamRef<jobject>& obj) { | |
| 197 content_compositor_->SetLayer(main_contents_); | |
| 198 ui_compositor_->SetLayer(ui_contents_); | |
| 199 } | |
| 200 | |
| 201 void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { | |
| 202 delete this; | |
| 203 } | |
| 204 | |
| 205 void VrShell::LoadUIContentOnUI() { | |
| 206 GURL url(kVrShellUIURL); | |
| 207 ui_contents_->GetController().LoadURL( | |
| 208 url, content::Referrer(), | |
| 209 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string("")); | |
| 210 } | |
| 211 | |
| 212 bool RegisterVrShell(JNIEnv* env) { | |
| 213 return RegisterNativesImpl(env); | |
| 214 } | |
| 215 | |
| 216 VrShell::~VrShell() { | |
| 217 if (delegate_ && delegate_->GetDeviceProvider()) { | |
| 218 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved(); | |
| 219 } | |
| 220 g_instance = nullptr; | |
| 221 gl::init::ShutdownGL(); | |
| 222 } | |
| 223 | |
| 224 void VrShell::SetDelegateOnUI(JNIEnv* env, | |
| 225 const base::android::JavaParamRef<jobject>& obj, | |
| 226 const base::android::JavaParamRef<jobject>& delegate) { | |
| 227 base::AutoLock lock(gvr_init_lock_); | |
| 228 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate); | |
| 229 if (swap_chain_.get()) { | |
| 230 delegate_->GetDeviceProvider()->OnGvrDelegateReady( | |
| 231 weak_ptr_factory_.GetWeakPtr()); | |
| 232 } | |
| 233 } | |
| 234 | 137 |
| 235 enum class ViewerType { | 138 enum class ViewerType { |
| 236 UNKNOWN_TYPE = 0, | 139 UNKNOWN_TYPE = 0, |
| 237 CARDBOARD = 1, | 140 CARDBOARD = 1, |
| 238 DAYDREAM = 2, | 141 DAYDREAM = 2, |
| 239 VIEWER_TYPE_MAX, | 142 VIEWER_TYPE_MAX, |
| 240 }; | 143 }; |
| 241 | 144 |
| 242 void VrShell::GvrInitOnGL(JNIEnv* env, | 145 int GetPixelEncodedPoseIndexByte() { |
| 243 const JavaParamRef<jobject>& obj, | 146 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
| 244 jlong native_gvr_api) { | 147 // Read the pose index encoded in a bottom left pixel as color values. |
| 245 // set the initial webvr state | 148 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
| 246 metrics_helper_->SetVRActive(true); | 149 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
| 150 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 151 // if not valid due to bad magic number. | |
| 152 uint8_t pixels[4]; | |
| 153 // Assume we're reading from the framebuffer we just wrote to. | |
| 154 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 155 // or equivalent if the rendering setup changes in the future. | |
| 156 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 247 | 157 |
| 248 gvr_api_ = | 158 // Check for the magic number written by VRDevice.cpp on submit. |
| 249 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); | 159 // This helps avoid glitches from garbage data in the render |
| 250 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once | 160 // buffer that can appear during initialization or resizing. These |
| 251 // we switch to using a WebVR render surface. We currently need to wait for | 161 // often appear as flashes of all-black or all-white pixels. |
| 252 // the compositor window's size to be known first. See also | 162 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| 253 // ContentSurfaceChanged. | 163 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| 254 controller_.reset( | 164 // Pose is good. |
| 255 new VrController(reinterpret_cast<gvr_context*>(native_gvr_api))); | 165 return pixels[0]; |
| 166 } | |
| 167 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 168 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 169 return -1; | |
| 170 } | |
| 256 | 171 |
| 172 } // namespace | |
| 173 | |
| 174 VrShellGl::VrShellGl( | |
| 175 VrShell* vr_shell, | |
| 176 base::WeakPtr<VrShell> weak_vr_shell, | |
| 177 base::WeakPtr<VrInputManager> content_input_manager, | |
| 178 base::WeakPtr<VrInputManager> ui_input_manager, | |
| 179 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, | |
| 180 gvr_context* gvr_api) | |
| 181 : task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 182 vr_shell_(vr_shell), | |
| 183 weak_vr_shell_(weak_vr_shell), | |
| 184 content_input_manager_(content_input_manager), | |
| 185 ui_input_manager_(ui_input_manager), | |
| 186 main_thread_task_runner_(std::move(main_thread_task_runner)), | |
| 187 weak_ptr_factory_(this) { | |
| 188 | |
| 189 GvrInit(gvr_api); | |
| 190 InitializeGl(); | |
| 191 | |
| 192 gvr::Mat4f identity; | |
| 193 SetIdentityM(identity); | |
| 194 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 195 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 196 | |
| 197 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | |
| 198 | |
| 199 scene_.reset(new UiScene); | |
| 200 | |
| 201 InitializeRenderer(); | |
| 202 | |
| 203 ScheduleNextDrawFrame(); | |
| 204 } | |
| 205 | |
| 206 VrShellGl::~VrShellGl() { | |
| 207 draw_task_.Cancel(); | |
| 208 } | |
| 209 | |
| 210 void VrShellGl::InitializeGl() { | |
| 211 if (gl::GetGLImplementation() == gl::kGLImplementationNone && | |
| 212 !gl::init::InitializeGLOneOff()) { | |
| 213 LOG(ERROR) << "gl::init::InitializeGLOneOff failed"; | |
| 214 ForceExitVR(); | |
|
piman
2016/12/09 01:32:13
You would need to return here, if initialization f
mthiesse
2016/12/09 15:47:59
whoops. Done.
| |
| 215 } | |
| 216 surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size()); | |
| 217 if (!surface_.get()) { | |
| 218 LOG(ERROR) << "gl::init::CreateOffscreenGLSurface failed"; | |
| 219 ForceExitVR(); | |
| 220 } | |
| 221 context_ = gl::init::CreateGLContext(nullptr, surface_.get(), | |
| 222 gl::GLContextAttribs()); | |
| 223 if (!context_.get()) { | |
| 224 LOG(ERROR) << "gl::init::CreateGLContext failed"; | |
| 225 ForceExitVR(); | |
| 226 } | |
| 227 if (!context_->MakeCurrent(surface_.get())) { | |
| 228 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed"; | |
| 229 ForceExitVR(); | |
| 230 } | |
| 231 | |
| 232 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is | |
| 233 // sort of okay, because the GVR swap chain will block if we render too fast, | |
| 234 // but we should address this properly. | |
| 235 if (surface_->GetVSyncProvider()) { | |
| 236 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( | |
| 237 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); | |
| 238 } else { | |
| 239 LOG(ERROR) << "No VSync Provider"; | |
| 240 } | |
| 241 | |
| 242 unsigned int textures[2]; | |
| 243 glGenTextures(2, textures); | |
| 244 ui_texture_id_ = textures[0]; | |
| 245 content_texture_id_ = textures[1]; | |
| 246 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | |
| 247 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | |
| 248 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); | |
| 249 content_surface_.reset(new gl::ScopedJavaSurface( | |
| 250 content_surface_texture_.get())); | |
| 251 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | |
| 252 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | |
| 253 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | |
| 254 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | |
| 255 | |
| 256 content_surface_texture_->SetDefaultBufferSize( | |
| 257 content_tex_physical_size_.width, content_tex_physical_size_.height); | |
| 258 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
| 259 ui_tex_physical_size_.height); | |
| 260 | |
| 261 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( | |
| 262 &VrShell::SurfacesChanged, weak_vr_shell_, | |
| 263 content_surface_->j_surface().obj(), | |
| 264 ui_surface_->j_surface().obj())); | |
| 265 } | |
| 266 | |
| 267 void VrShellGl::OnUIFrameAvailable() { | |
| 268 ui_surface_texture_->UpdateTexImage(); | |
| 269 } | |
| 270 | |
| 271 void VrShellGl::OnContentFrameAvailable() { | |
| 272 content_surface_texture_->UpdateTexImage(); | |
| 273 } | |
| 274 | |
| 275 void VrShellGl::GvrInit(gvr_context* gvr_api) { | |
| 276 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | |
| 277 controller_.reset(new VrController(gvr_api)); | |
| 257 | 278 |
| 258 ViewerType viewerType; | 279 ViewerType viewerType; |
| 259 switch (gvr_api_->GetViewerType()) { | 280 switch (gvr_api_->GetViewerType()) { |
| 260 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 281 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
| 261 viewerType = ViewerType::DAYDREAM; | 282 viewerType = ViewerType::DAYDREAM; |
| 262 break; | 283 break; |
| 263 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 284 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
| 264 viewerType = ViewerType::CARDBOARD; | 285 viewerType = ViewerType::CARDBOARD; |
| 265 break; | 286 break; |
| 266 default: | 287 default: |
| 267 NOTREACHED(); | 288 NOTREACHED(); |
| 268 viewerType = ViewerType::UNKNOWN_TYPE; | 289 viewerType = ViewerType::UNKNOWN_TYPE; |
| 269 break; | 290 break; |
| 270 } | 291 } |
| 271 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 292 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 272 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 293 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 273 } | 294 } |
| 274 | 295 |
| 275 void VrShell::InitializeGlOnGL(JNIEnv* env, | 296 void VrShellGl::InitializeRenderer() { |
| 276 const JavaParamRef<jobject>& obj, | |
| 277 jint content_texture_handle, | |
| 278 jint ui_texture_handle) { | |
| 279 base::AutoLock lock(gvr_init_lock_); | |
| 280 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone || | |
| 281 gl::init::InitializeGLOneOff()); | |
| 282 | |
| 283 content_texture_id_ = content_texture_handle; | |
| 284 ui_texture_id_ = ui_texture_handle; | |
| 285 | |
| 286 // While WebVR is going through the compositor path, it shares | 297 // While WebVR is going through the compositor path, it shares |
| 287 // the same texture ID. This will change once it gets its own | 298 // the same texture ID. This will change once it gets its own |
| 288 // surface, but store it separately to avoid future confusion. | 299 // surface, but store it separately to avoid future confusion. |
| 289 // TODO(klausw,crbug.com/655722): remove this. | 300 // TODO(klausw,crbug.com/655722): remove this. |
| 290 webvr_texture_id_ = content_texture_id_; | 301 webvr_texture_id_ = content_texture_id_; |
| 291 // Out of paranoia, explicitly reset the "pose valid" flags to false | 302 // Out of paranoia, explicitly reset the "pose valid" flags to false |
| 292 // from the GL thread. The constructor ran in the UI thread. | 303 // from the GL thread. The constructor ran in the UI thread. |
| 293 // TODO(klausw,crbug.com/655722): remove this. | 304 // TODO(klausw,crbug.com/655722): remove this. |
| 294 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); | 305 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); |
| 295 | 306 |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 350 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, | 361 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, |
| 351 webvr_left_viewport_.get()); | 362 webvr_left_viewport_.get()); |
| 352 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); | 363 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); |
| 353 | 364 |
| 354 webvr_right_viewport_.reset( | 365 webvr_right_viewport_.reset( |
| 355 new gvr::BufferViewport(gvr_api_->CreateBufferViewport())); | 366 new gvr::BufferViewport(gvr_api_->CreateBufferViewport())); |
| 356 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, | 367 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, |
| 357 webvr_right_viewport_.get()); | 368 webvr_right_viewport_.get()); |
| 358 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); | 369 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); |
| 359 | 370 |
| 360 if (delegate_) { | 371 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 361 main_thread_task_runner_->PostTask( | 372 &VrShell::GvrDelegateReady, weak_vr_shell_)); |
| 362 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady, | |
| 363 delegate_->GetDeviceProvider(), | |
| 364 weak_ptr_factory_.GetWeakPtr())); | |
| 365 } | |
| 366 } | 373 } |
| 367 | 374 |
| 368 void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) { | 375 void VrShellGl::UpdateController(const gvr::Vec3f& forward_vector) { |
| 369 controller_->UpdateState(); | 376 controller_->UpdateState(); |
| 370 | 377 |
| 371 #if defined(ENABLE_VR_SHELL) | 378 #if defined(ENABLE_VR_SHELL) |
| 379 // TODO(mthiesse): Fix menu button handling, which should be posted to the UI | |
| 380 // thread instead of handled here. | |
| 381 | |
| 372 // Note that button up/down state is transient, so ButtonUpHappened only | 382 // Note that button up/down state is transient, so ButtonUpHappened only |
| 373 // returns | 383 // returns true for a single frame (and we're guaranteed not to miss it). |
| 374 // true for a single frame (and we're guaranteed not to miss it). | |
| 375 if (controller_->ButtonUpHappened( | 384 if (controller_->ButtonUpHappened( |
| 376 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { | 385 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { |
| 377 html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); | 386 // html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); |
| 378 | 387 |
| 379 // TODO(mthiesse): The page is no longer visible when in menu mode. We | 388 // TODO(mthiesse): The page is no longer visible when in menu mode. We |
| 380 // should unfocus or otherwise let it know it's hidden. | 389 // should unfocus or otherwise let it know it's hidden. |
| 381 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 390 // if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { |
| 382 const auto&& task = html_interface_->GetMenuMode() ? | 391 // const auto&& task = html_interface_->GetMenuMode() ? |
| 383 &device::GvrDeviceProvider::OnDisplayBlur : | 392 // &device::GvrDeviceProvider::OnDisplayBlur : |
| 384 &device::GvrDeviceProvider::OnDisplayFocus; | 393 // &device::GvrDeviceProvider::OnDisplayFocus; |
| 385 main_thread_task_runner_->PostTask( | 394 // main_thread_task_runner_->PostTask( |
| 386 FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider())); | 395 // FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider())); |
| 387 } | 396 // } |
| 388 } | 397 } |
| 389 #endif | 398 #endif |
| 390 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 399 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 391 // Process screen touch events for Cardboard button compatibility. | 400 // Process screen touch events for Cardboard button compatibility. |
| 392 // Also send tap events for controller "touchpad click" events. | 401 // Also send tap events for controller "touchpad click" events. |
| 393 if (touch_pending_ || | 402 if (touch_pending_ || controller_->ButtonUpHappened( |
| 394 controller_->ButtonUpHappened( | |
| 395 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { | 403 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { |
| 396 touch_pending_ = false; | 404 touch_pending_ = false; |
| 397 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); | 405 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); |
| 398 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; | 406 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; |
| 399 gesture->timeStampSeconds = | 407 gesture->timeStampSeconds = |
| 400 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); | 408 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); |
| 401 gesture->type = WebInputEvent::GestureTapDown; | 409 gesture->type = WebInputEvent::GestureTapDown; |
| 402 gesture->x = 0; | 410 gesture->x = 0; |
| 403 gesture->y = 0; | 411 gesture->y = 0; |
| 404 SendGestureOnGL(CONTENT, std::move(gesture)); | 412 SendGesture(CONTENT, std::move(gesture)); |
| 405 } | 413 } |
| 406 | 414 |
| 407 return; | 415 return; |
| 408 } | 416 } |
| 409 | 417 |
| 410 gvr::Vec3f ergo_neutral_pose; | 418 gvr::Vec3f ergo_neutral_pose; |
| 411 if (!controller_->IsConnected()) { | 419 if (!controller_->IsConnected()) { |
| 412 // No controller detected, set up a gaze cursor that tracks the | 420 // No controller detected, set up a gaze cursor that tracks the |
| 413 // forward direction. | 421 // forward direction. |
| 414 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; | 422 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 489 plane->copy_rect.width, plane->copy_rect.height}; | 497 plane->copy_rect.width, plane->copy_rect.height}; |
| 490 } | 498 } |
| 491 pixel_x = pixel_rect.width * x + pixel_rect.x; | 499 pixel_x = pixel_rect.width * x + pixel_rect.x; |
| 492 pixel_y = pixel_rect.height * y + pixel_rect.y; | 500 pixel_y = pixel_rect.height * y + pixel_rect.y; |
| 493 | 501 |
| 494 target_point_ = plane_intersection_point; | 502 target_point_ = plane_intersection_point; |
| 495 target_element_ = plane.get(); | 503 target_element_ = plane.get(); |
| 496 input_target = plane->content_quad ? CONTENT : UI; | 504 input_target = plane->content_quad ? CONTENT : UI; |
| 497 } | 505 } |
| 498 } | 506 } |
| 499 SendEventsToTargetOnGL(input_target, pixel_x, pixel_y); | 507 SendEventsToTarget(input_target, pixel_x, pixel_y); |
| 500 } | 508 } |
| 501 | 509 |
| 502 void VrShell::SendEventsToTargetOnGL(InputTarget input_target, | 510 void VrShellGl::SendEventsToTarget(InputTarget input_target, |
| 503 int pixel_x, | 511 int pixel_x, |
| 504 int pixel_y) { | 512 int pixel_y) { |
| 505 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = | 513 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = |
| 506 controller_->DetectGestures(); | 514 controller_->DetectGestures(); |
| 507 double timestamp = gesture_list.front()->timeStampSeconds; | 515 double timestamp = gesture_list.front()->timeStampSeconds; |
| 508 | 516 |
| 509 if (touch_pending_) { | 517 if (touch_pending_) { |
| 510 touch_pending_ = false; | 518 touch_pending_ = false; |
| 511 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent()); | 519 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent()); |
| 512 event->type = WebInputEvent::GestureTapDown; | 520 event->type = WebInputEvent::GestureTapDown; |
| 513 event->sourceDevice = blink::WebGestureDeviceTouchpad; | 521 event->sourceDevice = blink::WebGestureDeviceTouchpad; |
| 514 event->timeStampSeconds = timestamp; | 522 event->timeStampSeconds = timestamp; |
| 515 event->x = pixel_x; | 523 event->x = pixel_x; |
| 516 event->y = pixel_y; | 524 event->y = pixel_y; |
| 517 gesture_list.push_back(std::move(event)); | 525 gesture_list.push_back(std::move(event)); |
| 518 } | 526 } |
| 519 | 527 |
| 520 for (const auto& gesture : gesture_list) { | 528 for (const auto& gesture : gesture_list) { |
| 521 switch (gesture->type) { | 529 switch (gesture->type) { |
| 522 case WebInputEvent::GestureScrollBegin: | 530 case WebInputEvent::GestureScrollBegin: |
| 523 case WebInputEvent::GestureScrollUpdate: | 531 case WebInputEvent::GestureScrollUpdate: |
| 524 case WebInputEvent::GestureScrollEnd: | 532 case WebInputEvent::GestureScrollEnd: |
| 525 case WebInputEvent::GestureFlingCancel: | 533 case WebInputEvent::GestureFlingCancel: |
| 526 case WebInputEvent::GestureFlingStart: | 534 case WebInputEvent::GestureFlingStart: |
| 527 SendGestureOnGL(CONTENT, | 535 SendGesture(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture))); |
| 528 base::WrapUnique(new WebGestureEvent(*gesture))); | |
| 529 break; | 536 break; |
| 530 case WebInputEvent::GestureTapDown: | 537 case WebInputEvent::GestureTapDown: |
| 531 gesture->x = pixel_x; | 538 gesture->x = pixel_x; |
| 532 gesture->y = pixel_y; | 539 gesture->y = pixel_y; |
| 533 if (input_target != NONE) | 540 if (input_target != NONE) |
| 534 SendGestureOnGL(input_target, | 541 SendGesture(input_target, |
| 535 base::WrapUnique(new WebGestureEvent(*gesture))); | 542 base::WrapUnique(new WebGestureEvent(*gesture))); |
| 536 break; | 543 break; |
| 537 case WebInputEvent::Undefined: | 544 case WebInputEvent::Undefined: |
| 538 break; | 545 break; |
| 539 default: | 546 default: |
| 540 NOTREACHED(); | 547 NOTREACHED(); |
| 541 } | 548 } |
| 542 } | 549 } |
| 543 | 550 |
| 544 // Hover support | 551 // Hover support |
| 545 bool new_target = input_target != current_input_target_; | 552 bool new_target = input_target != current_input_target_; |
| 546 if (new_target && current_input_target_ != NONE) { | 553 if (new_target && current_input_target_ != NONE) { |
| 547 // Send a move event indicating that the pointer moved off of an element. | 554 // Send a move event indicating that the pointer moved off of an element. |
| 548 SendGestureOnGL(current_input_target_, | 555 SendGesture(current_input_target_, |
| 549 MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0)); | 556 MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0)); |
| 550 } | 557 } |
| 551 | |
| 552 current_input_target_ = input_target; | 558 current_input_target_ = input_target; |
| 553 if (current_input_target_ != NONE) { | 559 if (current_input_target_ != NONE) { |
| 554 WebInputEvent::Type type = | 560 WebInputEvent::Type type = |
| 555 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; | 561 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; |
| 556 SendGestureOnGL(input_target, | 562 SendGesture(input_target, |
| 557 MakeMouseEvent(type, timestamp, pixel_x, pixel_y)); | 563 MakeMouseEvent(type, timestamp, pixel_x, pixel_y)); |
| 558 } | 564 } |
| 559 } | 565 } |
| 560 | 566 |
| 561 void VrShell::SendGestureOnGL(InputTarget input_target, | 567 void VrShellGl::SendGesture(InputTarget input_target, |
| 562 std::unique_ptr<blink::WebInputEvent> event) { | 568 std::unique_ptr<blink::WebInputEvent> event) { |
| 563 DCHECK(input_target != NONE); | 569 DCHECK(input_target != NONE); |
| 564 const base::WeakPtr<VrInputManager>& weak_ptr = | 570 const base::WeakPtr<VrInputManager>& weak_ptr = |
| 565 input_target == CONTENT ? weak_content_input_manager_ | 571 input_target == CONTENT ? content_input_manager_: ui_input_manager_; |
| 566 : weak_ui_input_manager_; | |
| 567 main_thread_task_runner_->PostTask( | 572 main_thread_task_runner_->PostTask( |
| 568 FROM_HERE, | 573 FROM_HERE, |
| 569 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, | 574 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, |
| 570 base::Passed(std::move(event)))); | 575 base::Passed(std::move(event)))); |
| 571 } | 576 } |
| 572 | 577 |
| 573 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { | 578 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { |
| 574 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; | 579 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; |
| 575 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; | 580 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; |
| 576 } | 581 } |
| 577 | 582 |
| 578 int GetPixelEncodedPoseIndexByte() { | 583 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { |
| 579 TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex"); | |
| 580 // Read the pose index encoded in a bottom left pixel as color values. | |
| 581 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 582 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | |
| 583 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 584 // if not valid due to bad magic number. | |
| 585 uint8_t pixels[4]; | |
| 586 // Assume we're reading from the framebuffer we just wrote to. | |
| 587 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 588 // or equivalent if the rendering setup changes in the future. | |
| 589 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 590 | |
| 591 // Check for the magic number written by VRDevice.cpp on submit. | |
| 592 // This helps avoid glitches from garbage data in the render | |
| 593 // buffer that can appear during initialization or resizing. These | |
| 594 // often appear as flashes of all-black or all-white pixels. | |
| 595 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | |
| 596 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | |
| 597 // Pose is good. | |
| 598 return pixels[0]; | |
| 599 } | |
| 600 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 601 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 602 return -1; | |
| 603 } | |
| 604 | |
| 605 bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) { | |
| 606 if (pose_index_byte < 0) { | 584 if (pose_index_byte < 0) { |
| 607 return false; | 585 return false; |
| 608 } | 586 } |
| 609 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { | 587 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { |
| 610 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << | 588 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << |
| 611 ", not a valid pose"; | 589 ", not a valid pose"; |
| 612 return false; | 590 return false; |
| 613 } | 591 } |
| 614 return true; | 592 return true; |
| 615 } | 593 } |
| 616 | 594 |
| 617 void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 595 void VrShellGl::DrawFrame() { |
| 618 TRACE_EVENT0("gpu", "VrShell::DrawFrame"); | 596 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
| 619 // Reset the viewport list to just the pair of viewports for the | 597 // Reset the viewport list to just the pair of viewports for the |
| 620 // primary buffer each frame. Head-locked viewports get added by | 598 // primary buffer each frame. Head-locked viewports get added by |
| 621 // DrawVrShell if needed. | 599 // DrawVrShell if needed. |
| 622 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 600 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 623 | 601 |
| 624 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 602 // TODO(klausw): Fix this. Resizing buffers here leads to webVR mode showing |
| 625 // If needed, resize the primary buffer for use with WebVR. | 603 // nothing but a black screen. |
| 626 if (render_size_primary_ != render_size_primary_webvr_) { | 604 // if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 627 if (!render_size_primary_webvr_.width) { | 605 // // If needed, resize the primary buffer for use with WebVR. |
| 628 VLOG(2) << "WebVR rendering size not known yet, dropping frame"; | 606 // if (render_size_primary_ != render_size_primary_webvr_) { |
| 629 return; | 607 // if (!render_size_primary_webvr_.width) { |
| 630 } | 608 // VLOG(2) << "WebVR rendering size not known yet, dropping frame"; |
| 631 render_size_primary_ = render_size_primary_webvr_; | 609 // return; |
| 632 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | 610 // } |
| 633 } | 611 // render_size_primary_ = render_size_primary_webvr_; |
| 634 } else { | 612 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); |
| 635 if (render_size_primary_ != render_size_primary_vrshell_) { | 613 // } |
| 636 render_size_primary_ = render_size_primary_vrshell_; | 614 // } else { |
| 637 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | 615 // if (render_size_primary_ != render_size_primary_vrshell_) { |
| 638 } | 616 // render_size_primary_ = render_size_primary_vrshell_; |
| 639 } | 617 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); |
| 618 // } | |
| 619 // } | |
| 640 | 620 |
| 641 gvr::Frame frame = swap_chain_->AcquireFrame(); | 621 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 642 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 622 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 643 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 623 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 644 | 624 |
| 645 gvr::Mat4f head_pose = | 625 gvr::Mat4f head_pose = |
| 646 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 626 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 647 | 627 |
| 648 gvr::Vec3f position = GetTranslation(head_pose); | 628 gvr::Vec3f position = GetTranslation(head_pose); |
| 649 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 629 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 650 // This appears to be a 3DOF pose without a neck model. Add one. | 630 // This appears to be a 3DOF pose without a neck model. Add one. |
| 651 // The head pose has redundant data. Assume we're only using the | 631 // The head pose has redundant data. Assume we're only using the |
| 652 // object_from_reference_matrix, we're not updating position_external. | 632 // object_from_reference_matrix, we're not updating position_external. |
| 653 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 633 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 654 // it. For now, removing it seems working fine. | 634 // it. For now, removing it seems working fine. |
| 655 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 635 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 656 } | 636 } |
| 657 | 637 |
| 658 // Bind the primary framebuffer. | |
| 659 frame.BindBuffer(kFramePrimaryBuffer); | 638 frame.BindBuffer(kFramePrimaryBuffer); |
| 660 | 639 |
| 661 HandleQueuedTasksOnGL(); | |
| 662 | |
| 663 // Update the render position of all UI elements (including desktop). | 640 // Update the render position of all UI elements (including desktop). |
| 664 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 641 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 665 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); | 642 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); |
| 666 | 643 |
| 667 UpdateControllerOnGL(GetForwardVector(head_pose)); | 644 UpdateController(GetForwardVector(head_pose)); |
| 668 | 645 |
| 669 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 646 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 670 DrawWebVrOnGL(); | 647 DrawWebVr(); |
| 671 | 648 |
| 672 // When using async reprojection, we need to know which pose was used in | 649 // When using async reprojection, we need to know which pose was used in |
| 673 // the WebVR app for drawing this frame. Due to unknown amounts of | 650 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 674 // buffering in the compositor and SurfaceTexture, we read the pose number | 651 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 675 // from a corner pixel. There's no point in doing this for legacy | 652 // from a corner pixel. There's no point in doing this for legacy |
| 676 // distortion rendering since that doesn't need a pose, and reading back | 653 // distortion rendering since that doesn't need a pose, and reading back |
| 677 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | 654 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 678 // doing this once we have working no-compositor rendering for WebVR. | 655 // doing this once we have working no-compositor rendering for WebVR. |
| 679 if (gvr_api_->GetAsyncReprojectionEnabled()) { | 656 if (gvr_api_->GetAsyncReprojectionEnabled()) { |
| 680 int pose_index_byte = GetPixelEncodedPoseIndexByte(); | 657 int pose_index_byte = GetPixelEncodedPoseIndexByte(); |
| 681 if (WebVrPoseByteIsValidOnGL(pose_index_byte)) { | 658 if (WebVrPoseByteIsValid(pose_index_byte)) { |
| 682 // We have a valid pose, use it for reprojection. | 659 // We have a valid pose, use it for reprojection. |
| 683 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | 660 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
| 684 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | 661 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
| 685 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; | 662 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; |
| 686 // We can't mark the used pose as invalid since unfortunately | 663 // We can't mark the used pose as invalid since unfortunately |
| 687 // we have to reuse them. The compositor will re-submit stale | 664 // we have to reuse them. The compositor will re-submit stale |
| 688 // frames on vsync, and we can't tell that this has happened | 665 // frames on vsync, and we can't tell that this has happened |
| 689 // until we've read the pose index from it, and at that point | 666 // until we've read the pose index from it, and at that point |
| 690 // it's too late to skip rendering. | 667 // it's too late to skip rendering. |
| 691 } else { | 668 } else { |
| 692 // If we don't get a valid frame ID back we shouldn't attempt | 669 // If we don't get a valid frame ID back we shouldn't attempt |
| 693 // to reproject by an invalid matrix, so turn off reprojection | 670 // to reproject by an invalid matrix, so turn off reprojection |
| 694 // instead. Invalid poses can permanently break reprojection | 671 // instead. Invalid poses can permanently break reprojection |
| 695 // for this GVR instance: http://crbug.com/667327 | 672 // for this GVR instance: http://crbug.com/667327 |
| 696 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | 673 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
| 697 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | 674 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
| 698 } | 675 } |
| 699 } | 676 } |
| 700 } | 677 } |
| 701 | 678 |
| 702 DrawVrShellOnGL(head_pose, frame); | 679 DrawVrShell(head_pose, frame); |
| 703 | 680 |
| 704 frame.Unbind(); | 681 frame.Unbind(); |
| 705 frame.Submit(*buffer_viewport_list_, head_pose); | 682 frame.Submit(*buffer_viewport_list_, head_pose); |
| 683 | |
| 684 // No need to SwapBuffers for an offscreen surface. | |
| 685 ScheduleNextDrawFrame(); | |
| 706 } | 686 } |
| 707 | 687 |
| 708 void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose, | 688 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, |
| 709 gvr::Frame &frame) { | 689 gvr::Frame &frame) { |
| 710 TRACE_EVENT0("gpu", "VrShell::DrawVrShell"); | 690 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
| 711 std::vector<const ContentRectangle*> head_locked_elements; | 691 std::vector<const ContentRectangle*> head_locked_elements; |
| 712 std::vector<const ContentRectangle*> world_elements; | 692 std::vector<const ContentRectangle*> world_elements; |
| 713 for (const auto& rect : scene_->GetUiElements()) { | 693 for (const auto& rect : scene_->GetUiElements()) { |
| 714 if (!rect->visible) { | 694 if (!rect->visible) { |
| 715 continue; | 695 continue; |
| 716 } | 696 } |
| 717 if (rect->lock_to_fov) { | 697 if (rect->lock_to_fov) { |
| 718 head_locked_elements.push_back(rect.get()); | 698 head_locked_elements.push_back(rect.get()); |
| 719 } else { | 699 } else { |
| 720 world_elements.push_back(rect.get()); | 700 world_elements.push_back(rect.get()); |
| 721 } | 701 } |
| 722 } | 702 } |
| 723 | 703 |
| 724 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 704 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 725 // WebVR is incompatible with 3D world compositing since the | 705 // WebVR is incompatible with 3D world compositing since the |
| 726 // depth buffer was already populated with unknown scaling - the | 706 // depth buffer was already populated with unknown scaling - the |
| 727 // WebVR app has full control over zNear/zFar. Just leave the | 707 // WebVR app has full control over zNear/zFar. Just leave the |
| 728 // existing content in place in the primary buffer without | 708 // existing content in place in the primary buffer without |
| 729 // clearing. Currently, there aren't any world elements in WebVR | 709 // clearing. Currently, there aren't any world elements in WebVR |
| 730 // mode, this will need further testing if those get added | 710 // mode, this will need further testing if those get added |
| 731 // later. | 711 // later. |
| 732 } else { | 712 } else { |
| 733 // Non-WebVR mode, enable depth testing and clear the primary buffers. | 713 // Non-WebVR mode, enable depth testing and clear the primary buffers. |
| 734 glEnable(GL_CULL_FACE); | 714 glEnable(GL_CULL_FACE); |
| 735 glEnable(GL_DEPTH_TEST); | 715 glEnable(GL_DEPTH_TEST); |
| 736 glDepthMask(GL_TRUE); | 716 glDepthMask(GL_TRUE); |
| 737 | 717 |
| 738 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); | 718 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); |
| 739 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 719 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 740 } | 720 } |
| 741 | |
| 742 if (!world_elements.empty()) { | 721 if (!world_elements.empty()) { |
| 743 DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_, | 722 DrawUiView(&head_pose, world_elements, render_size_primary_, |
| 744 kViewportListPrimaryOffset); | 723 kViewportListPrimaryOffset); |
| 745 } | 724 } |
| 746 | 725 |
| 747 if (!head_locked_elements.empty()) { | 726 if (!head_locked_elements.empty()) { |
| 748 // Add head-locked viewports. The list gets reset to just | 727 // Add head-locked viewports. The list gets reset to just |
| 749 // the recommended viewports (for the primary buffer) each frame. | 728 // the recommended viewports (for the primary buffer) each frame. |
| 750 buffer_viewport_list_->SetBufferViewport( | 729 buffer_viewport_list_->SetBufferViewport( |
| 751 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 730 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
| 752 *headlocked_left_viewport_); | 731 *headlocked_left_viewport_); |
| 753 buffer_viewport_list_->SetBufferViewport( | 732 buffer_viewport_list_->SetBufferViewport( |
| 754 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 733 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
| 755 *headlocked_right_viewport_); | 734 *headlocked_right_viewport_); |
| 756 | 735 |
| 757 // Bind the headlocked framebuffer. | 736 // Bind the headlocked framebuffer. |
| 737 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order | |
| 738 // here. | |
| 758 frame.BindBuffer(kFrameHeadlockedBuffer); | 739 frame.BindBuffer(kFrameHeadlockedBuffer); |
| 759 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 740 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
| 760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 741 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 761 DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_, | 742 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, |
| 762 kViewportListHeadlockedOffset); | 743 kViewportListHeadlockedOffset); |
| 763 } | 744 } |
| 764 } | 745 } |
| 765 | 746 |
| 766 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { | 747 void VrShellGl::SetWebVRRenderSurfaceSize(int width, int height) { |
| 767 render_size_primary_webvr_.width = width; | 748 render_size_primary_webvr_.width = width; |
| 768 render_size_primary_webvr_.height = height; | 749 render_size_primary_webvr_.height = height; |
| 769 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once | 750 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once |
| 770 // we have that. | 751 // we have that. |
| 771 } | 752 } |
| 772 | 753 |
| 773 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { | 754 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { |
| 774 // This is a stopgap while we're using the WebVR compositor rendering path. | 755 // This is a stopgap while we're using the WebVR compositor rendering path. |
| 775 // TODO(klausw,crbug.com/655722): Remove this method and member once we're | 756 // TODO(klausw,crbug.com/655722): Remove this method and member once we're |
| 776 // using a separate WebVR render surface. | 757 // using a separate WebVR render surface. |
| 777 return content_tex_physical_size_; | 758 return content_tex_physical_size_; |
| 778 } | 759 } |
| 779 | 760 |
| 780 | 761 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, |
| 781 void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose, | 762 const std::vector<const ContentRectangle*>& elements, |
| 782 const std::vector<const ContentRectangle*>& elements, | 763 const gvr::Sizei& render_size, |
| 783 const gvr::Sizei& render_size, int viewport_offset) { | 764 int viewport_offset) { |
| 784 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); | 765 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); |
| 785 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { | 766 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { |
| 786 buffer_viewport_list_->GetBufferViewport( | 767 buffer_viewport_list_->GetBufferViewport( |
| 787 eye + viewport_offset, buffer_viewport_.get()); | 768 eye + viewport_offset, buffer_viewport_.get()); |
| 788 | 769 |
| 789 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); | 770 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); |
| 790 if (head_pose != nullptr) { | 771 if (head_pose != nullptr) { |
| 791 view_matrix = MatrixMul(view_matrix, *head_pose); | 772 view_matrix = MatrixMul(view_matrix, *head_pose); |
| 792 } | 773 } |
| 793 | 774 |
| 794 gvr::Recti pixel_rect = | 775 gvr::Recti pixel_rect = |
| 795 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); | 776 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); |
| 796 glViewport(pixel_rect.left, pixel_rect.bottom, | 777 glViewport(pixel_rect.left, pixel_rect.bottom, |
| 797 pixel_rect.right - pixel_rect.left, | 778 pixel_rect.right - pixel_rect.left, |
| 798 pixel_rect.top - pixel_rect.bottom); | 779 pixel_rect.top - pixel_rect.bottom); |
| 799 | 780 |
| 800 const gvr::Mat4f render_matrix = MatrixMul( | 781 const gvr::Mat4f render_matrix = MatrixMul( |
| 801 PerspectiveMatrixFromView( | 782 PerspectiveMatrixFromView( |
| 802 buffer_viewport_->GetSourceFov(), kZNear, kZFar), | 783 buffer_viewport_->GetSourceFov(), kZNear, kZFar), |
| 803 view_matrix); | 784 view_matrix); |
| 804 | 785 |
| 805 DrawElementsOnGL(render_matrix, elements); | 786 DrawElements(render_matrix, elements); |
| 806 if (head_pose != nullptr && | 787 if (head_pose != nullptr && |
| 807 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) { | 788 vr_shell_->GetUiInterface()->GetMode() != UiInterface::Mode::WEB_VR) { |
| 808 DrawCursorOnGL(render_matrix); | 789 DrawCursor(render_matrix); |
| 809 } | 790 } |
| 810 } | 791 } |
| 811 } | 792 } |
| 812 | 793 |
| 813 void VrShell::DrawElementsOnGL( | 794 void VrShellGl::DrawElements( |
| 814 const gvr::Mat4f& render_matrix, | 795 const gvr::Mat4f& render_matrix, |
| 815 const std::vector<const ContentRectangle*>& elements) { | 796 const std::vector<const ContentRectangle*>& elements) { |
| 816 for (const auto& rect : elements) { | 797 for (const auto& rect : elements) { |
| 817 Rectf copy_rect; | 798 Rectf copy_rect; |
| 818 jint texture_handle; | 799 jint texture_handle; |
| 819 if (rect->content_quad) { | 800 if (rect->content_quad) { |
| 820 copy_rect = {0, 0, 1, 1}; | 801 copy_rect = {0, 0, 1, 1}; |
| 821 texture_handle = content_texture_id_; | 802 texture_handle = content_texture_id_; |
| 822 } else { | 803 } else { |
| 823 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; | 804 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; |
| 824 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; | 805 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; |
| 825 copy_rect.width = static_cast<float>(rect->copy_rect.width) / | 806 copy_rect.width = static_cast<float>(rect->copy_rect.width) / |
| 826 ui_tex_css_width_; | 807 ui_tex_css_width_; |
| 827 copy_rect.height = static_cast<float>(rect->copy_rect.height) / | 808 copy_rect.height = static_cast<float>(rect->copy_rect.height) / |
| 828 ui_tex_css_height_; | 809 ui_tex_css_height_; |
| 829 texture_handle = ui_texture_id_; | 810 texture_handle = ui_texture_id_; |
| 830 } | 811 } |
| 831 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); | 812 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); |
| 832 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( | 813 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( |
| 833 texture_handle, transform, copy_rect); | 814 texture_handle, transform, copy_rect); |
| 834 } | 815 } |
| 835 } | 816 } |
| 836 | 817 |
| 837 void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) { | 818 void VrShellGl::DrawCursor(const gvr::Mat4f& render_matrix) { |
| 838 gvr::Mat4f mat; | 819 gvr::Mat4f mat; |
| 839 SetIdentityM(mat); | 820 SetIdentityM(mat); |
| 840 | 821 |
| 841 // Draw the reticle. | 822 // Draw the reticle. |
| 842 | 823 |
| 843 // Scale the pointer to have a fixed FOV size at any distance. | 824 // Scale the pointer to have a fixed FOV size at any distance. |
| 844 const float eye_to_target = Distance(target_point_, kOrigin); | 825 const float eye_to_target = Distance(target_point_, kOrigin); |
| 845 ScaleM(mat, mat, kReticleWidth * eye_to_target, | 826 ScaleM(mat, mat, kReticleWidth * eye_to_target, |
| 846 kReticleHeight * eye_to_target, 1.0f); | 827 kReticleHeight * eye_to_target, 1.0f); |
| 847 | 828 |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 900 | 881 |
| 901 // Move the beam origin to the hand. | 882 // Move the beam origin to the hand. |
| 902 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, | 883 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, |
| 903 kHandPosition.z); | 884 kHandPosition.z); |
| 904 | 885 |
| 905 transform = MatrixMul(render_matrix, face_transform); | 886 transform = MatrixMul(render_matrix, face_transform); |
| 906 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); | 887 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); |
| 907 } | 888 } |
| 908 } | 889 } |
| 909 | 890 |
| 910 void VrShell::DrawWebVrOnGL() { | 891 void VrShellGl::DrawWebVr() { |
| 911 TRACE_EVENT0("gpu", "VrShell::DrawWebVr"); | 892 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); |
| 912 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 893 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
| 913 glDisable(GL_CULL_FACE); | 894 glDisable(GL_CULL_FACE); |
| 914 glDepthMask(GL_FALSE); | 895 glDepthMask(GL_FALSE); |
| 915 glDisable(GL_DEPTH_TEST); | 896 glDisable(GL_DEPTH_TEST); |
| 916 glDisable(GL_SCISSOR_TEST); | 897 glDisable(GL_SCISSOR_TEST); |
| 917 glDisable(GL_BLEND); | 898 glDisable(GL_BLEND); |
| 918 glDisable(GL_POLYGON_OFFSET_FILL); | 899 glDisable(GL_POLYGON_OFFSET_FILL); |
| 919 | 900 |
| 920 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 901 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
| 921 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 902 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| 922 | 903 |
| 923 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 904 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| 924 *webvr_left_viewport_); | 905 *webvr_left_viewport_); |
| 925 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 906 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 926 *webvr_right_viewport_); | 907 *webvr_right_viewport_); |
| 927 } | 908 } |
| 928 | 909 |
| 929 void VrShell::OnTriggerEventOnUI(JNIEnv* env, | 910 void VrShellGl::OnTriggerEvent() { |
| 930 const JavaParamRef<jobject>& obj) { | |
| 931 // Set a flag to handle this on the render thread at the next frame. | 911 // Set a flag to handle this on the render thread at the next frame. |
| 932 touch_pending_ = true; | 912 touch_pending_ = true; |
| 933 } | 913 } |
| 934 | 914 |
| 935 void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 915 void VrShellGl::OnPause() { |
| 936 if (gvr_api_ == nullptr) | |
| 937 return; | |
| 938 | |
| 939 // TODO(mthiesse): Clean up threading here. | |
| 940 controller_->OnPause(); | 916 controller_->OnPause(); |
| 941 gvr_api_->PauseTracking(); | 917 gvr_api_->PauseTracking(); |
| 942 SetShowingOverscrollGlowOnUI(true); | |
| 943 | |
| 944 // exit vr session | |
| 945 metrics_helper_->SetVRActive(false); | |
| 946 } | 918 } |
| 947 | 919 |
| 948 void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 920 void VrShellGl::OnResume() { |
| 949 if (gvr_api_ == nullptr) | |
| 950 return; | |
| 951 | |
| 952 // TODO(mthiesse): Clean up threading here. | |
| 953 gvr_api_->RefreshViewerProfile(); | 921 gvr_api_->RefreshViewerProfile(); |
| 954 gvr_api_->ResumeTracking(); | 922 gvr_api_->ResumeTracking(); |
| 955 controller_->OnResume(); | 923 controller_->OnResume(); |
| 956 SetShowingOverscrollGlowOnUI(false); | |
| 957 | |
| 958 // exit vr session | |
| 959 metrics_helper_->SetVRActive(true); | |
| 960 } | 924 } |
| 961 | 925 |
| 962 void VrShell::SetShowingOverscrollGlowOnUI(bool showing_glow) { | 926 void VrShellGl::SetWebVrMode(bool enabled) { |
| 963 main_contents_->GetRenderWidgetHostView()->SetShowingOverscrollGlow( | |
| 964 showing_glow); | |
| 965 } | |
| 966 | |
| 967 base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI( | |
| 968 const content::WebContents* web_contents) { | |
| 969 // Ensure that the WebContents requesting the VrShell instance is the one | |
| 970 // we created. | |
| 971 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents) | |
| 972 return g_instance->weak_ptr_factory_.GetWeakPtr(); | |
| 973 return base::WeakPtr<VrShell>(nullptr); | |
| 974 } | |
| 975 | |
| 976 void VrShell::OnDomContentsLoadedOnUI() { | |
| 977 html_interface_->SetURL(main_contents_->GetVisibleURL()); | |
| 978 html_interface_->SetLoading(main_contents_->IsLoading()); | |
| 979 html_interface_->OnDomContentsLoaded(); | |
| 980 } | |
| 981 | |
| 982 void VrShell::SetWebVrModeOnUI(JNIEnv* env, | |
| 983 const base::android::JavaParamRef<jobject>& obj, | |
| 984 bool enabled) { | |
| 985 metrics_helper_->SetWebVREnabled(enabled); | |
| 986 if (enabled) { | 927 if (enabled) { |
| 987 html_interface_->SetMode(UiInterface::Mode::WEB_VR); | 928 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::WEB_VR); |
| 988 } else { | 929 } else { |
| 989 html_interface_->SetMode(UiInterface::Mode::STANDARD); | 930 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::STANDARD); |
| 990 } | 931 } |
| 991 } | 932 } |
| 992 | 933 |
| 993 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { | 934 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| 994 // TODO(cjgrant): Align this state with the logic that drives the omnibox. | 935 const gvr::Rectf& right_bounds) { |
| 995 html_interface_->SetWebVRSecureOrigin(secure_origin); | |
| 996 } | |
| 997 | |
| 998 void VrShell::SubmitWebVRFrame() {} | |
| 999 | |
| 1000 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | |
| 1001 const gvr::Rectf& right_bounds) { | |
| 1002 webvr_left_viewport_->SetSourceUv(left_bounds); | 936 webvr_left_viewport_->SetSourceUv(left_bounds); |
| 1003 webvr_right_viewport_->SetSourceUv(right_bounds); | 937 webvr_right_viewport_->SetSourceUv(right_bounds); |
| 1004 } | 938 } |
| 1005 | 939 |
| 1006 gvr::GvrApi* VrShell::gvr_api() { | 940 gvr::GvrApi* VrShellGl::gvr_api() { |
| 1007 return gvr_api_.get(); | 941 return gvr_api_.get(); |
| 1008 } | 942 } |
| 1009 | 943 |
| 1010 void VrShell::SurfacesChangedOnUI(JNIEnv* env, | 944 void VrShellGl::ContentBoundsChanged(int width, int height) { |
| 1011 const JavaParamRef<jobject>& object, | 945 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
| 1012 const JavaParamRef<jobject>& content_surface, | 946 content_tex_css_width_ = width; |
| 1013 const JavaParamRef<jobject>& ui_surface) { | 947 content_tex_css_height_ = height; |
| 1014 content_compositor_->SurfaceChanged(content_surface); | |
| 1015 ui_compositor_->SurfaceChanged(ui_surface); | |
| 1016 } | 948 } |
| 1017 | 949 |
| 1018 void VrShell::ContentBoundsChangedOnUI(JNIEnv* env, | 950 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { |
| 1019 const JavaParamRef<jobject>& object, | 951 if (content_surface_texture_.get()) |
| 1020 jint width, jint height, jfloat dpr) { | 952 content_surface_texture_->SetDefaultBufferSize(width, height); |
| 1021 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged"); | |
| 1022 content_tex_physical_size_.width = width; | 953 content_tex_physical_size_.width = width; |
| 1023 content_tex_physical_size_.height = height; | 954 content_tex_physical_size_.height = height; |
| 1024 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in | |
| 1025 // response to MainFrameWasResized, not here. | |
| 1026 content_tex_css_width_ = width / dpr; | |
| 1027 content_tex_css_height_ = height / dpr; | |
| 1028 | |
| 1029 content_compositor_->SetWindowBounds(width, height); | |
| 1030 } | 955 } |
| 1031 | 956 |
| 1032 void VrShell::UIBoundsChangedOnUI(JNIEnv* env, | 957 void VrShellGl::UIBoundsChanged(int width, int height) { |
| 1033 const JavaParamRef<jobject>& object, | 958 ui_tex_css_width_ = width; |
| 1034 jint width, jint height, jfloat dpr) { | 959 ui_tex_css_height_ = height; |
| 1035 ui_compositor_->SetWindowBounds(width, height); | |
| 1036 } | 960 } |
| 1037 | 961 |
| 1038 UiScene* VrShell::GetSceneOnGL() { | 962 void VrShellGl::UIPhysicalBoundsChanged(int width, int height) { |
| 1039 return scene_.get(); | 963 if (ui_surface_texture_.get()) |
| 964 ui_surface_texture_->SetDefaultBufferSize(width, height); | |
| 965 ui_tex_physical_size_.width = width; | |
| 966 ui_tex_physical_size_.height = height; | |
| 1040 } | 967 } |
| 1041 | 968 |
| 1042 UiInterface* VrShell::GetUiInterfaceOnGL() { | 969 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
| 1043 return html_interface_.get(); | 970 return weak_ptr_factory_.GetWeakPtr(); |
| 1044 } | 971 } |
| 1045 | 972 |
| 1046 void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) { | 973 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase, |
| 1047 base::AutoLock lock(task_queue_lock_); | 974 const base::TimeDelta interval) { |
| 1048 task_queue_.push(callback); | 975 vsync_timebase_ = timebase; |
| 976 vsync_interval_ = interval; | |
| 1049 } | 977 } |
| 1050 | 978 |
| 1051 void VrShell::HandleQueuedTasksOnGL() { | 979 void VrShellGl::ScheduleNextDrawFrame() { |
| 1052 // To protect a stream of tasks from blocking rendering indefinitely, | 980 base::TimeTicks now = base::TimeTicks::Now(); |
| 1053 // process only the number of tasks present when first checked. | 981 base::TimeTicks target; |
| 1054 std::vector<base::Callback<void()>> tasks; | 982 |
| 1055 { | 983 if (vsync_interval_.is_zero()) { |
| 1056 base::AutoLock lock(task_queue_lock_); | 984 target = now; |
| 1057 const size_t count = task_queue_.size(); | 985 } else { |
| 1058 for (size_t i = 0; i < count; i++) { | 986 target = now + vsync_interval_; |
| 1059 tasks.push_back(task_queue_.front()); | 987 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
| 1060 task_queue_.pop(); | 988 target = vsync_timebase_ + intervals * vsync_interval_; |
| 1061 } | |
| 1062 } | 989 } |
| 1063 for (auto &task : tasks) { | 990 |
| 1064 task.Run(); | 991 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now); |
| 1065 } | |
| 1066 } | 992 } |
| 1067 | 993 |
| 1068 void VrShell::DoUiActionOnUI(const UiAction action) { | 994 void VrShellGl::ForceExitVR() { |
| 1069 content::NavigationController& controller = main_contents_->GetController(); | 995 main_thread_task_runner_->PostTask( |
| 1070 switch (action) { | 996 FROM_HERE, base::Bind(&VrShell::ForceExitVR, weak_vr_shell_)); |
| 1071 case HISTORY_BACK: | |
| 1072 if (main_contents_->IsFullscreen()) { | |
| 1073 main_contents_->ExitFullscreen(true /* will_cause_resize */); | |
| 1074 } else if (controller.CanGoBack()) { | |
| 1075 controller.GoBack(); | |
| 1076 } | |
| 1077 break; | |
| 1078 case HISTORY_FORWARD: | |
| 1079 if (controller.CanGoForward()) | |
| 1080 controller.GoForward(); | |
| 1081 break; | |
| 1082 case RELOAD: | |
| 1083 controller.Reload(false); | |
| 1084 break; | |
| 1085 #if defined(ENABLE_VR_SHELL_UI_DEV) | |
| 1086 case RELOAD_UI: | |
| 1087 ui_contents_->GetController().Reload(false); | |
| 1088 html_interface_.reset(new UiInterface(UiInterface::Mode::STANDARD, | |
| 1089 main_contents_->IsFullscreen())); | |
| 1090 vr_web_contents_observer_->SetUiInterface(html_interface_.get()); | |
| 1091 break; | |
| 1092 #endif | |
| 1093 case ZOOM_OUT: // Not handled yet. | |
| 1094 case ZOOM_IN: // Not handled yet. | |
| 1095 break; | |
| 1096 default: | |
| 1097 NOTREACHED(); | |
| 1098 } | |
| 1099 } | |
| 1100 | |
| 1101 void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host, | |
| 1102 content::RenderViewHost* new_host) { | |
| 1103 new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT); | |
| 1104 } | |
| 1105 | |
| 1106 void VrShell::MainFrameWasResized(bool width_changed) { | |
| 1107 display::Display display = display::Screen::GetScreen() | |
| 1108 ->GetDisplayNearestWindow(ui_contents_->GetNativeView()); | |
| 1109 // TODO(mthiesse): Synchronize with GL thread. | |
| 1110 ui_tex_css_width_ = display.size().width(); | |
| 1111 ui_tex_css_height_ = display.size().height(); | |
| 1112 } | |
| 1113 | |
| 1114 void VrShell::WebContentsDestroyed() { | |
| 1115 ui_input_manager_.reset(); | |
| 1116 ui_contents_ = nullptr; | |
| 1117 // TODO(mthiesse): Handle web contents being destroyed. | |
| 1118 delegate_->ForceExitVr(); | |
| 1119 } | |
| 1120 | |
| 1121 void VrShell::ContentWebContentsDestroyedOnUI() { | |
| 1122 content_input_manager_.reset(); | |
| 1123 main_contents_ = nullptr; | |
| 1124 // TODO(mthiesse): Handle web contents being destroyed. | |
| 1125 delegate_->ForceExitVr(); | |
| 1126 } | |
| 1127 | |
| 1128 void VrShell::ContentWasHiddenOnUI() { | |
| 1129 // Ensure we don't continue sending input to it. | |
| 1130 content_input_manager_.reset(); | |
| 1131 // TODO(mthiesse): Handle web contents being hidden. | |
| 1132 delegate_->ForceExitVr(); | |
| 1133 } | |
| 1134 | |
| 1135 void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) { | |
| 1136 JNIEnv* env = base::android::AttachCurrentThread(); | |
| 1137 Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height, | |
| 1138 dpr); | |
| 1139 } | |
| 1140 | |
| 1141 void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) { | |
| 1142 JNIEnv* env = base::android::AttachCurrentThread(); | |
| 1143 Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr); | |
| 1144 } | |
| 1145 | |
| 1146 // ---------------------------------------------------------------------------- | |
| 1147 // Native JNI methods | |
| 1148 // ---------------------------------------------------------------------------- | |
| 1149 | |
| 1150 jlong InitOnUI(JNIEnv* env, | |
| 1151 const JavaParamRef<jobject>& obj, | |
| 1152 const JavaParamRef<jobject>& content_web_contents, | |
| 1153 jlong content_window_android, | |
| 1154 const JavaParamRef<jobject>& ui_web_contents, | |
| 1155 jlong ui_window_android, | |
| 1156 jboolean for_web_vr) { | |
| 1157 return reinterpret_cast<intptr_t>(new VrShell( | |
| 1158 env, obj, content::WebContents::FromJavaWebContents(content_web_contents), | |
| 1159 reinterpret_cast<ui::WindowAndroid*>(content_window_android), | |
| 1160 content::WebContents::FromJavaWebContents(ui_web_contents), | |
| 1161 reinterpret_cast<ui::WindowAndroid*>(ui_window_android), | |
| 1162 for_web_vr)); | |
| 1163 } | 997 } |
| 1164 | 998 |
| 1165 } // namespace vr_shell | 999 } // namespace vr_shell |
| OLD | NEW |