Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include "base/memory/ptr_util.h" | |
| 7 #include "base/metrics/histogram_macros.h" | 8 #include "base/metrics/histogram_macros.h" |
| 9 #include "base/threading/thread_task_runner_handle.h" | |
| 8 #include "chrome/browser/android/vr_shell/ui_elements.h" | 10 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 9 #include "chrome/browser/android/vr_shell/ui_interface.h" | 11 #include "chrome/browser/android/vr_shell/ui_interface.h" |
| 10 #include "chrome/browser/android/vr_shell/ui_scene.h" | 12 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 11 #include "chrome/browser/android/vr_shell/vr_compositor.h" | |
| 12 #include "chrome/browser/android/vr_shell/vr_controller.h" | 13 #include "chrome/browser/android/vr_shell/vr_controller.h" |
| 13 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
| 14 #include "chrome/browser/android/vr_shell/vr_input_manager.h" | 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h" |
| 15 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" | 16 #include "chrome/browser/android/vr_shell/vr_math.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_shell.h" | |
| 16 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" | 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
| 17 #include "chrome/browser/android/vr_shell/vr_usage_monitor.h" | 19 #include "third_party/WebKit/public/platform/WebInputEvent.h" |
| 18 #include "chrome/browser/android/vr_shell/vr_web_contents_observer.h" | 20 #include "ui/gfx/vsync_provider.h" |
| 19 #include "content/public/browser/navigation_controller.h" | 21 #include "ui/gl/android/scoped_java_surface.h" |
| 20 #include "content/public/browser/render_view_host.h" | 22 #include "ui/gl/android/surface_texture.h" |
| 21 #include "content/public/browser/render_widget_host.h" | |
| 22 #include "content/public/browser/render_widget_host_view.h" | |
| 23 #include "content/public/browser/web_contents.h" | |
| 24 #include "content/public/common/referrer.h" | |
| 25 #include "device/vr/android/gvr/gvr_device_provider.h" | |
| 26 #include "jni/VrShellImpl_jni.h" | |
| 27 #include "ui/android/view_android.h" | |
| 28 #include "ui/android/window_android.h" | |
| 29 #include "ui/base/page_transition_types.h" | |
| 30 #include "ui/display/display.h" | |
| 31 #include "ui/display/screen.h" | |
| 32 #include "ui/gl/gl_bindings.h" | 23 #include "ui/gl/gl_bindings.h" |
| 24 #include "ui/gl/gl_context.h" | |
| 25 #include "ui/gl/gl_surface.h" | |
| 33 #include "ui/gl/init/gl_factory.h" | 26 #include "ui/gl/init/gl_factory.h" |
| 34 | 27 |
| 35 using base::android::JavaParamRef; | |
| 36 | |
| 37 namespace vr_shell { | 28 namespace vr_shell { |
| 38 | 29 |
| 39 namespace { | 30 namespace { |
| 40 // Constant taken from treasure_hunt demo. | 31 // Constant taken from treasure_hunt demo. |
| 41 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; | 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; |
| 42 | 33 |
| 43 static constexpr float kZNear = 0.1f; | 34 static constexpr float kZNear = 0.1f; |
| 44 static constexpr float kZFar = 1000.0f; | 35 static constexpr float kZFar = 1000.0f; |
| 45 | 36 |
| 46 // Screen angle in degrees. 0 = vertical, positive = top closer. | 37 // Screen angle in degrees. 0 = vertical, positive = top closer. |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 88 | 79 |
| 89 // The GVR viewport list has two entries (left eye and right eye) for each | 80 // The GVR viewport list has two entries (left eye and right eye) for each |
| 90 // GVR buffer. | 81 // GVR buffer. |
| 91 static constexpr int kViewportListPrimaryOffset = 0; | 82 static constexpr int kViewportListPrimaryOffset = 0; |
| 92 static constexpr int kViewportListHeadlockedOffset = 2; | 83 static constexpr int kViewportListHeadlockedOffset = 2; |
| 93 | 84 |
| 94 // Magic numbers used to mark valid pose index values encoded in frame | 85 // Magic numbers used to mark valid pose index values encoded in frame |
| 95 // data. Must match the magic numbers used in blink's VRDisplay.cpp. | 86 // data. Must match the magic numbers used in blink's VRDisplay.cpp. |
| 96 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; | 87 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; |
| 97 | 88 |
| 98 vr_shell::VrShell* g_instance; | |
| 99 | |
| 100 static const char kVrShellUIURL[] = "chrome://vr-shell-ui"; | |
| 101 | |
| 102 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 89 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
| 103 float xdiff = (vec1.x - vec2.x); | 90 float xdiff = (vec1.x - vec2.x); |
| 104 float ydiff = (vec1.y - vec2.y); | 91 float ydiff = (vec1.y - vec2.y); |
| 105 float zdiff = (vec1.z - vec2.z); | 92 float zdiff = (vec1.z - vec2.z); |
| 106 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 93 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
| 107 return std::sqrt(scale); | 94 return std::sqrt(scale); |
| 108 } | 95 } |
| 109 | 96 |
| 110 // Generate a quaternion representing the rotation from the negative Z axis | 97 // Generate a quaternion representing the rotation from the negative Z axis |
| 111 // (0, 0, -1) to a specified vector. This is an optimized version of a more | 98 // (0, 0, -1) to a specified vector. This is an optimized version of a more |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 140 mouse_event->x = x; | 127 mouse_event->x = x; |
| 141 mouse_event->y = y; | 128 mouse_event->y = y; |
| 142 mouse_event->windowX = x; | 129 mouse_event->windowX = x; |
| 143 mouse_event->windowY = y; | 130 mouse_event->windowY = y; |
| 144 mouse_event->timeStampSeconds = timestamp; | 131 mouse_event->timeStampSeconds = timestamp; |
| 145 mouse_event->clickCount = 1; | 132 mouse_event->clickCount = 1; |
| 146 mouse_event->modifiers = 0; | 133 mouse_event->modifiers = 0; |
| 147 | 134 |
| 148 return mouse_event; | 135 return mouse_event; |
| 149 } | 136 } |
| 150 } // namespace | |
| 151 | |
| 152 VrShell::VrShell(JNIEnv* env, | |
| 153 jobject obj, | |
| 154 content::WebContents* main_contents, | |
| 155 ui::WindowAndroid* content_window, | |
| 156 content::WebContents* ui_contents, | |
| 157 ui::WindowAndroid* ui_window, | |
| 158 bool for_web_vr) | |
| 159 : WebContentsObserver(ui_contents), | |
| 160 main_contents_(main_contents), | |
| 161 ui_contents_(ui_contents), | |
| 162 metrics_helper_(new VrMetricsHelper(main_contents)), | |
| 163 main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 164 weak_ptr_factory_(this) { | |
| 165 DCHECK(g_instance == nullptr); | |
| 166 g_instance = this; | |
| 167 j_vr_shell_.Reset(env, obj); | |
| 168 scene_.reset(new UiScene); | |
| 169 | |
| 170 if (for_web_vr) | |
| 171 metrics_helper_->SetWebVREnabled(true); | |
| 172 html_interface_.reset(new UiInterface( | |
| 173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD, | |
| 174 main_contents_->IsFullscreen())); | |
| 175 content_compositor_.reset(new VrCompositor(content_window, false)); | |
| 176 ui_compositor_.reset(new VrCompositor(ui_window, true)); | |
| 177 vr_web_contents_observer_.reset(new VrWebContentsObserver( | |
| 178 main_contents, html_interface_.get(), this)); | |
| 179 | |
| 180 LoadUIContentOnUI(); | |
| 181 | |
| 182 gvr::Mat4f identity; | |
| 183 SetIdentityM(identity); | |
| 184 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 186 | |
| 187 content_input_manager_.reset(new VrInputManager(main_contents_)); | |
| 188 ui_input_manager_.reset(new VrInputManager(ui_contents_)); | |
| 189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr(); | |
| 190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr(); | |
| 191 } | |
| 192 | |
| 193 void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env, | |
| 194 const JavaParamRef<jobject>& obj) { | |
| 195 content_compositor_->SetLayer(main_contents_); | |
| 196 ui_compositor_->SetLayer(ui_contents_); | |
| 197 } | |
| 198 | |
| 199 void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { | |
| 200 delete this; | |
| 201 } | |
| 202 | |
| 203 void VrShell::LoadUIContentOnUI() { | |
| 204 GURL url(kVrShellUIURL); | |
| 205 ui_contents_->GetController().LoadURL( | |
| 206 url, content::Referrer(), | |
| 207 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string("")); | |
| 208 } | |
| 209 | |
| 210 bool RegisterVrShell(JNIEnv* env) { | |
| 211 return RegisterNativesImpl(env); | |
| 212 } | |
| 213 | |
| 214 VrShell::~VrShell() { | |
| 215 if (delegate_ && delegate_->GetDeviceProvider()) { | |
| 216 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved(); | |
| 217 } | |
| 218 g_instance = nullptr; | |
| 219 gl::init::ShutdownGL(); | |
| 220 } | |
| 221 | |
| 222 void VrShell::SetDelegateOnUI(JNIEnv* env, | |
| 223 const base::android::JavaParamRef<jobject>& obj, | |
| 224 const base::android::JavaParamRef<jobject>& delegate) { | |
| 225 base::AutoLock lock(gvr_init_lock_); | |
| 226 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate); | |
| 227 if (swap_chain_.get()) { | |
| 228 delegate_->GetDeviceProvider()->OnGvrDelegateReady( | |
| 229 weak_ptr_factory_.GetWeakPtr()); | |
| 230 } | |
| 231 } | |
| 232 | 137 |
| 233 enum class ViewerType { | 138 enum class ViewerType { |
| 234 UNKNOWN_TYPE = 0, | 139 UNKNOWN_TYPE = 0, |
| 235 CARDBOARD = 1, | 140 CARDBOARD = 1, |
| 236 DAYDREAM = 2, | 141 DAYDREAM = 2, |
| 237 VIEWER_TYPE_MAX, | 142 VIEWER_TYPE_MAX, |
| 238 }; | 143 }; |
| 239 | 144 |
| 240 void VrShell::GvrInitOnGL(JNIEnv* env, | 145 int GetPixelEncodedPoseIndexByte() { |
| 241 const JavaParamRef<jobject>& obj, | 146 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
| 242 jlong native_gvr_api) { | 147 // Read the pose index encoded in a bottom left pixel as color values. |
| 243 // set the initial webvr state | 148 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
| 244 metrics_helper_->SetVRActive(true); | 149 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
| 150 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 151 // if not valid due to bad magic number. | |
| 152 uint8_t pixels[4]; | |
| 153 // Assume we're reading from the framebuffer we just wrote to. | |
| 154 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 155 // or equivalent if the rendering setup changes in the future. | |
| 156 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 245 | 157 |
| 246 gvr_api_ = | 158 // Check for the magic number written by VRDevice.cpp on submit. |
| 247 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); | 159 // This helps avoid glitches from garbage data in the render |
| 248 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once | 160 // buffer that can appear during initialization or resizing. These |
| 249 // we switch to using a WebVR render surface. We currently need to wait for | 161 // often appear as flashes of all-black or all-white pixels. |
| 250 // the compositor window's size to be known first. See also | 162 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| 251 // ContentSurfaceChanged. | 163 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| 252 controller_.reset( | 164 // Pose is good. |
| 253 new VrController(reinterpret_cast<gvr_context*>(native_gvr_api))); | 165 return pixels[0]; |
| 166 } | |
| 167 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 168 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 169 return -1; | |
| 170 } | |
| 254 | 171 |
| 172 } // namespace | |
| 173 | |
| 174 VrShellGl::VrShellGl( | |
| 175 VrShell* vr_shell, | |
| 176 base::WeakPtr<VrShell> weak_vr_shell, | |
| 177 base::WeakPtr<VrInputManager> content_input_manager, | |
| 178 base::WeakPtr<VrInputManager> ui_input_manager, | |
| 179 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, | |
| 180 gvr_context* gvr_api) | |
| 181 : task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 182 vr_shell_(vr_shell), | |
| 183 weak_vr_shell_(weak_vr_shell), | |
| 184 content_input_manager_(content_input_manager), | |
| 185 ui_input_manager_(ui_input_manager), | |
| 186 main_thread_task_runner_(std::move(main_thread_task_runner)), | |
| 187 weak_ptr_factory_(this) { | |
| 188 | |
| 189 GvrInit(gvr_api); | |
| 190 InitializeGl(); | |
| 191 | |
| 192 gvr::Mat4f identity; | |
| 193 SetIdentityM(identity); | |
| 194 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | |
| 195 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | |
| 196 | |
| 197 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); | |
| 198 | |
| 199 scene_.reset(new UiScene); | |
| 200 | |
| 201 InitializeRenderer(); | |
| 202 | |
| 203 ScheduleNextDrawFrame(); | |
| 204 } | |
| 205 | |
| 206 VrShellGl::~VrShellGl() { | |
| 207 draw_task_.Cancel(); | |
| 208 } | |
| 209 | |
| 210 void VrShellGl::InitializeGl() { | |
| 211 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone || | |
| 212 gl::init::InitializeGLOneOff()); | |
| 213 surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size()); | |
| 214 CHECK(surface_.get()) << "gl::init::CreateOffscreenGLSurface failed"; | |
| 215 context_ = gl::init::CreateGLContext(nullptr, surface_.get(), | |
| 216 gl::GLContextAttribs()); | |
| 217 CHECK(context_.get()) << "gl::init::CreateGLContext failed"; | |
| 218 | |
| 219 CHECK(context_->MakeCurrent(surface_.get())) | |
| 220 << "gl::GLContext::MakeCurrent() failed"; | |
| 221 | |
| 222 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is | |
| 223 // sort of okay, because the GVR swap chain will block if we render too fast, | |
| 224 // but we should address this properly. | |
| 225 if (surface_->GetVSyncProvider()) { | |
| 226 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( | |
| 227 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); | |
| 228 } else { | |
| 229 LOG(ERROR) << "No VSync Provider."; | |
|
cjgrant
2016/12/08 17:02:49
Typically no periods in logging sentences.
mthiesse
2016/12/09 01:28:42
Done
| |
| 230 } | |
| 231 | |
| 232 unsigned int textures[2]; | |
| 233 glGenTextures(2, textures); | |
| 234 ui_texture_id_ = textures[0]; | |
| 235 content_texture_id_ = textures[1]; | |
| 236 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); | |
| 237 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); | |
| 238 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); | |
| 239 content_surface_.reset(new gl::ScopedJavaSurface( | |
| 240 content_surface_texture_.get())); | |
| 241 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( | |
| 242 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | |
| 243 content_surface_texture_->SetFrameAvailableCallback(base::Bind( | |
| 244 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); | |
| 245 | |
| 246 content_surface_texture_->SetDefaultBufferSize( | |
| 247 content_tex_physical_size_.width, content_tex_physical_size_.height); | |
| 248 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, | |
| 249 ui_tex_physical_size_.height); | |
| 250 | |
| 251 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( | |
| 252 &VrShell::SurfacesChanged, weak_vr_shell_, | |
| 253 content_surface_->j_surface().obj(), | |
| 254 ui_surface_->j_surface().obj())); | |
| 255 } | |
| 256 | |
| 257 void VrShellGl::OnUIFrameAvailable() { | |
| 258 ui_surface_texture_->UpdateTexImage(); | |
| 259 } | |
| 260 | |
| 261 void VrShellGl::OnContentFrameAvailable() { | |
| 262 content_surface_texture_->UpdateTexImage(); | |
| 263 } | |
| 264 | |
| 265 void VrShellGl::GvrInit(gvr_context* gvr_api) { | |
| 266 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | |
| 267 controller_.reset(new VrController(gvr_api)); | |
| 255 | 268 |
| 256 ViewerType viewerType; | 269 ViewerType viewerType; |
| 257 switch (gvr_api_->GetViewerType()) { | 270 switch (gvr_api_->GetViewerType()) { |
| 258 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: | 271 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: |
| 259 viewerType = ViewerType::DAYDREAM; | 272 viewerType = ViewerType::DAYDREAM; |
| 260 break; | 273 break; |
| 261 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: | 274 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: |
| 262 viewerType = ViewerType::CARDBOARD; | 275 viewerType = ViewerType::CARDBOARD; |
| 263 break; | 276 break; |
| 264 default: | 277 default: |
| 265 NOTREACHED(); | 278 NOTREACHED(); |
| 266 viewerType = ViewerType::UNKNOWN_TYPE; | 279 viewerType = ViewerType::UNKNOWN_TYPE; |
| 267 break; | 280 break; |
| 268 } | 281 } |
| 269 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 282 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 270 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 283 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 271 } | 284 } |
| 272 | 285 |
| 273 void VrShell::InitializeGlOnGL(JNIEnv* env, | 286 void VrShellGl::InitializeRenderer() { |
| 274 const JavaParamRef<jobject>& obj, | |
| 275 jint content_texture_handle, | |
| 276 jint ui_texture_handle) { | |
| 277 base::AutoLock lock(gvr_init_lock_); | |
| 278 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone || | |
| 279 gl::init::InitializeGLOneOff()); | |
| 280 | |
| 281 content_texture_id_ = content_texture_handle; | |
| 282 ui_texture_id_ = ui_texture_handle; | |
| 283 | |
| 284 // While WebVR is going through the compositor path, it shares | 287 // While WebVR is going through the compositor path, it shares |
| 285 // the same texture ID. This will change once it gets its own | 288 // the same texture ID. This will change once it gets its own |
| 286 // surface, but store it separately to avoid future confusion. | 289 // surface, but store it separately to avoid future confusion. |
| 287 // TODO(klausw,crbug.com/655722): remove this. | 290 // TODO(klausw,crbug.com/655722): remove this. |
| 288 webvr_texture_id_ = content_texture_id_; | 291 webvr_texture_id_ = content_texture_id_; |
| 289 // Out of paranoia, explicitly reset the "pose valid" flags to false | 292 // Out of paranoia, explicitly reset the "pose valid" flags to false |
| 290 // from the GL thread. The constructor ran in the UI thread. | 293 // from the GL thread. The constructor ran in the UI thread. |
| 291 // TODO(klausw,crbug.com/655722): remove this. | 294 // TODO(klausw,crbug.com/655722): remove this. |
| 292 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); | 295 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); |
| 293 | 296 |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 348 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, | 351 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, |
| 349 webvr_left_viewport_.get()); | 352 webvr_left_viewport_.get()); |
| 350 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); | 353 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); |
| 351 | 354 |
| 352 webvr_right_viewport_.reset( | 355 webvr_right_viewport_.reset( |
| 353 new gvr::BufferViewport(gvr_api_->CreateBufferViewport())); | 356 new gvr::BufferViewport(gvr_api_->CreateBufferViewport())); |
| 354 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, | 357 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, |
| 355 webvr_right_viewport_.get()); | 358 webvr_right_viewport_.get()); |
| 356 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); | 359 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); |
| 357 | 360 |
| 358 if (delegate_) { | 361 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 359 main_thread_task_runner_->PostTask( | 362 &VrShell::GvrDelegateReady, weak_vr_shell_)); |
| 360 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady, | |
| 361 delegate_->GetDeviceProvider(), | |
| 362 weak_ptr_factory_.GetWeakPtr())); | |
| 363 } | |
| 364 } | 363 } |
| 365 | 364 |
| 366 void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) { | 365 void VrShellGl::UpdateController(const gvr::Vec3f& forward_vector) { |
| 367 controller_->UpdateState(); | 366 controller_->UpdateState(); |
| 368 | 367 |
| 369 #if defined(ENABLE_VR_SHELL) | 368 #if defined(ENABLE_VR_SHELL) |
| 369 // TODO(mthiesse): Fix this properly. | |
|
cjgrant
2016/12/08 17:02:49
Should elaborate.
mthiesse
2016/12/09 01:28:42
Done.
| |
| 370 | |
| 370 // Note that button up/down state is transient, so ButtonUpHappened only | 371 // Note that button up/down state is transient, so ButtonUpHappened only |
| 371 // returns | 372 // returns true for a single frame (and we're guaranteed not to miss it). |
| 372 // true for a single frame (and we're guaranteed not to miss it). | |
| 373 if (controller_->ButtonUpHappened( | 373 if (controller_->ButtonUpHappened( |
| 374 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { | 374 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { |
| 375 html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); | 375 // html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); |
| 376 | 376 |
| 377 // TODO(mthiesse): The page is no longer visible when in menu mode. We | 377 // TODO(mthiesse): The page is no longer visible when in menu mode. We |
| 378 // should unfocus or otherwise let it know it's hidden. | 378 // should unfocus or otherwise let it know it's hidden. |
| 379 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 379 // if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { |
|
cjgrant
2016/12/08 17:02:49
Remove this block?
mthiesse
2016/12/09 01:28:42
For clarity, I'd like to leave it here as a sign t
| |
| 380 const auto&& task = html_interface_->GetMenuMode() ? | 380 // const auto&& task = html_interface_->GetMenuMode() ? |
| 381 &device::GvrDeviceProvider::OnDisplayBlur : | 381 // &device::GvrDeviceProvider::OnDisplayBlur : |
| 382 &device::GvrDeviceProvider::OnDisplayFocus; | 382 // &device::GvrDeviceProvider::OnDisplayFocus; |
| 383 main_thread_task_runner_->PostTask( | 383 // main_thread_task_runner_->PostTask( |
| 384 FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider())); | 384 // FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider())); |
| 385 } | 385 // } |
| 386 } | 386 } |
| 387 #endif | 387 #endif |
| 388 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 388 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 389 // Process screen touch events for Cardboard button compatibility. | 389 // Process screen touch events for Cardboard button compatibility. |
| 390 // Also send tap events for controller "touchpad click" events. | 390 // Also send tap events for controller "touchpad click" events. |
| 391 if (touch_pending_ || | 391 if (touch_pending_ || controller_->ButtonUpHappened( |
| 392 controller_->ButtonUpHappened( | |
| 393 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { | 392 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { |
| 394 touch_pending_ = false; | 393 touch_pending_ = false; |
| 395 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); | 394 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); |
| 396 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; | 395 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; |
| 397 gesture->timeStampSeconds = | 396 gesture->timeStampSeconds = |
| 398 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); | 397 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); |
| 399 gesture->type = WebInputEvent::GestureTapDown; | 398 gesture->type = WebInputEvent::GestureTapDown; |
| 400 gesture->x = 0; | 399 gesture->x = 0; |
| 401 gesture->y = 0; | 400 gesture->y = 0; |
| 402 SendGestureOnGL(CONTENT, std::move(gesture)); | 401 SendGesture(CONTENT, std::move(gesture)); |
| 403 } | 402 } |
| 404 | 403 |
| 405 return; | 404 return; |
| 406 } | 405 } |
| 407 | 406 |
| 408 gvr::Vec3f ergo_neutral_pose; | 407 gvr::Vec3f ergo_neutral_pose; |
| 409 if (!controller_->IsConnected()) { | 408 if (!controller_->IsConnected()) { |
| 410 // No controller detected, set up a gaze cursor that tracks the | 409 // No controller detected, set up a gaze cursor that tracks the |
| 411 // forward direction. | 410 // forward direction. |
| 412 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; | 411 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 487 plane->copy_rect.width, plane->copy_rect.height}; | 486 plane->copy_rect.width, plane->copy_rect.height}; |
| 488 } | 487 } |
| 489 pixel_x = pixel_rect.width * x + pixel_rect.x; | 488 pixel_x = pixel_rect.width * x + pixel_rect.x; |
| 490 pixel_y = pixel_rect.height * y + pixel_rect.y; | 489 pixel_y = pixel_rect.height * y + pixel_rect.y; |
| 491 | 490 |
| 492 target_point_ = plane_intersection_point; | 491 target_point_ = plane_intersection_point; |
| 493 target_element_ = plane.get(); | 492 target_element_ = plane.get(); |
| 494 input_target = plane->content_quad ? CONTENT : UI; | 493 input_target = plane->content_quad ? CONTENT : UI; |
| 495 } | 494 } |
| 496 } | 495 } |
| 497 SendEventsToTargetOnGL(input_target, pixel_x, pixel_y); | 496 SendEventsToTarget(input_target, pixel_x, pixel_y); |
| 498 } | 497 } |
| 499 | 498 |
| 500 void VrShell::SendEventsToTargetOnGL(InputTarget input_target, | 499 void VrShellGl::SendEventsToTarget(InputTarget input_target, |
| 501 int pixel_x, | 500 int pixel_x, |
| 502 int pixel_y) { | 501 int pixel_y) { |
| 503 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = | 502 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = |
| 504 controller_->DetectGestures(); | 503 controller_->DetectGestures(); |
| 505 std::unique_ptr<WebGestureEvent> gesture = std::move(gesture_list.front()); | 504 std::unique_ptr<WebGestureEvent> gesture = std::move(gesture_list.front()); |
| 506 | 505 |
| 507 // TODO(asimjour) for now, scroll is sent to the main content. | 506 // TODO(asimjour) for now, scroll is sent to the main content. |
| 508 if (gesture->type == WebInputEvent::GestureScrollBegin || | 507 if (gesture->type == WebInputEvent::GestureScrollBegin || |
| 509 gesture->type == WebInputEvent::GestureScrollUpdate || | 508 gesture->type == WebInputEvent::GestureScrollUpdate || |
| 510 gesture->type == WebInputEvent::GestureScrollEnd || | 509 gesture->type == WebInputEvent::GestureScrollEnd || |
| 511 gesture->type == WebInputEvent::GestureFlingCancel) { | 510 gesture->type == WebInputEvent::GestureFlingCancel) { |
| 512 SendGestureOnGL(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture))); | 511 SendGesture(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture))); |
| 513 } | 512 } |
| 514 | 513 |
| 515 if (gesture->type == WebInputEvent::GestureScrollEnd) { | 514 if (gesture->type == WebInputEvent::GestureScrollEnd) { |
| 516 CHECK(gesture_list.size() == 2); | 515 CHECK(gesture_list.size() == 2); |
| 517 if (gesture_list.back()->type == WebInputEvent::GestureTapDown) { | 516 if (gesture_list.back()->type == WebInputEvent::GestureTapDown) { |
| 518 gesture_list.back()->x = pixel_x; | 517 gesture_list.back()->x = pixel_x; |
| 519 gesture_list.back()->y = pixel_y; | 518 gesture_list.back()->y = pixel_y; |
| 520 if (input_target != NONE) | 519 if (input_target != NONE) |
| 521 SendGestureOnGL(input_target, std::move(gesture_list.back())); | 520 SendGesture(input_target, std::move(gesture_list.back())); |
| 522 } else if (gesture_list.back()->type == WebInputEvent::GestureFlingStart) { | 521 } else if (gesture_list.back()->type == WebInputEvent::GestureFlingStart) { |
| 523 SendGestureOnGL(CONTENT, std::move(gesture_list.back())); | 522 SendGesture(CONTENT, std::move(gesture_list.back())); |
| 524 } else { | 523 } else { |
| 525 NOTREACHED(); | 524 NOTREACHED(); |
| 526 } | 525 } |
| 527 } | 526 } |
| 528 | 527 |
| 529 WebInputEvent::Type original_type = gesture->type; | 528 WebInputEvent::Type original_type = gesture->type; |
| 530 | 529 |
| 531 bool new_target = input_target != current_input_target_; | 530 bool new_target = input_target != current_input_target_; |
| 532 if (new_target && current_input_target_ != NONE) { | 531 if (new_target && current_input_target_ != NONE) { |
| 533 // Send a move event indicating that the pointer moved off of an element. | 532 // Send a move event indicating that the pointer moved off of an element. |
| 534 SendGestureOnGL(current_input_target_, MakeMouseEvent( | 533 SendGesture(current_input_target_, MakeMouseEvent( |
| 535 WebInputEvent::MouseLeave, gesture->timeStampSeconds, 0, 0)); | 534 WebInputEvent::MouseLeave, gesture->timeStampSeconds, 0, 0)); |
| 536 } | 535 } |
| 537 current_input_target_ = input_target; | 536 current_input_target_ = input_target; |
| 538 if (current_input_target_ == NONE) { | 537 if (current_input_target_ == NONE) { |
| 539 return; | 538 return; |
| 540 } | 539 } |
| 541 WebInputEvent::Type type = | 540 WebInputEvent::Type type = |
| 542 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; | 541 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; |
| 543 SendGestureOnGL(current_input_target_, MakeMouseEvent( | 542 SendGesture(current_input_target_, MakeMouseEvent( |
| 544 type, gesture->timeStampSeconds, pixel_x, pixel_y)); | 543 type, gesture->timeStampSeconds, pixel_x, pixel_y)); |
| 545 | 544 |
| 546 if (original_type == WebInputEvent::GestureTapDown || touch_pending_) { | 545 if (original_type == WebInputEvent::GestureTapDown || touch_pending_) { |
| 547 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent(*gesture)); | 546 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent(*gesture)); |
| 548 if (touch_pending_) { | 547 if (touch_pending_) { |
| 549 touch_pending_ = false; | 548 touch_pending_ = false; |
| 550 event->sourceDevice = blink::WebGestureDeviceTouchpad; | 549 event->sourceDevice = blink::WebGestureDeviceTouchpad; |
| 551 event->timeStampSeconds = | 550 event->timeStampSeconds = |
| 552 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); | 551 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); |
| 553 } | 552 } |
| 554 event->type = WebInputEvent::GestureTapDown; | 553 event->type = WebInputEvent::GestureTapDown; |
| 555 event->x = pixel_x; | 554 event->x = pixel_x; |
| 556 event->y = pixel_y; | 555 event->y = pixel_y; |
| 557 SendGestureOnGL(current_input_target_, std::move(event)); | 556 SendGesture(current_input_target_, std::move(event)); |
| 558 } | 557 } |
| 559 } | 558 } |
| 560 | 559 |
| 561 void VrShell::SendGestureOnGL(InputTarget input_target, | 560 void VrShellGl::SendGesture(InputTarget input_target, |
| 562 std::unique_ptr<blink::WebInputEvent> event) { | 561 std::unique_ptr<blink::WebInputEvent> event) { |
| 563 DCHECK(input_target != NONE); | 562 DCHECK(input_target != NONE); |
| 564 const base::WeakPtr<VrInputManager>& weak_ptr = | 563 const base::WeakPtr<VrInputManager>& weak_ptr = |
| 565 input_target == CONTENT ? weak_content_input_manager_ | 564 input_target == CONTENT ? content_input_manager_: ui_input_manager_; |
| 566 : weak_ui_input_manager_; | |
| 567 main_thread_task_runner_->PostTask( | 565 main_thread_task_runner_->PostTask( |
| 568 FROM_HERE, | 566 FROM_HERE, |
| 569 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, | 567 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, |
| 570 base::Passed(std::move(event)))); | 568 base::Passed(std::move(event)))); |
| 571 } | 569 } |
| 572 | 570 |
| 573 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { | 571 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { |
| 574 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; | 572 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; |
| 575 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; | 573 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; |
| 576 } | 574 } |
| 577 | 575 |
| 578 int GetPixelEncodedPoseIndexByte() { | 576 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { |
| 579 TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex"); | |
| 580 // Read the pose index encoded in a bottom left pixel as color values. | |
| 581 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 582 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | |
| 583 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | |
| 584 // if not valid due to bad magic number. | |
| 585 uint8_t pixels[4]; | |
| 586 // Assume we're reading from the framebuffer we just wrote to. | |
| 587 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 588 // or equivalent if the rendering setup changes in the future. | |
| 589 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 590 | |
| 591 // Check for the magic number written by VRDevice.cpp on submit. | |
| 592 // This helps avoid glitches from garbage data in the render | |
| 593 // buffer that can appear during initialization or resizing. These | |
| 594 // often appear as flashes of all-black or all-white pixels. | |
| 595 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | |
| 596 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | |
| 597 // Pose is good. | |
| 598 return pixels[0]; | |
| 599 } | |
| 600 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | |
| 601 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | |
| 602 return -1; | |
| 603 } | |
| 604 | |
| 605 bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) { | |
| 606 if (pose_index_byte < 0) { | 577 if (pose_index_byte < 0) { |
| 607 return false; | 578 return false; |
| 608 } | 579 } |
| 609 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { | 580 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { |
| 610 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << | 581 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << |
| 611 ", not a valid pose"; | 582 ", not a valid pose"; |
| 612 return false; | 583 return false; |
| 613 } | 584 } |
| 614 return true; | 585 return true; |
| 615 } | 586 } |
| 616 | 587 |
| 617 void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 588 void VrShellGl::DrawFrame() { |
| 618 TRACE_EVENT0("gpu", "VrShell::DrawFrame"); | 589 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
| 619 // Reset the viewport list to just the pair of viewports for the | 590 // Reset the viewport list to just the pair of viewports for the |
| 620 // primary buffer each frame. Head-locked viewports get added by | 591 // primary buffer each frame. Head-locked viewports get added by |
| 621 // DrawVrShell if needed. | 592 // DrawVrShell if needed. |
| 622 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 593 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 623 | 594 |
| 624 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 595 // TODO(klausw): Fix this. Resizing buffers here leads to webVR mode showing |
| 625 // If needed, resize the primary buffer for use with WebVR. | 596 // nothing but a black screen. |
| 626 if (render_size_primary_ != render_size_primary_webvr_) { | 597 // if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 627 if (!render_size_primary_webvr_.width) { | 598 // // If needed, resize the primary buffer for use with WebVR. |
| 628 VLOG(2) << "WebVR rendering size not known yet, dropping frame"; | 599 // if (render_size_primary_ != render_size_primary_webvr_) { |
| 629 return; | 600 // if (!render_size_primary_webvr_.width) { |
| 630 } | 601 // VLOG(2) << "WebVR rendering size not known yet, dropping frame"; |
| 631 render_size_primary_ = render_size_primary_webvr_; | 602 // return; |
| 632 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | 603 // } |
| 633 } | 604 // render_size_primary_ = render_size_primary_webvr_; |
| 634 } else { | 605 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); |
| 635 if (render_size_primary_ != render_size_primary_vrshell_) { | 606 // } |
| 636 render_size_primary_ = render_size_primary_vrshell_; | 607 // } else { |
| 637 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); | 608 // if (render_size_primary_ != render_size_primary_vrshell_) { |
| 638 } | 609 // render_size_primary_ = render_size_primary_vrshell_; |
| 639 } | 610 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); |
| 611 // } | |
| 612 // } | |
| 640 | 613 |
| 641 gvr::Frame frame = swap_chain_->AcquireFrame(); | 614 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 642 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 615 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 643 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 616 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 644 | 617 |
| 645 gvr::Mat4f head_pose = | 618 gvr::Mat4f head_pose = |
| 646 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 619 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 647 | 620 |
| 648 gvr::Vec3f position = GetTranslation(head_pose); | 621 gvr::Vec3f position = GetTranslation(head_pose); |
| 649 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 622 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 650 // This appears to be a 3DOF pose without a neck model. Add one. | 623 // This appears to be a 3DOF pose without a neck model. Add one. |
| 651 // The head pose has redundant data. Assume we're only using the | 624 // The head pose has redundant data. Assume we're only using the |
| 652 // object_from_reference_matrix, we're not updating position_external. | 625 // object_from_reference_matrix, we're not updating position_external. |
| 653 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 626 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 654 // it. For now, removing it seems working fine. | 627 // it. For now, removing it seems working fine. |
| 655 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 628 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 656 } | 629 } |
| 657 | 630 |
| 658 // Bind the primary framebuffer. | |
| 659 frame.BindBuffer(kFramePrimaryBuffer); | 631 frame.BindBuffer(kFramePrimaryBuffer); |
| 660 | 632 |
| 661 HandleQueuedTasksOnGL(); | |
| 662 | |
| 663 // Update the render position of all UI elements (including desktop). | 633 // Update the render position of all UI elements (including desktop). |
| 664 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 634 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 665 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); | 635 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); |
| 666 | 636 |
| 667 UpdateControllerOnGL(GetForwardVector(head_pose)); | 637 UpdateController(GetForwardVector(head_pose)); |
| 668 | 638 |
| 669 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 639 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 670 DrawWebVrOnGL(); | 640 DrawWebVr(); |
| 671 | 641 |
| 672 // When using async reprojection, we need to know which pose was used in | 642 // When using async reprojection, we need to know which pose was used in |
| 673 // the WebVR app for drawing this frame. Due to unknown amounts of | 643 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 674 // buffering in the compositor and SurfaceTexture, we read the pose number | 644 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 675 // from a corner pixel. There's no point in doing this for legacy | 645 // from a corner pixel. There's no point in doing this for legacy |
| 676 // distortion rendering since that doesn't need a pose, and reading back | 646 // distortion rendering since that doesn't need a pose, and reading back |
| 677 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | 647 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 678 // doing this once we have working no-compositor rendering for WebVR. | 648 // doing this once we have working no-compositor rendering for WebVR. |
| 679 if (gvr_api_->GetAsyncReprojectionEnabled()) { | 649 if (gvr_api_->GetAsyncReprojectionEnabled()) { |
| 680 int pose_index_byte = GetPixelEncodedPoseIndexByte(); | 650 int pose_index_byte = GetPixelEncodedPoseIndexByte(); |
| 681 if (WebVrPoseByteIsValidOnGL(pose_index_byte)) { | 651 if (WebVrPoseByteIsValid(pose_index_byte)) { |
| 682 // We have a valid pose, use it for reprojection. | 652 // We have a valid pose, use it for reprojection. |
| 683 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | 653 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
| 684 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | 654 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
| 685 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; | 655 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; |
| 686 // We can't mark the used pose as invalid since unfortunately | 656 // We can't mark the used pose as invalid since unfortunately |
| 687 // we have to reuse them. The compositor will re-submit stale | 657 // we have to reuse them. The compositor will re-submit stale |
| 688 // frames on vsync, and we can't tell that this has happened | 658 // frames on vsync, and we can't tell that this has happened |
| 689 // until we've read the pose index from it, and at that point | 659 // until we've read the pose index from it, and at that point |
| 690 // it's too late to skip rendering. | 660 // it's too late to skip rendering. |
| 691 } else { | 661 } else { |
| 692 // If we don't get a valid frame ID back we shouldn't attempt | 662 // If we don't get a valid frame ID back we shouldn't attempt |
| 693 // to reproject by an invalid matrix, so turn off reprojection | 663 // to reproject by an invalid matrix, so turn off reprojection |
| 694 // instead. Invalid poses can permanently break reprojection | 664 // instead. Invalid poses can permanently break reprojection |
| 695 // for this GVR instance: http://crbug.com/667327 | 665 // for this GVR instance: http://crbug.com/667327 |
| 696 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | 666 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
| 697 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | 667 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
| 698 } | 668 } |
| 699 } | 669 } |
| 700 } | 670 } |
| 701 | 671 |
| 702 DrawVrShellOnGL(head_pose, frame); | 672 DrawVrShell(head_pose, frame); |
| 703 | 673 |
| 704 frame.Unbind(); | 674 frame.Unbind(); |
| 705 frame.Submit(*buffer_viewport_list_, head_pose); | 675 frame.Submit(*buffer_viewport_list_, head_pose); |
| 676 | |
| 677 // No need to SwapBuffers for an offscreen surface. | |
| 678 ScheduleNextDrawFrame(); | |
| 706 } | 679 } |
| 707 | 680 |
| 708 void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose, | 681 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, |
| 709 gvr::Frame &frame) { | 682 gvr::Frame &frame) { |
| 710 TRACE_EVENT0("gpu", "VrShell::DrawVrShell"); | 683 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); |
| 711 std::vector<const ContentRectangle*> head_locked_elements; | 684 std::vector<const ContentRectangle*> head_locked_elements; |
| 712 std::vector<const ContentRectangle*> world_elements; | 685 std::vector<const ContentRectangle*> world_elements; |
| 713 for (const auto& rect : scene_->GetUiElements()) { | 686 for (const auto& rect : scene_->GetUiElements()) { |
| 714 if (!rect->visible) { | 687 if (!rect->visible) { |
| 715 continue; | 688 continue; |
| 716 } | 689 } |
| 717 if (rect->lock_to_fov) { | 690 if (rect->lock_to_fov) { |
| 718 head_locked_elements.push_back(rect.get()); | 691 head_locked_elements.push_back(rect.get()); |
| 719 } else { | 692 } else { |
| 720 world_elements.push_back(rect.get()); | 693 world_elements.push_back(rect.get()); |
| 721 } | 694 } |
| 722 } | 695 } |
| 723 | 696 |
| 724 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 697 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) { |
| 725 // WebVR is incompatible with 3D world compositing since the | 698 // WebVR is incompatible with 3D world compositing since the |
| 726 // depth buffer was already populated with unknown scaling - the | 699 // depth buffer was already populated with unknown scaling - the |
| 727 // WebVR app has full control over zNear/zFar. Just leave the | 700 // WebVR app has full control over zNear/zFar. Just leave the |
| 728 // existing content in place in the primary buffer without | 701 // existing content in place in the primary buffer without |
| 729 // clearing. Currently, there aren't any world elements in WebVR | 702 // clearing. Currently, there aren't any world elements in WebVR |
| 730 // mode, this will need further testing if those get added | 703 // mode, this will need further testing if those get added |
| 731 // later. | 704 // later. |
| 732 } else { | 705 } else { |
| 733 // Non-WebVR mode, enable depth testing and clear the primary buffers. | 706 // Non-WebVR mode, enable depth testing and clear the primary buffers. |
| 734 glEnable(GL_CULL_FACE); | 707 glEnable(GL_CULL_FACE); |
| 735 glEnable(GL_DEPTH_TEST); | 708 glEnable(GL_DEPTH_TEST); |
| 736 glDepthMask(GL_TRUE); | 709 glDepthMask(GL_TRUE); |
| 737 | 710 |
| 738 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); | 711 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); |
| 739 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 712 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 740 } | 713 } |
| 741 | |
| 742 if (!world_elements.empty()) { | 714 if (!world_elements.empty()) { |
| 743 DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_, | 715 DrawUiView(&head_pose, world_elements, render_size_primary_, |
| 744 kViewportListPrimaryOffset); | 716 kViewportListPrimaryOffset); |
| 745 } | 717 } |
| 746 | 718 |
| 747 if (!head_locked_elements.empty()) { | 719 if (!head_locked_elements.empty()) { |
| 748 // Add head-locked viewports. The list gets reset to just | 720 // Add head-locked viewports. The list gets reset to just |
| 749 // the recommended viewports (for the primary buffer) each frame. | 721 // the recommended viewports (for the primary buffer) each frame. |
| 750 buffer_viewport_list_->SetBufferViewport( | 722 buffer_viewport_list_->SetBufferViewport( |
| 751 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 723 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
| 752 *headlocked_left_viewport_); | 724 *headlocked_left_viewport_); |
| 753 buffer_viewport_list_->SetBufferViewport( | 725 buffer_viewport_list_->SetBufferViewport( |
| 754 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 726 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
| 755 *headlocked_right_viewport_); | 727 *headlocked_right_viewport_); |
| 756 | 728 |
| 757 // Bind the headlocked framebuffer. | 729 // Bind the headlocked framebuffer. |
| 730 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order | |
| 731 // here. | |
| 758 frame.BindBuffer(kFrameHeadlockedBuffer); | 732 frame.BindBuffer(kFrameHeadlockedBuffer); |
| 759 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 733 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
| 760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 734 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 761 DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_, | 735 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, |
| 762 kViewportListHeadlockedOffset); | 736 kViewportListHeadlockedOffset); |
| 763 } | 737 } |
| 764 } | 738 } |
| 765 | 739 |
| 766 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { | 740 void VrShellGl::SetWebVRRenderSurfaceSize(int width, int height) { |
| 767 render_size_primary_webvr_.width = width; | 741 render_size_primary_webvr_.width = width; |
| 768 render_size_primary_webvr_.height = height; | 742 render_size_primary_webvr_.height = height; |
| 769 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once | 743 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once |
| 770 // we have that. | 744 // we have that. |
| 771 } | 745 } |
| 772 | 746 |
| 773 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { | 747 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { |
| 774 // This is a stopgap while we're using the WebVR compositor rendering path. | 748 // This is a stopgap while we're using the WebVR compositor rendering path. |
| 775 // TODO(klausw,crbug.com/655722): Remove this method and member once we're | 749 // TODO(klausw,crbug.com/655722): Remove this method and member once we're |
| 776 // using a separate WebVR render surface. | 750 // using a separate WebVR render surface. |
| 777 return content_tex_physical_size_; | 751 return content_tex_physical_size_; |
| 778 } | 752 } |
| 779 | 753 |
| 780 | 754 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, |
| 781 void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose, | 755 const std::vector<const ContentRectangle*>& elements, |
| 782 const std::vector<const ContentRectangle*>& elements, | 756 const gvr::Sizei& render_size, |
| 783 const gvr::Sizei& render_size, int viewport_offset) { | 757 int viewport_offset) { |
| 784 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); | 758 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); |
| 785 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { | 759 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { |
| 786 buffer_viewport_list_->GetBufferViewport( | 760 buffer_viewport_list_->GetBufferViewport( |
| 787 eye + viewport_offset, buffer_viewport_.get()); | 761 eye + viewport_offset, buffer_viewport_.get()); |
| 788 | 762 |
| 789 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); | 763 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); |
| 790 if (head_pose != nullptr) { | 764 if (head_pose != nullptr) { |
| 791 view_matrix = MatrixMul(view_matrix, *head_pose); | 765 view_matrix = MatrixMul(view_matrix, *head_pose); |
| 792 } | 766 } |
| 793 | 767 |
| 794 gvr::Recti pixel_rect = | 768 gvr::Recti pixel_rect = |
| 795 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); | 769 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); |
| 796 glViewport(pixel_rect.left, pixel_rect.bottom, | 770 glViewport(pixel_rect.left, pixel_rect.bottom, |
| 797 pixel_rect.right - pixel_rect.left, | 771 pixel_rect.right - pixel_rect.left, |
| 798 pixel_rect.top - pixel_rect.bottom); | 772 pixel_rect.top - pixel_rect.bottom); |
| 799 | 773 |
| 800 const gvr::Mat4f render_matrix = MatrixMul( | 774 const gvr::Mat4f render_matrix = MatrixMul( |
| 801 PerspectiveMatrixFromView( | 775 PerspectiveMatrixFromView( |
| 802 buffer_viewport_->GetSourceFov(), kZNear, kZFar), | 776 buffer_viewport_->GetSourceFov(), kZNear, kZFar), |
| 803 view_matrix); | 777 view_matrix); |
| 804 | 778 |
| 805 DrawElementsOnGL(render_matrix, elements); | 779 DrawElements(render_matrix, elements); |
| 806 if (head_pose != nullptr && | 780 if (head_pose != nullptr && |
| 807 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) { | 781 vr_shell_->GetUiInterface()->GetMode() != |
| 808 DrawCursorOnGL(render_matrix); | 782 UiInterface::Mode::WEB_VR) { |
| 783 DrawCursor(render_matrix); | |
| 809 } | 784 } |
| 810 } | 785 } |
| 811 } | 786 } |
| 812 | 787 |
| 813 void VrShell::DrawElementsOnGL( | 788 void VrShellGl::DrawElements( |
| 814 const gvr::Mat4f& render_matrix, | 789 const gvr::Mat4f& render_matrix, |
| 815 const std::vector<const ContentRectangle*>& elements) { | 790 const std::vector<const ContentRectangle*>& elements) { |
| 816 for (const auto& rect : elements) { | 791 for (const auto& rect : elements) { |
| 817 Rectf copy_rect; | 792 Rectf copy_rect; |
| 818 jint texture_handle; | 793 jint texture_handle; |
| 819 if (rect->content_quad) { | 794 if (rect->content_quad) { |
| 820 copy_rect = {0, 0, 1, 1}; | 795 copy_rect = {0, 0, 1, 1}; |
| 821 texture_handle = content_texture_id_; | 796 texture_handle = content_texture_id_; |
| 822 } else { | 797 } else { |
| 823 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; | 798 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; |
| 824 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; | 799 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; |
| 825 copy_rect.width = static_cast<float>(rect->copy_rect.width) / | 800 copy_rect.width = static_cast<float>(rect->copy_rect.width) / |
| 826 ui_tex_css_width_; | 801 ui_tex_css_width_; |
| 827 copy_rect.height = static_cast<float>(rect->copy_rect.height) / | 802 copy_rect.height = static_cast<float>(rect->copy_rect.height) / |
| 828 ui_tex_css_height_; | 803 ui_tex_css_height_; |
| 829 texture_handle = ui_texture_id_; | 804 texture_handle = ui_texture_id_; |
| 830 } | 805 } |
| 831 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); | 806 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); |
| 832 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( | 807 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( |
| 833 texture_handle, transform, copy_rect); | 808 texture_handle, transform, copy_rect); |
| 834 } | 809 } |
| 835 } | 810 } |
| 836 | 811 |
| 837 void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) { | 812 void VrShellGl::DrawCursor(const gvr::Mat4f& render_matrix) { |
| 838 gvr::Mat4f mat; | 813 gvr::Mat4f mat; |
| 839 SetIdentityM(mat); | 814 SetIdentityM(mat); |
| 840 | 815 |
| 841 // Draw the reticle. | 816 // Draw the reticle. |
| 842 | 817 |
| 843 // Scale the pointer to have a fixed FOV size at any distance. | 818 // Scale the pointer to have a fixed FOV size at any distance. |
| 844 const float eye_to_target = Distance(target_point_, kOrigin); | 819 const float eye_to_target = Distance(target_point_, kOrigin); |
| 845 ScaleM(mat, mat, kReticleWidth * eye_to_target, | 820 ScaleM(mat, mat, kReticleWidth * eye_to_target, |
| 846 kReticleHeight * eye_to_target, 1.0f); | 821 kReticleHeight * eye_to_target, 1.0f); |
| 847 | 822 |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 900 | 875 |
| 901 // Move the beam origin to the hand. | 876 // Move the beam origin to the hand. |
| 902 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, | 877 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, |
| 903 kHandPosition.z); | 878 kHandPosition.z); |
| 904 | 879 |
| 905 transform = MatrixMul(render_matrix, face_transform); | 880 transform = MatrixMul(render_matrix, face_transform); |
| 906 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); | 881 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); |
| 907 } | 882 } |
| 908 } | 883 } |
| 909 | 884 |
| 910 void VrShell::DrawWebVrOnGL() { | 885 void VrShellGl::DrawWebVr() { |
| 911 TRACE_EVENT0("gpu", "VrShell::DrawWebVr"); | 886 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); |
| 912 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 887 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
| 913 glDisable(GL_CULL_FACE); | 888 glDisable(GL_CULL_FACE); |
| 914 glDepthMask(GL_FALSE); | 889 glDepthMask(GL_FALSE); |
| 915 glDisable(GL_DEPTH_TEST); | 890 glDisable(GL_DEPTH_TEST); |
| 916 glDisable(GL_SCISSOR_TEST); | 891 glDisable(GL_SCISSOR_TEST); |
| 917 glDisable(GL_BLEND); | 892 glDisable(GL_BLEND); |
| 918 glDisable(GL_POLYGON_OFFSET_FILL); | 893 glDisable(GL_POLYGON_OFFSET_FILL); |
| 919 | 894 |
| 920 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 895 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
| 921 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 896 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| 922 | 897 |
| 923 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 898 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| 924 *webvr_left_viewport_); | 899 *webvr_left_viewport_); |
| 925 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 900 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 926 *webvr_right_viewport_); | 901 *webvr_right_viewport_); |
| 927 } | 902 } |
| 928 | 903 |
| 929 void VrShell::OnTriggerEventOnUI(JNIEnv* env, | 904 void VrShellGl::OnTriggerEvent() { |
| 930 const JavaParamRef<jobject>& obj) { | |
| 931 // Set a flag to handle this on the render thread at the next frame. | 905 // Set a flag to handle this on the render thread at the next frame. |
| 932 touch_pending_ = true; | 906 touch_pending_ = true; |
| 933 } | 907 } |
| 934 | 908 |
| 935 void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 909 void VrShellGl::OnPause() { |
| 936 if (gvr_api_ == nullptr) | |
| 937 return; | |
| 938 | |
| 939 // TODO(mthiesse): Clean up threading here. | |
| 940 controller_->OnPause(); | 910 controller_->OnPause(); |
| 941 gvr_api_->PauseTracking(); | 911 gvr_api_->PauseTracking(); |
| 942 | |
| 943 // exit vr session | |
| 944 metrics_helper_->SetVRActive(false); | |
| 945 } | 912 } |
| 946 | 913 |
| 947 void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 914 void VrShellGl::OnResume() { |
| 948 if (gvr_api_ == nullptr) | |
| 949 return; | |
| 950 | |
| 951 // TODO(mthiesse): Clean up threading here. | |
| 952 gvr_api_->RefreshViewerProfile(); | 915 gvr_api_->RefreshViewerProfile(); |
| 953 gvr_api_->ResumeTracking(); | 916 gvr_api_->ResumeTracking(); |
| 954 controller_->OnResume(); | 917 controller_->OnResume(); |
| 955 | |
| 956 // exit vr session | |
| 957 metrics_helper_->SetVRActive(true); | |
| 958 } | 918 } |
| 959 | 919 |
| 960 base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI( | 920 void VrShellGl::SetWebVrMode(bool enabled) { |
| 961 const content::WebContents* web_contents) { | |
| 962 // Ensure that the WebContents requesting the VrShell instance is the one | |
| 963 // we created. | |
| 964 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents) | |
| 965 return g_instance->weak_ptr_factory_.GetWeakPtr(); | |
| 966 return base::WeakPtr<VrShell>(nullptr); | |
| 967 } | |
| 968 | |
| 969 void VrShell::OnDomContentsLoadedOnUI() { | |
| 970 html_interface_->SetURL(main_contents_->GetVisibleURL()); | |
| 971 html_interface_->SetLoading(main_contents_->IsLoading()); | |
| 972 html_interface_->OnDomContentsLoaded(); | |
| 973 } | |
| 974 | |
| 975 void VrShell::SetWebVrModeOnUI(JNIEnv* env, | |
| 976 const base::android::JavaParamRef<jobject>& obj, | |
| 977 bool enabled) { | |
| 978 metrics_helper_->SetWebVREnabled(enabled); | |
| 979 if (enabled) { | 921 if (enabled) { |
| 980 html_interface_->SetMode(UiInterface::Mode::WEB_VR); | 922 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::WEB_VR); |
| 981 } else { | 923 } else { |
| 982 html_interface_->SetMode(UiInterface::Mode::STANDARD); | 924 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::STANDARD); |
| 983 } | 925 } |
| 984 } | 926 } |
| 985 | 927 |
| 986 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { | 928 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| 987 // TODO(cjgrant): Align this state with the logic that drives the omnibox. | 929 const gvr::Rectf& right_bounds) { |
| 988 html_interface_->SetWebVRSecureOrigin(secure_origin); | |
| 989 } | |
| 990 | |
| 991 void VrShell::SubmitWebVRFrame() {} | |
| 992 | |
| 993 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | |
| 994 const gvr::Rectf& right_bounds) { | |
| 995 webvr_left_viewport_->SetSourceUv(left_bounds); | 930 webvr_left_viewport_->SetSourceUv(left_bounds); |
| 996 webvr_right_viewport_->SetSourceUv(right_bounds); | 931 webvr_right_viewport_->SetSourceUv(right_bounds); |
| 997 } | 932 } |
| 998 | 933 |
| 999 gvr::GvrApi* VrShell::gvr_api() { | 934 gvr::GvrApi* VrShellGl::gvr_api() { |
| 1000 return gvr_api_.get(); | 935 return gvr_api_.get(); |
| 1001 } | 936 } |
| 1002 | 937 |
| 1003 void VrShell::SurfacesChangedOnUI(JNIEnv* env, | 938 void VrShellGl::ContentBoundsChanged(int width, int height) { |
| 1004 const JavaParamRef<jobject>& object, | 939 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
| 1005 const JavaParamRef<jobject>& content_surface, | 940 content_tex_css_width_ = width; |
| 1006 const JavaParamRef<jobject>& ui_surface) { | 941 content_tex_css_height_ = height; |
| 1007 content_compositor_->SurfaceChanged(content_surface); | |
| 1008 ui_compositor_->SurfaceChanged(ui_surface); | |
| 1009 } | 942 } |
| 1010 | 943 |
| 1011 void VrShell::ContentBoundsChangedOnUI(JNIEnv* env, | 944 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) { |
| 1012 const JavaParamRef<jobject>& object, | 945 if (content_surface_texture_.get()) |
| 1013 jint width, jint height, jfloat dpr) { | 946 content_surface_texture_->SetDefaultBufferSize(width, height); |
| 1014 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged"); | |
| 1015 content_tex_physical_size_.width = width; | 947 content_tex_physical_size_.width = width; |
| 1016 content_tex_physical_size_.height = height; | 948 content_tex_physical_size_.height = height; |
| 1017 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in | |
| 1018 // response to MainFrameWasResized, not here. | |
| 1019 content_tex_css_width_ = width / dpr; | |
| 1020 content_tex_css_height_ = height / dpr; | |
| 1021 | |
| 1022 content_compositor_->SetWindowBounds(width, height); | |
| 1023 } | 949 } |
| 1024 | 950 |
| 1025 void VrShell::UIBoundsChangedOnUI(JNIEnv* env, | 951 void VrShellGl::UIBoundsChanged(int width, int height) { |
| 1026 const JavaParamRef<jobject>& object, | 952 ui_tex_css_width_ = width; |
| 1027 jint width, jint height, jfloat dpr) { | 953 ui_tex_css_height_ = height; |
| 1028 ui_compositor_->SetWindowBounds(width, height); | |
| 1029 } | 954 } |
| 1030 | 955 |
| 1031 UiScene* VrShell::GetSceneOnGL() { | 956 void VrShellGl::UIPhysicalBoundsChanged(int width, int height) { |
| 1032 return scene_.get(); | 957 if (ui_surface_texture_.get()) |
| 958 ui_surface_texture_->SetDefaultBufferSize(width, height); | |
| 959 ui_tex_physical_size_.width = width; | |
| 960 ui_tex_physical_size_.height = height; | |
| 1033 } | 961 } |
| 1034 | 962 |
| 1035 UiInterface* VrShell::GetUiInterfaceOnGL() { | 963 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
| 1036 return html_interface_.get(); | 964 return weak_ptr_factory_.GetWeakPtr(); |
| 1037 } | 965 } |
| 1038 | 966 |
| 1039 void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) { | 967 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase, |
| 1040 base::AutoLock lock(task_queue_lock_); | 968 const base::TimeDelta interval) { |
| 1041 task_queue_.push(callback); | 969 vsync_timebase_ = timebase; |
| 970 vsync_interval_ = interval; | |
| 1042 } | 971 } |
| 1043 | 972 |
| 1044 void VrShell::HandleQueuedTasksOnGL() { | 973 void VrShellGl::ScheduleNextDrawFrame() { |
| 1045 // To protect a stream of tasks from blocking rendering indefinitely, | 974 base::TimeTicks now = base::TimeTicks::Now(); |
| 1046 // process only the number of tasks present when first checked. | 975 base::TimeTicks target; |
| 1047 std::vector<base::Callback<void()>> tasks; | 976 |
| 1048 { | 977 if (vsync_interval_.is_zero()) { |
| 1049 base::AutoLock lock(task_queue_lock_); | 978 target = now; |
| 1050 const size_t count = task_queue_.size(); | 979 } else { |
| 1051 for (size_t i = 0; i < count; i++) { | 980 target = now + vsync_interval_; |
| 1052 tasks.push_back(task_queue_.front()); | 981 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
| 1053 task_queue_.pop(); | 982 target = vsync_timebase_ + intervals * vsync_interval_; |
| 1054 } | |
| 1055 } | 983 } |
| 1056 for (auto &task : tasks) { | |
| 1057 task.Run(); | |
| 1058 } | |
| 1059 } | |
| 1060 | 984 |
| 1061 void VrShell::DoUiActionOnUI(const UiAction action) { | 985 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now); |
| 1062 content::NavigationController& controller = main_contents_->GetController(); | |
| 1063 switch (action) { | |
| 1064 case HISTORY_BACK: | |
| 1065 if (main_contents_->IsFullscreen()) { | |
| 1066 main_contents_->ExitFullscreen(true /* will_cause_resize */); | |
| 1067 } else if (controller.CanGoBack()) { | |
| 1068 controller.GoBack(); | |
| 1069 } | |
| 1070 break; | |
| 1071 case HISTORY_FORWARD: | |
| 1072 if (controller.CanGoForward()) | |
| 1073 controller.GoForward(); | |
| 1074 break; | |
| 1075 case RELOAD: | |
| 1076 controller.Reload(false); | |
| 1077 break; | |
| 1078 #if defined(ENABLE_VR_SHELL_UI_DEV) | |
| 1079 case RELOAD_UI: | |
| 1080 ui_contents_->GetController().Reload(false); | |
| 1081 html_interface_.reset(new UiInterface(UiInterface::Mode::STANDARD, | |
| 1082 main_contents_->IsFullscreen())); | |
| 1083 vr_web_contents_observer_->SetUiInterface(html_interface_.get()); | |
| 1084 break; | |
| 1085 #endif | |
| 1086 case ZOOM_OUT: // Not handled yet. | |
| 1087 case ZOOM_IN: // Not handled yet. | |
| 1088 break; | |
| 1089 default: | |
| 1090 NOTREACHED(); | |
| 1091 } | |
| 1092 } | |
| 1093 | |
| 1094 void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host, | |
| 1095 content::RenderViewHost* new_host) { | |
| 1096 new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT); | |
| 1097 } | |
| 1098 | |
| 1099 void VrShell::MainFrameWasResized(bool width_changed) { | |
| 1100 display::Display display = display::Screen::GetScreen() | |
| 1101 ->GetDisplayNearestWindow(ui_contents_->GetNativeView()); | |
| 1102 // TODO(mthiesse): Synchronize with GL thread. | |
| 1103 ui_tex_css_width_ = display.size().width(); | |
| 1104 ui_tex_css_height_ = display.size().height(); | |
| 1105 } | |
| 1106 | |
| 1107 void VrShell::WebContentsDestroyed() { | |
| 1108 ui_input_manager_.reset(); | |
| 1109 ui_contents_ = nullptr; | |
| 1110 // TODO(mthiesse): Handle web contents being destroyed. | |
| 1111 delegate_->ForceExitVr(); | |
| 1112 } | |
| 1113 | |
| 1114 void VrShell::ContentWebContentsDestroyedOnUI() { | |
| 1115 content_input_manager_.reset(); | |
| 1116 main_contents_ = nullptr; | |
| 1117 // TODO(mthiesse): Handle web contents being destroyed. | |
| 1118 delegate_->ForceExitVr(); | |
| 1119 } | |
| 1120 | |
| 1121 void VrShell::ContentWasHiddenOnUI() { | |
| 1122 // Ensure we don't continue sending input to it. | |
| 1123 content_input_manager_.reset(); | |
| 1124 // TODO(mthiesse): Handle web contents being hidden. | |
| 1125 delegate_->ForceExitVr(); | |
| 1126 } | |
| 1127 | |
| 1128 void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) { | |
| 1129 JNIEnv* env = base::android::AttachCurrentThread(); | |
| 1130 Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height, | |
| 1131 dpr); | |
| 1132 } | |
| 1133 | |
| 1134 void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) { | |
| 1135 JNIEnv* env = base::android::AttachCurrentThread(); | |
| 1136 Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr); | |
| 1137 } | |
| 1138 | |
| 1139 // ---------------------------------------------------------------------------- | |
| 1140 // Native JNI methods | |
| 1141 // ---------------------------------------------------------------------------- | |
| 1142 | |
| 1143 jlong InitOnUI(JNIEnv* env, | |
| 1144 const JavaParamRef<jobject>& obj, | |
| 1145 const JavaParamRef<jobject>& content_web_contents, | |
| 1146 jlong content_window_android, | |
| 1147 const JavaParamRef<jobject>& ui_web_contents, | |
| 1148 jlong ui_window_android, | |
| 1149 jboolean for_web_vr) { | |
| 1150 return reinterpret_cast<intptr_t>(new VrShell( | |
| 1151 env, obj, content::WebContents::FromJavaWebContents(content_web_contents), | |
| 1152 reinterpret_cast<ui::WindowAndroid*>(content_window_android), | |
| 1153 content::WebContents::FromJavaWebContents(ui_web_contents), | |
| 1154 reinterpret_cast<ui::WindowAndroid*>(ui_window_android), | |
| 1155 for_web_vr)); | |
| 1156 } | 986 } |
| 1157 | 987 |
| 1158 } // namespace vr_shell | 988 } // namespace vr_shell |
| OLD | NEW |