Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(320)

Side by Side Diff: chrome/browser/android/vr_shell/vr_shell_gl.cc

Issue 2562733002: Implement our own GLThread for VR Shell. (Closed)
Patch Set: sigh Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/android/vr_shell/vr_shell.h" 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h"
6 6
7 #include "base/memory/ptr_util.h"
7 #include "base/metrics/histogram_macros.h" 8 #include "base/metrics/histogram_macros.h"
9 #include "base/threading/thread_task_runner_handle.h"
8 #include "chrome/browser/android/vr_shell/ui_elements.h" 10 #include "chrome/browser/android/vr_shell/ui_elements.h"
9 #include "chrome/browser/android/vr_shell/ui_interface.h" 11 #include "chrome/browser/android/vr_shell/ui_interface.h"
10 #include "chrome/browser/android/vr_shell/ui_scene.h" 12 #include "chrome/browser/android/vr_shell/ui_scene.h"
11 #include "chrome/browser/android/vr_shell/vr_compositor.h"
12 #include "chrome/browser/android/vr_shell/vr_controller.h" 13 #include "chrome/browser/android/vr_shell/vr_controller.h"
13 #include "chrome/browser/android/vr_shell/vr_gl_util.h" 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h"
14 #include "chrome/browser/android/vr_shell/vr_input_manager.h" 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h"
15 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" 16 #include "chrome/browser/android/vr_shell/vr_math.h"
17 #include "chrome/browser/android/vr_shell/vr_shell.h"
16 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
17 #include "chrome/browser/android/vr_shell/vr_usage_monitor.h" 19 #include "third_party/WebKit/public/platform/WebInputEvent.h"
18 #include "chrome/browser/android/vr_shell/vr_web_contents_observer.h" 20 #include "ui/gfx/vsync_provider.h"
19 #include "content/public/browser/navigation_controller.h" 21 #include "ui/gl/android/scoped_java_surface.h"
20 #include "content/public/browser/render_view_host.h" 22 #include "ui/gl/android/surface_texture.h"
21 #include "content/public/browser/render_widget_host.h"
22 #include "content/public/browser/render_widget_host_view.h"
23 #include "content/public/browser/web_contents.h"
24 #include "content/public/common/referrer.h"
25 #include "device/vr/android/gvr/gvr_device_provider.h"
26 #include "jni/VrShellImpl_jni.h"
27 #include "ui/android/view_android.h"
28 #include "ui/android/window_android.h"
29 #include "ui/base/page_transition_types.h"
30 #include "ui/display/display.h"
31 #include "ui/display/screen.h"
32 #include "ui/gl/gl_bindings.h" 23 #include "ui/gl/gl_bindings.h"
24 #include "ui/gl/gl_context.h"
25 #include "ui/gl/gl_surface.h"
33 #include "ui/gl/init/gl_factory.h" 26 #include "ui/gl/init/gl_factory.h"
34 27
35 using base::android::JavaParamRef;
36
37 namespace vr_shell { 28 namespace vr_shell {
38 29
39 namespace { 30 namespace {
40 // Constant taken from treasure_hunt demo. 31 // Constant taken from treasure_hunt demo.
41 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000;
42 33
43 static constexpr float kZNear = 0.1f; 34 static constexpr float kZNear = 0.1f;
44 static constexpr float kZFar = 1000.0f; 35 static constexpr float kZFar = 1000.0f;
45 36
46 // Screen angle in degrees. 0 = vertical, positive = top closer. 37 // Screen angle in degrees. 0 = vertical, positive = top closer.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 79
89 // The GVR viewport list has two entries (left eye and right eye) for each 80 // The GVR viewport list has two entries (left eye and right eye) for each
90 // GVR buffer. 81 // GVR buffer.
91 static constexpr int kViewportListPrimaryOffset = 0; 82 static constexpr int kViewportListPrimaryOffset = 0;
92 static constexpr int kViewportListHeadlockedOffset = 2; 83 static constexpr int kViewportListHeadlockedOffset = 2;
93 84
94 // Magic numbers used to mark valid pose index values encoded in frame 85 // Magic numbers used to mark valid pose index values encoded in frame
95 // data. Must match the magic numbers used in blink's VRDisplay.cpp. 86 // data. Must match the magic numbers used in blink's VRDisplay.cpp.
96 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; 87 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
97 88
98 vr_shell::VrShell* g_instance;
99
100 static const char kVrShellUIURL[] = "chrome://vr-shell-ui";
101
102 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { 89 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
103 float xdiff = (vec1.x - vec2.x); 90 float xdiff = (vec1.x - vec2.x);
104 float ydiff = (vec1.y - vec2.y); 91 float ydiff = (vec1.y - vec2.y);
105 float zdiff = (vec1.z - vec2.z); 92 float zdiff = (vec1.z - vec2.z);
106 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; 93 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff;
107 return std::sqrt(scale); 94 return std::sqrt(scale);
108 } 95 }
109 96
110 // Generate a quaternion representing the rotation from the negative Z axis 97 // Generate a quaternion representing the rotation from the negative Z axis
111 // (0, 0, -1) to a specified vector. This is an optimized version of a more 98 // (0, 0, -1) to a specified vector. This is an optimized version of a more
(...skipping 28 matching lines...) Expand all
140 mouse_event->x = x; 127 mouse_event->x = x;
141 mouse_event->y = y; 128 mouse_event->y = y;
142 mouse_event->windowX = x; 129 mouse_event->windowX = x;
143 mouse_event->windowY = y; 130 mouse_event->windowY = y;
144 mouse_event->timeStampSeconds = timestamp; 131 mouse_event->timeStampSeconds = timestamp;
145 mouse_event->clickCount = 1; 132 mouse_event->clickCount = 1;
146 mouse_event->modifiers = 0; 133 mouse_event->modifiers = 0;
147 134
148 return mouse_event; 135 return mouse_event;
149 } 136 }
150 } // namespace
151
152 VrShell::VrShell(JNIEnv* env,
153 jobject obj,
154 content::WebContents* main_contents,
155 ui::WindowAndroid* content_window,
156 content::WebContents* ui_contents,
157 ui::WindowAndroid* ui_window,
158 bool for_web_vr)
159 : WebContentsObserver(ui_contents),
160 main_contents_(main_contents),
161 ui_contents_(ui_contents),
162 metrics_helper_(new VrMetricsHelper(main_contents)),
163 main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
164 weak_ptr_factory_(this) {
165 DCHECK(g_instance == nullptr);
166 g_instance = this;
167 j_vr_shell_.Reset(env, obj);
168 scene_.reset(new UiScene);
169
170 if (for_web_vr)
171 metrics_helper_->SetWebVREnabled(true);
172 html_interface_.reset(new UiInterface(
173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD,
174 main_contents_->IsFullscreen()));
175 content_compositor_.reset(new VrCompositor(content_window, false));
176 ui_compositor_.reset(new VrCompositor(ui_window, true));
177 vr_web_contents_observer_.reset(new VrWebContentsObserver(
178 main_contents, html_interface_.get(), this));
179
180 LoadUIContentOnUI();
181
182 gvr::Mat4f identity;
183 SetIdentityM(identity);
184 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
186
187 content_input_manager_.reset(new VrInputManager(main_contents_));
188 ui_input_manager_.reset(new VrInputManager(ui_contents_));
189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr();
190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr();
191
192 SetShowingOverscrollGlowOnUI(false);
193 }
194
195 void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env,
196 const JavaParamRef<jobject>& obj) {
197 content_compositor_->SetLayer(main_contents_);
198 ui_compositor_->SetLayer(ui_contents_);
199 }
200
201 void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
202 delete this;
203 }
204
205 void VrShell::LoadUIContentOnUI() {
206 GURL url(kVrShellUIURL);
207 ui_contents_->GetController().LoadURL(
208 url, content::Referrer(),
209 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string(""));
210 }
211
212 bool RegisterVrShell(JNIEnv* env) {
213 return RegisterNativesImpl(env);
214 }
215
216 VrShell::~VrShell() {
217 if (delegate_ && delegate_->GetDeviceProvider()) {
218 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved();
219 }
220 g_instance = nullptr;
221 gl::init::ShutdownGL();
222 }
223
224 void VrShell::SetDelegateOnUI(JNIEnv* env,
225 const base::android::JavaParamRef<jobject>& obj,
226 const base::android::JavaParamRef<jobject>& delegate) {
227 base::AutoLock lock(gvr_init_lock_);
228 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate);
229 if (swap_chain_.get()) {
230 delegate_->GetDeviceProvider()->OnGvrDelegateReady(
231 weak_ptr_factory_.GetWeakPtr());
232 }
233 }
234 137
235 enum class ViewerType { 138 enum class ViewerType {
236 UNKNOWN_TYPE = 0, 139 UNKNOWN_TYPE = 0,
237 CARDBOARD = 1, 140 CARDBOARD = 1,
238 DAYDREAM = 2, 141 DAYDREAM = 2,
239 VIEWER_TYPE_MAX, 142 VIEWER_TYPE_MAX,
240 }; 143 };
241 144
242 void VrShell::GvrInitOnGL(JNIEnv* env, 145 int GetPixelEncodedPoseIndexByte() {
243 const JavaParamRef<jobject>& obj, 146 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex");
244 jlong native_gvr_api) { 147 // Read the pose index encoded in a bottom left pixel as color values.
245 // set the initial webvr state 148 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
246 metrics_helper_->SetVRActive(true); 149 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
150 // which tracks poses. Returns the low byte (0..255) if valid, or -1
151 // if not valid due to bad magic number.
152 uint8_t pixels[4];
153 // Assume we're reading from the framebuffer we just wrote to.
154 // That's true currently, we may need to use glReadBuffer(GL_BACK)
155 // or equivalent if the rendering setup changes in the future.
156 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
247 157
248 gvr_api_ = 158 // Check for the magic number written by VRDevice.cpp on submit.
249 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); 159 // This helps avoid glitches from garbage data in the render
250 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once 160 // buffer that can appear during initialization or resizing. These
251 // we switch to using a WebVR render surface. We currently need to wait for 161 // often appear as flashes of all-black or all-white pixels.
252 // the compositor window's size to be known first. See also 162 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
253 // ContentSurfaceChanged. 163 pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
254 controller_.reset( 164 // Pose is good.
255 new VrController(reinterpret_cast<gvr_context*>(native_gvr_api))); 165 return pixels[0];
166 }
167 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
168 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
169 return -1;
170 }
256 171
172 } // namespace
173
174 VrShellGl::VrShellGl(
175 VrShell* vr_shell,
176 const base::WeakPtr<VrShell>& weak_vr_shell,
177 const base::WeakPtr<VrInputManager>& content_input_manager,
178 const base::WeakPtr<VrInputManager>& ui_input_manager,
179 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
180 gvr_context* gvr_api)
181 : task_runner_(base::ThreadTaskRunnerHandle::Get()),
182 vr_shell_(vr_shell),
183 weak_vr_shell_(weak_vr_shell),
184 content_input_manager_(content_input_manager),
185 ui_input_manager_(ui_input_manager),
186 main_thread_task_runner_(std::move(main_thread_task_runner)),
187 weak_ptr_factory_(this) {
188 GvrInit(gvr_api);
189 }
190
191 VrShellGl::~VrShellGl() {
192 draw_task_.Cancel();
193 }
194
195 bool VrShellGl::Initialize() {
196 if (!InitializeGl()) return false;
197
198 gvr::Mat4f identity;
199 SetIdentityM(identity);
200 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
201 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
202
203 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this)));
204
205 scene_.reset(new UiScene);
206
207 InitializeRenderer();
208
209 ScheduleNextDrawFrame();
210 return true;
211 }
212
213 bool VrShellGl::InitializeGl() {
214 if (gl::GetGLImplementation() == gl::kGLImplementationNone &&
215 !gl::init::InitializeGLOneOff()) {
216 LOG(ERROR) << "gl::init::InitializeGLOneOff failed";
217 ForceExitVR();
218 return false;
219 }
220 surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size());
221 if (!surface_.get()) {
222 LOG(ERROR) << "gl::init::CreateOffscreenGLSurface failed";
223 ForceExitVR();
224 return false;
225 }
226 context_ = gl::init::CreateGLContext(nullptr, surface_.get(),
227 gl::GLContextAttribs());
228 if (!context_.get()) {
229 LOG(ERROR) << "gl::init::CreateGLContext failed";
230 ForceExitVR();
231 return false;
232 }
233 if (!context_->MakeCurrent(surface_.get())) {
234 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed";
235 ForceExitVR();
236 return false;
237 }
238
239 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is
240 // sort of okay, because the GVR swap chain will block if we render too fast,
241 // but we should address this properly.
242 if (surface_->GetVSyncProvider()) {
243 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind(
244 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr()));
245 } else {
246 LOG(ERROR) << "No VSync Provider";
247 }
248
249 unsigned int textures[2];
250 glGenTextures(2, textures);
251 ui_texture_id_ = textures[0];
252 content_texture_id_ = textures[1];
253 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_);
254 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_);
255 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get()));
256 content_surface_.reset(new gl::ScopedJavaSurface(
257 content_surface_texture_.get()));
258 ui_surface_texture_->SetFrameAvailableCallback(base::Bind(
259 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
260 content_surface_texture_->SetFrameAvailableCallback(base::Bind(
261 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
262
263 content_surface_texture_->SetDefaultBufferSize(
264 content_tex_physical_size_.width, content_tex_physical_size_.height);
265 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width,
266 ui_tex_physical_size_.height);
267
268 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
269 &VrShell::SurfacesChanged, weak_vr_shell_,
270 content_surface_->j_surface().obj(),
271 ui_surface_->j_surface().obj()));
272 return true;
273 }
274
275 void VrShellGl::OnUIFrameAvailable() {
276 ui_surface_texture_->UpdateTexImage();
277 }
278
279 void VrShellGl::OnContentFrameAvailable() {
280 content_surface_texture_->UpdateTexImage();
281 }
282
283 void VrShellGl::GvrInit(gvr_context* gvr_api) {
284 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api);
285 controller_.reset(new VrController(gvr_api));
257 286
258 ViewerType viewerType; 287 ViewerType viewerType;
259 switch (gvr_api_->GetViewerType()) { 288 switch (gvr_api_->GetViewerType()) {
260 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: 289 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM:
261 viewerType = ViewerType::DAYDREAM; 290 viewerType = ViewerType::DAYDREAM;
262 break; 291 break;
263 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: 292 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD:
264 viewerType = ViewerType::CARDBOARD; 293 viewerType = ViewerType::CARDBOARD;
265 break; 294 break;
266 default: 295 default:
267 NOTREACHED(); 296 NOTREACHED();
268 viewerType = ViewerType::UNKNOWN_TYPE; 297 viewerType = ViewerType::UNKNOWN_TYPE;
269 break; 298 break;
270 } 299 }
271 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), 300 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType),
272 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); 301 static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
273 } 302 }
274 303
275 void VrShell::InitializeGlOnGL(JNIEnv* env, 304 void VrShellGl::InitializeRenderer() {
276 const JavaParamRef<jobject>& obj,
277 jint content_texture_handle,
278 jint ui_texture_handle) {
279 base::AutoLock lock(gvr_init_lock_);
280 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
281 gl::init::InitializeGLOneOff());
282
283 content_texture_id_ = content_texture_handle;
284 ui_texture_id_ = ui_texture_handle;
285
286 // While WebVR is going through the compositor path, it shares 305 // While WebVR is going through the compositor path, it shares
287 // the same texture ID. This will change once it gets its own 306 // the same texture ID. This will change once it gets its own
288 // surface, but store it separately to avoid future confusion. 307 // surface, but store it separately to avoid future confusion.
289 // TODO(klausw,crbug.com/655722): remove this. 308 // TODO(klausw,crbug.com/655722): remove this.
290 webvr_texture_id_ = content_texture_id_; 309 webvr_texture_id_ = content_texture_id_;
291 // Out of paranoia, explicitly reset the "pose valid" flags to false 310 // Out of paranoia, explicitly reset the "pose valid" flags to false
292 // from the GL thread. The constructor ran in the UI thread. 311 // from the GL thread. The constructor ran in the UI thread.
293 // TODO(klausw,crbug.com/655722): remove this. 312 // TODO(klausw,crbug.com/655722): remove this.
294 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); 313 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false);
295 314
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
350 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, 369 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE,
351 webvr_left_viewport_.get()); 370 webvr_left_viewport_.get());
352 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); 371 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
353 372
354 webvr_right_viewport_.reset( 373 webvr_right_viewport_.reset(
355 new gvr::BufferViewport(gvr_api_->CreateBufferViewport())); 374 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
356 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, 375 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE,
357 webvr_right_viewport_.get()); 376 webvr_right_viewport_.get());
358 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); 377 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
359 378
360 if (delegate_) { 379 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
361 main_thread_task_runner_->PostTask( 380 &VrShell::GvrDelegateReady, weak_vr_shell_));
362 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady,
363 delegate_->GetDeviceProvider(),
364 weak_ptr_factory_.GetWeakPtr()));
365 }
366 } 381 }
367 382
368 void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) { 383 void VrShellGl::UpdateController(const gvr::Vec3f& forward_vector) {
369 controller_->UpdateState(); 384 controller_->UpdateState();
370 385
371 #if defined(ENABLE_VR_SHELL) 386 #if defined(ENABLE_VR_SHELL)
387 // TODO(mthiesse): Fix menu button handling, which should be posted to the UI
388 // thread instead of handled here.
389
372 // Note that button up/down state is transient, so ButtonUpHappened only 390 // Note that button up/down state is transient, so ButtonUpHappened only
373 // returns 391 // returns true for a single frame (and we're guaranteed not to miss it).
374 // true for a single frame (and we're guaranteed not to miss it).
375 if (controller_->ButtonUpHappened( 392 if (controller_->ButtonUpHappened(
376 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { 393 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
377 html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); 394 // html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
378 395
379 // TODO(mthiesse): The page is no longer visible when in menu mode. We 396 // TODO(mthiesse): The page is no longer visible when in menu mode. We
380 // should unfocus or otherwise let it know it's hidden. 397 // should unfocus or otherwise let it know it's hidden.
381 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 398 // if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
382 const auto&& task = html_interface_->GetMenuMode() ? 399 // const auto&& task = html_interface_->GetMenuMode() ?
383 &device::GvrDeviceProvider::OnDisplayBlur : 400 // &device::GvrDeviceProvider::OnDisplayBlur :
384 &device::GvrDeviceProvider::OnDisplayFocus; 401 // &device::GvrDeviceProvider::OnDisplayFocus;
385 main_thread_task_runner_->PostTask( 402 // main_thread_task_runner_->PostTask(
386 FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider())); 403 // FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
387 } 404 // }
388 } 405 }
389 #endif 406 #endif
390 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 407 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
391 // Process screen touch events for Cardboard button compatibility. 408 // Process screen touch events for Cardboard button compatibility.
392 // Also send tap events for controller "touchpad click" events. 409 // Also send tap events for controller "touchpad click" events.
393 if (touch_pending_ || 410 if (touch_pending_ || controller_->ButtonUpHappened(
394 controller_->ButtonUpHappened(
395 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { 411 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
396 touch_pending_ = false; 412 touch_pending_ = false;
397 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); 413 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
398 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; 414 gesture->sourceDevice = blink::WebGestureDeviceTouchpad;
399 gesture->timeStampSeconds = 415 gesture->timeStampSeconds =
400 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); 416 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF();
401 gesture->type = WebInputEvent::GestureTapDown; 417 gesture->type = WebInputEvent::GestureTapDown;
402 gesture->x = 0; 418 gesture->x = 0;
403 gesture->y = 0; 419 gesture->y = 0;
404 SendGestureOnGL(CONTENT, std::move(gesture)); 420 SendGesture(InputTarget::CONTENT, std::move(gesture));
405 } 421 }
406 422
407 return; 423 return;
408 } 424 }
409 425
410 gvr::Vec3f ergo_neutral_pose; 426 gvr::Vec3f ergo_neutral_pose;
411 if (!controller_->IsConnected()) { 427 if (!controller_->IsConnected()) {
412 // No controller detected, set up a gaze cursor that tracks the 428 // No controller detected, set up a gaze cursor that tracks the
413 // forward direction. 429 // forward direction.
414 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; 430 ergo_neutral_pose = {0.0f, 0.0f, -1.0f};
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
454 target_point_ = GetRayPoint(origin, forward, distance); 470 target_point_ = GetRayPoint(origin, forward, distance);
455 gvr::Vec3f eye_to_target = target_point_; 471 gvr::Vec3f eye_to_target = target_point_;
456 NormalizeVector(eye_to_target); 472 NormalizeVector(eye_to_target);
457 473
458 // Determine which UI element (if any) intersects the line between the eyes 474 // Determine which UI element (if any) intersects the line between the eyes
459 // and the controller target position. 475 // and the controller target position.
460 float closest_element_distance = std::numeric_limits<float>::infinity(); 476 float closest_element_distance = std::numeric_limits<float>::infinity();
461 int pixel_x = 0; 477 int pixel_x = 0;
462 int pixel_y = 0; 478 int pixel_y = 0;
463 target_element_ = nullptr; 479 target_element_ = nullptr;
464 InputTarget input_target = NONE; 480 InputTarget input_target = InputTarget::NONE;
465 481
466 for (const auto& plane : scene_->GetUiElements()) { 482 for (const auto& plane : scene_->GetUiElements()) {
467 if (!plane->visible || !plane->hit_testable) { 483 if (!plane->visible || !plane->hit_testable) {
468 continue; 484 continue;
469 } 485 }
470 float distance_to_plane = plane->GetRayDistance(kOrigin, eye_to_target); 486 float distance_to_plane = plane->GetRayDistance(kOrigin, eye_to_target);
471 gvr::Vec3f plane_intersection_point = 487 gvr::Vec3f plane_intersection_point =
472 GetRayPoint(kOrigin, eye_to_target, distance_to_plane); 488 GetRayPoint(kOrigin, eye_to_target, distance_to_plane);
473 489
474 gvr::Vec3f rect_2d_point = 490 gvr::Vec3f rect_2d_point =
(...skipping 11 matching lines...) Expand all
486 pixel_rect = {0, 0, content_tex_css_width_, content_tex_css_height_}; 502 pixel_rect = {0, 0, content_tex_css_width_, content_tex_css_height_};
487 } else { 503 } else {
488 pixel_rect = {plane->copy_rect.x, plane->copy_rect.y, 504 pixel_rect = {plane->copy_rect.x, plane->copy_rect.y,
489 plane->copy_rect.width, plane->copy_rect.height}; 505 plane->copy_rect.width, plane->copy_rect.height};
490 } 506 }
491 pixel_x = pixel_rect.width * x + pixel_rect.x; 507 pixel_x = pixel_rect.width * x + pixel_rect.x;
492 pixel_y = pixel_rect.height * y + pixel_rect.y; 508 pixel_y = pixel_rect.height * y + pixel_rect.y;
493 509
494 target_point_ = plane_intersection_point; 510 target_point_ = plane_intersection_point;
495 target_element_ = plane.get(); 511 target_element_ = plane.get();
496 input_target = plane->content_quad ? CONTENT : UI; 512 input_target = plane->content_quad ? InputTarget::CONTENT
513 : InputTarget::UI;
497 } 514 }
498 } 515 }
499 SendEventsToTargetOnGL(input_target, pixel_x, pixel_y); 516 SendEventsToTarget(input_target, pixel_x, pixel_y);
500 } 517 }
501 518
502 void VrShell::SendEventsToTargetOnGL(InputTarget input_target, 519 void VrShellGl::SendEventsToTarget(InputTarget input_target,
503 int pixel_x, 520 int pixel_x,
504 int pixel_y) { 521 int pixel_y) {
505 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = 522 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list =
506 controller_->DetectGestures(); 523 controller_->DetectGestures();
507 double timestamp = gesture_list.front()->timeStampSeconds; 524 double timestamp = gesture_list.front()->timeStampSeconds;
508 525
509 if (touch_pending_) { 526 if (touch_pending_) {
510 touch_pending_ = false; 527 touch_pending_ = false;
511 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent()); 528 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent());
512 event->type = WebInputEvent::GestureTapDown; 529 event->type = WebInputEvent::GestureTapDown;
513 event->sourceDevice = blink::WebGestureDeviceTouchpad; 530 event->sourceDevice = blink::WebGestureDeviceTouchpad;
514 event->timeStampSeconds = timestamp; 531 event->timeStampSeconds = timestamp;
515 event->x = pixel_x; 532 event->x = pixel_x;
516 event->y = pixel_y; 533 event->y = pixel_y;
517 gesture_list.push_back(std::move(event)); 534 gesture_list.push_back(std::move(event));
518 } 535 }
519 536
520 for (const auto& gesture : gesture_list) { 537 for (const auto& gesture : gesture_list) {
521 switch (gesture->type) { 538 switch (gesture->type) {
522 case WebInputEvent::GestureScrollBegin: 539 case WebInputEvent::GestureScrollBegin:
523 case WebInputEvent::GestureScrollUpdate: 540 case WebInputEvent::GestureScrollUpdate:
524 case WebInputEvent::GestureScrollEnd: 541 case WebInputEvent::GestureScrollEnd:
525 case WebInputEvent::GestureFlingCancel: 542 case WebInputEvent::GestureFlingCancel:
526 case WebInputEvent::GestureFlingStart: 543 case WebInputEvent::GestureFlingStart:
527 SendGestureOnGL(CONTENT, 544 SendGesture(InputTarget::CONTENT,
528 base::WrapUnique(new WebGestureEvent(*gesture))); 545 base::WrapUnique(new WebGestureEvent(*gesture)));
529 break; 546 break;
530 case WebInputEvent::GestureTapDown: 547 case WebInputEvent::GestureTapDown:
531 gesture->x = pixel_x; 548 gesture->x = pixel_x;
532 gesture->y = pixel_y; 549 gesture->y = pixel_y;
533 if (input_target != NONE) 550 if (input_target != InputTarget::NONE)
534 SendGestureOnGL(input_target, 551 SendGesture(input_target,
535 base::WrapUnique(new WebGestureEvent(*gesture))); 552 base::WrapUnique(new WebGestureEvent(*gesture)));
536 break; 553 break;
537 case WebInputEvent::Undefined: 554 case WebInputEvent::Undefined:
538 break; 555 break;
539 default: 556 default:
540 NOTREACHED(); 557 NOTREACHED();
541 } 558 }
542 } 559 }
543 560
544 // Hover support 561 // Hover support
545 bool new_target = input_target != current_input_target_; 562 bool new_target = input_target != current_input_target_;
546 if (new_target && current_input_target_ != NONE) { 563 if (new_target && current_input_target_ != InputTarget::NONE) {
547 // Send a move event indicating that the pointer moved off of an element. 564 // Send a move event indicating that the pointer moved off of an element.
548 SendGestureOnGL(current_input_target_, 565 SendGesture(current_input_target_,
549 MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0)); 566 MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0));
550 } 567 }
551
552 current_input_target_ = input_target; 568 current_input_target_ = input_target;
553 if (current_input_target_ != NONE) { 569 if (current_input_target_ != InputTarget::NONE) {
554 WebInputEvent::Type type = 570 WebInputEvent::Type type =
555 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; 571 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
556 SendGestureOnGL(input_target, 572 SendGesture(input_target,
557 MakeMouseEvent(type, timestamp, pixel_x, pixel_y)); 573 MakeMouseEvent(type, timestamp, pixel_x, pixel_y));
558 } 574 }
559 } 575 }
560 576
561 void VrShell::SendGestureOnGL(InputTarget input_target, 577 void VrShellGl::SendGesture(InputTarget input_target,
562 std::unique_ptr<blink::WebInputEvent> event) { 578 std::unique_ptr<blink::WebInputEvent> event) {
563 DCHECK(input_target != NONE); 579 DCHECK(input_target != InputTarget::NONE);
564 const base::WeakPtr<VrInputManager>& weak_ptr = 580 const base::WeakPtr<VrInputManager>& weak_ptr =
565 input_target == CONTENT ? weak_content_input_manager_ 581 input_target == InputTarget::CONTENT ? content_input_manager_
566 : weak_ui_input_manager_; 582 : ui_input_manager_;
567 main_thread_task_runner_->PostTask( 583 main_thread_task_runner_->PostTask(
568 FROM_HERE, 584 FROM_HERE,
569 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, 585 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
570 base::Passed(std::move(event)))); 586 base::Passed(std::move(event))));
571 } 587 }
572 588
573 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { 589 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
574 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; 590 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose;
575 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; 591 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true;
576 } 592 }
577 593
578 int GetPixelEncodedPoseIndexByte() { 594 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) {
579 TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
580 // Read the pose index encoded in a bottom left pixel as color values.
581 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
582 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
583 // which tracks poses. Returns the low byte (0..255) if valid, or -1
584 // if not valid due to bad magic number.
585 uint8_t pixels[4];
586 // Assume we're reading from the framebuffer we just wrote to.
587 // That's true currently, we may need to use glReadBuffer(GL_BACK)
588 // or equivalent if the rendering setup changes in the future.
589 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
590
591 // Check for the magic number written by VRDevice.cpp on submit.
592 // This helps avoid glitches from garbage data in the render
593 // buffer that can appear during initialization or resizing. These
594 // often appear as flashes of all-black or all-white pixels.
595 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
596 pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
597 // Pose is good.
598 return pixels[0];
599 }
600 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
601 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
602 return -1;
603 }
604
605 bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
606 if (pose_index_byte < 0) { 595 if (pose_index_byte < 0) {
607 return false; 596 return false;
608 } 597 }
609 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { 598 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) {
610 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << 599 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte <<
611 ", not a valid pose"; 600 ", not a valid pose";
612 return false; 601 return false;
613 } 602 }
614 return true; 603 return true;
615 } 604 }
616 605
617 void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) { 606 void VrShellGl::DrawFrame() {
618 TRACE_EVENT0("gpu", "VrShell::DrawFrame"); 607 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame");
619 // Reset the viewport list to just the pair of viewports for the 608 // Reset the viewport list to just the pair of viewports for the
620 // primary buffer each frame. Head-locked viewports get added by 609 // primary buffer each frame. Head-locked viewports get added by
621 // DrawVrShell if needed. 610 // DrawVrShell if needed.
622 buffer_viewport_list_->SetToRecommendedBufferViewports(); 611 buffer_viewport_list_->SetToRecommendedBufferViewports();
623 612
624 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 613 // TODO(klausw): Fix this. Resizing buffers here leads to webVR mode showing
625 // If needed, resize the primary buffer for use with WebVR. 614 // nothing but a black screen.
626 if (render_size_primary_ != render_size_primary_webvr_) { 615 // if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
627 if (!render_size_primary_webvr_.width) { 616 // // If needed, resize the primary buffer for use with WebVR.
628 VLOG(2) << "WebVR rendering size not known yet, dropping frame"; 617 // if (render_size_primary_ != render_size_primary_webvr_) {
629 return; 618 // if (!render_size_primary_webvr_.width) {
630 } 619 // VLOG(2) << "WebVR rendering size not known yet, dropping frame";
631 render_size_primary_ = render_size_primary_webvr_; 620 // return;
632 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); 621 // }
633 } 622 // render_size_primary_ = render_size_primary_webvr_;
634 } else { 623 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
635 if (render_size_primary_ != render_size_primary_vrshell_) { 624 // }
636 render_size_primary_ = render_size_primary_vrshell_; 625 // } else {
637 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); 626 // if (render_size_primary_ != render_size_primary_vrshell_) {
638 } 627 // render_size_primary_ = render_size_primary_vrshell_;
639 } 628 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
629 // }
630 // }
640 631
641 gvr::Frame frame = swap_chain_->AcquireFrame(); 632 gvr::Frame frame = swap_chain_->AcquireFrame();
642 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); 633 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
643 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; 634 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
644 635
645 gvr::Mat4f head_pose = 636 gvr::Mat4f head_pose =
646 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); 637 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
647 638
648 gvr::Vec3f position = GetTranslation(head_pose); 639 gvr::Vec3f position = GetTranslation(head_pose);
649 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { 640 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
650 // This appears to be a 3DOF pose without a neck model. Add one. 641 // This appears to be a 3DOF pose without a neck model. Add one.
651 // The head pose has redundant data. Assume we're only using the 642 // The head pose has redundant data. Assume we're only using the
652 // object_from_reference_matrix, we're not updating position_external. 643 // object_from_reference_matrix, we're not updating position_external.
653 // TODO: Not sure what object_from_reference_matrix is. The new api removed 644 // TODO: Not sure what object_from_reference_matrix is. The new api removed
654 // it. For now, removing it seems working fine. 645 // it. For now, removing it seems working fine.
655 gvr_api_->ApplyNeckModel(head_pose, 1.0f); 646 gvr_api_->ApplyNeckModel(head_pose, 1.0f);
656 } 647 }
657 648
658 // Bind the primary framebuffer.
659 frame.BindBuffer(kFramePrimaryBuffer); 649 frame.BindBuffer(kFramePrimaryBuffer);
660 650
661 HandleQueuedTasksOnGL();
662
663 // Update the render position of all UI elements (including desktop). 651 // Update the render position of all UI elements (including desktop).
664 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; 652 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
665 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); 653 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds());
666 654
667 UpdateControllerOnGL(GetForwardVector(head_pose)); 655 UpdateController(GetForwardVector(head_pose));
668 656
669 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 657 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
670 DrawWebVrOnGL(); 658 DrawWebVr();
671 659
672 // When using async reprojection, we need to know which pose was used in 660 // When using async reprojection, we need to know which pose was used in
673 // the WebVR app for drawing this frame. Due to unknown amounts of 661 // the WebVR app for drawing this frame. Due to unknown amounts of
674 // buffering in the compositor and SurfaceTexture, we read the pose number 662 // buffering in the compositor and SurfaceTexture, we read the pose number
675 // from a corner pixel. There's no point in doing this for legacy 663 // from a corner pixel. There's no point in doing this for legacy
676 // distortion rendering since that doesn't need a pose, and reading back 664 // distortion rendering since that doesn't need a pose, and reading back
677 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop 665 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
678 // doing this once we have working no-compositor rendering for WebVR. 666 // doing this once we have working no-compositor rendering for WebVR.
679 if (gvr_api_->GetAsyncReprojectionEnabled()) { 667 if (gvr_api_->GetAsyncReprojectionEnabled()) {
680 int pose_index_byte = GetPixelEncodedPoseIndexByte(); 668 int pose_index_byte = GetPixelEncodedPoseIndexByte();
681 if (WebVrPoseByteIsValidOnGL(pose_index_byte)) { 669 if (WebVrPoseByteIsValid(pose_index_byte)) {
682 // We have a valid pose, use it for reprojection. 670 // We have a valid pose, use it for reprojection.
683 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); 671 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
684 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); 672 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
685 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; 673 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize];
686 // We can't mark the used pose as invalid since unfortunately 674 // We can't mark the used pose as invalid since unfortunately
687 // we have to reuse them. The compositor will re-submit stale 675 // we have to reuse them. The compositor will re-submit stale
688 // frames on vsync, and we can't tell that this has happened 676 // frames on vsync, and we can't tell that this has happened
689 // until we've read the pose index from it, and at that point 677 // until we've read the pose index from it, and at that point
690 // it's too late to skip rendering. 678 // it's too late to skip rendering.
691 } else { 679 } else {
692 // If we don't get a valid frame ID back we shouldn't attempt 680 // If we don't get a valid frame ID back we shouldn't attempt
693 // to reproject by an invalid matrix, so turn off reprojection 681 // to reproject by an invalid matrix, so turn off reprojection
694 // instead. Invalid poses can permanently break reprojection 682 // instead. Invalid poses can permanently break reprojection
695 // for this GVR instance: http://crbug.com/667327 683 // for this GVR instance: http://crbug.com/667327
696 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); 684 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
697 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); 685 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
698 } 686 }
699 } 687 }
700 } 688 }
701 689
702 DrawVrShellOnGL(head_pose, frame); 690 DrawVrShell(head_pose, frame);
703 691
704 frame.Unbind(); 692 frame.Unbind();
705 frame.Submit(*buffer_viewport_list_, head_pose); 693 frame.Submit(*buffer_viewport_list_, head_pose);
694
695 // No need to SwapBuffers for an offscreen surface.
696 ScheduleNextDrawFrame();
706 } 697 }
707 698
708 void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose, 699 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose,
709 gvr::Frame &frame) { 700 gvr::Frame &frame) {
710 TRACE_EVENT0("gpu", "VrShell::DrawVrShell"); 701 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell");
711 std::vector<const ContentRectangle*> head_locked_elements; 702 std::vector<const ContentRectangle*> head_locked_elements;
712 std::vector<const ContentRectangle*> world_elements; 703 std::vector<const ContentRectangle*> world_elements;
713 for (const auto& rect : scene_->GetUiElements()) { 704 for (const auto& rect : scene_->GetUiElements()) {
714 if (!rect->visible) { 705 if (!rect->visible) {
715 continue; 706 continue;
716 } 707 }
717 if (rect->lock_to_fov) { 708 if (rect->lock_to_fov) {
718 head_locked_elements.push_back(rect.get()); 709 head_locked_elements.push_back(rect.get());
719 } else { 710 } else {
720 world_elements.push_back(rect.get()); 711 world_elements.push_back(rect.get());
721 } 712 }
722 } 713 }
723 714
724 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 715 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
725 // WebVR is incompatible with 3D world compositing since the 716 // WebVR is incompatible with 3D world compositing since the
726 // depth buffer was already populated with unknown scaling - the 717 // depth buffer was already populated with unknown scaling - the
727 // WebVR app has full control over zNear/zFar. Just leave the 718 // WebVR app has full control over zNear/zFar. Just leave the
728 // existing content in place in the primary buffer without 719 // existing content in place in the primary buffer without
729 // clearing. Currently, there aren't any world elements in WebVR 720 // clearing. Currently, there aren't any world elements in WebVR
730 // mode, this will need further testing if those get added 721 // mode, this will need further testing if those get added
731 // later. 722 // later.
732 } else { 723 } else {
733 // Non-WebVR mode, enable depth testing and clear the primary buffers. 724 // Non-WebVR mode, enable depth testing and clear the primary buffers.
734 glEnable(GL_CULL_FACE); 725 glEnable(GL_CULL_FACE);
735 glEnable(GL_DEPTH_TEST); 726 glEnable(GL_DEPTH_TEST);
736 glDepthMask(GL_TRUE); 727 glDepthMask(GL_TRUE);
737 728
738 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); 729 glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
739 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 730 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
740 } 731 }
741
742 if (!world_elements.empty()) { 732 if (!world_elements.empty()) {
743 DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_, 733 DrawUiView(&head_pose, world_elements, render_size_primary_,
744 kViewportListPrimaryOffset); 734 kViewportListPrimaryOffset);
745 } 735 }
746 736
747 if (!head_locked_elements.empty()) { 737 if (!head_locked_elements.empty()) {
748 // Add head-locked viewports. The list gets reset to just 738 // Add head-locked viewports. The list gets reset to just
749 // the recommended viewports (for the primary buffer) each frame. 739 // the recommended viewports (for the primary buffer) each frame.
750 buffer_viewport_list_->SetBufferViewport( 740 buffer_viewport_list_->SetBufferViewport(
751 kViewportListHeadlockedOffset + GVR_LEFT_EYE, 741 kViewportListHeadlockedOffset + GVR_LEFT_EYE,
752 *headlocked_left_viewport_); 742 *headlocked_left_viewport_);
753 buffer_viewport_list_->SetBufferViewport( 743 buffer_viewport_list_->SetBufferViewport(
754 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, 744 kViewportListHeadlockedOffset + GVR_RIGHT_EYE,
755 *headlocked_right_viewport_); 745 *headlocked_right_viewport_);
756 746
757 // Bind the headlocked framebuffer. 747 // Bind the headlocked framebuffer.
748 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order
749 // here.
758 frame.BindBuffer(kFrameHeadlockedBuffer); 750 frame.BindBuffer(kFrameHeadlockedBuffer);
759 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 751 glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 752 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
761 DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_, 753 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_,
762 kViewportListHeadlockedOffset); 754 kViewportListHeadlockedOffset);
763 } 755 }
764 } 756 }
765 757
766 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { 758 void VrShellGl::SetWebVRRenderSurfaceSize(int width, int height) {
767 render_size_primary_webvr_.width = width; 759 render_size_primary_webvr_.width = width;
768 render_size_primary_webvr_.height = height; 760 render_size_primary_webvr_.height = height;
769 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once 761 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once
770 // we have that. 762 // we have that.
771 } 763 }
772 764
773 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { 765 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() {
774 // This is a stopgap while we're using the WebVR compositor rendering path. 766 // This is a stopgap while we're using the WebVR compositor rendering path.
775 // TODO(klausw,crbug.com/655722): Remove this method and member once we're 767 // TODO(klausw,crbug.com/655722): Remove this method and member once we're
776 // using a separate WebVR render surface. 768 // using a separate WebVR render surface.
777 return content_tex_physical_size_; 769 return content_tex_physical_size_;
778 } 770 }
779 771
780 772 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose,
781 void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose, 773 const std::vector<const ContentRectangle*>& elements,
782 const std::vector<const ContentRectangle*>& elements, 774 const gvr::Sizei& render_size,
783 const gvr::Sizei& render_size, int viewport_offset) { 775 int viewport_offset) {
784 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); 776 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView");
785 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { 777 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
786 buffer_viewport_list_->GetBufferViewport( 778 buffer_viewport_list_->GetBufferViewport(
787 eye + viewport_offset, buffer_viewport_.get()); 779 eye + viewport_offset, buffer_viewport_.get());
788 780
789 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); 781 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye);
790 if (head_pose != nullptr) { 782 if (head_pose != nullptr) {
791 view_matrix = MatrixMul(view_matrix, *head_pose); 783 view_matrix = MatrixMul(view_matrix, *head_pose);
792 } 784 }
793 785
794 gvr::Recti pixel_rect = 786 gvr::Recti pixel_rect =
795 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); 787 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv());
796 glViewport(pixel_rect.left, pixel_rect.bottom, 788 glViewport(pixel_rect.left, pixel_rect.bottom,
797 pixel_rect.right - pixel_rect.left, 789 pixel_rect.right - pixel_rect.left,
798 pixel_rect.top - pixel_rect.bottom); 790 pixel_rect.top - pixel_rect.bottom);
799 791
800 const gvr::Mat4f render_matrix = MatrixMul( 792 const gvr::Mat4f render_matrix = MatrixMul(
801 PerspectiveMatrixFromView( 793 PerspectiveMatrixFromView(
802 buffer_viewport_->GetSourceFov(), kZNear, kZFar), 794 buffer_viewport_->GetSourceFov(), kZNear, kZFar),
803 view_matrix); 795 view_matrix);
804 796
805 DrawElementsOnGL(render_matrix, elements); 797 DrawElements(render_matrix, elements);
806 if (head_pose != nullptr && 798 if (head_pose != nullptr &&
807 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) { 799 vr_shell_->GetUiInterface()->GetMode() != UiInterface::Mode::WEB_VR) {
808 DrawCursorOnGL(render_matrix); 800 DrawCursor(render_matrix);
809 } 801 }
810 } 802 }
811 } 803 }
812 804
813 void VrShell::DrawElementsOnGL( 805 void VrShellGl::DrawElements(
814 const gvr::Mat4f& render_matrix, 806 const gvr::Mat4f& render_matrix,
815 const std::vector<const ContentRectangle*>& elements) { 807 const std::vector<const ContentRectangle*>& elements) {
816 for (const auto& rect : elements) { 808 for (const auto& rect : elements) {
817 Rectf copy_rect; 809 Rectf copy_rect;
818 jint texture_handle; 810 jint texture_handle;
819 if (rect->content_quad) { 811 if (rect->content_quad) {
820 copy_rect = {0, 0, 1, 1}; 812 copy_rect = {0, 0, 1, 1};
821 texture_handle = content_texture_id_; 813 texture_handle = content_texture_id_;
822 } else { 814 } else {
823 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; 815 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_;
824 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; 816 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_;
825 copy_rect.width = static_cast<float>(rect->copy_rect.width) / 817 copy_rect.width = static_cast<float>(rect->copy_rect.width) /
826 ui_tex_css_width_; 818 ui_tex_css_width_;
827 copy_rect.height = static_cast<float>(rect->copy_rect.height) / 819 copy_rect.height = static_cast<float>(rect->copy_rect.height) /
828 ui_tex_css_height_; 820 ui_tex_css_height_;
829 texture_handle = ui_texture_id_; 821 texture_handle = ui_texture_id_;
830 } 822 }
831 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); 823 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world);
832 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( 824 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw(
833 texture_handle, transform, copy_rect); 825 texture_handle, transform, copy_rect);
834 } 826 }
835 } 827 }
836 828
837 void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) { 829 void VrShellGl::DrawCursor(const gvr::Mat4f& render_matrix) {
838 gvr::Mat4f mat; 830 gvr::Mat4f mat;
839 SetIdentityM(mat); 831 SetIdentityM(mat);
840 832
841 // Draw the reticle. 833 // Draw the reticle.
842 834
843 // Scale the pointer to have a fixed FOV size at any distance. 835 // Scale the pointer to have a fixed FOV size at any distance.
844 const float eye_to_target = Distance(target_point_, kOrigin); 836 const float eye_to_target = Distance(target_point_, kOrigin);
845 ScaleM(mat, mat, kReticleWidth * eye_to_target, 837 ScaleM(mat, mat, kReticleWidth * eye_to_target,
846 kReticleHeight * eye_to_target, 1.0f); 838 kReticleHeight * eye_to_target, 1.0f);
847 839
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
900 892
901 // Move the beam origin to the hand. 893 // Move the beam origin to the hand.
902 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, 894 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y,
903 kHandPosition.z); 895 kHandPosition.z);
904 896
905 transform = MatrixMul(render_matrix, face_transform); 897 transform = MatrixMul(render_matrix, face_transform);
906 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); 898 vr_shell_renderer_->GetLaserRenderer()->Draw(transform);
907 } 899 }
908 } 900 }
909 901
910 void VrShell::DrawWebVrOnGL() { 902 void VrShellGl::DrawWebVr() {
911 TRACE_EVENT0("gpu", "VrShell::DrawWebVr"); 903 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr");
912 // Don't need face culling, depth testing, blending, etc. Turn it all off. 904 // Don't need face culling, depth testing, blending, etc. Turn it all off.
913 glDisable(GL_CULL_FACE); 905 glDisable(GL_CULL_FACE);
914 glDepthMask(GL_FALSE); 906 glDepthMask(GL_FALSE);
915 glDisable(GL_DEPTH_TEST); 907 glDisable(GL_DEPTH_TEST);
916 glDisable(GL_SCISSOR_TEST); 908 glDisable(GL_SCISSOR_TEST);
917 glDisable(GL_BLEND); 909 glDisable(GL_BLEND);
918 glDisable(GL_POLYGON_OFFSET_FILL); 910 glDisable(GL_POLYGON_OFFSET_FILL);
919 911
920 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); 912 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
921 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); 913 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
922 914
923 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, 915 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
924 *webvr_left_viewport_); 916 *webvr_left_viewport_);
925 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, 917 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
926 *webvr_right_viewport_); 918 *webvr_right_viewport_);
927 } 919 }
928 920
929 void VrShell::OnTriggerEventOnUI(JNIEnv* env, 921 void VrShellGl::OnTriggerEvent() {
930 const JavaParamRef<jobject>& obj) {
931 // Set a flag to handle this on the render thread at the next frame. 922 // Set a flag to handle this on the render thread at the next frame.
932 touch_pending_ = true; 923 touch_pending_ = true;
933 } 924 }
934 925
935 void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { 926 void VrShellGl::OnPause() {
936 if (gvr_api_ == nullptr) 927 draw_task_.Cancel();
937 return;
938
939 // TODO(mthiesse): Clean up threading here.
940 controller_->OnPause(); 928 controller_->OnPause();
941 gvr_api_->PauseTracking(); 929 gvr_api_->PauseTracking();
942 SetShowingOverscrollGlowOnUI(true);
943
944 // exit vr session
945 metrics_helper_->SetVRActive(false);
946 } 930 }
947 931
948 void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { 932 void VrShellGl::OnResume() {
949 if (gvr_api_ == nullptr)
950 return;
951
952 // TODO(mthiesse): Clean up threading here.
953 gvr_api_->RefreshViewerProfile(); 933 gvr_api_->RefreshViewerProfile();
954 gvr_api_->ResumeTracking(); 934 gvr_api_->ResumeTracking();
955 controller_->OnResume(); 935 controller_->OnResume();
956 SetShowingOverscrollGlowOnUI(false); 936 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this)));
957 937 ScheduleNextDrawFrame();
958 // exit vr session
959 metrics_helper_->SetVRActive(true);
960 } 938 }
961 939
962 void VrShell::SetShowingOverscrollGlowOnUI(bool showing_glow) { 940 void VrShellGl::SetWebVrMode(bool enabled) {
963 main_contents_->GetRenderWidgetHostView()->SetShowingOverscrollGlow(
964 showing_glow);
965 }
966
967 base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI(
968 const content::WebContents* web_contents) {
969 // Ensure that the WebContents requesting the VrShell instance is the one
970 // we created.
971 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents)
972 return g_instance->weak_ptr_factory_.GetWeakPtr();
973 return base::WeakPtr<VrShell>(nullptr);
974 }
975
976 void VrShell::OnDomContentsLoadedOnUI() {
977 html_interface_->SetURL(main_contents_->GetVisibleURL());
978 html_interface_->SetLoading(main_contents_->IsLoading());
979 html_interface_->OnDomContentsLoaded();
980 }
981
982 void VrShell::SetWebVrModeOnUI(JNIEnv* env,
983 const base::android::JavaParamRef<jobject>& obj,
984 bool enabled) {
985 metrics_helper_->SetWebVREnabled(enabled);
986 if (enabled) { 941 if (enabled) {
987 html_interface_->SetMode(UiInterface::Mode::WEB_VR); 942 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::WEB_VR);
988 } else { 943 } else {
989 html_interface_->SetMode(UiInterface::Mode::STANDARD); 944 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::STANDARD);
990 } 945 }
991 } 946 }
992 947
993 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { 948 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
994 // TODO(cjgrant): Align this state with the logic that drives the omnibox. 949 const gvr::Rectf& right_bounds) {
995 html_interface_->SetWebVRSecureOrigin(secure_origin);
996 }
997
998 void VrShell::SubmitWebVRFrame() {}
999
1000 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
1001 const gvr::Rectf& right_bounds) {
1002 webvr_left_viewport_->SetSourceUv(left_bounds); 950 webvr_left_viewport_->SetSourceUv(left_bounds);
1003 webvr_right_viewport_->SetSourceUv(right_bounds); 951 webvr_right_viewport_->SetSourceUv(right_bounds);
1004 } 952 }
1005 953
1006 gvr::GvrApi* VrShell::gvr_api() { 954 gvr::GvrApi* VrShellGl::gvr_api() {
1007 return gvr_api_.get(); 955 return gvr_api_.get();
1008 } 956 }
1009 957
1010 void VrShell::SurfacesChangedOnUI(JNIEnv* env, 958 void VrShellGl::ContentBoundsChanged(int width, int height) {
1011 const JavaParamRef<jobject>& object, 959 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged");
1012 const JavaParamRef<jobject>& content_surface, 960 content_tex_css_width_ = width;
1013 const JavaParamRef<jobject>& ui_surface) { 961 content_tex_css_height_ = height;
1014 content_compositor_->SurfaceChanged(content_surface);
1015 ui_compositor_->SurfaceChanged(ui_surface);
1016 } 962 }
1017 963
1018 void VrShell::ContentBoundsChangedOnUI(JNIEnv* env, 964 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) {
1019 const JavaParamRef<jobject>& object, 965 if (content_surface_texture_.get())
1020 jint width, jint height, jfloat dpr) { 966 content_surface_texture_->SetDefaultBufferSize(width, height);
1021 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged");
1022 content_tex_physical_size_.width = width; 967 content_tex_physical_size_.width = width;
1023 content_tex_physical_size_.height = height; 968 content_tex_physical_size_.height = height;
1024 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in
1025 // response to MainFrameWasResized, not here.
1026 content_tex_css_width_ = width / dpr;
1027 content_tex_css_height_ = height / dpr;
1028
1029 content_compositor_->SetWindowBounds(width, height);
1030 } 969 }
1031 970
1032 void VrShell::UIBoundsChangedOnUI(JNIEnv* env, 971 void VrShellGl::UIBoundsChanged(int width, int height) {
1033 const JavaParamRef<jobject>& object, 972 ui_tex_css_width_ = width;
1034 jint width, jint height, jfloat dpr) { 973 ui_tex_css_height_ = height;
1035 ui_compositor_->SetWindowBounds(width, height);
1036 } 974 }
1037 975
1038 UiScene* VrShell::GetSceneOnGL() { 976 void VrShellGl::UIPhysicalBoundsChanged(int width, int height) {
1039 return scene_.get(); 977 if (ui_surface_texture_.get())
978 ui_surface_texture_->SetDefaultBufferSize(width, height);
979 ui_tex_physical_size_.width = width;
980 ui_tex_physical_size_.height = height;
1040 } 981 }
1041 982
1042 UiInterface* VrShell::GetUiInterfaceOnGL() { 983 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() {
1043 return html_interface_.get(); 984 return weak_ptr_factory_.GetWeakPtr();
1044 } 985 }
1045 986
1046 void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) { 987 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase,
1047 base::AutoLock lock(task_queue_lock_); 988 const base::TimeDelta interval) {
1048 task_queue_.push(callback); 989 vsync_timebase_ = timebase;
990 vsync_interval_ = interval;
1049 } 991 }
1050 992
1051 void VrShell::HandleQueuedTasksOnGL() { 993 void VrShellGl::ScheduleNextDrawFrame() {
1052 // To protect a stream of tasks from blocking rendering indefinitely, 994 base::TimeTicks now = base::TimeTicks::Now();
1053 // process only the number of tasks present when first checked. 995 base::TimeTicks target;
1054 std::vector<base::Callback<void()>> tasks; 996
1055 { 997 if (vsync_interval_.is_zero()) {
1056 base::AutoLock lock(task_queue_lock_); 998 target = now;
1057 const size_t count = task_queue_.size(); 999 } else {
1058 for (size_t i = 0; i < count; i++) { 1000 target = now + vsync_interval_;
1059 tasks.push_back(task_queue_.front()); 1001 int64_t intervals = (target - vsync_timebase_) / vsync_interval_;
1060 task_queue_.pop(); 1002 target = vsync_timebase_ + intervals * vsync_interval_;
1061 }
1062 } 1003 }
1063 for (auto &task : tasks) { 1004
1064 task.Run(); 1005 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now);
1065 }
1066 } 1006 }
1067 1007
1068 void VrShell::DoUiActionOnUI(const UiAction action) { 1008 void VrShellGl::ForceExitVR() {
1069 content::NavigationController& controller = main_contents_->GetController(); 1009 main_thread_task_runner_->PostTask(
1070 switch (action) { 1010 FROM_HERE, base::Bind(&VrShell::ForceExitVR, weak_vr_shell_));
1071 case HISTORY_BACK:
1072 if (main_contents_->IsFullscreen()) {
1073 main_contents_->ExitFullscreen(true /* will_cause_resize */);
1074 } else if (controller.CanGoBack()) {
1075 controller.GoBack();
1076 }
1077 break;
1078 case HISTORY_FORWARD:
1079 if (controller.CanGoForward())
1080 controller.GoForward();
1081 break;
1082 case RELOAD:
1083 controller.Reload(false);
1084 break;
1085 #if defined(ENABLE_VR_SHELL_UI_DEV)
1086 case RELOAD_UI:
1087 ui_contents_->GetController().Reload(false);
1088 html_interface_.reset(new UiInterface(UiInterface::Mode::STANDARD,
1089 main_contents_->IsFullscreen()));
1090 vr_web_contents_observer_->SetUiInterface(html_interface_.get());
1091 break;
1092 #endif
1093 case ZOOM_OUT: // Not handled yet.
1094 case ZOOM_IN: // Not handled yet.
1095 break;
1096 default:
1097 NOTREACHED();
1098 }
1099 }
1100
1101 void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host,
1102 content::RenderViewHost* new_host) {
1103 new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT);
1104 }
1105
1106 void VrShell::MainFrameWasResized(bool width_changed) {
1107 display::Display display = display::Screen::GetScreen()
1108 ->GetDisplayNearestWindow(ui_contents_->GetNativeView());
1109 // TODO(mthiesse): Synchronize with GL thread.
1110 ui_tex_css_width_ = display.size().width();
1111 ui_tex_css_height_ = display.size().height();
1112 }
1113
1114 void VrShell::WebContentsDestroyed() {
1115 ui_input_manager_.reset();
1116 ui_contents_ = nullptr;
1117 // TODO(mthiesse): Handle web contents being destroyed.
1118 delegate_->ForceExitVr();
1119 }
1120
1121 void VrShell::ContentWebContentsDestroyedOnUI() {
1122 content_input_manager_.reset();
1123 main_contents_ = nullptr;
1124 // TODO(mthiesse): Handle web contents being destroyed.
1125 delegate_->ForceExitVr();
1126 }
1127
1128 void VrShell::ContentWasHiddenOnUI() {
1129 // Ensure we don't continue sending input to it.
1130 content_input_manager_.reset();
1131 // TODO(mthiesse): Handle web contents being hidden.
1132 delegate_->ForceExitVr();
1133 }
1134
1135 void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) {
1136 JNIEnv* env = base::android::AttachCurrentThread();
1137 Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height,
1138 dpr);
1139 }
1140
1141 void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) {
1142 JNIEnv* env = base::android::AttachCurrentThread();
1143 Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr);
1144 }
1145
1146 // ----------------------------------------------------------------------------
1147 // Native JNI methods
1148 // ----------------------------------------------------------------------------
1149
1150 jlong InitOnUI(JNIEnv* env,
1151 const JavaParamRef<jobject>& obj,
1152 const JavaParamRef<jobject>& content_web_contents,
1153 jlong content_window_android,
1154 const JavaParamRef<jobject>& ui_web_contents,
1155 jlong ui_window_android,
1156 jboolean for_web_vr) {
1157 return reinterpret_cast<intptr_t>(new VrShell(
1158 env, obj, content::WebContents::FromJavaWebContents(content_web_contents),
1159 reinterpret_cast<ui::WindowAndroid*>(content_window_android),
1160 content::WebContents::FromJavaWebContents(ui_web_contents),
1161 reinterpret_cast<ui::WindowAndroid*>(ui_window_android),
1162 for_web_vr));
1163 } 1011 }
1164 1012
1165 } // namespace vr_shell 1013 } // namespace vr_shell
OLDNEW
« no previous file with comments | « chrome/browser/android/vr_shell/vr_shell_gl.h ('k') | chrome/browser/android/vr_shell/vr_web_contents_observer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698