Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(239)

Side by Side Diff: chrome/browser/android/vr_shell/vr_shell_gl.cc

Issue 2562733002: Implement our own GLThread for VR Shell. (Closed)
Patch Set: Address comment Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/android/vr_shell/vr_shell.h" 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h"
6 6
7 #include "base/memory/ptr_util.h"
7 #include "base/metrics/histogram_macros.h" 8 #include "base/metrics/histogram_macros.h"
9 #include "base/threading/thread_task_runner_handle.h"
8 #include "chrome/browser/android/vr_shell/ui_elements.h" 10 #include "chrome/browser/android/vr_shell/ui_elements.h"
9 #include "chrome/browser/android/vr_shell/ui_interface.h" 11 #include "chrome/browser/android/vr_shell/ui_interface.h"
10 #include "chrome/browser/android/vr_shell/ui_scene.h" 12 #include "chrome/browser/android/vr_shell/ui_scene.h"
11 #include "chrome/browser/android/vr_shell/vr_compositor.h"
12 #include "chrome/browser/android/vr_shell/vr_controller.h" 13 #include "chrome/browser/android/vr_shell/vr_controller.h"
13 #include "chrome/browser/android/vr_shell/vr_gl_util.h" 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h"
14 #include "chrome/browser/android/vr_shell/vr_input_manager.h" 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h"
15 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" 16 #include "chrome/browser/android/vr_shell/vr_math.h"
17 #include "chrome/browser/android/vr_shell/vr_shell.h"
16 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
17 #include "chrome/browser/android/vr_shell/vr_usage_monitor.h" 19 #include "third_party/WebKit/public/platform/WebInputEvent.h"
18 #include "chrome/browser/android/vr_shell/vr_web_contents_observer.h" 20 #include "ui/gfx/vsync_provider.h"
19 #include "content/public/browser/navigation_controller.h" 21 #include "ui/gl/android/scoped_java_surface.h"
20 #include "content/public/browser/render_view_host.h" 22 #include "ui/gl/android/surface_texture.h"
21 #include "content/public/browser/render_widget_host.h"
22 #include "content/public/browser/render_widget_host_view.h"
23 #include "content/public/browser/web_contents.h"
24 #include "content/public/common/referrer.h"
25 #include "device/vr/android/gvr/gvr_device_provider.h"
26 #include "jni/VrShellImpl_jni.h"
27 #include "ui/android/view_android.h"
28 #include "ui/android/window_android.h"
29 #include "ui/base/page_transition_types.h"
30 #include "ui/display/display.h"
31 #include "ui/display/screen.h"
32 #include "ui/gl/gl_bindings.h" 23 #include "ui/gl/gl_bindings.h"
24 #include "ui/gl/gl_context.h"
25 #include "ui/gl/gl_surface.h"
33 #include "ui/gl/init/gl_factory.h" 26 #include "ui/gl/init/gl_factory.h"
34 27
35 using base::android::JavaParamRef;
36
37 namespace vr_shell { 28 namespace vr_shell {
38 29
39 namespace { 30 namespace {
40 // Constant taken from treasure_hunt demo. 31 // Constant taken from treasure_hunt demo.
41 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; 32 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000;
42 33
43 static constexpr float kZNear = 0.1f; 34 static constexpr float kZNear = 0.1f;
44 static constexpr float kZFar = 1000.0f; 35 static constexpr float kZFar = 1000.0f;
45 36
46 // Screen angle in degrees. 0 = vertical, positive = top closer. 37 // Screen angle in degrees. 0 = vertical, positive = top closer.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 79
89 // The GVR viewport list has two entries (left eye and right eye) for each 80 // The GVR viewport list has two entries (left eye and right eye) for each
90 // GVR buffer. 81 // GVR buffer.
91 static constexpr int kViewportListPrimaryOffset = 0; 82 static constexpr int kViewportListPrimaryOffset = 0;
92 static constexpr int kViewportListHeadlockedOffset = 2; 83 static constexpr int kViewportListHeadlockedOffset = 2;
93 84
94 // Magic numbers used to mark valid pose index values encoded in frame 85 // Magic numbers used to mark valid pose index values encoded in frame
95 // data. Must match the magic numbers used in blink's VRDisplay.cpp. 86 // data. Must match the magic numbers used in blink's VRDisplay.cpp.
96 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; 87 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
97 88
98 vr_shell::VrShell* g_instance;
99
100 static const char kVrShellUIURL[] = "chrome://vr-shell-ui";
101
102 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { 89 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
103 float xdiff = (vec1.x - vec2.x); 90 float xdiff = (vec1.x - vec2.x);
104 float ydiff = (vec1.y - vec2.y); 91 float ydiff = (vec1.y - vec2.y);
105 float zdiff = (vec1.z - vec2.z); 92 float zdiff = (vec1.z - vec2.z);
106 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; 93 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff;
107 return std::sqrt(scale); 94 return std::sqrt(scale);
108 } 95 }
109 96
110 // Generate a quaternion representing the rotation from the negative Z axis 97 // Generate a quaternion representing the rotation from the negative Z axis
111 // (0, 0, -1) to a specified vector. This is an optimized version of a more 98 // (0, 0, -1) to a specified vector. This is an optimized version of a more
(...skipping 28 matching lines...) Expand all
140 mouse_event->x = x; 127 mouse_event->x = x;
141 mouse_event->y = y; 128 mouse_event->y = y;
142 mouse_event->windowX = x; 129 mouse_event->windowX = x;
143 mouse_event->windowY = y; 130 mouse_event->windowY = y;
144 mouse_event->timeStampSeconds = timestamp; 131 mouse_event->timeStampSeconds = timestamp;
145 mouse_event->clickCount = 1; 132 mouse_event->clickCount = 1;
146 mouse_event->modifiers = 0; 133 mouse_event->modifiers = 0;
147 134
148 return mouse_event; 135 return mouse_event;
149 } 136 }
150 } // namespace
151
152 VrShell::VrShell(JNIEnv* env,
153 jobject obj,
154 content::WebContents* main_contents,
155 ui::WindowAndroid* content_window,
156 content::WebContents* ui_contents,
157 ui::WindowAndroid* ui_window,
158 bool for_web_vr)
159 : WebContentsObserver(ui_contents),
160 main_contents_(main_contents),
161 ui_contents_(ui_contents),
162 metrics_helper_(new VrMetricsHelper(main_contents)),
163 main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
164 weak_ptr_factory_(this) {
165 DCHECK(g_instance == nullptr);
166 g_instance = this;
167 j_vr_shell_.Reset(env, obj);
168 scene_.reset(new UiScene);
169
170 if (for_web_vr)
171 metrics_helper_->SetWebVREnabled(true);
172 html_interface_.reset(new UiInterface(
173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD,
174 main_contents_->IsFullscreen()));
175 content_compositor_.reset(new VrCompositor(content_window, false));
176 ui_compositor_.reset(new VrCompositor(ui_window, true));
177 vr_web_contents_observer_.reset(new VrWebContentsObserver(
178 main_contents, html_interface_.get(), this));
179
180 LoadUIContentOnUI();
181
182 gvr::Mat4f identity;
183 SetIdentityM(identity);
184 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
186
187 content_input_manager_.reset(new VrInputManager(main_contents_));
188 ui_input_manager_.reset(new VrInputManager(ui_contents_));
189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr();
190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr();
191
192 SetShowingOverscrollGlowOnUI(false);
193 }
194
195 void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env,
196 const JavaParamRef<jobject>& obj) {
197 content_compositor_->SetLayer(main_contents_);
198 ui_compositor_->SetLayer(ui_contents_);
199 }
200
201 void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
202 delete this;
203 }
204
205 void VrShell::LoadUIContentOnUI() {
206 GURL url(kVrShellUIURL);
207 ui_contents_->GetController().LoadURL(
208 url, content::Referrer(),
209 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string(""));
210 }
211
212 bool RegisterVrShell(JNIEnv* env) {
213 return RegisterNativesImpl(env);
214 }
215
216 VrShell::~VrShell() {
217 if (delegate_ && delegate_->GetDeviceProvider()) {
218 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved();
219 }
220 g_instance = nullptr;
221 gl::init::ShutdownGL();
222 }
223
224 void VrShell::SetDelegateOnUI(JNIEnv* env,
225 const base::android::JavaParamRef<jobject>& obj,
226 const base::android::JavaParamRef<jobject>& delegate) {
227 base::AutoLock lock(gvr_init_lock_);
228 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate);
229 if (swap_chain_.get()) {
230 delegate_->GetDeviceProvider()->OnGvrDelegateReady(
231 weak_ptr_factory_.GetWeakPtr());
232 }
233 }
234 137
235 enum class ViewerType { 138 enum class ViewerType {
236 UNKNOWN_TYPE = 0, 139 UNKNOWN_TYPE = 0,
237 CARDBOARD = 1, 140 CARDBOARD = 1,
238 DAYDREAM = 2, 141 DAYDREAM = 2,
239 VIEWER_TYPE_MAX, 142 VIEWER_TYPE_MAX,
240 }; 143 };
241 144
242 void VrShell::GvrInitOnGL(JNIEnv* env, 145 int GetPixelEncodedPoseIndexByte() {
243 const JavaParamRef<jobject>& obj, 146 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex");
244 jlong native_gvr_api) { 147 // Read the pose index encoded in a bottom left pixel as color values.
245 // set the initial webvr state 148 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
246 metrics_helper_->SetVRActive(true); 149 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
150 // which tracks poses. Returns the low byte (0..255) if valid, or -1
151 // if not valid due to bad magic number.
152 uint8_t pixels[4];
153 // Assume we're reading from the framebuffer we just wrote to.
154 // That's true currently, we may need to use glReadBuffer(GL_BACK)
155 // or equivalent if the rendering setup changes in the future.
156 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
247 157
248 gvr_api_ = 158 // Check for the magic number written by VRDevice.cpp on submit.
249 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); 159 // This helps avoid glitches from garbage data in the render
250 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once 160 // buffer that can appear during initialization or resizing. These
251 // we switch to using a WebVR render surface. We currently need to wait for 161 // often appear as flashes of all-black or all-white pixels.
252 // the compositor window's size to be known first. See also 162 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
253 // ContentSurfaceChanged. 163 pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
254 controller_.reset( 164 // Pose is good.
255 new VrController(reinterpret_cast<gvr_context*>(native_gvr_api))); 165 return pixels[0];
166 }
167 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
168 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
169 return -1;
170 }
256 171
172 } // namespace
173
174 VrShellGl::VrShellGl(
175 VrShell* vr_shell,
176 base::WeakPtr<VrShell> weak_vr_shell,
177 base::WeakPtr<VrInputManager> content_input_manager,
178 base::WeakPtr<VrInputManager> ui_input_manager,
179 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
180 gvr_context* gvr_api)
181 : task_runner_(base::ThreadTaskRunnerHandle::Get()),
182 vr_shell_(vr_shell),
183 weak_vr_shell_(weak_vr_shell),
184 content_input_manager_(content_input_manager),
185 ui_input_manager_(ui_input_manager),
186 main_thread_task_runner_(std::move(main_thread_task_runner)),
187 weak_ptr_factory_(this) {
188 GvrInit(gvr_api);
189 }
190
191 VrShellGl::~VrShellGl() {
192 draw_task_.Cancel();
bshe 2016/12/09 19:36:25 Do you need to delete textures and shutdown GL her
mthiesse 2016/12/09 20:23:18 No, calling ShutdownGL here will lead to segfaults
193 }
194
195 bool VrShellGl::Initialize() {
196 if (!InitializeGl()) return false;
197
198 gvr::Mat4f identity;
199 SetIdentityM(identity);
200 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
201 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
202
203 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this)));
204
205 scene_.reset(new UiScene);
206
207 InitializeRenderer();
208
209 ScheduleNextDrawFrame();
210 return true;
211 }
212
213 bool VrShellGl::InitializeGl() {
214 if (gl::GetGLImplementation() == gl::kGLImplementationNone &&
215 !gl::init::InitializeGLOneOff()) {
216 LOG(ERROR) << "gl::init::InitializeGLOneOff failed";
217 ForceExitVR();
218 return false;
219 }
220 surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size());
221 if (!surface_.get()) {
222 LOG(ERROR) << "gl::init::CreateOffscreenGLSurface failed";
223 ForceExitVR();
224 return false;
225 }
226 context_ = gl::init::CreateGLContext(nullptr, surface_.get(),
227 gl::GLContextAttribs());
228 if (!context_.get()) {
229 LOG(ERROR) << "gl::init::CreateGLContext failed";
230 ForceExitVR();
231 return false;
232 }
233 if (!context_->MakeCurrent(surface_.get())) {
234 LOG(ERROR) << "gl::GLContext::MakeCurrent() failed";
235 ForceExitVR();
236 return false;
237 }
238
239 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is
240 // sort of okay, because the GVR swap chain will block if we render too fast,
241 // but we should address this properly.
242 if (surface_->GetVSyncProvider()) {
243 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind(
244 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr()));
245 } else {
246 LOG(ERROR) << "No VSync Provider";
247 }
248
249 unsigned int textures[2];
250 glGenTextures(2, textures);
251 ui_texture_id_ = textures[0];
252 content_texture_id_ = textures[1];
253 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_);
254 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_);
255 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get()));
256 content_surface_.reset(new gl::ScopedJavaSurface(
257 content_surface_texture_.get()));
258 ui_surface_texture_->SetFrameAvailableCallback(base::Bind(
259 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
260 content_surface_texture_->SetFrameAvailableCallback(base::Bind(
261 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
262
263 content_surface_texture_->SetDefaultBufferSize(
264 content_tex_physical_size_.width, content_tex_physical_size_.height);
265 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width,
266 ui_tex_physical_size_.height);
267
268 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
269 &VrShell::SurfacesChanged, weak_vr_shell_,
270 content_surface_->j_surface().obj(),
271 ui_surface_->j_surface().obj()));
272 return true;
273 }
274
275 void VrShellGl::OnUIFrameAvailable() {
276 ui_surface_texture_->UpdateTexImage();
277 }
278
279 void VrShellGl::OnContentFrameAvailable() {
280 content_surface_texture_->UpdateTexImage();
281 }
282
283 void VrShellGl::GvrInit(gvr_context* gvr_api) {
284 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api);
285 controller_.reset(new VrController(gvr_api));
257 286
258 ViewerType viewerType; 287 ViewerType viewerType;
259 switch (gvr_api_->GetViewerType()) { 288 switch (gvr_api_->GetViewerType()) {
260 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: 289 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM:
261 viewerType = ViewerType::DAYDREAM; 290 viewerType = ViewerType::DAYDREAM;
262 break; 291 break;
263 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: 292 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD:
264 viewerType = ViewerType::CARDBOARD; 293 viewerType = ViewerType::CARDBOARD;
265 break; 294 break;
266 default: 295 default:
267 NOTREACHED(); 296 NOTREACHED();
268 viewerType = ViewerType::UNKNOWN_TYPE; 297 viewerType = ViewerType::UNKNOWN_TYPE;
269 break; 298 break;
270 } 299 }
271 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), 300 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType),
272 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); 301 static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
273 } 302 }
274 303
275 void VrShell::InitializeGlOnGL(JNIEnv* env, 304 void VrShellGl::InitializeRenderer() {
276 const JavaParamRef<jobject>& obj,
277 jint content_texture_handle,
278 jint ui_texture_handle) {
279 base::AutoLock lock(gvr_init_lock_);
280 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
281 gl::init::InitializeGLOneOff());
282
283 content_texture_id_ = content_texture_handle;
284 ui_texture_id_ = ui_texture_handle;
285
286 // While WebVR is going through the compositor path, it shares 305 // While WebVR is going through the compositor path, it shares
287 // the same texture ID. This will change once it gets its own 306 // the same texture ID. This will change once it gets its own
288 // surface, but store it separately to avoid future confusion. 307 // surface, but store it separately to avoid future confusion.
289 // TODO(klausw,crbug.com/655722): remove this. 308 // TODO(klausw,crbug.com/655722): remove this.
290 webvr_texture_id_ = content_texture_id_; 309 webvr_texture_id_ = content_texture_id_;
291 // Out of paranoia, explicitly reset the "pose valid" flags to false 310 // Out of paranoia, explicitly reset the "pose valid" flags to false
292 // from the GL thread. The constructor ran in the UI thread. 311 // from the GL thread. The constructor ran in the UI thread.
293 // TODO(klausw,crbug.com/655722): remove this. 312 // TODO(klausw,crbug.com/655722): remove this.
294 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); 313 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false);
295 314
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
350 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, 369 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE,
351 webvr_left_viewport_.get()); 370 webvr_left_viewport_.get());
352 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); 371 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
353 372
354 webvr_right_viewport_.reset( 373 webvr_right_viewport_.reset(
355 new gvr::BufferViewport(gvr_api_->CreateBufferViewport())); 374 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
356 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, 375 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE,
357 webvr_right_viewport_.get()); 376 webvr_right_viewport_.get());
358 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); 377 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
359 378
360 if (delegate_) { 379 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
361 main_thread_task_runner_->PostTask( 380 &VrShell::GvrDelegateReady, weak_vr_shell_));
362 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady,
363 delegate_->GetDeviceProvider(),
364 weak_ptr_factory_.GetWeakPtr()));
365 }
366 } 381 }
367 382
368 void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) { 383 void VrShellGl::UpdateController(const gvr::Vec3f& forward_vector) {
369 controller_->UpdateState(); 384 controller_->UpdateState();
370 385
371 #if defined(ENABLE_VR_SHELL) 386 #if defined(ENABLE_VR_SHELL)
387 // TODO(mthiesse): Fix menu button handling, which should be posted to the UI
388 // thread instead of handled here.
389
372 // Note that button up/down state is transient, so ButtonUpHappened only 390 // Note that button up/down state is transient, so ButtonUpHappened only
373 // returns 391 // returns true for a single frame (and we're guaranteed not to miss it).
374 // true for a single frame (and we're guaranteed not to miss it).
375 if (controller_->ButtonUpHappened( 392 if (controller_->ButtonUpHappened(
376 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { 393 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
377 html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); 394 // html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
378 395
379 // TODO(mthiesse): The page is no longer visible when in menu mode. We 396 // TODO(mthiesse): The page is no longer visible when in menu mode. We
380 // should unfocus or otherwise let it know it's hidden. 397 // should unfocus or otherwise let it know it's hidden.
381 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 398 // if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
382 const auto&& task = html_interface_->GetMenuMode() ? 399 // const auto&& task = html_interface_->GetMenuMode() ?
383 &device::GvrDeviceProvider::OnDisplayBlur : 400 // &device::GvrDeviceProvider::OnDisplayBlur :
384 &device::GvrDeviceProvider::OnDisplayFocus; 401 // &device::GvrDeviceProvider::OnDisplayFocus;
385 main_thread_task_runner_->PostTask( 402 // main_thread_task_runner_->PostTask(
386 FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider())); 403 // FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
387 } 404 // }
388 } 405 }
389 #endif 406 #endif
390 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 407 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
391 // Process screen touch events for Cardboard button compatibility. 408 // Process screen touch events for Cardboard button compatibility.
392 // Also send tap events for controller "touchpad click" events. 409 // Also send tap events for controller "touchpad click" events.
393 if (touch_pending_ || 410 if (touch_pending_ || controller_->ButtonUpHappened(
394 controller_->ButtonUpHappened(
395 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { 411 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
396 touch_pending_ = false; 412 touch_pending_ = false;
397 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); 413 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
398 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; 414 gesture->sourceDevice = blink::WebGestureDeviceTouchpad;
399 gesture->timeStampSeconds = 415 gesture->timeStampSeconds =
400 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); 416 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF();
401 gesture->type = WebInputEvent::GestureTapDown; 417 gesture->type = WebInputEvent::GestureTapDown;
402 gesture->x = 0; 418 gesture->x = 0;
403 gesture->y = 0; 419 gesture->y = 0;
404 SendGestureOnGL(CONTENT, std::move(gesture)); 420 SendGesture(CONTENT, std::move(gesture));
405 } 421 }
406 422
407 return; 423 return;
408 } 424 }
409 425
410 gvr::Vec3f ergo_neutral_pose; 426 gvr::Vec3f ergo_neutral_pose;
411 if (!controller_->IsConnected()) { 427 if (!controller_->IsConnected()) {
412 // No controller detected, set up a gaze cursor that tracks the 428 // No controller detected, set up a gaze cursor that tracks the
413 // forward direction. 429 // forward direction.
414 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; 430 ergo_neutral_pose = {0.0f, 0.0f, -1.0f};
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
489 plane->copy_rect.width, plane->copy_rect.height}; 505 plane->copy_rect.width, plane->copy_rect.height};
490 } 506 }
491 pixel_x = pixel_rect.width * x + pixel_rect.x; 507 pixel_x = pixel_rect.width * x + pixel_rect.x;
492 pixel_y = pixel_rect.height * y + pixel_rect.y; 508 pixel_y = pixel_rect.height * y + pixel_rect.y;
493 509
494 target_point_ = plane_intersection_point; 510 target_point_ = plane_intersection_point;
495 target_element_ = plane.get(); 511 target_element_ = plane.get();
496 input_target = plane->content_quad ? CONTENT : UI; 512 input_target = plane->content_quad ? CONTENT : UI;
497 } 513 }
498 } 514 }
499 SendEventsToTargetOnGL(input_target, pixel_x, pixel_y); 515 SendEventsToTarget(input_target, pixel_x, pixel_y);
500 } 516 }
501 517
502 void VrShell::SendEventsToTargetOnGL(InputTarget input_target, 518 void VrShellGl::SendEventsToTarget(InputTarget input_target,
503 int pixel_x, 519 int pixel_x,
504 int pixel_y) { 520 int pixel_y) {
505 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = 521 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list =
506 controller_->DetectGestures(); 522 controller_->DetectGestures();
507 double timestamp = gesture_list.front()->timeStampSeconds; 523 double timestamp = gesture_list.front()->timeStampSeconds;
508 524
509 if (touch_pending_) { 525 if (touch_pending_) {
510 touch_pending_ = false; 526 touch_pending_ = false;
511 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent()); 527 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent());
512 event->type = WebInputEvent::GestureTapDown; 528 event->type = WebInputEvent::GestureTapDown;
513 event->sourceDevice = blink::WebGestureDeviceTouchpad; 529 event->sourceDevice = blink::WebGestureDeviceTouchpad;
514 event->timeStampSeconds = timestamp; 530 event->timeStampSeconds = timestamp;
515 event->x = pixel_x; 531 event->x = pixel_x;
516 event->y = pixel_y; 532 event->y = pixel_y;
517 gesture_list.push_back(std::move(event)); 533 gesture_list.push_back(std::move(event));
518 } 534 }
519 535
520 for (const auto& gesture : gesture_list) { 536 for (const auto& gesture : gesture_list) {
521 switch (gesture->type) { 537 switch (gesture->type) {
522 case WebInputEvent::GestureScrollBegin: 538 case WebInputEvent::GestureScrollBegin:
523 case WebInputEvent::GestureScrollUpdate: 539 case WebInputEvent::GestureScrollUpdate:
524 case WebInputEvent::GestureScrollEnd: 540 case WebInputEvent::GestureScrollEnd:
525 case WebInputEvent::GestureFlingCancel: 541 case WebInputEvent::GestureFlingCancel:
526 case WebInputEvent::GestureFlingStart: 542 case WebInputEvent::GestureFlingStart:
527 SendGestureOnGL(CONTENT, 543 SendGesture(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture)));
528 base::WrapUnique(new WebGestureEvent(*gesture)));
529 break; 544 break;
530 case WebInputEvent::GestureTapDown: 545 case WebInputEvent::GestureTapDown:
531 gesture->x = pixel_x; 546 gesture->x = pixel_x;
532 gesture->y = pixel_y; 547 gesture->y = pixel_y;
533 if (input_target != NONE) 548 if (input_target != NONE)
534 SendGestureOnGL(input_target, 549 SendGesture(input_target,
535 base::WrapUnique(new WebGestureEvent(*gesture))); 550 base::WrapUnique(new WebGestureEvent(*gesture)));
536 break; 551 break;
537 case WebInputEvent::Undefined: 552 case WebInputEvent::Undefined:
538 break; 553 break;
539 default: 554 default:
540 NOTREACHED(); 555 NOTREACHED();
541 } 556 }
542 } 557 }
543 558
544 // Hover support 559 // Hover support
545 bool new_target = input_target != current_input_target_; 560 bool new_target = input_target != current_input_target_;
546 if (new_target && current_input_target_ != NONE) { 561 if (new_target && current_input_target_ != NONE) {
547 // Send a move event indicating that the pointer moved off of an element. 562 // Send a move event indicating that the pointer moved off of an element.
548 SendGestureOnGL(current_input_target_, 563 SendGesture(current_input_target_,
549 MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0)); 564 MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0));
550 } 565 }
551
552 current_input_target_ = input_target; 566 current_input_target_ = input_target;
553 if (current_input_target_ != NONE) { 567 if (current_input_target_ != NONE) {
554 WebInputEvent::Type type = 568 WebInputEvent::Type type =
555 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; 569 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
556 SendGestureOnGL(input_target, 570 SendGesture(input_target,
557 MakeMouseEvent(type, timestamp, pixel_x, pixel_y)); 571 MakeMouseEvent(type, timestamp, pixel_x, pixel_y));
558 } 572 }
559 } 573 }
560 574
561 void VrShell::SendGestureOnGL(InputTarget input_target, 575 void VrShellGl::SendGesture(InputTarget input_target,
562 std::unique_ptr<blink::WebInputEvent> event) { 576 std::unique_ptr<blink::WebInputEvent> event) {
563 DCHECK(input_target != NONE); 577 DCHECK(input_target != NONE);
564 const base::WeakPtr<VrInputManager>& weak_ptr = 578 const base::WeakPtr<VrInputManager>& weak_ptr =
565 input_target == CONTENT ? weak_content_input_manager_ 579 input_target == CONTENT ? content_input_manager_: ui_input_manager_;
566 : weak_ui_input_manager_;
567 main_thread_task_runner_->PostTask( 580 main_thread_task_runner_->PostTask(
568 FROM_HERE, 581 FROM_HERE,
569 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, 582 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
570 base::Passed(std::move(event)))); 583 base::Passed(std::move(event))));
571 } 584 }
572 585
573 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { 586 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
574 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; 587 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose;
575 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; 588 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true;
576 } 589 }
577 590
578 int GetPixelEncodedPoseIndexByte() { 591 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) {
579 TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
580 // Read the pose index encoded in a bottom left pixel as color values.
581 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
582 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
583 // which tracks poses. Returns the low byte (0..255) if valid, or -1
584 // if not valid due to bad magic number.
585 uint8_t pixels[4];
586 // Assume we're reading from the framebuffer we just wrote to.
587 // That's true currently, we may need to use glReadBuffer(GL_BACK)
588 // or equivalent if the rendering setup changes in the future.
589 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
590
591 // Check for the magic number written by VRDevice.cpp on submit.
592 // This helps avoid glitches from garbage data in the render
593 // buffer that can appear during initialization or resizing. These
594 // often appear as flashes of all-black or all-white pixels.
595 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
596 pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
597 // Pose is good.
598 return pixels[0];
599 }
600 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
601 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
602 return -1;
603 }
604
605 bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
606 if (pose_index_byte < 0) { 592 if (pose_index_byte < 0) {
607 return false; 593 return false;
608 } 594 }
609 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { 595 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) {
610 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << 596 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte <<
611 ", not a valid pose"; 597 ", not a valid pose";
612 return false; 598 return false;
613 } 599 }
614 return true; 600 return true;
615 } 601 }
616 602
617 void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) { 603 void VrShellGl::DrawFrame() {
618 TRACE_EVENT0("gpu", "VrShell::DrawFrame"); 604 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame");
619 // Reset the viewport list to just the pair of viewports for the 605 // Reset the viewport list to just the pair of viewports for the
620 // primary buffer each frame. Head-locked viewports get added by 606 // primary buffer each frame. Head-locked viewports get added by
621 // DrawVrShell if needed. 607 // DrawVrShell if needed.
622 buffer_viewport_list_->SetToRecommendedBufferViewports(); 608 buffer_viewport_list_->SetToRecommendedBufferViewports();
623 609
624 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 610 // TODO(klausw): Fix this. Resizing buffers here leads to webVR mode showing
625 // If needed, resize the primary buffer for use with WebVR. 611 // nothing but a black screen.
626 if (render_size_primary_ != render_size_primary_webvr_) { 612 // if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
627 if (!render_size_primary_webvr_.width) { 613 // // If needed, resize the primary buffer for use with WebVR.
628 VLOG(2) << "WebVR rendering size not known yet, dropping frame"; 614 // if (render_size_primary_ != render_size_primary_webvr_) {
629 return; 615 // if (!render_size_primary_webvr_.width) {
630 } 616 // VLOG(2) << "WebVR rendering size not known yet, dropping frame";
631 render_size_primary_ = render_size_primary_webvr_; 617 // return;
632 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); 618 // }
633 } 619 // render_size_primary_ = render_size_primary_webvr_;
634 } else { 620 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
635 if (render_size_primary_ != render_size_primary_vrshell_) { 621 // }
636 render_size_primary_ = render_size_primary_vrshell_; 622 // } else {
637 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_); 623 // if (render_size_primary_ != render_size_primary_vrshell_) {
638 } 624 // render_size_primary_ = render_size_primary_vrshell_;
639 } 625 // swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
626 // }
627 // }
640 628
641 gvr::Frame frame = swap_chain_->AcquireFrame(); 629 gvr::Frame frame = swap_chain_->AcquireFrame();
642 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); 630 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
643 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; 631 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
644 632
645 gvr::Mat4f head_pose = 633 gvr::Mat4f head_pose =
646 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); 634 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
647 635
648 gvr::Vec3f position = GetTranslation(head_pose); 636 gvr::Vec3f position = GetTranslation(head_pose);
649 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { 637 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
650 // This appears to be a 3DOF pose without a neck model. Add one. 638 // This appears to be a 3DOF pose without a neck model. Add one.
651 // The head pose has redundant data. Assume we're only using the 639 // The head pose has redundant data. Assume we're only using the
652 // object_from_reference_matrix, we're not updating position_external. 640 // object_from_reference_matrix, we're not updating position_external.
653 // TODO: Not sure what object_from_reference_matrix is. The new api removed 641 // TODO: Not sure what object_from_reference_matrix is. The new api removed
654 // it. For now, removing it seems working fine. 642 // it. For now, removing it seems working fine.
655 gvr_api_->ApplyNeckModel(head_pose, 1.0f); 643 gvr_api_->ApplyNeckModel(head_pose, 1.0f);
656 } 644 }
657 645
658 // Bind the primary framebuffer.
659 frame.BindBuffer(kFramePrimaryBuffer); 646 frame.BindBuffer(kFramePrimaryBuffer);
660 647
661 HandleQueuedTasksOnGL();
662
663 // Update the render position of all UI elements (including desktop). 648 // Update the render position of all UI elements (including desktop).
664 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; 649 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
665 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); 650 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds());
666 651
667 UpdateControllerOnGL(GetForwardVector(head_pose)); 652 UpdateController(GetForwardVector(head_pose));
668 653
669 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 654 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
670 DrawWebVrOnGL(); 655 DrawWebVr();
671 656
672 // When using async reprojection, we need to know which pose was used in 657 // When using async reprojection, we need to know which pose was used in
673 // the WebVR app for drawing this frame. Due to unknown amounts of 658 // the WebVR app for drawing this frame. Due to unknown amounts of
674 // buffering in the compositor and SurfaceTexture, we read the pose number 659 // buffering in the compositor and SurfaceTexture, we read the pose number
675 // from a corner pixel. There's no point in doing this for legacy 660 // from a corner pixel. There's no point in doing this for legacy
676 // distortion rendering since that doesn't need a pose, and reading back 661 // distortion rendering since that doesn't need a pose, and reading back
677 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop 662 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
678 // doing this once we have working no-compositor rendering for WebVR. 663 // doing this once we have working no-compositor rendering for WebVR.
679 if (gvr_api_->GetAsyncReprojectionEnabled()) { 664 if (gvr_api_->GetAsyncReprojectionEnabled()) {
680 int pose_index_byte = GetPixelEncodedPoseIndexByte(); 665 int pose_index_byte = GetPixelEncodedPoseIndexByte();
681 if (WebVrPoseByteIsValidOnGL(pose_index_byte)) { 666 if (WebVrPoseByteIsValid(pose_index_byte)) {
682 // We have a valid pose, use it for reprojection. 667 // We have a valid pose, use it for reprojection.
683 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); 668 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
684 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); 669 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
685 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; 670 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize];
686 // We can't mark the used pose as invalid since unfortunately 671 // We can't mark the used pose as invalid since unfortunately
687 // we have to reuse them. The compositor will re-submit stale 672 // we have to reuse them. The compositor will re-submit stale
688 // frames on vsync, and we can't tell that this has happened 673 // frames on vsync, and we can't tell that this has happened
689 // until we've read the pose index from it, and at that point 674 // until we've read the pose index from it, and at that point
690 // it's too late to skip rendering. 675 // it's too late to skip rendering.
691 } else { 676 } else {
692 // If we don't get a valid frame ID back we shouldn't attempt 677 // If we don't get a valid frame ID back we shouldn't attempt
693 // to reproject by an invalid matrix, so turn off reprojection 678 // to reproject by an invalid matrix, so turn off reprojection
694 // instead. Invalid poses can permanently break reprojection 679 // instead. Invalid poses can permanently break reprojection
695 // for this GVR instance: http://crbug.com/667327 680 // for this GVR instance: http://crbug.com/667327
696 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); 681 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
697 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); 682 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
698 } 683 }
699 } 684 }
700 } 685 }
701 686
702 DrawVrShellOnGL(head_pose, frame); 687 DrawVrShell(head_pose, frame);
703 688
704 frame.Unbind(); 689 frame.Unbind();
705 frame.Submit(*buffer_viewport_list_, head_pose); 690 frame.Submit(*buffer_viewport_list_, head_pose);
691
692 // No need to SwapBuffers for an offscreen surface.
693 ScheduleNextDrawFrame();
706 } 694 }
707 695
708 void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose, 696 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose,
709 gvr::Frame &frame) { 697 gvr::Frame &frame) {
710 TRACE_EVENT0("gpu", "VrShell::DrawVrShell"); 698 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell");
711 std::vector<const ContentRectangle*> head_locked_elements; 699 std::vector<const ContentRectangle*> head_locked_elements;
712 std::vector<const ContentRectangle*> world_elements; 700 std::vector<const ContentRectangle*> world_elements;
713 for (const auto& rect : scene_->GetUiElements()) { 701 for (const auto& rect : scene_->GetUiElements()) {
714 if (!rect->visible) { 702 if (!rect->visible) {
715 continue; 703 continue;
716 } 704 }
717 if (rect->lock_to_fov) { 705 if (rect->lock_to_fov) {
718 head_locked_elements.push_back(rect.get()); 706 head_locked_elements.push_back(rect.get());
719 } else { 707 } else {
720 world_elements.push_back(rect.get()); 708 world_elements.push_back(rect.get());
721 } 709 }
722 } 710 }
723 711
724 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { 712 if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
725 // WebVR is incompatible with 3D world compositing since the 713 // WebVR is incompatible with 3D world compositing since the
726 // depth buffer was already populated with unknown scaling - the 714 // depth buffer was already populated with unknown scaling - the
727 // WebVR app has full control over zNear/zFar. Just leave the 715 // WebVR app has full control over zNear/zFar. Just leave the
728 // existing content in place in the primary buffer without 716 // existing content in place in the primary buffer without
729 // clearing. Currently, there aren't any world elements in WebVR 717 // clearing. Currently, there aren't any world elements in WebVR
730 // mode, this will need further testing if those get added 718 // mode, this will need further testing if those get added
731 // later. 719 // later.
732 } else { 720 } else {
733 // Non-WebVR mode, enable depth testing and clear the primary buffers. 721 // Non-WebVR mode, enable depth testing and clear the primary buffers.
734 glEnable(GL_CULL_FACE); 722 glEnable(GL_CULL_FACE);
735 glEnable(GL_DEPTH_TEST); 723 glEnable(GL_DEPTH_TEST);
736 glDepthMask(GL_TRUE); 724 glDepthMask(GL_TRUE);
737 725
738 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); 726 glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
739 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 727 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
740 } 728 }
741
742 if (!world_elements.empty()) { 729 if (!world_elements.empty()) {
743 DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_, 730 DrawUiView(&head_pose, world_elements, render_size_primary_,
744 kViewportListPrimaryOffset); 731 kViewportListPrimaryOffset);
745 } 732 }
746 733
747 if (!head_locked_elements.empty()) { 734 if (!head_locked_elements.empty()) {
748 // Add head-locked viewports. The list gets reset to just 735 // Add head-locked viewports. The list gets reset to just
749 // the recommended viewports (for the primary buffer) each frame. 736 // the recommended viewports (for the primary buffer) each frame.
750 buffer_viewport_list_->SetBufferViewport( 737 buffer_viewport_list_->SetBufferViewport(
751 kViewportListHeadlockedOffset + GVR_LEFT_EYE, 738 kViewportListHeadlockedOffset + GVR_LEFT_EYE,
752 *headlocked_left_viewport_); 739 *headlocked_left_viewport_);
753 buffer_viewport_list_->SetBufferViewport( 740 buffer_viewport_list_->SetBufferViewport(
754 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, 741 kViewportListHeadlockedOffset + GVR_RIGHT_EYE,
755 *headlocked_right_viewport_); 742 *headlocked_right_viewport_);
756 743
757 // Bind the headlocked framebuffer. 744 // Bind the headlocked framebuffer.
745 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order
746 // here.
758 frame.BindBuffer(kFrameHeadlockedBuffer); 747 frame.BindBuffer(kFrameHeadlockedBuffer);
759 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 748 glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 749 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
761 DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_, 750 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_,
762 kViewportListHeadlockedOffset); 751 kViewportListHeadlockedOffset);
763 } 752 }
764 } 753 }
765 754
766 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { 755 void VrShellGl::SetWebVRRenderSurfaceSize(int width, int height) {
767 render_size_primary_webvr_.width = width; 756 render_size_primary_webvr_.width = width;
768 render_size_primary_webvr_.height = height; 757 render_size_primary_webvr_.height = height;
769 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once 758 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once
770 // we have that. 759 // we have that.
771 } 760 }
772 761
773 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { 762 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() {
774 // This is a stopgap while we're using the WebVR compositor rendering path. 763 // This is a stopgap while we're using the WebVR compositor rendering path.
775 // TODO(klausw,crbug.com/655722): Remove this method and member once we're 764 // TODO(klausw,crbug.com/655722): Remove this method and member once we're
776 // using a separate WebVR render surface. 765 // using a separate WebVR render surface.
777 return content_tex_physical_size_; 766 return content_tex_physical_size_;
778 } 767 }
779 768
780 769 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose,
781 void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose, 770 const std::vector<const ContentRectangle*>& elements,
782 const std::vector<const ContentRectangle*>& elements, 771 const gvr::Sizei& render_size,
783 const gvr::Sizei& render_size, int viewport_offset) { 772 int viewport_offset) {
784 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); 773 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView");
785 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { 774 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
786 buffer_viewport_list_->GetBufferViewport( 775 buffer_viewport_list_->GetBufferViewport(
787 eye + viewport_offset, buffer_viewport_.get()); 776 eye + viewport_offset, buffer_viewport_.get());
788 777
789 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); 778 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye);
790 if (head_pose != nullptr) { 779 if (head_pose != nullptr) {
791 view_matrix = MatrixMul(view_matrix, *head_pose); 780 view_matrix = MatrixMul(view_matrix, *head_pose);
792 } 781 }
793 782
794 gvr::Recti pixel_rect = 783 gvr::Recti pixel_rect =
795 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); 784 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv());
796 glViewport(pixel_rect.left, pixel_rect.bottom, 785 glViewport(pixel_rect.left, pixel_rect.bottom,
797 pixel_rect.right - pixel_rect.left, 786 pixel_rect.right - pixel_rect.left,
798 pixel_rect.top - pixel_rect.bottom); 787 pixel_rect.top - pixel_rect.bottom);
799 788
800 const gvr::Mat4f render_matrix = MatrixMul( 789 const gvr::Mat4f render_matrix = MatrixMul(
801 PerspectiveMatrixFromView( 790 PerspectiveMatrixFromView(
802 buffer_viewport_->GetSourceFov(), kZNear, kZFar), 791 buffer_viewport_->GetSourceFov(), kZNear, kZFar),
803 view_matrix); 792 view_matrix);
804 793
805 DrawElementsOnGL(render_matrix, elements); 794 DrawElements(render_matrix, elements);
806 if (head_pose != nullptr && 795 if (head_pose != nullptr &&
807 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) { 796 vr_shell_->GetUiInterface()->GetMode() != UiInterface::Mode::WEB_VR) {
808 DrawCursorOnGL(render_matrix); 797 DrawCursor(render_matrix);
809 } 798 }
810 } 799 }
811 } 800 }
812 801
813 void VrShell::DrawElementsOnGL( 802 void VrShellGl::DrawElements(
814 const gvr::Mat4f& render_matrix, 803 const gvr::Mat4f& render_matrix,
815 const std::vector<const ContentRectangle*>& elements) { 804 const std::vector<const ContentRectangle*>& elements) {
816 for (const auto& rect : elements) { 805 for (const auto& rect : elements) {
817 Rectf copy_rect; 806 Rectf copy_rect;
818 jint texture_handle; 807 jint texture_handle;
819 if (rect->content_quad) { 808 if (rect->content_quad) {
820 copy_rect = {0, 0, 1, 1}; 809 copy_rect = {0, 0, 1, 1};
821 texture_handle = content_texture_id_; 810 texture_handle = content_texture_id_;
822 } else { 811 } else {
823 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; 812 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_;
824 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; 813 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_;
825 copy_rect.width = static_cast<float>(rect->copy_rect.width) / 814 copy_rect.width = static_cast<float>(rect->copy_rect.width) /
826 ui_tex_css_width_; 815 ui_tex_css_width_;
827 copy_rect.height = static_cast<float>(rect->copy_rect.height) / 816 copy_rect.height = static_cast<float>(rect->copy_rect.height) /
828 ui_tex_css_height_; 817 ui_tex_css_height_;
829 texture_handle = ui_texture_id_; 818 texture_handle = ui_texture_id_;
830 } 819 }
831 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); 820 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world);
832 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( 821 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw(
833 texture_handle, transform, copy_rect); 822 texture_handle, transform, copy_rect);
834 } 823 }
835 } 824 }
836 825
837 void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) { 826 void VrShellGl::DrawCursor(const gvr::Mat4f& render_matrix) {
838 gvr::Mat4f mat; 827 gvr::Mat4f mat;
839 SetIdentityM(mat); 828 SetIdentityM(mat);
840 829
841 // Draw the reticle. 830 // Draw the reticle.
842 831
843 // Scale the pointer to have a fixed FOV size at any distance. 832 // Scale the pointer to have a fixed FOV size at any distance.
844 const float eye_to_target = Distance(target_point_, kOrigin); 833 const float eye_to_target = Distance(target_point_, kOrigin);
845 ScaleM(mat, mat, kReticleWidth * eye_to_target, 834 ScaleM(mat, mat, kReticleWidth * eye_to_target,
846 kReticleHeight * eye_to_target, 1.0f); 835 kReticleHeight * eye_to_target, 1.0f);
847 836
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
900 889
901 // Move the beam origin to the hand. 890 // Move the beam origin to the hand.
902 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, 891 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y,
903 kHandPosition.z); 892 kHandPosition.z);
904 893
905 transform = MatrixMul(render_matrix, face_transform); 894 transform = MatrixMul(render_matrix, face_transform);
906 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); 895 vr_shell_renderer_->GetLaserRenderer()->Draw(transform);
907 } 896 }
908 } 897 }
909 898
910 void VrShell::DrawWebVrOnGL() { 899 void VrShellGl::DrawWebVr() {
911 TRACE_EVENT0("gpu", "VrShell::DrawWebVr"); 900 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr");
912 // Don't need face culling, depth testing, blending, etc. Turn it all off. 901 // Don't need face culling, depth testing, blending, etc. Turn it all off.
913 glDisable(GL_CULL_FACE); 902 glDisable(GL_CULL_FACE);
914 glDepthMask(GL_FALSE); 903 glDepthMask(GL_FALSE);
915 glDisable(GL_DEPTH_TEST); 904 glDisable(GL_DEPTH_TEST);
916 glDisable(GL_SCISSOR_TEST); 905 glDisable(GL_SCISSOR_TEST);
917 glDisable(GL_BLEND); 906 glDisable(GL_BLEND);
918 glDisable(GL_POLYGON_OFFSET_FILL); 907 glDisable(GL_POLYGON_OFFSET_FILL);
919 908
920 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); 909 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
921 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); 910 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
922 911
923 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, 912 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
924 *webvr_left_viewport_); 913 *webvr_left_viewport_);
925 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, 914 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
926 *webvr_right_viewport_); 915 *webvr_right_viewport_);
927 } 916 }
928 917
929 void VrShell::OnTriggerEventOnUI(JNIEnv* env, 918 void VrShellGl::OnTriggerEvent() {
930 const JavaParamRef<jobject>& obj) {
931 // Set a flag to handle this on the render thread at the next frame. 919 // Set a flag to handle this on the render thread at the next frame.
932 touch_pending_ = true; 920 touch_pending_ = true;
933 } 921 }
934 922
935 void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { 923 void VrShellGl::OnPause() {
936 if (gvr_api_ == nullptr)
937 return;
938
939 // TODO(mthiesse): Clean up threading here.
940 controller_->OnPause(); 924 controller_->OnPause();
941 gvr_api_->PauseTracking(); 925 gvr_api_->PauseTracking();
bshe 2016/12/09 19:36:25 do you need pause DrawFrame too?
mthiesse 2016/12/09 20:23:18 Good question, I'm not sure. To be on the safe sid
942 SetShowingOverscrollGlowOnUI(true);
943
944 // exit vr session
945 metrics_helper_->SetVRActive(false);
946 } 926 }
947 927
948 void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { 928 void VrShellGl::OnResume() {
949 if (gvr_api_ == nullptr)
950 return;
951
952 // TODO(mthiesse): Clean up threading here.
953 gvr_api_->RefreshViewerProfile(); 929 gvr_api_->RefreshViewerProfile();
954 gvr_api_->ResumeTracking(); 930 gvr_api_->ResumeTracking();
955 controller_->OnResume(); 931 controller_->OnResume();
956 SetShowingOverscrollGlowOnUI(false);
957
958 // exit vr session
959 metrics_helper_->SetVRActive(true);
960 } 932 }
961 933
962 void VrShell::SetShowingOverscrollGlowOnUI(bool showing_glow) { 934 void VrShellGl::SetWebVrMode(bool enabled) {
963 main_contents_->GetRenderWidgetHostView()->SetShowingOverscrollGlow(
964 showing_glow);
965 }
966
967 base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI(
968 const content::WebContents* web_contents) {
969 // Ensure that the WebContents requesting the VrShell instance is the one
970 // we created.
971 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents)
972 return g_instance->weak_ptr_factory_.GetWeakPtr();
973 return base::WeakPtr<VrShell>(nullptr);
974 }
975
976 void VrShell::OnDomContentsLoadedOnUI() {
977 html_interface_->SetURL(main_contents_->GetVisibleURL());
978 html_interface_->SetLoading(main_contents_->IsLoading());
979 html_interface_->OnDomContentsLoaded();
980 }
981
982 void VrShell::SetWebVrModeOnUI(JNIEnv* env,
983 const base::android::JavaParamRef<jobject>& obj,
984 bool enabled) {
985 metrics_helper_->SetWebVREnabled(enabled);
986 if (enabled) { 935 if (enabled) {
987 html_interface_->SetMode(UiInterface::Mode::WEB_VR); 936 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::WEB_VR);
988 } else { 937 } else {
989 html_interface_->SetMode(UiInterface::Mode::STANDARD); 938 vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::STANDARD);
990 } 939 }
991 } 940 }
992 941
993 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { 942 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
994 // TODO(cjgrant): Align this state with the logic that drives the omnibox. 943 const gvr::Rectf& right_bounds) {
995 html_interface_->SetWebVRSecureOrigin(secure_origin);
996 }
997
998 void VrShell::SubmitWebVRFrame() {}
999
1000 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
1001 const gvr::Rectf& right_bounds) {
1002 webvr_left_viewport_->SetSourceUv(left_bounds); 944 webvr_left_viewport_->SetSourceUv(left_bounds);
1003 webvr_right_viewport_->SetSourceUv(right_bounds); 945 webvr_right_viewport_->SetSourceUv(right_bounds);
1004 } 946 }
1005 947
1006 gvr::GvrApi* VrShell::gvr_api() { 948 gvr::GvrApi* VrShellGl::gvr_api() {
1007 return gvr_api_.get(); 949 return gvr_api_.get();
1008 } 950 }
1009 951
1010 void VrShell::SurfacesChangedOnUI(JNIEnv* env, 952 void VrShellGl::ContentBoundsChanged(int width, int height) {
1011 const JavaParamRef<jobject>& object, 953 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged");
1012 const JavaParamRef<jobject>& content_surface, 954 content_tex_css_width_ = width;
1013 const JavaParamRef<jobject>& ui_surface) { 955 content_tex_css_height_ = height;
1014 content_compositor_->SurfaceChanged(content_surface);
1015 ui_compositor_->SurfaceChanged(ui_surface);
1016 } 956 }
1017 957
1018 void VrShell::ContentBoundsChangedOnUI(JNIEnv* env, 958 void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) {
1019 const JavaParamRef<jobject>& object, 959 if (content_surface_texture_.get())
1020 jint width, jint height, jfloat dpr) { 960 content_surface_texture_->SetDefaultBufferSize(width, height);
1021 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged");
1022 content_tex_physical_size_.width = width; 961 content_tex_physical_size_.width = width;
1023 content_tex_physical_size_.height = height; 962 content_tex_physical_size_.height = height;
1024 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in
1025 // response to MainFrameWasResized, not here.
1026 content_tex_css_width_ = width / dpr;
1027 content_tex_css_height_ = height / dpr;
1028
1029 content_compositor_->SetWindowBounds(width, height);
1030 } 963 }
1031 964
1032 void VrShell::UIBoundsChangedOnUI(JNIEnv* env, 965 void VrShellGl::UIBoundsChanged(int width, int height) {
1033 const JavaParamRef<jobject>& object, 966 ui_tex_css_width_ = width;
1034 jint width, jint height, jfloat dpr) { 967 ui_tex_css_height_ = height;
1035 ui_compositor_->SetWindowBounds(width, height);
1036 } 968 }
1037 969
1038 UiScene* VrShell::GetSceneOnGL() { 970 void VrShellGl::UIPhysicalBoundsChanged(int width, int height) {
1039 return scene_.get(); 971 if (ui_surface_texture_.get())
972 ui_surface_texture_->SetDefaultBufferSize(width, height);
973 ui_tex_physical_size_.width = width;
974 ui_tex_physical_size_.height = height;
1040 } 975 }
1041 976
1042 UiInterface* VrShell::GetUiInterfaceOnGL() { 977 base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() {
1043 return html_interface_.get(); 978 return weak_ptr_factory_.GetWeakPtr();
1044 } 979 }
1045 980
1046 void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) { 981 void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase,
1047 base::AutoLock lock(task_queue_lock_); 982 const base::TimeDelta interval) {
1048 task_queue_.push(callback); 983 vsync_timebase_ = timebase;
984 vsync_interval_ = interval;
1049 } 985 }
1050 986
1051 void VrShell::HandleQueuedTasksOnGL() { 987 void VrShellGl::ScheduleNextDrawFrame() {
1052 // To protect a stream of tasks from blocking rendering indefinitely, 988 base::TimeTicks now = base::TimeTicks::Now();
1053 // process only the number of tasks present when first checked. 989 base::TimeTicks target;
1054 std::vector<base::Callback<void()>> tasks; 990
1055 { 991 if (vsync_interval_.is_zero()) {
1056 base::AutoLock lock(task_queue_lock_); 992 target = now;
1057 const size_t count = task_queue_.size(); 993 } else {
1058 for (size_t i = 0; i < count; i++) { 994 target = now + vsync_interval_;
1059 tasks.push_back(task_queue_.front()); 995 int64_t intervals = (target - vsync_timebase_) / vsync_interval_;
1060 task_queue_.pop(); 996 target = vsync_timebase_ + intervals * vsync_interval_;
1061 }
1062 } 997 }
1063 for (auto &task : tasks) { 998
1064 task.Run(); 999 task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now);
1065 }
1066 } 1000 }
1067 1001
1068 void VrShell::DoUiActionOnUI(const UiAction action) { 1002 void VrShellGl::ForceExitVR() {
1069 content::NavigationController& controller = main_contents_->GetController(); 1003 main_thread_task_runner_->PostTask(
1070 switch (action) { 1004 FROM_HERE, base::Bind(&VrShell::ForceExitVR, weak_vr_shell_));
1071 case HISTORY_BACK:
1072 if (main_contents_->IsFullscreen()) {
1073 main_contents_->ExitFullscreen(true /* will_cause_resize */);
1074 } else if (controller.CanGoBack()) {
1075 controller.GoBack();
1076 }
1077 break;
1078 case HISTORY_FORWARD:
1079 if (controller.CanGoForward())
1080 controller.GoForward();
1081 break;
1082 case RELOAD:
1083 controller.Reload(false);
1084 break;
1085 #if defined(ENABLE_VR_SHELL_UI_DEV)
1086 case RELOAD_UI:
1087 ui_contents_->GetController().Reload(false);
1088 html_interface_.reset(new UiInterface(UiInterface::Mode::STANDARD,
1089 main_contents_->IsFullscreen()));
1090 vr_web_contents_observer_->SetUiInterface(html_interface_.get());
1091 break;
1092 #endif
1093 case ZOOM_OUT: // Not handled yet.
1094 case ZOOM_IN: // Not handled yet.
1095 break;
1096 default:
1097 NOTREACHED();
1098 }
1099 }
1100
1101 void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host,
1102 content::RenderViewHost* new_host) {
1103 new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT);
1104 }
1105
1106 void VrShell::MainFrameWasResized(bool width_changed) {
1107 display::Display display = display::Screen::GetScreen()
1108 ->GetDisplayNearestWindow(ui_contents_->GetNativeView());
1109 // TODO(mthiesse): Synchronize with GL thread.
1110 ui_tex_css_width_ = display.size().width();
1111 ui_tex_css_height_ = display.size().height();
1112 }
1113
1114 void VrShell::WebContentsDestroyed() {
1115 ui_input_manager_.reset();
1116 ui_contents_ = nullptr;
1117 // TODO(mthiesse): Handle web contents being destroyed.
1118 delegate_->ForceExitVr();
1119 }
1120
1121 void VrShell::ContentWebContentsDestroyedOnUI() {
1122 content_input_manager_.reset();
1123 main_contents_ = nullptr;
1124 // TODO(mthiesse): Handle web contents being destroyed.
1125 delegate_->ForceExitVr();
1126 }
1127
1128 void VrShell::ContentWasHiddenOnUI() {
1129 // Ensure we don't continue sending input to it.
1130 content_input_manager_.reset();
1131 // TODO(mthiesse): Handle web contents being hidden.
1132 delegate_->ForceExitVr();
1133 }
1134
1135 void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) {
1136 JNIEnv* env = base::android::AttachCurrentThread();
1137 Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height,
1138 dpr);
1139 }
1140
1141 void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) {
1142 JNIEnv* env = base::android::AttachCurrentThread();
1143 Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr);
1144 }
1145
1146 // ----------------------------------------------------------------------------
1147 // Native JNI methods
1148 // ----------------------------------------------------------------------------
1149
1150 jlong InitOnUI(JNIEnv* env,
1151 const JavaParamRef<jobject>& obj,
1152 const JavaParamRef<jobject>& content_web_contents,
1153 jlong content_window_android,
1154 const JavaParamRef<jobject>& ui_web_contents,
1155 jlong ui_window_android,
1156 jboolean for_web_vr) {
1157 return reinterpret_cast<intptr_t>(new VrShell(
1158 env, obj, content::WebContents::FromJavaWebContents(content_web_contents),
1159 reinterpret_cast<ui::WindowAndroid*>(content_window_android),
1160 content::WebContents::FromJavaWebContents(ui_web_contents),
1161 reinterpret_cast<ui::WindowAndroid*>(ui_window_android),
1162 for_web_vr));
1163 } 1005 }
1164 1006
1165 } // namespace vr_shell 1007 } // namespace vr_shell
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698