Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(726)

Side by Side Diff: chrome/browser/android/vr_shell/vr_shell.cc

Issue 2562733002: Implement our own GLThread for VR Shell. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/android/vr_shell/vr_shell.h" 5 #include "chrome/browser/android/vr_shell/vr_shell.h"
6 6
7 #include "base/metrics/histogram_macros.h" 7 #include "base/metrics/histogram_macros.h"
8 #include "chrome/browser/android/vr_shell/ui_elements.h" 8 #include "base/threading/platform_thread.h"
9 #include "base/threading/thread.h"
10 #include "base/threading/thread_task_runner_handle.h"
9 #include "chrome/browser/android/vr_shell/ui_interface.h" 11 #include "chrome/browser/android/vr_shell/ui_interface.h"
10 #include "chrome/browser/android/vr_shell/ui_scene.h"
11 #include "chrome/browser/android/vr_shell/vr_compositor.h" 12 #include "chrome/browser/android/vr_shell/vr_compositor.h"
12 #include "chrome/browser/android/vr_shell/vr_controller.h"
13 #include "chrome/browser/android/vr_shell/vr_gl_util.h"
14 #include "chrome/browser/android/vr_shell/vr_input_manager.h" 13 #include "chrome/browser/android/vr_shell/vr_input_manager.h"
15 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h" 14 #include "chrome/browser/android/vr_shell/vr_shell_delegate.h"
16 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" 15 #include "chrome/browser/android/vr_shell/vr_shell_gl.h"
17 #include "chrome/browser/android/vr_shell/vr_usage_monitor.h" 16 #include "chrome/browser/android/vr_shell/vr_usage_monitor.h"
18 #include "chrome/browser/android/vr_shell/vr_web_contents_observer.h" 17 #include "chrome/browser/android/vr_shell/vr_web_contents_observer.h"
19 #include "content/public/browser/navigation_controller.h" 18 #include "content/public/browser/navigation_controller.h"
20 #include "content/public/browser/render_view_host.h" 19 #include "content/public/browser/render_view_host.h"
21 #include "content/public/browser/render_widget_host.h" 20 #include "content/public/browser/render_widget_host.h"
22 #include "content/public/browser/render_widget_host_view.h" 21 #include "content/public/browser/render_widget_host_view.h"
23 #include "content/public/browser/web_contents.h" 22 #include "content/public/browser/web_contents.h"
24 #include "content/public/common/referrer.h" 23 #include "content/public/common/referrer.h"
25 #include "device/vr/android/gvr/gvr_device_provider.h" 24 #include "device/vr/android/gvr/gvr_device_provider.h"
26 #include "jni/VrShellImpl_jni.h" 25 #include "jni/VrShellImpl_jni.h"
27 #include "ui/android/view_android.h" 26 #include "ui/android/view_android.h"
28 #include "ui/android/window_android.h" 27 #include "ui/android/window_android.h"
29 #include "ui/base/page_transition_types.h" 28 #include "ui/base/page_transition_types.h"
30 #include "ui/display/display.h" 29 #include "ui/display/display.h"
31 #include "ui/display/screen.h" 30 #include "ui/display/screen.h"
32 #include "ui/gl/gl_bindings.h"
33 #include "ui/gl/init/gl_factory.h"
34 31
35 using base::android::JavaParamRef; 32 using base::android::JavaParamRef;
33 using base::android::JavaRef;
36 34
37 namespace vr_shell { 35 namespace vr_shell {
38 36
39 namespace { 37 namespace {
40 // Constant taken from treasure_hunt demo.
41 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000;
42
43 static constexpr float kZNear = 0.1f;
44 static constexpr float kZFar = 1000.0f;
45
46 // Screen angle in degrees. 0 = vertical, positive = top closer.
47 static constexpr float kDesktopScreenTiltDefault = 0;
48
49 static constexpr float kReticleWidth = 0.025f;
50 static constexpr float kReticleHeight = 0.025f;
51
52 static constexpr float kLaserWidth = 0.01f;
53
54 // Angle (radians) the beam down from the controller axis, for wrist comfort.
55 static constexpr float kErgoAngleOffset = 0.26f;
56
57 static constexpr gvr::Vec3f kOrigin = {0.0f, 0.0f, 0.0f};
58
59 // In lieu of an elbow model, we assume a position for the user's hand.
60 // TODO(mthiesse): Handedness options.
61 static constexpr gvr::Vec3f kHandPosition = {0.2f, -0.5f, -0.2f};
62
63 // If there is no content quad, and the reticle isn't hitting another element,
64 // draw the reticle at this distance.
65 static constexpr float kDefaultReticleDistance = 2.0f;
66
67 // Fraction of the distance to the object the cursor is drawn at to avoid
68 // rounding errors drawing the cursor behind the object.
69 static constexpr float kReticleOffset = 0.99f;
70
71 // Limit the rendering distance of the reticle to the distance to a corner of
72 // the content quad, times this value. This lets the rendering distance
73 // adjust according to content quad placement.
74 static constexpr float kReticleDistanceMultiplier = 1.5f;
75
76 // GVR buffer indices for use with viewport->SetSourceBufferIndex
77 // or frame.BindBuffer. We use one for world content (with reprojection)
78 // including main VrShell and WebVR content plus world-space UI.
79 // The headlocked buffer is for UI that should not use reprojection.
80 static constexpr int kFramePrimaryBuffer = 0;
81 static constexpr int kFrameHeadlockedBuffer = 1;
82
83 // Pixel dimensions and field of view for the head-locked content. This
84 // is currently sized to fit the WebVR "insecure transport" warnings,
85 // adjust it as needed if there is additional content.
86 static constexpr gvr::Sizei kHeadlockedBufferDimensions = {1024, 1024};
87 static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f};
88
89 // The GVR viewport list has two entries (left eye and right eye) for each
90 // GVR buffer.
91 static constexpr int kViewportListPrimaryOffset = 0;
92 static constexpr int kViewportListHeadlockedOffset = 2;
93
94 // Magic numbers used to mark valid pose index values encoded in frame
95 // data. Must match the magic numbers used in blink's VRDisplay.cpp.
96 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
97
98 vr_shell::VrShell* g_instance; 38 vr_shell::VrShell* g_instance;
99 39
100 static const char kVrShellUIURL[] = "chrome://vr-shell-ui"; 40 static const char kVrShellUIURL[] = "chrome://vr-shell-ui";
101 41
102 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { 42 class GLThread : public base::Thread {
103 float xdiff = (vec1.x - vec2.x); 43 public:
104 float ydiff = (vec1.y - vec2.y); 44 GLThread(VrShell* vr_shell, base::WeakPtr<VrShell> weak_vr_shell,
105 float zdiff = (vec1.z - vec2.z); 45 base::WeakPtr<VrInputManager> content_input_manager,
106 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; 46 base::WeakPtr<VrInputManager> ui_input_manager,
107 return std::sqrt(scale); 47 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
108 } 48 gvr_context* gvr_api)
49 : base::Thread("VrShellGL"),
50 vr_shell_(vr_shell),
51 weak_vr_shell_(weak_vr_shell),
52 content_input_manager_(content_input_manager),
53 ui_input_manager_(ui_input_manager),
54 main_thread_task_runner_(std::move(main_thread_task_runner)),
55 gvr_api_(gvr_api) {}
109 56
110 // Generate a quaternion representing the rotation from the negative Z axis 57 base::WeakPtr<VrShellGl> GetVrShellGl() { return weak_vr_shell_gl_; }
111 // (0, 0, -1) to a specified vector. This is an optimized version of a more 58 VrShellGl* GetVrShellGlUnsafe() { return vr_shell_gl_.get(); }
112 // general vector-to-vector calculation. 59 void StopRenderLoop() {
113 gvr::Quatf GetRotationFromZAxis(gvr::Vec3f vec) { 60 vr_shell_gl_.reset();
114 vr_shell::NormalizeVector(vec);
115 gvr::Quatf quat;
116 quat.qw = 1.0f - vec.z;
117 if (quat.qw < 1e-6f) {
118 // Degenerate case: vectors are exactly opposite. Replace by an
119 // arbitrary 180 degree rotation to avoid invalid normalization.
120 quat.qx = 1.0f;
121 quat.qy = 0.0f;
122 quat.qz = 0.0f;
123 quat.qw = 0.0f;
124 } else {
125 quat.qx = vec.y;
126 quat.qy = -vec.x;
127 quat.qz = 0.0f;
128 vr_shell::NormalizeQuat(quat);
129 } 61 }
130 return quat;
131 }
132 62
133 std::unique_ptr<blink::WebMouseEvent> MakeMouseEvent(WebInputEvent::Type type, 63 protected:
134 double timestamp, 64 void Init() override {
135 float x, 65 vr_shell_gl_.reset(new VrShellGl(vr_shell_,
136 float y) { 66 std::move(weak_vr_shell_),
137 std::unique_ptr<blink::WebMouseEvent> mouse_event(new blink::WebMouseEvent); 67 std::move(content_input_manager_),
138 mouse_event->type = type; 68 std::move(ui_input_manager_),
139 mouse_event->pointerType = blink::WebPointerProperties::PointerType::Mouse; 69 std::move(main_thread_task_runner_),
140 mouse_event->x = x; 70 gvr_api_));
141 mouse_event->y = y; 71 weak_vr_shell_gl_ = vr_shell_gl_->GetWeakPtr();
142 mouse_event->windowX = x; 72 }
143 mouse_event->windowY = y;
144 mouse_event->timeStampSeconds = timestamp;
145 mouse_event->clickCount = 1;
146 mouse_event->modifiers = 0;
147 73
148 return mouse_event; 74 private:
149 } 75 std::unique_ptr<VrShellGl> vr_shell_gl_;
76 base::WeakPtr<VrShellGl> weak_vr_shell_gl_;
77
78 // TODO(mthiesse): Remove vr_shell_.
79 VrShell* vr_shell_;
80 base::WeakPtr<VrShell> weak_vr_shell_;
81 base::WeakPtr<VrInputManager> content_input_manager_;
82 base::WeakPtr<VrInputManager> ui_input_manager_;
83 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_;
84 gvr_context* gvr_api_;
85 };
86
150 } // namespace 87 } // namespace
151 88
152 VrShell::VrShell(JNIEnv* env, 89 VrShell::VrShell(JNIEnv* env,
153 jobject obj, 90 jobject obj,
154 content::WebContents* main_contents, 91 content::WebContents* main_contents,
155 ui::WindowAndroid* content_window, 92 ui::WindowAndroid* content_window,
156 content::WebContents* ui_contents, 93 content::WebContents* ui_contents,
157 ui::WindowAndroid* ui_window, 94 ui::WindowAndroid* ui_window,
158 bool for_web_vr) 95 bool for_web_vr,
96 VrShellDelegate* delegate,
97 gvr_context* gvr_api)
159 : WebContentsObserver(ui_contents), 98 : WebContentsObserver(ui_contents),
160 main_contents_(main_contents), 99 main_contents_(main_contents),
161 ui_contents_(ui_contents), 100 ui_contents_(ui_contents),
101 delegate_(delegate),
162 metrics_helper_(new VrMetricsHelper(main_contents)), 102 metrics_helper_(new VrMetricsHelper(main_contents)),
163 main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()), 103 main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
164 weak_ptr_factory_(this) { 104 weak_ptr_factory_(this) {
165 DCHECK(g_instance == nullptr); 105 DCHECK(g_instance == nullptr);
166 g_instance = this; 106 g_instance = this;
167 j_vr_shell_.Reset(env, obj); 107 j_vr_shell_.Reset(env, obj);
168 scene_.reset(new UiScene); 108
109 content_input_manager_.reset(new VrInputManager(main_contents_));
110 ui_input_manager_.reset(new VrInputManager(ui_contents_));
111
112 gl_thread_.reset(new GLThread(this, weak_ptr_factory_.GetWeakPtr(),
113 content_input_manager_->GetWeakPtr(),
114 ui_input_manager_->GetWeakPtr(),
115 main_thread_task_runner_,
116 gvr_api));
117
118 base::Thread::Options options(base::MessageLoop::TYPE_DEFAULT, 0);
119 options.priority = base::ThreadPriority::DISPLAY;
120 gl_thread_->StartWithOptions(options);
169 121
170 if (for_web_vr) 122 if (for_web_vr)
171 metrics_helper_->SetWebVREnabled(true); 123 metrics_helper_->SetWebVREnabled(true);
172 html_interface_.reset(new UiInterface( 124 html_interface_.reset(new UiInterface(
173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD, 125 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD,
174 main_contents_->IsFullscreen())); 126 main_contents_->IsFullscreen()));
175 content_compositor_.reset(new VrCompositor(content_window, false)); 127 content_compositor_.reset(new VrCompositor(content_window, false));
128 content_compositor_->SetLayer(main_contents_);
176 ui_compositor_.reset(new VrCompositor(ui_window, true)); 129 ui_compositor_.reset(new VrCompositor(ui_window, true));
130 ui_compositor_->SetLayer(ui_contents_);
177 vr_web_contents_observer_.reset(new VrWebContentsObserver( 131 vr_web_contents_observer_.reset(new VrWebContentsObserver(
178 main_contents, html_interface_.get(), this)); 132 main_contents, html_interface_.get(), this));
179
180 LoadUIContentOnUI();
181
182 gvr::Mat4f identity;
183 SetIdentityM(identity);
184 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
186
187 content_input_manager_.reset(new VrInputManager(main_contents_));
188 ui_input_manager_.reset(new VrInputManager(ui_contents_));
189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr();
190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr();
191 } 133 }
192 134
193 void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env, 135 void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) {
194 const JavaParamRef<jobject>& obj) {
195 content_compositor_->SetLayer(main_contents_);
196 ui_compositor_->SetLayer(ui_contents_);
197 }
198
199 void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
200 delete this; 136 delete this;
201 } 137 }
202 138
203 void VrShell::LoadUIContentOnUI() { 139 void VrShell::LoadUIContent(JNIEnv* env,
140 const JavaParamRef<jobject>& obj) {
204 GURL url(kVrShellUIURL); 141 GURL url(kVrShellUIURL);
205 ui_contents_->GetController().LoadURL( 142 ui_contents_->GetController().LoadURL(
206 url, content::Referrer(), 143 url, content::Referrer(),
207 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string("")); 144 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string(""));
208 } 145 }
209 146
210 bool RegisterVrShell(JNIEnv* env) { 147 bool RegisterVrShell(JNIEnv* env) {
211 return RegisterNativesImpl(env); 148 return RegisterNativesImpl(env);
212 } 149 }
213 150
214 VrShell::~VrShell() { 151 VrShell::~VrShell() {
215 if (delegate_ && delegate_->GetDeviceProvider()) { 152 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
216 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved(); 153 thread->task_runner()->PostTask(
217 } 154 FROM_HERE,
155 base::Bind(&GLThread::StopRenderLoop, base::Unretained(thread)));
156 thread->Stop();
157 delegate_->RemoveDelegate();
218 g_instance = nullptr; 158 g_instance = nullptr;
219 gl::init::ShutdownGL();
220 }
221
222 void VrShell::SetDelegateOnUI(JNIEnv* env,
223 const base::android::JavaParamRef<jobject>& obj,
224 const base::android::JavaParamRef<jobject>& delegate) {
225 base::AutoLock lock(gvr_init_lock_);
226 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate);
227 if (swap_chain_.get()) {
228 delegate_->GetDeviceProvider()->OnGvrDelegateReady(
229 weak_ptr_factory_.GetWeakPtr());
230 }
231 }
232
233 enum class ViewerType {
234 UNKNOWN_TYPE = 0,
235 CARDBOARD = 1,
236 DAYDREAM = 2,
237 VIEWER_TYPE_MAX,
238 };
239
240 void VrShell::GvrInitOnGL(JNIEnv* env,
241 const JavaParamRef<jobject>& obj,
242 jlong native_gvr_api) {
243 // set the initial webvr state
244 metrics_helper_->SetVRActive(true);
245
246 gvr_api_ =
247 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api));
248 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once
249 // we switch to using a WebVR render surface. We currently need to wait for
250 // the compositor window's size to be known first. See also
251 // ContentSurfaceChanged.
252 controller_.reset(
253 new VrController(reinterpret_cast<gvr_context*>(native_gvr_api)));
254
255
256 ViewerType viewerType;
257 switch (gvr_api_->GetViewerType()) {
258 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM:
259 viewerType = ViewerType::DAYDREAM;
260 break;
261 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD:
262 viewerType = ViewerType::CARDBOARD;
263 break;
264 default:
265 NOTREACHED();
266 viewerType = ViewerType::UNKNOWN_TYPE;
267 break;
268 }
269 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType),
270 static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
271 }
272
273 void VrShell::InitializeGlOnGL(JNIEnv* env,
274 const JavaParamRef<jobject>& obj,
275 jint content_texture_handle,
276 jint ui_texture_handle) {
277 base::AutoLock lock(gvr_init_lock_);
278 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
279 gl::init::InitializeGLOneOff());
280
281 content_texture_id_ = content_texture_handle;
282 ui_texture_id_ = ui_texture_handle;
283
284 // While WebVR is going through the compositor path, it shares
285 // the same texture ID. This will change once it gets its own
286 // surface, but store it separately to avoid future confusion.
287 // TODO(klausw,crbug.com/655722): remove this.
288 webvr_texture_id_ = content_texture_id_;
289 // Out of paranoia, explicitly reset the "pose valid" flags to false
290 // from the GL thread. The constructor ran in the UI thread.
291 // TODO(klausw,crbug.com/655722): remove this.
292 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false);
293
294 gvr_api_->InitializeGl();
295 std::vector<gvr::BufferSpec> specs;
296 // For kFramePrimaryBuffer (primary VrShell and WebVR content)
297 specs.push_back(gvr_api_->CreateBufferSpec());
298 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize();
299 render_size_primary_vrshell_ = render_size_primary_;
300
301 // For kFrameHeadlockedBuffer (for WebVR insecure content warning).
302 // Set this up at fixed resolution, the (smaller) FOV gets set below.
303 specs.push_back(gvr_api_->CreateBufferSpec());
304 specs.back().SetSize(kHeadlockedBufferDimensions);
305 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize();
306
307 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs)));
308
309 vr_shell_renderer_.reset(new VrShellRenderer());
310
311 // Allocate a buffer viewport for use in UI drawing. This isn't
312 // initialized at this point, it'll be set from other viewport list
313 // entries as needed.
314 buffer_viewport_.reset(
315 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
316
317 // Set up main content viewports. The list has two elements, 0=left
318 // eye and 1=right eye.
319 buffer_viewport_list_.reset(
320 new gvr::BufferViewportList(gvr_api_->CreateEmptyBufferViewportList()));
321 buffer_viewport_list_->SetToRecommendedBufferViewports();
322
323 // Set up head-locked UI viewports, these will be elements 2=left eye
324 // and 3=right eye. For now, use a hardcoded 20-degree-from-center FOV
325 // frustum to reduce rendering cost for this overlay. This fits the
326 // current content, but will need to be adjusted once there's more dynamic
327 // head-locked content that could be larger.
328 headlocked_left_viewport_.reset(
329 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
330 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE,
331 headlocked_left_viewport_.get());
332 headlocked_left_viewport_->SetSourceBufferIndex(kFrameHeadlockedBuffer);
333 headlocked_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
334 headlocked_left_viewport_->SetSourceFov(kHeadlockedBufferFov);
335
336 headlocked_right_viewport_.reset(
337 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
338 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE,
339 headlocked_right_viewport_.get());
340 headlocked_right_viewport_->SetSourceBufferIndex(kFrameHeadlockedBuffer);
341 headlocked_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
342 headlocked_right_viewport_->SetSourceFov(kHeadlockedBufferFov);
343
344 // Save copies of the first two viewport items for use by WebVR, it
345 // sets its own UV bounds.
346 webvr_left_viewport_.reset(
347 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
348 buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE,
349 webvr_left_viewport_.get());
350 webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
351
352 webvr_right_viewport_.reset(
353 new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
354 buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE,
355 webvr_right_viewport_.get());
356 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
357
358 if (delegate_) {
359 main_thread_task_runner_->PostTask(
360 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady,
361 delegate_->GetDeviceProvider(),
362 weak_ptr_factory_.GetWeakPtr()));
363 }
364 }
365
366 void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
367 controller_->UpdateState();
368
369 #if defined(ENABLE_VR_SHELL)
370 // Note that button up/down state is transient, so ButtonUpHappened only
371 // returns
372 // true for a single frame (and we're guaranteed not to miss it).
373 if (controller_->ButtonUpHappened(
374 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
375 html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
376
377 // TODO(mthiesse): The page is no longer visible when in menu mode. We
378 // should unfocus or otherwise let it know it's hidden.
379 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
380 const auto&& task = html_interface_->GetMenuMode() ?
381 &device::GvrDeviceProvider::OnDisplayBlur :
382 &device::GvrDeviceProvider::OnDisplayFocus;
383 main_thread_task_runner_->PostTask(
384 FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
385 }
386 }
387 #endif
388 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
389 // Process screen touch events for Cardboard button compatibility.
390 // Also send tap events for controller "touchpad click" events.
391 if (touch_pending_ ||
392 controller_->ButtonUpHappened(
393 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
394 touch_pending_ = false;
395 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
396 gesture->sourceDevice = blink::WebGestureDeviceTouchpad;
397 gesture->timeStampSeconds =
398 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF();
399 gesture->type = WebInputEvent::GestureTapDown;
400 gesture->x = 0;
401 gesture->y = 0;
402 SendGestureOnGL(CONTENT, std::move(gesture));
403 }
404
405 return;
406 }
407
408 gvr::Vec3f ergo_neutral_pose;
409 if (!controller_->IsConnected()) {
410 // No controller detected, set up a gaze cursor that tracks the
411 // forward direction.
412 ergo_neutral_pose = {0.0f, 0.0f, -1.0f};
413 controller_quat_ = GetRotationFromZAxis(forward_vector);
414 } else {
415 ergo_neutral_pose = {0.0f, -sin(kErgoAngleOffset), -cos(kErgoAngleOffset)};
416 controller_quat_ = controller_->Orientation();
417 }
418
419 gvr::Mat4f mat = QuatToMatrix(controller_quat_);
420 gvr::Vec3f forward = MatrixVectorMul(mat, ergo_neutral_pose);
421 gvr::Vec3f origin = kHandPosition;
422
423 // If we place the reticle based on elements intersecting the controller beam,
424 // we can end up with the reticle hiding behind elements, or jumping laterally
425 // in the field of view. This is physically correct, but hard to use. For
426 // usability, do the following instead:
427 //
428 // - Project the controller laser onto an outer surface, which is the
429 // closer of the desktop plane, or a distance-limiting sphere.
430 // - Create a vector between the eyes and the outer surface point.
431 // - If any UI elements intersect this vector, choose the closest to the eyes,
432 // and place the reticle at the intersection point.
433
434 // Find distance to a corner of the content quad, and limit the cursor
435 // distance to a multiple of that distance. This lets us keep the reticle on
436 // the content plane near the content window, and on the surface of a sphere
437 // in other directions. Note that this approach uses distance from controller,
438 // rather than eye, for simplicity. This will make the sphere slightly
439 // off-center.
440 float distance = kDefaultReticleDistance;
441 ContentRectangle* content_plane = scene_->GetContentQuad();
442 if (content_plane) {
443 distance = content_plane->GetRayDistance(origin, forward);
444 gvr::Vec3f corner = {0.5f, 0.5f, 0.0f};
445 corner = MatrixVectorMul(content_plane->transform.to_world, corner);
446 float max_distance = Distance(origin, corner) * kReticleDistanceMultiplier;
447 if (distance > max_distance || distance <= 0.0f) {
448 distance = max_distance;
449 }
450 }
451
452 target_point_ = GetRayPoint(origin, forward, distance);
453 gvr::Vec3f eye_to_target = target_point_;
454 NormalizeVector(eye_to_target);
455
456 // Determine which UI element (if any) intersects the line between the eyes
457 // and the controller target position.
458 float closest_element_distance = std::numeric_limits<float>::infinity();
459 int pixel_x = 0;
460 int pixel_y = 0;
461 target_element_ = nullptr;
462 InputTarget input_target = NONE;
463
464 for (const auto& plane : scene_->GetUiElements()) {
465 if (!plane->visible || !plane->hit_testable) {
466 continue;
467 }
468 float distance_to_plane = plane->GetRayDistance(kOrigin, eye_to_target);
469 gvr::Vec3f plane_intersection_point =
470 GetRayPoint(kOrigin, eye_to_target, distance_to_plane);
471
472 gvr::Vec3f rect_2d_point =
473 MatrixVectorMul(plane->transform.from_world, plane_intersection_point);
474 if (distance_to_plane > 0 && distance_to_plane < closest_element_distance) {
475 float x = rect_2d_point.x + 0.5f;
476 float y = 0.5f - rect_2d_point.y;
477 bool is_inside = x >= 0.0f && x < 1.0f && y >= 0.0f && y < 1.0f;
478 if (!is_inside)
479 continue;
480
481 closest_element_distance = distance_to_plane;
482 Rectf pixel_rect;
483 if (plane->content_quad) {
484 pixel_rect = {0, 0, content_tex_css_width_, content_tex_css_height_};
485 } else {
486 pixel_rect = {plane->copy_rect.x, plane->copy_rect.y,
487 plane->copy_rect.width, plane->copy_rect.height};
488 }
489 pixel_x = pixel_rect.width * x + pixel_rect.x;
490 pixel_y = pixel_rect.height * y + pixel_rect.y;
491
492 target_point_ = plane_intersection_point;
493 target_element_ = plane.get();
494 input_target = plane->content_quad ? CONTENT : UI;
495 }
496 }
497 SendEventsToTargetOnGL(input_target, pixel_x, pixel_y);
498 }
499
500 void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
501 int pixel_x,
502 int pixel_y) {
503 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list =
504 controller_->DetectGestures();
505 std::unique_ptr<WebGestureEvent> gesture = std::move(gesture_list.front());
506
507 // TODO(asimjour) for now, scroll is sent to the main content.
508 if (gesture->type == WebInputEvent::GestureScrollBegin ||
509 gesture->type == WebInputEvent::GestureScrollUpdate ||
510 gesture->type == WebInputEvent::GestureScrollEnd ||
511 gesture->type == WebInputEvent::GestureFlingCancel) {
512 SendGestureOnGL(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture)));
513 }
514
515 if (gesture->type == WebInputEvent::GestureScrollEnd) {
516 CHECK(gesture_list.size() == 2);
517 if (gesture_list.back()->type == WebInputEvent::GestureTapDown) {
518 gesture_list.back()->x = pixel_x;
519 gesture_list.back()->y = pixel_y;
520 if (input_target != NONE)
521 SendGestureOnGL(input_target, std::move(gesture_list.back()));
522 } else if (gesture_list.back()->type == WebInputEvent::GestureFlingStart) {
523 SendGestureOnGL(CONTENT, std::move(gesture_list.back()));
524 } else {
525 NOTREACHED();
526 }
527 }
528
529 WebInputEvent::Type original_type = gesture->type;
530
531 bool new_target = input_target != current_input_target_;
532 if (new_target && current_input_target_ != NONE) {
533 // Send a move event indicating that the pointer moved off of an element.
534 SendGestureOnGL(current_input_target_, MakeMouseEvent(
535 WebInputEvent::MouseLeave, gesture->timeStampSeconds, 0, 0));
536 }
537 current_input_target_ = input_target;
538 if (current_input_target_ == NONE) {
539 return;
540 }
541 WebInputEvent::Type type =
542 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
543 SendGestureOnGL(current_input_target_, MakeMouseEvent(
544 type, gesture->timeStampSeconds, pixel_x, pixel_y));
545
546 if (original_type == WebInputEvent::GestureTapDown || touch_pending_) {
547 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent(*gesture));
548 if (touch_pending_) {
549 touch_pending_ = false;
550 event->sourceDevice = blink::WebGestureDeviceTouchpad;
551 event->timeStampSeconds =
552 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF();
553 }
554 event->type = WebInputEvent::GestureTapDown;
555 event->x = pixel_x;
556 event->y = pixel_y;
557 SendGestureOnGL(current_input_target_, std::move(event));
558 }
559 }
560
561 void VrShell::SendGestureOnGL(InputTarget input_target,
562 std::unique_ptr<blink::WebInputEvent> event) {
563 DCHECK(input_target != NONE);
564 const base::WeakPtr<VrInputManager>& weak_ptr =
565 input_target == CONTENT ? weak_content_input_manager_
566 : weak_ui_input_manager_;
567 main_thread_task_runner_->PostTask(
568 FROM_HERE,
569 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
570 base::Passed(std::move(event))));
571 } 159 }
572 160
573 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { 161 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
574 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; 162 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
575 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; 163 if (thread->GetVrShellGlUnsafe()) {
576 } 164 thread->GetVrShellGlUnsafe()->SetGvrPoseForWebVr(pose, pose_num);
577
578 int GetPixelEncodedPoseIndexByte() {
579 TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
580 // Read the pose index encoded in a bottom left pixel as color values.
581 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
582 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
583 // which tracks poses. Returns the low byte (0..255) if valid, or -1
584 // if not valid due to bad magic number.
585 uint8_t pixels[4];
586 // Assume we're reading from the framebuffer we just wrote to.
587 // That's true currently, we may need to use glReadBuffer(GL_BACK)
588 // or equivalent if the rendering setup changes in the future.
589 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
590
591 // Check for the magic number written by VRDevice.cpp on submit.
592 // This helps avoid glitches from garbage data in the render
593 // buffer that can appear during initialization or resizing. These
594 // often appear as flashes of all-black or all-white pixels.
595 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
596 pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
597 // Pose is good.
598 return pixels[0];
599 }
600 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
601 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
602 return -1;
603 }
604
605 bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
606 if (pose_index_byte < 0) {
607 return false;
608 }
609 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) {
610 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte <<
611 ", not a valid pose";
612 return false;
613 }
614 return true;
615 }
616
617 void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
618 TRACE_EVENT0("gpu", "VrShell::DrawFrame");
619 // Reset the viewport list to just the pair of viewports for the
620 // primary buffer each frame. Head-locked viewports get added by
621 // DrawVrShell if needed.
622 buffer_viewport_list_->SetToRecommendedBufferViewports();
623
624 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
625 // If needed, resize the primary buffer for use with WebVR.
626 if (render_size_primary_ != render_size_primary_webvr_) {
627 if (!render_size_primary_webvr_.width) {
628 VLOG(2) << "WebVR rendering size not known yet, dropping frame";
629 return;
630 }
631 render_size_primary_ = render_size_primary_webvr_;
632 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
633 }
634 } else {
635 if (render_size_primary_ != render_size_primary_vrshell_) {
636 render_size_primary_ = render_size_primary_vrshell_;
637 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
638 }
639 }
640
641 gvr::Frame frame = swap_chain_->AcquireFrame();
642 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
643 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
644
645 gvr::Mat4f head_pose =
646 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
647
648 gvr::Vec3f position = GetTranslation(head_pose);
649 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
650 // This appears to be a 3DOF pose without a neck model. Add one.
651 // The head pose has redundant data. Assume we're only using the
652 // object_from_reference_matrix, we're not updating position_external.
653 // TODO: Not sure what object_from_reference_matrix is. The new api removed
654 // it. For now, removing it seems working fine.
655 gvr_api_->ApplyNeckModel(head_pose, 1.0f);
656 }
657
658 // Bind the primary framebuffer.
659 frame.BindBuffer(kFramePrimaryBuffer);
660
661 HandleQueuedTasksOnGL();
662
663 // Update the render position of all UI elements (including desktop).
664 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
665 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds());
666
667 UpdateControllerOnGL(GetForwardVector(head_pose));
668
669 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
670 DrawWebVrOnGL();
671
672 // When using async reprojection, we need to know which pose was used in
673 // the WebVR app for drawing this frame. Due to unknown amounts of
674 // buffering in the compositor and SurfaceTexture, we read the pose number
675 // from a corner pixel. There's no point in doing this for legacy
676 // distortion rendering since that doesn't need a pose, and reading back
677 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
678 // doing this once we have working no-compositor rendering for WebVR.
679 if (gvr_api_->GetAsyncReprojectionEnabled()) {
680 int pose_index_byte = GetPixelEncodedPoseIndexByte();
681 if (WebVrPoseByteIsValidOnGL(pose_index_byte)) {
682 // We have a valid pose, use it for reprojection.
683 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
684 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
685 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize];
686 // We can't mark the used pose as invalid since unfortunately
687 // we have to reuse them. The compositor will re-submit stale
688 // frames on vsync, and we can't tell that this has happened
689 // until we've read the pose index from it, and at that point
690 // it's too late to skip rendering.
691 } else {
692 // If we don't get a valid frame ID back we shouldn't attempt
693 // to reproject by an invalid matrix, so turn off reprojection
694 // instead. Invalid poses can permanently break reprojection
695 // for this GVR instance: http://crbug.com/667327
696 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
697 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
698 }
699 }
700 }
701
702 DrawVrShellOnGL(head_pose, frame);
703
704 frame.Unbind();
705 frame.Submit(*buffer_viewport_list_, head_pose);
706 }
707
708 void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
709 gvr::Frame &frame) {
710 TRACE_EVENT0("gpu", "VrShell::DrawVrShell");
711 std::vector<const ContentRectangle*> head_locked_elements;
712 std::vector<const ContentRectangle*> world_elements;
713 for (const auto& rect : scene_->GetUiElements()) {
714 if (!rect->visible) {
715 continue;
716 }
717 if (rect->lock_to_fov) {
718 head_locked_elements.push_back(rect.get());
719 } else {
720 world_elements.push_back(rect.get());
721 }
722 }
723
724 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
725 // WebVR is incompatible with 3D world compositing since the
726 // depth buffer was already populated with unknown scaling - the
727 // WebVR app has full control over zNear/zFar. Just leave the
728 // existing content in place in the primary buffer without
729 // clearing. Currently, there aren't any world elements in WebVR
730 // mode, this will need further testing if those get added
731 // later.
732 } else {
733 // Non-WebVR mode, enable depth testing and clear the primary buffers.
734 glEnable(GL_CULL_FACE);
735 glEnable(GL_DEPTH_TEST);
736 glDepthMask(GL_TRUE);
737
738 glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
739 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
740 }
741
742 if (!world_elements.empty()) {
743 DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_,
744 kViewportListPrimaryOffset);
745 }
746
747 if (!head_locked_elements.empty()) {
748 // Add head-locked viewports. The list gets reset to just
749 // the recommended viewports (for the primary buffer) each frame.
750 buffer_viewport_list_->SetBufferViewport(
751 kViewportListHeadlockedOffset + GVR_LEFT_EYE,
752 *headlocked_left_viewport_);
753 buffer_viewport_list_->SetBufferViewport(
754 kViewportListHeadlockedOffset + GVR_RIGHT_EYE,
755 *headlocked_right_viewport_);
756
757 // Bind the headlocked framebuffer.
758 frame.BindBuffer(kFrameHeadlockedBuffer);
759 glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
761 DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_,
762 kViewportListHeadlockedOffset);
763 } 165 }
764 } 166 }
765 167
766 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { 168 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) {
767 render_size_primary_webvr_.width = width; 169 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
768 render_size_primary_webvr_.height = height; 170 if (thread->GetVrShellGlUnsafe()) {
769 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once 171 thread->GetVrShellGlUnsafe()->SetWebVRRenderSurfaceSize(width, height);
770 // we have that. 172 }
771 } 173 }
772 174
773 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { 175 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() {
774 // This is a stopgap while we're using the WebVR compositor rendering path. 176 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
775 // TODO(klausw,crbug.com/655722): Remove this method and member once we're 177 if (thread->GetVrShellGlUnsafe()) {
776 // using a separate WebVR render surface. 178 return thread->GetVrShellGlUnsafe()->GetWebVRCompositorSurfaceSize();
777 return content_tex_physical_size_; 179 }
180 return gvr::Sizei();
778 } 181 }
779 182
780 183 void VrShell::OnTriggerEvent(JNIEnv* env,
781 void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose, 184 const JavaParamRef<jobject>& obj) {
782 const std::vector<const ContentRectangle*>& elements, 185 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
783 const gvr::Sizei& render_size, int viewport_offset) { 186 thread->task_runner()->PostTask(FROM_HERE,
784 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); 187 base::Bind(&VrShellGl::OnTriggerEvent,
785 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { 188 thread->GetVrShellGl()));
786 buffer_viewport_list_->GetBufferViewport(
787 eye + viewport_offset, buffer_viewport_.get());
788
789 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye);
790 if (head_pose != nullptr) {
791 view_matrix = MatrixMul(view_matrix, *head_pose);
792 }
793
794 gvr::Recti pixel_rect =
795 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv());
796 glViewport(pixel_rect.left, pixel_rect.bottom,
797 pixel_rect.right - pixel_rect.left,
798 pixel_rect.top - pixel_rect.bottom);
799
800 const gvr::Mat4f render_matrix = MatrixMul(
801 PerspectiveMatrixFromView(
802 buffer_viewport_->GetSourceFov(), kZNear, kZFar),
803 view_matrix);
804
805 DrawElementsOnGL(render_matrix, elements);
806 if (head_pose != nullptr &&
807 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) {
808 DrawCursorOnGL(render_matrix);
809 }
810 }
811 } 189 }
812 190
813 void VrShell::DrawElementsOnGL( 191 void VrShell::OnPause(JNIEnv* env, const JavaParamRef<jobject>& obj) {
814 const gvr::Mat4f& render_matrix, 192 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
815 const std::vector<const ContentRectangle*>& elements) { 193 thread->task_runner()->PostTask(
816 for (const auto& rect : elements) { 194 FROM_HERE, base::Bind(&VrShellGl::OnPause, thread->GetVrShellGl()));
817 Rectf copy_rect;
818 jint texture_handle;
819 if (rect->content_quad) {
820 copy_rect = {0, 0, 1, 1};
821 texture_handle = content_texture_id_;
822 } else {
823 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_;
824 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_;
825 copy_rect.width = static_cast<float>(rect->copy_rect.width) /
826 ui_tex_css_width_;
827 copy_rect.height = static_cast<float>(rect->copy_rect.height) /
828 ui_tex_css_height_;
829 texture_handle = ui_texture_id_;
830 }
831 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world);
832 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw(
833 texture_handle, transform, copy_rect);
834 }
835 }
836
837 void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) {
838 gvr::Mat4f mat;
839 SetIdentityM(mat);
840
841 // Draw the reticle.
842
843 // Scale the pointer to have a fixed FOV size at any distance.
844 const float eye_to_target = Distance(target_point_, kOrigin);
845 ScaleM(mat, mat, kReticleWidth * eye_to_target,
846 kReticleHeight * eye_to_target, 1.0f);
847
848 gvr::Quatf rotation;
849 if (target_element_ != nullptr) {
850 // Make the reticle planar to the element it's hitting.
851 rotation = GetRotationFromZAxis(target_element_->GetNormal());
852 } else {
853 // Rotate the cursor to directly face the eyes.
854 rotation = GetRotationFromZAxis(target_point_);
855 }
856 mat = MatrixMul(QuatToMatrix(rotation), mat);
857
858 // Place the pointer slightly in front of the plane intersection point.
859 TranslateM(mat, mat, target_point_.x * kReticleOffset,
860 target_point_.y * kReticleOffset,
861 target_point_.z * kReticleOffset);
862
863 gvr::Mat4f transform = MatrixMul(render_matrix, mat);
864 vr_shell_renderer_->GetReticleRenderer()->Draw(transform);
865
866 // Draw the laser.
867
868 // Find the length of the beam (from hand to target).
869 const float laser_length = Distance(kHandPosition, target_point_);
870
871 // Build a beam, originating from the origin.
872 SetIdentityM(mat);
873
874 // Move the beam half its height so that its end sits on the origin.
875 TranslateM(mat, mat, 0.0f, 0.5f, 0.0f);
876 ScaleM(mat, mat, kLaserWidth, laser_length, 1);
877
878 // Tip back 90 degrees to flat, pointing at the scene.
879 const gvr::Quatf q = QuatFromAxisAngle({1.0f, 0.0f, 0.0f}, -M_PI / 2);
880 mat = MatrixMul(QuatToMatrix(q), mat);
881
882 const gvr::Vec3f beam_direction = {
883 target_point_.x - kHandPosition.x,
884 target_point_.y - kHandPosition.y,
885 target_point_.z - kHandPosition.z
886 };
887 const gvr::Mat4f beam_direction_mat =
888 QuatToMatrix(GetRotationFromZAxis(beam_direction));
889
890 // Render multiple faces to make the laser appear cylindrical.
891 const int faces = 4;
892 for (int i = 0; i < faces; i++) {
893 // Rotate around Z.
894 const float angle = M_PI * 2 * i / faces;
895 const gvr::Quatf rot = QuatFromAxisAngle({0.0f, 0.0f, 1.0f}, angle);
896 gvr::Mat4f face_transform = MatrixMul(QuatToMatrix(rot), mat);
897
898 // Orient according to target direction.
899 face_transform = MatrixMul(beam_direction_mat, face_transform);
900
901 // Move the beam origin to the hand.
902 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y,
903 kHandPosition.z);
904
905 transform = MatrixMul(render_matrix, face_transform);
906 vr_shell_renderer_->GetLaserRenderer()->Draw(transform);
907 }
908 }
909
910 void VrShell::DrawWebVrOnGL() {
911 TRACE_EVENT0("gpu", "VrShell::DrawWebVr");
912 // Don't need face culling, depth testing, blending, etc. Turn it all off.
913 glDisable(GL_CULL_FACE);
914 glDepthMask(GL_FALSE);
915 glDisable(GL_DEPTH_TEST);
916 glDisable(GL_SCISSOR_TEST);
917 glDisable(GL_BLEND);
918 glDisable(GL_POLYGON_OFFSET_FILL);
919
920 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
921 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
922
923 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
924 *webvr_left_viewport_);
925 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
926 *webvr_right_viewport_);
927 }
928
929 void VrShell::OnTriggerEventOnUI(JNIEnv* env,
930 const JavaParamRef<jobject>& obj) {
931 // Set a flag to handle this on the render thread at the next frame.
932 touch_pending_ = true;
933 }
934
935 void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
936 if (gvr_api_ == nullptr)
937 return;
938
939 // TODO(mthiesse): Clean up threading here.
940 controller_->OnPause();
941 gvr_api_->PauseTracking();
942 195
943 // exit vr session 196 // exit vr session
944 metrics_helper_->SetVRActive(false); 197 metrics_helper_->SetVRActive(false);
945 } 198 }
946 199
947 void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { 200 void VrShell::OnResume(JNIEnv* env, const JavaParamRef<jobject>& obj) {
948 if (gvr_api_ == nullptr) 201 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
949 return; 202 thread->task_runner()->PostTask(
950 203 FROM_HERE, base::Bind(&VrShellGl::OnResume, thread->GetVrShellGl()));
951 // TODO(mthiesse): Clean up threading here.
952 gvr_api_->RefreshViewerProfile();
953 gvr_api_->ResumeTracking();
954 controller_->OnResume();
955 204
956 // exit vr session 205 // exit vr session
957 metrics_helper_->SetVRActive(true); 206 metrics_helper_->SetVRActive(true);
958 } 207 }
959 208
960 base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI( 209 base::WeakPtr<VrShell> VrShell::GetWeakPtr(
961 const content::WebContents* web_contents) { 210 const content::WebContents* web_contents) {
962 // Ensure that the WebContents requesting the VrShell instance is the one 211 // Ensure that the WebContents requesting the VrShell instance is the one
963 // we created. 212 // we created.
964 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents) 213 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents)
965 return g_instance->weak_ptr_factory_.GetWeakPtr(); 214 return g_instance->weak_ptr_factory_.GetWeakPtr();
966 return base::WeakPtr<VrShell>(nullptr); 215 return base::WeakPtr<VrShell>(nullptr);
967 } 216 }
968 217
969 void VrShell::OnDomContentsLoadedOnUI() { 218 void VrShell::OnDomContentsLoaded() {
970 html_interface_->SetURL(main_contents_->GetVisibleURL()); 219 html_interface_->SetURL(main_contents_->GetVisibleURL());
971 html_interface_->SetLoading(main_contents_->IsLoading()); 220 html_interface_->SetLoading(main_contents_->IsLoading());
972 html_interface_->OnDomContentsLoaded(); 221 html_interface_->OnDomContentsLoaded();
973 } 222 }
974 223
975 void VrShell::SetWebVrModeOnUI(JNIEnv* env, 224 void VrShell::SetWebVrMode(JNIEnv* env,
976 const base::android::JavaParamRef<jobject>& obj, 225 const base::android::JavaParamRef<jobject>& obj,
977 bool enabled) { 226 bool enabled) {
978 metrics_helper_->SetWebVREnabled(enabled); 227 metrics_helper_->SetWebVREnabled(enabled);
979 if (enabled) { 228 if (enabled) {
980 html_interface_->SetMode(UiInterface::Mode::WEB_VR); 229 html_interface_->SetMode(UiInterface::Mode::WEB_VR);
981 } else { 230 } else {
982 html_interface_->SetMode(UiInterface::Mode::STANDARD); 231 html_interface_->SetMode(UiInterface::Mode::STANDARD);
983 } 232 }
984 } 233 }
985 234
986 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { 235 void VrShell::SetWebVRSecureOrigin(bool secure_origin) {
987 // TODO(cjgrant): Align this state with the logic that drives the omnibox. 236 // TODO(cjgrant): Align this state with the logic that drives the omnibox.
988 html_interface_->SetWebVRSecureOrigin(secure_origin); 237 html_interface_->SetWebVRSecureOrigin(secure_origin);
989 } 238 }
990 239
991 void VrShell::SubmitWebVRFrame() {} 240 void VrShell::SubmitWebVRFrame() {}
992 241
993 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, 242 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
994 const gvr::Rectf& right_bounds) { 243 const gvr::Rectf& right_bounds) {
995 webvr_left_viewport_->SetSourceUv(left_bounds); 244 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
996 webvr_right_viewport_->SetSourceUv(right_bounds); 245 if (thread->GetVrShellGlUnsafe()) {
246 thread->GetVrShellGlUnsafe()->UpdateWebVRTextureBounds(left_bounds,
247 right_bounds);
248 }
997 } 249 }
998 250
251 // TODO(mthiesse): Do not expose GVR API outside of GL thread.
252 // It's not thread-safe.
999 gvr::GvrApi* VrShell::gvr_api() { 253 gvr::GvrApi* VrShell::gvr_api() {
1000 return gvr_api_.get(); 254 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
255 if (thread->GetVrShellGlUnsafe()) {
256 return thread->GetVrShellGlUnsafe()->gvr_api();
257 }
258 CHECK(false);
259 return nullptr;
1001 } 260 }
1002 261
1003 void VrShell::SurfacesChangedOnUI(JNIEnv* env, 262 void VrShell::SurfacesChanged(jobject content_surface, jobject ui_surface) {
1004 const JavaParamRef<jobject>& object,
1005 const JavaParamRef<jobject>& content_surface,
1006 const JavaParamRef<jobject>& ui_surface) {
1007 content_compositor_->SurfaceChanged(content_surface); 263 content_compositor_->SurfaceChanged(content_surface);
1008 ui_compositor_->SurfaceChanged(ui_surface); 264 ui_compositor_->SurfaceChanged(ui_surface);
1009 } 265 }
1010 266
1011 void VrShell::ContentBoundsChangedOnUI(JNIEnv* env, 267 void VrShell::GvrDelegateReady() {
268 delegate_->SetDelegate(weak_ptr_factory_.GetWeakPtr());
269 }
270
271 void VrShell::ContentBoundsChanged(JNIEnv* env,
1012 const JavaParamRef<jobject>& object, 272 const JavaParamRef<jobject>& object,
1013 jint width, jint height, jfloat dpr) { 273 jint width, jint height, jfloat dpr) {
1014 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged"); 274 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged");
1015 content_tex_physical_size_.width = width; 275 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
1016 content_tex_physical_size_.height = height; 276 // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
1017 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in 277 // finished starting?
1018 // response to MainFrameWasResized, not here. 278 thread->WaitUntilThreadStarted();
1019 content_tex_css_width_ = width / dpr; 279 CHECK(thread->task_runner()->PostTask(
1020 content_tex_css_height_ = height / dpr; 280 FROM_HERE, base::Bind(&VrShellGl::ContentPhysicalBoundsChanged,
1021 281 thread->GetVrShellGl(),
282 width, height)));
1022 content_compositor_->SetWindowBounds(width, height); 283 content_compositor_->SetWindowBounds(width, height);
1023 } 284 }
1024 285
1025 void VrShell::UIBoundsChangedOnUI(JNIEnv* env, 286 void VrShell::UIBoundsChanged(JNIEnv* env,
1026 const JavaParamRef<jobject>& object, 287 const JavaParamRef<jobject>& object,
1027 jint width, jint height, jfloat dpr) { 288 jint width, jint height, jfloat dpr) {
289 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
290 // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
291 // finished starting?
292 thread->WaitUntilThreadStarted();
293 thread->task_runner()->PostTask(
294 FROM_HERE, base::Bind(&VrShellGl::UIPhysicalBoundsChanged,
295 thread->GetVrShellGl(),
296 width, height));
1028 ui_compositor_->SetWindowBounds(width, height); 297 ui_compositor_->SetWindowBounds(width, height);
1029 } 298 }
1030 299
1031 UiScene* VrShell::GetSceneOnGL() { 300 UiScene* VrShell::GetScene() {
1032 return scene_.get(); 301 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
302 // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
303 // finished starting?
304 thread->WaitUntilThreadStarted();
305 if (thread->GetVrShellGlUnsafe()) {
306 return thread->GetVrShellGlUnsafe()->GetScene();
307 }
308 return nullptr;
1033 } 309 }
1034 310
1035 UiInterface* VrShell::GetUiInterfaceOnGL() { 311 UiInterface* VrShell::GetUiInterface() {
1036 return html_interface_.get(); 312 return html_interface_.get();
1037 } 313 }
1038 314
1039 void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) { 315 void VrShell::QueueTask(base::Callback<void()>& callback) {
1040 base::AutoLock lock(task_queue_lock_); 316 gl_thread_->task_runner()->PostTask(FROM_HERE, callback);
1041 task_queue_.push(callback);
1042 } 317 }
1043 318
1044 void VrShell::HandleQueuedTasksOnGL() { 319 void VrShell::DoUiAction(const UiAction action) {
1045 // To protect a stream of tasks from blocking rendering indefinitely,
1046 // process only the number of tasks present when first checked.
1047 std::vector<base::Callback<void()>> tasks;
1048 {
1049 base::AutoLock lock(task_queue_lock_);
1050 const size_t count = task_queue_.size();
1051 for (size_t i = 0; i < count; i++) {
1052 tasks.push_back(task_queue_.front());
1053 task_queue_.pop();
1054 }
1055 }
1056 for (auto &task : tasks) {
1057 task.Run();
1058 }
1059 }
1060
1061 void VrShell::DoUiActionOnUI(const UiAction action) {
1062 content::NavigationController& controller = main_contents_->GetController(); 320 content::NavigationController& controller = main_contents_->GetController();
1063 switch (action) { 321 switch (action) {
1064 case HISTORY_BACK: 322 case HISTORY_BACK:
1065 if (main_contents_->IsFullscreen()) { 323 if (main_contents_->IsFullscreen()) {
1066 main_contents_->ExitFullscreen(true /* will_cause_resize */); 324 main_contents_->ExitFullscreen(true /* will_cause_resize */);
1067 } else if (controller.CanGoBack()) { 325 } else if (controller.CanGoBack()) {
1068 controller.GoBack(); 326 controller.GoBack();
1069 } 327 }
1070 break; 328 break;
1071 case HISTORY_FORWARD: 329 case HISTORY_FORWARD:
(...skipping 20 matching lines...) Expand all
1092 } 350 }
1093 351
1094 void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host, 352 void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host,
1095 content::RenderViewHost* new_host) { 353 content::RenderViewHost* new_host) {
1096 new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT); 354 new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT);
1097 } 355 }
1098 356
1099 void VrShell::MainFrameWasResized(bool width_changed) { 357 void VrShell::MainFrameWasResized(bool width_changed) {
1100 display::Display display = display::Screen::GetScreen() 358 display::Display display = display::Screen::GetScreen()
1101 ->GetDisplayNearestWindow(ui_contents_->GetNativeView()); 359 ->GetDisplayNearestWindow(ui_contents_->GetNativeView());
1102 // TODO(mthiesse): Synchronize with GL thread. 360 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
1103 ui_tex_css_width_ = display.size().width(); 361 // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
1104 ui_tex_css_height_ = display.size().height(); 362 // finished starting?
363 thread->WaitUntilThreadStarted();
364 thread->task_runner()->PostTask(
365 FROM_HERE, base::Bind(&VrShellGl::UIBoundsChanged,
366 thread->GetVrShellGl(),
367 display.size().width(), display.size().height()));
368 }
369
370 void VrShell::ContentFrameWasResized(bool width_changed) {
371 display::Display display = display::Screen::GetScreen()
372 ->GetDisplayNearestWindow(main_contents_->GetNativeView());
373 GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
374 // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
375 // finished starting?
376 thread->WaitUntilThreadStarted();
377 thread->task_runner()->PostTask(
378 FROM_HERE, base::Bind(&VrShellGl::ContentBoundsChanged,
379 thread->GetVrShellGl(),
380 display.size().width(), display.size().height()));
1105 } 381 }
1106 382
1107 void VrShell::WebContentsDestroyed() { 383 void VrShell::WebContentsDestroyed() {
1108 ui_input_manager_.reset(); 384 ui_input_manager_.reset();
1109 ui_contents_ = nullptr; 385 ui_contents_ = nullptr;
1110 // TODO(mthiesse): Handle web contents being destroyed. 386 // TODO(mthiesse): Handle web contents being destroyed.
1111 delegate_->ForceExitVr(); 387 delegate_->ForceExitVr();
1112 } 388 }
1113 389
1114 void VrShell::ContentWebContentsDestroyedOnUI() { 390 void VrShell::ContentWebContentsDestroyed() {
1115 content_input_manager_.reset(); 391 content_input_manager_.reset();
1116 main_contents_ = nullptr; 392 main_contents_ = nullptr;
1117 // TODO(mthiesse): Handle web contents being destroyed. 393 // TODO(mthiesse): Handle web contents being destroyed.
1118 delegate_->ForceExitVr(); 394 delegate_->ForceExitVr();
1119 } 395 }
1120 396
1121 void VrShell::ContentWasHiddenOnUI() { 397 void VrShell::ContentWasHidden() {
1122 // Ensure we don't continue sending input to it. 398 // Ensure we don't continue sending input to it.
1123 content_input_manager_.reset(); 399 content_input_manager_.reset();
1124 // TODO(mthiesse): Handle web contents being hidden. 400 // TODO(mthiesse): Handle web contents being hidden.
1125 delegate_->ForceExitVr(); 401 delegate_->ForceExitVr();
1126 } 402 }
1127 403
1128 void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) { 404 void VrShell::SetContentCssSize(float width, float height, float dpr) {
1129 JNIEnv* env = base::android::AttachCurrentThread(); 405 JNIEnv* env = base::android::AttachCurrentThread();
1130 Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height, 406 Java_VrShellImpl_setContentCssSize(env, j_vr_shell_.obj(), width, height,
1131 dpr); 407 dpr);
1132 } 408 }
1133 409
1134 void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) { 410 void VrShell::SetUiCssSize(float width, float height, float dpr) {
1135 JNIEnv* env = base::android::AttachCurrentThread(); 411 JNIEnv* env = base::android::AttachCurrentThread();
1136 Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr); 412 Java_VrShellImpl_setUiCssSize(env, j_vr_shell_.obj(), width, height, dpr);
1137 } 413 }
1138 414
1139 // ---------------------------------------------------------------------------- 415 // ----------------------------------------------------------------------------
1140 // Native JNI methods 416 // Native JNI methods
1141 // ---------------------------------------------------------------------------- 417 // ----------------------------------------------------------------------------
1142 418
1143 jlong InitOnUI(JNIEnv* env, 419 jlong Init(JNIEnv* env, const JavaParamRef<jobject>& obj,
1144 const JavaParamRef<jobject>& obj, 420 const JavaParamRef<jobject>& content_web_contents,
1145 const JavaParamRef<jobject>& content_web_contents, 421 jlong content_window_android,
1146 jlong content_window_android, 422 const JavaParamRef<jobject>& ui_web_contents,
1147 const JavaParamRef<jobject>& ui_web_contents, 423 jlong ui_window_android, jboolean for_web_vr,
1148 jlong ui_window_android, 424 const base::android::JavaParamRef<jobject>& delegate,
1149 jboolean for_web_vr) { 425 jlong gvr_api) {
1150 return reinterpret_cast<intptr_t>(new VrShell( 426 return reinterpret_cast<intptr_t>(new VrShell(
1151 env, obj, content::WebContents::FromJavaWebContents(content_web_contents), 427 env, obj, content::WebContents::FromJavaWebContents(content_web_contents),
1152 reinterpret_cast<ui::WindowAndroid*>(content_window_android), 428 reinterpret_cast<ui::WindowAndroid*>(content_window_android),
1153 content::WebContents::FromJavaWebContents(ui_web_contents), 429 content::WebContents::FromJavaWebContents(ui_web_contents),
1154 reinterpret_cast<ui::WindowAndroid*>(ui_window_android), 430 reinterpret_cast<ui::WindowAndroid*>(ui_window_android),
1155 for_web_vr)); 431 for_web_vr, VrShellDelegate::GetNativeDelegate(env, delegate),
432 reinterpret_cast<gvr_context*>(gvr_api)));
1156 } 433 }
1157 434
1158 } // namespace vr_shell 435 } // namespace vr_shell
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698