| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell.h" |
| 6 | 6 |
| 7 #include "base/metrics/histogram_macros.h" | 7 #include "base/metrics/histogram_macros.h" |
| 8 #include "chrome/browser/android/vr_shell/ui_elements.h" | 8 #include "chrome/browser/android/vr_shell/ui_elements.h" |
| 9 #include "chrome/browser/android/vr_shell/ui_interface.h" | 9 #include "chrome/browser/android/vr_shell/ui_interface.h" |
| 10 #include "chrome/browser/android/vr_shell/ui_scene.h" | 10 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| (...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 170 if (for_web_vr) | 170 if (for_web_vr) |
| 171 metrics_helper_->SetWebVREnabled(true); | 171 metrics_helper_->SetWebVREnabled(true); |
| 172 html_interface_.reset(new UiInterface( | 172 html_interface_.reset(new UiInterface( |
| 173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD, | 173 for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD, |
| 174 main_contents_->IsFullscreen())); | 174 main_contents_->IsFullscreen())); |
| 175 content_compositor_.reset(new VrCompositor(content_window, false)); | 175 content_compositor_.reset(new VrCompositor(content_window, false)); |
| 176 ui_compositor_.reset(new VrCompositor(ui_window, true)); | 176 ui_compositor_.reset(new VrCompositor(ui_window, true)); |
| 177 vr_web_contents_observer_.reset(new VrWebContentsObserver( | 177 vr_web_contents_observer_.reset(new VrWebContentsObserver( |
| 178 main_contents, html_interface_.get(), this)); | 178 main_contents, html_interface_.get(), this)); |
| 179 | 179 |
| 180 LoadUIContent(); | 180 LoadUIContentOnUI(); |
| 181 | 181 |
| 182 gvr::Mat4f identity; | 182 gvr::Mat4f identity; |
| 183 SetIdentityM(identity); | 183 SetIdentityM(identity); |
| 184 webvr_head_pose_.resize(kPoseRingBufferSize, identity); | 184 webvr_head_pose_.resize(kPoseRingBufferSize, identity); |
| 185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); | 185 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); |
| 186 | 186 |
| 187 content_input_manager_.reset(new VrInputManager(main_contents_)); | 187 content_input_manager_.reset(new VrInputManager(main_contents_)); |
| 188 ui_input_manager_.reset(new VrInputManager(ui_contents_)); | 188 ui_input_manager_.reset(new VrInputManager(ui_contents_)); |
| 189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr(); | 189 weak_content_input_manager_ = content_input_manager_->GetWeakPtr(); |
| 190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr(); | 190 weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr(); |
| 191 } | 191 } |
| 192 | 192 |
| 193 void VrShell::UpdateCompositorLayers(JNIEnv* env, | 193 void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env, |
| 194 const JavaParamRef<jobject>& obj) { | 194 const JavaParamRef<jobject>& obj) { |
| 195 content_compositor_->SetLayer(main_contents_); | 195 content_compositor_->SetLayer(main_contents_); |
| 196 ui_compositor_->SetLayer(ui_contents_); | 196 ui_compositor_->SetLayer(ui_contents_); |
| 197 } | 197 } |
| 198 | 198 |
| 199 void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 199 void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
| 200 delete this; | 200 delete this; |
| 201 } | 201 } |
| 202 | 202 |
| 203 void VrShell::LoadUIContent() { | 203 void VrShell::LoadUIContentOnUI() { |
| 204 GURL url(kVrShellUIURL); | 204 GURL url(kVrShellUIURL); |
| 205 ui_contents_->GetController().LoadURL( | 205 ui_contents_->GetController().LoadURL( |
| 206 url, content::Referrer(), | 206 url, content::Referrer(), |
| 207 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string("")); | 207 ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string("")); |
| 208 } | 208 } |
| 209 | 209 |
| 210 bool RegisterVrShell(JNIEnv* env) { | 210 bool RegisterVrShell(JNIEnv* env) { |
| 211 return RegisterNativesImpl(env); | 211 return RegisterNativesImpl(env); |
| 212 } | 212 } |
| 213 | 213 |
| 214 VrShell::~VrShell() { | 214 VrShell::~VrShell() { |
| 215 if (delegate_ && delegate_->GetDeviceProvider()) { | 215 if (delegate_ && delegate_->GetDeviceProvider()) { |
| 216 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved(); | 216 delegate_->GetDeviceProvider()->OnGvrDelegateRemoved(); |
| 217 } | 217 } |
| 218 g_instance = nullptr; | 218 g_instance = nullptr; |
| 219 gl::init::ShutdownGL(); | 219 gl::init::ShutdownGL(); |
| 220 } | 220 } |
| 221 | 221 |
| 222 void VrShell::SetDelegate(JNIEnv* env, | 222 void VrShell::SetDelegateOnUI(JNIEnv* env, |
| 223 const base::android::JavaParamRef<jobject>& obj, | 223 const base::android::JavaParamRef<jobject>& obj, |
| 224 const base::android::JavaParamRef<jobject>& delegate) { | 224 const base::android::JavaParamRef<jobject>& delegate) { |
| 225 base::AutoLock lock(gvr_init_lock_); | 225 base::AutoLock lock(gvr_init_lock_); |
| 226 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate); | 226 delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate); |
| 227 if (swap_chain_.get()) { | 227 if (swap_chain_.get()) { |
| 228 main_thread_task_runner_->PostTask( | 228 delegate_->GetDeviceProvider()->OnGvrDelegateReady( |
| 229 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady, | 229 weak_ptr_factory_.GetWeakPtr()); |
| 230 delegate_->GetDeviceProvider(), | |
| 231 weak_ptr_factory_.GetWeakPtr())); | |
| 232 } | 230 } |
| 233 } | 231 } |
| 234 | 232 |
| 235 enum class ViewerType { | 233 enum class ViewerType { |
| 236 UNKNOWN_TYPE = 0, | 234 UNKNOWN_TYPE = 0, |
| 237 CARDBOARD = 1, | 235 CARDBOARD = 1, |
| 238 DAYDREAM = 2, | 236 DAYDREAM = 2, |
| 239 VIEWER_TYPE_MAX, | 237 VIEWER_TYPE_MAX, |
| 240 }; | 238 }; |
| 241 | 239 |
| 242 void VrShell::GvrInit(JNIEnv* env, | 240 void VrShell::GvrInitOnGL(JNIEnv* env, |
| 243 const JavaParamRef<jobject>& obj, | 241 const JavaParamRef<jobject>& obj, |
| 244 jlong native_gvr_api) { | 242 jlong native_gvr_api) { |
| 245 // set the initial webvr state | 243 // set the initial webvr state |
| 246 metrics_helper_->SetVRActive(true); | 244 metrics_helper_->SetVRActive(true); |
| 247 | 245 |
| 248 gvr_api_ = | 246 gvr_api_ = |
| 249 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); | 247 gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); |
| 250 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once | 248 // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once |
| 251 // we switch to using a WebVR render surface. We currently need to wait for | 249 // we switch to using a WebVR render surface. We currently need to wait for |
| 252 // the compositor window's size to be known first. See also | 250 // the compositor window's size to be known first. See also |
| (...skipping 12 matching lines...) Expand all Loading... |
| 265 break; | 263 break; |
| 266 default: | 264 default: |
| 267 NOTREACHED(); | 265 NOTREACHED(); |
| 268 viewerType = ViewerType::UNKNOWN_TYPE; | 266 viewerType = ViewerType::UNKNOWN_TYPE; |
| 269 break; | 267 break; |
| 270 } | 268 } |
| 271 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), | 269 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), |
| 272 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); | 270 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); |
| 273 } | 271 } |
| 274 | 272 |
| 275 void VrShell::InitializeGl(JNIEnv* env, | 273 void VrShell::InitializeGlOnGL(JNIEnv* env, |
| 276 const JavaParamRef<jobject>& obj, | 274 const JavaParamRef<jobject>& obj, |
| 277 jint content_texture_handle, | 275 jint content_texture_handle, |
| 278 jint ui_texture_handle) { | 276 jint ui_texture_handle) { |
| 279 base::AutoLock lock(gvr_init_lock_); | 277 base::AutoLock lock(gvr_init_lock_); |
| 280 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone || | 278 CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone || |
| 281 gl::init::InitializeGLOneOff()); | 279 gl::init::InitializeGLOneOff()); |
| 282 | 280 |
| 283 content_texture_id_ = content_texture_handle; | 281 content_texture_id_ = content_texture_handle; |
| 284 ui_texture_id_ = ui_texture_handle; | 282 ui_texture_id_ = ui_texture_handle; |
| 285 | 283 |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 358 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); | 356 webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer); |
| 359 | 357 |
| 360 if (delegate_) { | 358 if (delegate_) { |
| 361 main_thread_task_runner_->PostTask( | 359 main_thread_task_runner_->PostTask( |
| 362 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady, | 360 FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady, |
| 363 delegate_->GetDeviceProvider(), | 361 delegate_->GetDeviceProvider(), |
| 364 weak_ptr_factory_.GetWeakPtr())); | 362 weak_ptr_factory_.GetWeakPtr())); |
| 365 } | 363 } |
| 366 } | 364 } |
| 367 | 365 |
| 368 void VrShell::UpdateController(const gvr::Vec3f& forward_vector) { | 366 void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) { |
| 369 controller_->UpdateState(); | 367 controller_->UpdateState(); |
| 370 | 368 |
| 371 #if defined(ENABLE_VR_SHELL) | 369 #if defined(ENABLE_VR_SHELL) |
| 372 // Note that button up/down state is transient, so IsButtonUp only returns | 370 // Note that button up/down state is transient, so IsButtonUp only returns |
| 373 // true for a single frame (and we're guaranteed not to miss it). | 371 // true for a single frame (and we're guaranteed not to miss it). |
| 374 if (controller_->IsButtonUp( | 372 if (controller_->IsButtonUp( |
| 375 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { | 373 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { |
| 376 html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); | 374 html_interface_->SetMenuMode(!html_interface_->GetMenuMode()); |
| 377 | 375 |
| 378 // TODO(mthiesse): The page is no longer visible when in menu mode. We | 376 // TODO(mthiesse): The page is no longer visible when in menu mode. We |
| (...skipping 13 matching lines...) Expand all Loading... |
| 392 if (touch_pending_ || controller_->IsButtonUp( | 390 if (touch_pending_ || controller_->IsButtonUp( |
| 393 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { | 391 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { |
| 394 touch_pending_ = false; | 392 touch_pending_ = false; |
| 395 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); | 393 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); |
| 396 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; | 394 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; |
| 397 gesture->timeStampSeconds = | 395 gesture->timeStampSeconds = |
| 398 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); | 396 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); |
| 399 gesture->type = WebInputEvent::GestureTapDown; | 397 gesture->type = WebInputEvent::GestureTapDown; |
| 400 gesture->data.tapDown.width = 0; | 398 gesture->data.tapDown.width = 0; |
| 401 gesture->data.tapDown.height = 0; | 399 gesture->data.tapDown.height = 0; |
| 402 SendGesture(CONTENT, std::move(gesture)); | 400 SendGestureOnGL(CONTENT, std::move(gesture)); |
| 403 } | 401 } |
| 404 | 402 |
| 405 return; | 403 return; |
| 406 } | 404 } |
| 407 | 405 |
| 408 gvr::Vec3f ergo_neutral_pose; | 406 gvr::Vec3f ergo_neutral_pose; |
| 409 if (!controller_->IsConnected()) { | 407 if (!controller_->IsConnected()) { |
| 410 // No controller detected, set up a gaze cursor that tracks the | 408 // No controller detected, set up a gaze cursor that tracks the |
| 411 // forward direction. | 409 // forward direction. |
| 412 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; | 410 ergo_neutral_pose = {0.0f, 0.0f, -1.0f}; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 487 plane->copy_rect.width, plane->copy_rect.height}; | 485 plane->copy_rect.width, plane->copy_rect.height}; |
| 488 } | 486 } |
| 489 pixel_x = pixel_rect.width * x + pixel_rect.x; | 487 pixel_x = pixel_rect.width * x + pixel_rect.x; |
| 490 pixel_y = pixel_rect.height * y + pixel_rect.y; | 488 pixel_y = pixel_rect.height * y + pixel_rect.y; |
| 491 | 489 |
| 492 target_point_ = plane_intersection_point; | 490 target_point_ = plane_intersection_point; |
| 493 target_element_ = plane.get(); | 491 target_element_ = plane.get(); |
| 494 input_target = plane->content_quad ? CONTENT : UI; | 492 input_target = plane->content_quad ? CONTENT : UI; |
| 495 } | 493 } |
| 496 } | 494 } |
| 497 SendEventsToTarget(input_target, pixel_x, pixel_y); | 495 SendEventsToTargetOnGL(input_target, pixel_x, pixel_y); |
| 498 } | 496 } |
| 499 | 497 |
| 500 void VrShell::SendEventsToTarget(InputTarget input_target, | 498 void VrShell::SendEventsToTargetOnGL(InputTarget input_target, |
| 501 int pixel_x, | 499 int pixel_x, |
| 502 int pixel_y) { | 500 int pixel_y) { |
| 503 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = | 501 std::vector<std::unique_ptr<WebGestureEvent>> gesture_list = |
| 504 controller_->DetectGestures(); | 502 controller_->DetectGestures(); |
| 505 std::unique_ptr<WebGestureEvent> gesture = std::move(gesture_list.front()); | 503 std::unique_ptr<WebGestureEvent> gesture = std::move(gesture_list.front()); |
| 506 | 504 |
| 507 // TODO(asimjour) for now, scroll is sent to the main content. | 505 // TODO(asimjour) for now, scroll is sent to the main content. |
| 508 if (gesture->type == WebInputEvent::GestureScrollBegin || | 506 if (gesture->type == WebInputEvent::GestureScrollBegin || |
| 509 gesture->type == WebInputEvent::GestureScrollUpdate || | 507 gesture->type == WebInputEvent::GestureScrollUpdate || |
| 510 gesture->type == WebInputEvent::GestureScrollEnd || | 508 gesture->type == WebInputEvent::GestureScrollEnd || |
| 511 gesture->type == WebInputEvent::GestureFlingCancel) { | 509 gesture->type == WebInputEvent::GestureFlingCancel) { |
| 512 SendGesture(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture))); | 510 SendGestureOnGL(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture))); |
| 513 } | 511 } |
| 514 | 512 |
| 515 if (gesture->type == WebInputEvent::GestureScrollEnd) { | 513 if (gesture->type == WebInputEvent::GestureScrollEnd) { |
| 516 CHECK(gesture_list.size() == 2); | 514 CHECK(gesture_list.size() == 2); |
| 517 if (gesture_list.back()->type == WebInputEvent::GestureTapDown) { | 515 if (gesture_list.back()->type == WebInputEvent::GestureTapDown) { |
| 518 gesture_list.back()->data.tapDown.width = pixel_x; | 516 gesture_list.back()->data.tapDown.width = pixel_x; |
| 519 gesture_list.back()->data.tapDown.height = pixel_y; | 517 gesture_list.back()->data.tapDown.height = pixel_y; |
| 520 if (input_target != NONE) | 518 if (input_target != NONE) |
| 521 SendGesture(input_target, std::move(gesture_list.back())); | 519 SendGestureOnGL(input_target, std::move(gesture_list.back())); |
| 522 } else if (gesture_list.back()->type == WebInputEvent::GestureFlingStart) { | 520 } else if (gesture_list.back()->type == WebInputEvent::GestureFlingStart) { |
| 523 SendGesture(CONTENT, std::move(gesture_list.back())); | 521 SendGestureOnGL(CONTENT, std::move(gesture_list.back())); |
| 524 } else { | 522 } else { |
| 525 NOTREACHED(); | 523 NOTREACHED(); |
| 526 } | 524 } |
| 527 } | 525 } |
| 528 | 526 |
| 529 WebInputEvent::Type original_type = gesture->type; | 527 WebInputEvent::Type original_type = gesture->type; |
| 530 | 528 |
| 531 bool new_target = input_target != current_input_target_; | 529 bool new_target = input_target != current_input_target_; |
| 532 if (new_target && current_input_target_ != NONE) { | 530 if (new_target && current_input_target_ != NONE) { |
| 533 // Send a move event indicating that the pointer moved off of an element. | 531 // Send a move event indicating that the pointer moved off of an element. |
| 534 SendGesture(current_input_target_, MakeMouseEvent( | 532 SendGestureOnGL(current_input_target_, MakeMouseEvent( |
| 535 WebInputEvent::MouseLeave, gesture->timeStampSeconds, 0, 0)); | 533 WebInputEvent::MouseLeave, gesture->timeStampSeconds, 0, 0)); |
| 536 } | 534 } |
| 537 current_input_target_ = input_target; | 535 current_input_target_ = input_target; |
| 538 if (current_input_target_ == NONE) { | 536 if (current_input_target_ == NONE) { |
| 539 return; | 537 return; |
| 540 } | 538 } |
| 541 WebInputEvent::Type type = | 539 WebInputEvent::Type type = |
| 542 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; | 540 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; |
| 543 SendGesture(current_input_target_, MakeMouseEvent( | 541 SendGestureOnGL(current_input_target_, MakeMouseEvent( |
| 544 type, gesture->timeStampSeconds, pixel_x, pixel_y)); | 542 type, gesture->timeStampSeconds, pixel_x, pixel_y)); |
| 545 | 543 |
| 546 if (original_type == WebInputEvent::GestureTapDown || touch_pending_) { | 544 if (original_type == WebInputEvent::GestureTapDown || touch_pending_) { |
| 547 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent(*gesture)); | 545 std::unique_ptr<WebGestureEvent> event(new WebGestureEvent(*gesture)); |
| 548 if (touch_pending_) { | 546 if (touch_pending_) { |
| 549 touch_pending_ = false; | 547 touch_pending_ = false; |
| 550 event->sourceDevice = blink::WebGestureDeviceTouchpad; | 548 event->sourceDevice = blink::WebGestureDeviceTouchpad; |
| 551 event->timeStampSeconds = | 549 event->timeStampSeconds = |
| 552 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); | 550 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); |
| 553 } | 551 } |
| 554 event->type = WebInputEvent::GestureTapDown; | 552 event->type = WebInputEvent::GestureTapDown; |
| 555 event->data.tapDown.width = pixel_x; | 553 event->data.tapDown.width = pixel_x; |
| 556 event->data.tapDown.height = pixel_y; | 554 event->data.tapDown.height = pixel_y; |
| 557 SendGesture(current_input_target_, std::move(event)); | 555 SendGestureOnGL(current_input_target_, std::move(event)); |
| 558 } | 556 } |
| 559 } | 557 } |
| 560 | 558 |
| 561 void VrShell::SendGesture(InputTarget input_target, | 559 void VrShell::SendGestureOnGL(InputTarget input_target, |
| 562 std::unique_ptr<blink::WebInputEvent> event) { | 560 std::unique_ptr<blink::WebInputEvent> event) { |
| 563 DCHECK(input_target != NONE); | 561 DCHECK(input_target != NONE); |
| 564 const base::WeakPtr<VrInputManager>& weak_ptr = | 562 const base::WeakPtr<VrInputManager>& weak_ptr = |
| 565 input_target == CONTENT ? weak_content_input_manager_ | 563 input_target == CONTENT ? weak_content_input_manager_ |
| 566 : weak_ui_input_manager_; | 564 : weak_ui_input_manager_; |
| 567 main_thread_task_runner_->PostTask( | 565 main_thread_task_runner_->PostTask( |
| 568 FROM_HERE, | 566 FROM_HERE, |
| 569 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, | 567 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, |
| 570 base::Passed(std::move(event)))); | 568 base::Passed(std::move(event)))); |
| 571 } | 569 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 595 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | 593 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| 596 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | 594 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| 597 // Pose is good. | 595 // Pose is good. |
| 598 return pixels[0]; | 596 return pixels[0]; |
| 599 } | 597 } |
| 600 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << | 598 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << |
| 601 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; | 599 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; |
| 602 return -1; | 600 return -1; |
| 603 } | 601 } |
| 604 | 602 |
| 605 bool VrShell::WebVrPoseByteIsValid(int pose_index_byte) { | 603 bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) { |
| 606 if (pose_index_byte < 0) { | 604 if (pose_index_byte < 0) { |
| 607 return false; | 605 return false; |
| 608 } | 606 } |
| 609 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { | 607 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { |
| 610 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << | 608 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << |
| 611 ", not a valid pose"; | 609 ", not a valid pose"; |
| 612 return false; | 610 return false; |
| 613 } | 611 } |
| 614 return true; | 612 return true; |
| 615 } | 613 } |
| 616 | 614 |
| 617 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 615 void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
| 618 TRACE_EVENT0("gpu", "VrShell::DrawFrame"); | 616 TRACE_EVENT0("gpu", "VrShell::DrawFrame"); |
| 619 // Reset the viewport list to just the pair of viewports for the | 617 // Reset the viewport list to just the pair of viewports for the |
| 620 // primary buffer each frame. Head-locked viewports get added by | 618 // primary buffer each frame. Head-locked viewports get added by |
| 621 // DrawVrShell if needed. | 619 // DrawVrShell if needed. |
| 622 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 620 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 623 | 621 |
| 624 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 622 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { |
| 625 // If needed, resize the primary buffer for use with WebVR. | 623 // If needed, resize the primary buffer for use with WebVR. |
| 626 if (render_size_primary_ != render_size_primary_webvr_) { | 624 if (render_size_primary_ != render_size_primary_webvr_) { |
| 627 if (!render_size_primary_webvr_.width) { | 625 if (!render_size_primary_webvr_.width) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 651 // The head pose has redundant data. Assume we're only using the | 649 // The head pose has redundant data. Assume we're only using the |
| 652 // object_from_reference_matrix, we're not updating position_external. | 650 // object_from_reference_matrix, we're not updating position_external. |
| 653 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 651 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
| 654 // it. For now, removing it seems working fine. | 652 // it. For now, removing it seems working fine. |
| 655 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 653 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 656 } | 654 } |
| 657 | 655 |
| 658 // Bind the primary framebuffer. | 656 // Bind the primary framebuffer. |
| 659 frame.BindBuffer(kFramePrimaryBuffer); | 657 frame.BindBuffer(kFramePrimaryBuffer); |
| 660 | 658 |
| 661 HandleQueuedTasks(); | 659 HandleQueuedTasksOnGL(); |
| 662 | 660 |
| 663 // Update the render position of all UI elements (including desktop). | 661 // Update the render position of all UI elements (including desktop). |
| 664 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; | 662 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
| 665 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); | 663 scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds()); |
| 666 | 664 |
| 667 UpdateController(GetForwardVector(head_pose)); | 665 UpdateControllerOnGL(GetForwardVector(head_pose)); |
| 668 | 666 |
| 669 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { | 667 if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) { |
| 670 DrawWebVr(); | 668 DrawWebVrOnGL(); |
| 671 | 669 |
| 672 // When using async reprojection, we need to know which pose was used in | 670 // When using async reprojection, we need to know which pose was used in |
| 673 // the WebVR app for drawing this frame. Due to unknown amounts of | 671 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 674 // buffering in the compositor and SurfaceTexture, we read the pose number | 672 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 675 // from a corner pixel. There's no point in doing this for legacy | 673 // from a corner pixel. There's no point in doing this for legacy |
| 676 // distortion rendering since that doesn't need a pose, and reading back | 674 // distortion rendering since that doesn't need a pose, and reading back |
| 677 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | 675 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 678 // doing this once we have working no-compositor rendering for WebVR. | 676 // doing this once we have working no-compositor rendering for WebVR. |
| 679 if (gvr_api_->GetAsyncReprojectionEnabled()) { | 677 if (gvr_api_->GetAsyncReprojectionEnabled()) { |
| 680 int pose_index_byte = GetPixelEncodedPoseIndexByte(); | 678 int pose_index_byte = GetPixelEncodedPoseIndexByte(); |
| 681 if (WebVrPoseByteIsValid(pose_index_byte)) { | 679 if (WebVrPoseByteIsValidOnGL(pose_index_byte)) { |
| 682 // We have a valid pose, use it for reprojection. | 680 // We have a valid pose, use it for reprojection. |
| 683 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | 681 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
| 684 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); | 682 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
| 685 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; | 683 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; |
| 686 // We can't mark the used pose as invalid since unfortunately | 684 // We can't mark the used pose as invalid since unfortunately |
| 687 // we have to reuse them. The compositor will re-submit stale | 685 // we have to reuse them. The compositor will re-submit stale |
| 688 // frames on vsync, and we can't tell that this has happened | 686 // frames on vsync, and we can't tell that this has happened |
| 689 // until we've read the pose index from it, and at that point | 687 // until we've read the pose index from it, and at that point |
| 690 // it's too late to skip rendering. | 688 // it's too late to skip rendering. |
| 691 } else { | 689 } else { |
| 692 // If we don't get a valid frame ID back we shouldn't attempt | 690 // If we don't get a valid frame ID back we shouldn't attempt |
| 693 // to reproject by an invalid matrix, so turn off reprojection | 691 // to reproject by an invalid matrix, so turn off reprojection |
| 694 // instead. Invalid poses can permanently break reprojection | 692 // instead. Invalid poses can permanently break reprojection |
| 695 // for this GVR instance: http://crbug.com/667327 | 693 // for this GVR instance: http://crbug.com/667327 |
| 696 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | 694 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
| 697 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); | 695 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
| 698 } | 696 } |
| 699 } | 697 } |
| 700 } | 698 } |
| 701 | 699 |
| 702 DrawVrShell(head_pose, frame); | 700 DrawVrShellOnGL(head_pose, frame); |
| 703 | 701 |
| 704 frame.Unbind(); | 702 frame.Unbind(); |
| 705 frame.Submit(*buffer_viewport_list_, head_pose); | 703 frame.Submit(*buffer_viewport_list_, head_pose); |
| 706 } | 704 } |
| 707 | 705 |
| 708 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose, | 706 void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose, |
| 709 gvr::Frame &frame) { | 707 gvr::Frame &frame) { |
| 710 TRACE_EVENT0("gpu", "VrShell::DrawVrShell"); | 708 TRACE_EVENT0("gpu", "VrShell::DrawVrShell"); |
| 711 std::vector<const ContentRectangle*> head_locked_elements; | 709 std::vector<const ContentRectangle*> head_locked_elements; |
| 712 std::vector<const ContentRectangle*> world_elements; | 710 std::vector<const ContentRectangle*> world_elements; |
| 713 for (const auto& rect : scene_->GetUiElements()) { | 711 for (const auto& rect : scene_->GetUiElements()) { |
| 714 if (!rect->visible) { | 712 if (!rect->visible) { |
| 715 continue; | 713 continue; |
| 716 } | 714 } |
| 717 if (rect->lock_to_fov) { | 715 if (rect->lock_to_fov) { |
| 718 head_locked_elements.push_back(rect.get()); | 716 head_locked_elements.push_back(rect.get()); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 733 // Non-WebVR mode, enable depth testing and clear the primary buffers. | 731 // Non-WebVR mode, enable depth testing and clear the primary buffers. |
| 734 glEnable(GL_CULL_FACE); | 732 glEnable(GL_CULL_FACE); |
| 735 glEnable(GL_DEPTH_TEST); | 733 glEnable(GL_DEPTH_TEST); |
| 736 glDepthMask(GL_TRUE); | 734 glDepthMask(GL_TRUE); |
| 737 | 735 |
| 738 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); | 736 glClearColor(0.1f, 0.1f, 0.1f, 1.0f); |
| 739 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 737 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 740 } | 738 } |
| 741 | 739 |
| 742 if (!world_elements.empty()) { | 740 if (!world_elements.empty()) { |
| 743 DrawUiView(&head_pose, world_elements, render_size_primary_, | 741 DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_, |
| 744 kViewportListPrimaryOffset); | 742 kViewportListPrimaryOffset); |
| 745 } | 743 } |
| 746 | 744 |
| 747 if (!head_locked_elements.empty()) { | 745 if (!head_locked_elements.empty()) { |
| 748 // Add head-locked viewports. The list gets reset to just | 746 // Add head-locked viewports. The list gets reset to just |
| 749 // the recommended viewports (for the primary buffer) each frame. | 747 // the recommended viewports (for the primary buffer) each frame. |
| 750 buffer_viewport_list_->SetBufferViewport( | 748 buffer_viewport_list_->SetBufferViewport( |
| 751 kViewportListHeadlockedOffset + GVR_LEFT_EYE, | 749 kViewportListHeadlockedOffset + GVR_LEFT_EYE, |
| 752 *headlocked_left_viewport_); | 750 *headlocked_left_viewport_); |
| 753 buffer_viewport_list_->SetBufferViewport( | 751 buffer_viewport_list_->SetBufferViewport( |
| 754 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, | 752 kViewportListHeadlockedOffset + GVR_RIGHT_EYE, |
| 755 *headlocked_right_viewport_); | 753 *headlocked_right_viewport_); |
| 756 | 754 |
| 757 // Bind the headlocked framebuffer. | 755 // Bind the headlocked framebuffer. |
| 758 frame.BindBuffer(kFrameHeadlockedBuffer); | 756 frame.BindBuffer(kFrameHeadlockedBuffer); |
| 759 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); | 757 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); |
| 760 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); | 758 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); |
| 761 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, | 759 DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_, |
| 762 kViewportListHeadlockedOffset); | 760 kViewportListHeadlockedOffset); |
| 763 } | 761 } |
| 764 } | 762 } |
| 765 | 763 |
| 766 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { | 764 void VrShell::SetWebVRRenderSurfaceSize(int width, int height) { |
| 767 render_size_primary_webvr_.width = width; | 765 render_size_primary_webvr_.width = width; |
| 768 render_size_primary_webvr_.height = height; | 766 render_size_primary_webvr_.height = height; |
| 769 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once | 767 // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once |
| 770 // we have that. | 768 // we have that. |
| 771 } | 769 } |
| 772 | 770 |
| 773 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { | 771 gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() { |
| 774 // This is a stopgap while we're using the WebVR compositor rendering path. | 772 // This is a stopgap while we're using the WebVR compositor rendering path. |
| 775 // TODO(klausw,crbug.com/655722): Remove this method and member once we're | 773 // TODO(klausw,crbug.com/655722): Remove this method and member once we're |
| 776 // using a separate WebVR render surface. | 774 // using a separate WebVR render surface. |
| 777 return content_tex_physical_size_; | 775 return content_tex_physical_size_; |
| 778 } | 776 } |
| 779 | 777 |
| 780 | 778 |
| 781 void VrShell::DrawUiView(const gvr::Mat4f* head_pose, | 779 void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose, |
| 782 const std::vector<const ContentRectangle*>& elements, | 780 const std::vector<const ContentRectangle*>& elements, |
| 783 const gvr::Sizei& render_size, int viewport_offset) { | 781 const gvr::Sizei& render_size, int viewport_offset) { |
| 784 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); | 782 TRACE_EVENT0("gpu", "VrShell::DrawUiView"); |
| 785 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { | 783 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { |
| 786 buffer_viewport_list_->GetBufferViewport( | 784 buffer_viewport_list_->GetBufferViewport( |
| 787 eye + viewport_offset, buffer_viewport_.get()); | 785 eye + viewport_offset, buffer_viewport_.get()); |
| 788 | 786 |
| 789 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); | 787 gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye); |
| 790 if (head_pose != nullptr) { | 788 if (head_pose != nullptr) { |
| 791 view_matrix = MatrixMul(view_matrix, *head_pose); | 789 view_matrix = MatrixMul(view_matrix, *head_pose); |
| 792 } | 790 } |
| 793 | 791 |
| 794 gvr::Recti pixel_rect = | 792 gvr::Recti pixel_rect = |
| 795 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); | 793 CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv()); |
| 796 glViewport(pixel_rect.left, pixel_rect.bottom, | 794 glViewport(pixel_rect.left, pixel_rect.bottom, |
| 797 pixel_rect.right - pixel_rect.left, | 795 pixel_rect.right - pixel_rect.left, |
| 798 pixel_rect.top - pixel_rect.bottom); | 796 pixel_rect.top - pixel_rect.bottom); |
| 799 | 797 |
| 800 const gvr::Mat4f render_matrix = MatrixMul( | 798 const gvr::Mat4f render_matrix = MatrixMul( |
| 801 PerspectiveMatrixFromView( | 799 PerspectiveMatrixFromView( |
| 802 buffer_viewport_->GetSourceFov(), kZNear, kZFar), | 800 buffer_viewport_->GetSourceFov(), kZNear, kZFar), |
| 803 view_matrix); | 801 view_matrix); |
| 804 | 802 |
| 805 DrawElements(render_matrix, elements); | 803 DrawElementsOnGL(render_matrix, elements); |
| 806 if (head_pose != nullptr && | 804 if (head_pose != nullptr && |
| 807 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) { | 805 html_interface_->GetMode() != UiInterface::Mode::WEB_VR) { |
| 808 DrawCursor(render_matrix); | 806 DrawCursorOnGL(render_matrix); |
| 809 } | 807 } |
| 810 } | 808 } |
| 811 } | 809 } |
| 812 | 810 |
| 813 void VrShell::DrawElements( | 811 void VrShell::DrawElementsOnGL( |
| 814 const gvr::Mat4f& render_matrix, | 812 const gvr::Mat4f& render_matrix, |
| 815 const std::vector<const ContentRectangle*>& elements) { | 813 const std::vector<const ContentRectangle*>& elements) { |
| 816 for (const auto& rect : elements) { | 814 for (const auto& rect : elements) { |
| 817 Rectf copy_rect; | 815 Rectf copy_rect; |
| 818 jint texture_handle; | 816 jint texture_handle; |
| 819 if (rect->content_quad) { | 817 if (rect->content_quad) { |
| 820 copy_rect = {0, 0, 1, 1}; | 818 copy_rect = {0, 0, 1, 1}; |
| 821 texture_handle = content_texture_id_; | 819 texture_handle = content_texture_id_; |
| 822 } else { | 820 } else { |
| 823 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; | 821 copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_; |
| 824 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; | 822 copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_; |
| 825 copy_rect.width = static_cast<float>(rect->copy_rect.width) / | 823 copy_rect.width = static_cast<float>(rect->copy_rect.width) / |
| 826 ui_tex_css_width_; | 824 ui_tex_css_width_; |
| 827 copy_rect.height = static_cast<float>(rect->copy_rect.height) / | 825 copy_rect.height = static_cast<float>(rect->copy_rect.height) / |
| 828 ui_tex_css_height_; | 826 ui_tex_css_height_; |
| 829 texture_handle = ui_texture_id_; | 827 texture_handle = ui_texture_id_; |
| 830 } | 828 } |
| 831 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); | 829 gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world); |
| 832 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( | 830 vr_shell_renderer_->GetTexturedQuadRenderer()->Draw( |
| 833 texture_handle, transform, copy_rect); | 831 texture_handle, transform, copy_rect); |
| 834 } | 832 } |
| 835 } | 833 } |
| 836 | 834 |
| 837 void VrShell::DrawCursor(const gvr::Mat4f& render_matrix) { | 835 void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) { |
| 838 gvr::Mat4f mat; | 836 gvr::Mat4f mat; |
| 839 SetIdentityM(mat); | 837 SetIdentityM(mat); |
| 840 | 838 |
| 841 // Draw the reticle. | 839 // Draw the reticle. |
| 842 | 840 |
| 843 // Scale the pointer to have a fixed FOV size at any distance. | 841 // Scale the pointer to have a fixed FOV size at any distance. |
| 844 const float eye_to_target = Distance(target_point_, kOrigin); | 842 const float eye_to_target = Distance(target_point_, kOrigin); |
| 845 ScaleM(mat, mat, kReticleWidth * eye_to_target, | 843 ScaleM(mat, mat, kReticleWidth * eye_to_target, |
| 846 kReticleHeight * eye_to_target, 1.0f); | 844 kReticleHeight * eye_to_target, 1.0f); |
| 847 | 845 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 880 mat = MatrixMul(QuatToMatrix(q), mat); | 878 mat = MatrixMul(QuatToMatrix(q), mat); |
| 881 | 879 |
| 882 const gvr::Vec3f beam_direction = { | 880 const gvr::Vec3f beam_direction = { |
| 883 target_point_.x - kHandPosition.x, | 881 target_point_.x - kHandPosition.x, |
| 884 target_point_.y - kHandPosition.y, | 882 target_point_.y - kHandPosition.y, |
| 885 target_point_.z - kHandPosition.z | 883 target_point_.z - kHandPosition.z |
| 886 }; | 884 }; |
| 887 const gvr::Mat4f beam_direction_mat = | 885 const gvr::Mat4f beam_direction_mat = |
| 888 QuatToMatrix(GetRotationFromZAxis(beam_direction)); | 886 QuatToMatrix(GetRotationFromZAxis(beam_direction)); |
| 889 | 887 |
| 890 | |
| 891 // Render multiple faces to make the laser appear cylindrical. | 888 // Render multiple faces to make the laser appear cylindrical. |
| 892 const int faces = 4; | 889 const int faces = 4; |
| 893 for (int i = 0; i < faces; i++) { | 890 for (int i = 0; i < faces; i++) { |
| 894 // Rotate around Z. | 891 // Rotate around Z. |
| 895 const float angle = M_PI * 2 * i / faces; | 892 const float angle = M_PI * 2 * i / faces; |
| 896 const gvr::Quatf rot = QuatFromAxisAngle({0.0f, 0.0f, 1.0f}, angle); | 893 const gvr::Quatf rot = QuatFromAxisAngle({0.0f, 0.0f, 1.0f}, angle); |
| 897 gvr::Mat4f face_transform = MatrixMul(QuatToMatrix(rot), mat); | 894 gvr::Mat4f face_transform = MatrixMul(QuatToMatrix(rot), mat); |
| 898 | 895 |
| 899 // Orient according to target direction. | 896 // Orient according to target direction. |
| 900 face_transform = MatrixMul(beam_direction_mat, face_transform); | 897 face_transform = MatrixMul(beam_direction_mat, face_transform); |
| 901 | 898 |
| 902 // Move the beam origin to the hand. | 899 // Move the beam origin to the hand. |
| 903 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, | 900 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, |
| 904 kHandPosition.z); | 901 kHandPosition.z); |
| 905 | 902 |
| 906 transform = MatrixMul(render_matrix, face_transform); | 903 transform = MatrixMul(render_matrix, face_transform); |
| 907 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); | 904 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); |
| 908 } | 905 } |
| 909 } | 906 } |
| 910 | 907 |
| 911 void VrShell::DrawWebVr() { | 908 void VrShell::DrawWebVrOnGL() { |
| 912 TRACE_EVENT0("gpu", "VrShell::DrawWebVr"); | 909 TRACE_EVENT0("gpu", "VrShell::DrawWebVr"); |
| 913 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 910 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
| 914 glDisable(GL_CULL_FACE); | 911 glDisable(GL_CULL_FACE); |
| 915 glDepthMask(GL_FALSE); | 912 glDepthMask(GL_FALSE); |
| 916 glDisable(GL_DEPTH_TEST); | 913 glDisable(GL_DEPTH_TEST); |
| 917 glDisable(GL_SCISSOR_TEST); | 914 glDisable(GL_SCISSOR_TEST); |
| 918 glDisable(GL_BLEND); | 915 glDisable(GL_BLEND); |
| 919 glDisable(GL_POLYGON_OFFSET_FILL); | 916 glDisable(GL_POLYGON_OFFSET_FILL); |
| 920 | 917 |
| 921 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 918 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
| 922 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 919 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| 923 | 920 |
| 924 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | 921 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| 925 *webvr_left_viewport_); | 922 *webvr_left_viewport_); |
| 926 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | 923 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| 927 *webvr_right_viewport_); | 924 *webvr_right_viewport_); |
| 928 } | 925 } |
| 929 | 926 |
| 930 void VrShell::OnTriggerEvent(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 927 void VrShell::OnTriggerEventOnUI(JNIEnv* env, |
| 928 const JavaParamRef<jobject>& obj) { |
| 931 // Set a flag to handle this on the render thread at the next frame. | 929 // Set a flag to handle this on the render thread at the next frame. |
| 932 touch_pending_ = true; | 930 touch_pending_ = true; |
| 933 } | 931 } |
| 934 | 932 |
| 935 void VrShell::OnPause(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 933 void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
| 936 if (gvr_api_ == nullptr) | 934 if (gvr_api_ == nullptr) |
| 937 return; | 935 return; |
| 936 |
| 937 // TODO(mthiesse): Clean up threading here. |
| 938 controller_->OnPause(); | 938 controller_->OnPause(); |
| 939 gvr_api_->PauseTracking(); | 939 gvr_api_->PauseTracking(); |
| 940 | 940 |
| 941 // exit vr session | 941 // exit vr session |
| 942 metrics_helper_->SetVRActive(false); | 942 metrics_helper_->SetVRActive(false); |
| 943 } | 943 } |
| 944 | 944 |
| 945 void VrShell::OnResume(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 945 void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
| 946 if (gvr_api_ == nullptr) | 946 if (gvr_api_ == nullptr) |
| 947 return; | 947 return; |
| 948 | 948 |
| 949 // TODO(mthiesse): Clean up threading here. |
| 949 gvr_api_->RefreshViewerProfile(); | 950 gvr_api_->RefreshViewerProfile(); |
| 950 gvr_api_->ResumeTracking(); | 951 gvr_api_->ResumeTracking(); |
| 951 controller_->OnResume(); | 952 controller_->OnResume(); |
| 952 | 953 |
| 953 // exit vr session | 954 // exit vr session |
| 954 metrics_helper_->SetVRActive(true); | 955 metrics_helper_->SetVRActive(true); |
| 955 } | 956 } |
| 956 | 957 |
| 957 base::WeakPtr<VrShell> VrShell::GetWeakPtr( | 958 base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI( |
| 958 const content::WebContents* web_contents) { | 959 const content::WebContents* web_contents) { |
| 959 // Ensure that the WebContents requesting the VrShell instance is the one | 960 // Ensure that the WebContents requesting the VrShell instance is the one |
| 960 // we created. | 961 // we created. |
| 961 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents) | 962 if (g_instance != nullptr && g_instance->ui_contents_ == web_contents) |
| 962 return g_instance->weak_ptr_factory_.GetWeakPtr(); | 963 return g_instance->weak_ptr_factory_.GetWeakPtr(); |
| 963 return base::WeakPtr<VrShell>(nullptr); | 964 return base::WeakPtr<VrShell>(nullptr); |
| 964 } | 965 } |
| 965 | 966 |
| 966 void VrShell::OnDomContentsLoaded() { | 967 void VrShell::OnDomContentsLoadedOnUI() { |
| 967 html_interface_->SetURL(main_contents_->GetVisibleURL()); | 968 html_interface_->SetURL(main_contents_->GetVisibleURL()); |
| 968 html_interface_->SetLoading(main_contents_->IsLoading()); | 969 html_interface_->SetLoading(main_contents_->IsLoading()); |
| 969 html_interface_->OnDomContentsLoaded(); | 970 html_interface_->OnDomContentsLoaded(); |
| 970 } | 971 } |
| 971 | 972 |
| 972 void VrShell::SetWebVrMode(JNIEnv* env, | 973 void VrShell::SetWebVrModeOnUI(JNIEnv* env, |
| 973 const base::android::JavaParamRef<jobject>& obj, | 974 const base::android::JavaParamRef<jobject>& obj, |
| 974 bool enabled) { | 975 bool enabled) { |
| 975 metrics_helper_->SetWebVREnabled(enabled); | 976 metrics_helper_->SetWebVREnabled(enabled); |
| 976 if (enabled) { | 977 if (enabled) { |
| 977 html_interface_->SetMode(UiInterface::Mode::WEB_VR); | 978 html_interface_->SetMode(UiInterface::Mode::WEB_VR); |
| 978 } else { | 979 } else { |
| 979 html_interface_->SetMode(UiInterface::Mode::STANDARD); | 980 html_interface_->SetMode(UiInterface::Mode::STANDARD); |
| 980 } | 981 } |
| 981 } | 982 } |
| 982 | 983 |
| 983 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { | 984 void VrShell::SetWebVRSecureOrigin(bool secure_origin) { |
| 984 html_interface_->SetSecureOrigin(secure_origin); | 985 html_interface_->SetSecureOrigin(secure_origin); |
| 985 } | 986 } |
| 986 | 987 |
| 987 void VrShell::SubmitWebVRFrame() {} | 988 void VrShell::SubmitWebVRFrame() {} |
| 988 | 989 |
| 989 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | 990 void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| 990 const gvr::Rectf& right_bounds) { | 991 const gvr::Rectf& right_bounds) { |
| 991 webvr_left_viewport_->SetSourceUv(left_bounds); | 992 webvr_left_viewport_->SetSourceUv(left_bounds); |
| 992 webvr_right_viewport_->SetSourceUv(right_bounds); | 993 webvr_right_viewport_->SetSourceUv(right_bounds); |
| 993 } | 994 } |
| 994 | 995 |
| 995 gvr::GvrApi* VrShell::gvr_api() { | 996 gvr::GvrApi* VrShell::gvr_api() { |
| 996 return gvr_api_.get(); | 997 return gvr_api_.get(); |
| 997 } | 998 } |
| 998 | 999 |
| 999 void VrShell::SurfacesChanged(JNIEnv* env, | 1000 void VrShell::SurfacesChangedOnUI(JNIEnv* env, |
| 1000 const JavaParamRef<jobject>& object, | 1001 const JavaParamRef<jobject>& object, |
| 1001 const JavaParamRef<jobject>& content_surface, | 1002 const JavaParamRef<jobject>& content_surface, |
| 1002 const JavaParamRef<jobject>& ui_surface) { | 1003 const JavaParamRef<jobject>& ui_surface) { |
| 1003 content_compositor_->SurfaceChanged(content_surface); | 1004 content_compositor_->SurfaceChanged(content_surface); |
| 1004 ui_compositor_->SurfaceChanged(ui_surface); | 1005 ui_compositor_->SurfaceChanged(ui_surface); |
| 1005 } | 1006 } |
| 1006 | 1007 |
| 1007 void VrShell::ContentBoundsChanged(JNIEnv* env, | 1008 void VrShell::ContentBoundsChangedOnUI(JNIEnv* env, |
| 1008 const JavaParamRef<jobject>& object, | 1009 const JavaParamRef<jobject>& object, |
| 1009 jint width, jint height, jfloat dpr) { | 1010 jint width, jint height, jfloat dpr) { |
| 1010 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged"); | 1011 TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged"); |
| 1011 content_tex_physical_size_.width = width; | 1012 content_tex_physical_size_.width = width; |
| 1012 content_tex_physical_size_.height = height; | 1013 content_tex_physical_size_.height = height; |
| 1013 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in | 1014 // TODO(mthiesse): Synchronize with GL thread, and update tex css size in |
| 1014 // response to MainFrameWasResized, not here. | 1015 // response to MainFrameWasResized, not here. |
| 1015 content_tex_css_width_ = width / dpr; | 1016 content_tex_css_width_ = width / dpr; |
| 1016 content_tex_css_height_ = height / dpr; | 1017 content_tex_css_height_ = height / dpr; |
| 1017 | 1018 |
| 1018 content_compositor_->SetWindowBounds(width, height); | 1019 content_compositor_->SetWindowBounds(width, height); |
| 1019 } | 1020 } |
| 1020 | 1021 |
| 1021 void VrShell::UIBoundsChanged(JNIEnv* env, | 1022 void VrShell::UIBoundsChangedOnUI(JNIEnv* env, |
| 1022 const JavaParamRef<jobject>& object, | 1023 const JavaParamRef<jobject>& object, |
| 1023 jint width, jint height, jfloat dpr) { | 1024 jint width, jint height, jfloat dpr) { |
| 1024 ui_compositor_->SetWindowBounds(width, height); | 1025 ui_compositor_->SetWindowBounds(width, height); |
| 1025 } | 1026 } |
| 1026 | 1027 |
| 1027 UiScene* VrShell::GetScene() { | 1028 UiScene* VrShell::GetSceneOnGL() { |
| 1028 return scene_.get(); | 1029 return scene_.get(); |
| 1029 } | 1030 } |
| 1030 | 1031 |
| 1031 UiInterface* VrShell::GetUiInterface() { | 1032 UiInterface* VrShell::GetUiInterfaceOnGL() { |
| 1032 return html_interface_.get(); | 1033 return html_interface_.get(); |
| 1033 } | 1034 } |
| 1034 | 1035 |
| 1035 void VrShell::QueueTask(base::Callback<void()>& callback) { | 1036 void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) { |
| 1036 base::AutoLock lock(task_queue_lock_); | 1037 base::AutoLock lock(task_queue_lock_); |
| 1037 task_queue_.push(callback); | 1038 task_queue_.push(callback); |
| 1038 } | 1039 } |
| 1039 | 1040 |
| 1040 void VrShell::HandleQueuedTasks() { | 1041 void VrShell::HandleQueuedTasksOnGL() { |
| 1041 // To protect a stream of tasks from blocking rendering indefinitely, | 1042 // To protect a stream of tasks from blocking rendering indefinitely, |
| 1042 // process only the number of tasks present when first checked. | 1043 // process only the number of tasks present when first checked. |
| 1043 std::vector<base::Callback<void()>> tasks; | 1044 std::vector<base::Callback<void()>> tasks; |
| 1044 { | 1045 { |
| 1045 base::AutoLock lock(task_queue_lock_); | 1046 base::AutoLock lock(task_queue_lock_); |
| 1046 const size_t count = task_queue_.size(); | 1047 const size_t count = task_queue_.size(); |
| 1047 for (size_t i = 0; i < count; i++) { | 1048 for (size_t i = 0; i < count; i++) { |
| 1048 tasks.push_back(task_queue_.front()); | 1049 tasks.push_back(task_queue_.front()); |
| 1049 task_queue_.pop(); | 1050 task_queue_.pop(); |
| 1050 } | 1051 } |
| 1051 } | 1052 } |
| 1052 for (auto &task : tasks) { | 1053 for (auto &task : tasks) { |
| 1053 task.Run(); | 1054 task.Run(); |
| 1054 } | 1055 } |
| 1055 } | 1056 } |
| 1056 | 1057 |
| 1057 void VrShell::DoUiAction(const UiAction action) { | 1058 void VrShell::DoUiActionOnUI(const UiAction action) { |
| 1058 content::NavigationController& controller = main_contents_->GetController(); | 1059 content::NavigationController& controller = main_contents_->GetController(); |
| 1059 switch (action) { | 1060 switch (action) { |
| 1060 case HISTORY_BACK: | 1061 case HISTORY_BACK: |
| 1061 if (main_contents_->IsFullscreen()) { | 1062 if (main_contents_->IsFullscreen()) { |
| 1062 main_contents_->ExitFullscreen(true /* will_cause_resize */); | 1063 main_contents_->ExitFullscreen(true /* will_cause_resize */); |
| 1063 } else if (controller.CanGoBack()) { | 1064 } else if (controller.CanGoBack()) { |
| 1064 controller.GoBack(); | 1065 controller.GoBack(); |
| 1065 } | 1066 } |
| 1066 break; | 1067 break; |
| 1067 case HISTORY_FORWARD: | 1068 case HISTORY_FORWARD: |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1100 ui_tex_css_height_ = display.size().height(); | 1101 ui_tex_css_height_ = display.size().height(); |
| 1101 } | 1102 } |
| 1102 | 1103 |
| 1103 void VrShell::WebContentsDestroyed() { | 1104 void VrShell::WebContentsDestroyed() { |
| 1104 ui_input_manager_.reset(); | 1105 ui_input_manager_.reset(); |
| 1105 ui_contents_ = nullptr; | 1106 ui_contents_ = nullptr; |
| 1106 // TODO(mthiesse): Handle web contents being destroyed. | 1107 // TODO(mthiesse): Handle web contents being destroyed. |
| 1107 delegate_->ForceExitVr(); | 1108 delegate_->ForceExitVr(); |
| 1108 } | 1109 } |
| 1109 | 1110 |
| 1110 void VrShell::ContentWebContentsDestroyed() { | 1111 void VrShell::ContentWebContentsDestroyedOnUI() { |
| 1111 content_input_manager_.reset(); | 1112 content_input_manager_.reset(); |
| 1112 main_contents_ = nullptr; | 1113 main_contents_ = nullptr; |
| 1113 // TODO(mthiesse): Handle web contents being destroyed. | 1114 // TODO(mthiesse): Handle web contents being destroyed. |
| 1114 delegate_->ForceExitVr(); | 1115 delegate_->ForceExitVr(); |
| 1115 } | 1116 } |
| 1116 | 1117 |
| 1117 void VrShell::ContentWasHidden() { | 1118 void VrShell::ContentWasHiddenOnUI() { |
| 1118 // Ensure we don't continue sending input to it. | 1119 // Ensure we don't continue sending input to it. |
| 1119 content_input_manager_.reset(); | 1120 content_input_manager_.reset(); |
| 1120 // TODO(mthiesse): Handle web contents being hidden. | 1121 // TODO(mthiesse): Handle web contents being hidden. |
| 1121 delegate_->ForceExitVr(); | 1122 delegate_->ForceExitVr(); |
| 1122 } | 1123 } |
| 1123 | 1124 |
| 1124 void VrShell::SetContentCssSize(float width, float height, float dpr) { | 1125 void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) { |
| 1125 JNIEnv* env = base::android::AttachCurrentThread(); | 1126 JNIEnv* env = base::android::AttachCurrentThread(); |
| 1126 Java_VrShellImpl_setContentCssSize(env, j_vr_shell_.obj(), width, height, | 1127 Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height, |
| 1127 dpr); | 1128 dpr); |
| 1128 } | 1129 } |
| 1129 | 1130 |
| 1130 void VrShell::SetUiCssSize(float width, float height, float dpr) { | 1131 void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) { |
| 1131 JNIEnv* env = base::android::AttachCurrentThread(); | 1132 JNIEnv* env = base::android::AttachCurrentThread(); |
| 1132 Java_VrShellImpl_setUiCssSize(env, j_vr_shell_.obj(), width, height, dpr); | 1133 Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr); |
| 1133 } | 1134 } |
| 1134 | 1135 |
| 1135 // ---------------------------------------------------------------------------- | 1136 // ---------------------------------------------------------------------------- |
| 1136 // Native JNI methods | 1137 // Native JNI methods |
| 1137 // ---------------------------------------------------------------------------- | 1138 // ---------------------------------------------------------------------------- |
| 1138 | 1139 |
| 1139 jlong Init(JNIEnv* env, | 1140 jlong InitOnUI(JNIEnv* env, |
| 1140 const JavaParamRef<jobject>& obj, | 1141 const JavaParamRef<jobject>& obj, |
| 1141 const JavaParamRef<jobject>& content_web_contents, | 1142 const JavaParamRef<jobject>& content_web_contents, |
| 1142 jlong content_window_android, | 1143 jlong content_window_android, |
| 1143 const JavaParamRef<jobject>& ui_web_contents, | 1144 const JavaParamRef<jobject>& ui_web_contents, |
| 1144 jlong ui_window_android, | 1145 jlong ui_window_android, |
| 1145 jboolean for_web_vr) { | 1146 jboolean for_web_vr) { |
| 1146 return reinterpret_cast<intptr_t>(new VrShell( | 1147 return reinterpret_cast<intptr_t>(new VrShell( |
| 1147 env, obj, content::WebContents::FromJavaWebContents(content_web_contents), | 1148 env, obj, content::WebContents::FromJavaWebContents(content_web_contents), |
| 1148 reinterpret_cast<ui::WindowAndroid*>(content_window_android), | 1149 reinterpret_cast<ui::WindowAndroid*>(content_window_android), |
| 1149 content::WebContents::FromJavaWebContents(ui_web_contents), | 1150 content::WebContents::FromJavaWebContents(ui_web_contents), |
| 1150 reinterpret_cast<ui::WindowAndroid*>(ui_window_android), | 1151 reinterpret_cast<ui::WindowAndroid*>(ui_window_android), |
| 1151 for_web_vr)); | 1152 for_web_vr)); |
| 1152 } | 1153 } |
| 1153 | 1154 |
| 1154 } // namespace vr_shell | 1155 } // namespace vr_shell |
| OLD | NEW |