| Index: chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| diff --git a/chrome/browser/android/vr_shell/vr_shell.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| similarity index 62%
|
| copy from chrome/browser/android/vr_shell/vr_shell.cc
|
| copy to chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| index 8fad473cc54708ffc5cedc0da52b4d5bcb9dd974..a2ac61588d22344527cf9a215cd35587f0e756c6 100644
|
| --- a/chrome/browser/android/vr_shell/vr_shell.cc
|
| +++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| @@ -2,38 +2,29 @@
|
| // Use of this source code is governed by a BSD-style license that can be
|
| // found in the LICENSE file.
|
|
|
| -#include "chrome/browser/android/vr_shell/vr_shell.h"
|
| +#include "chrome/browser/android/vr_shell/vr_shell_gl.h"
|
|
|
| +#include "base/memory/ptr_util.h"
|
| #include "base/metrics/histogram_macros.h"
|
| +#include "base/threading/thread_task_runner_handle.h"
|
| #include "chrome/browser/android/vr_shell/ui_elements.h"
|
| #include "chrome/browser/android/vr_shell/ui_interface.h"
|
| #include "chrome/browser/android/vr_shell/ui_scene.h"
|
| -#include "chrome/browser/android/vr_shell/vr_compositor.h"
|
| #include "chrome/browser/android/vr_shell/vr_controller.h"
|
| #include "chrome/browser/android/vr_shell/vr_gl_util.h"
|
| #include "chrome/browser/android/vr_shell/vr_input_manager.h"
|
| -#include "chrome/browser/android/vr_shell/vr_shell_delegate.h"
|
| +#include "chrome/browser/android/vr_shell/vr_math.h"
|
| +#include "chrome/browser/android/vr_shell/vr_shell.h"
|
| #include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
|
| -#include "chrome/browser/android/vr_shell/vr_usage_monitor.h"
|
| -#include "chrome/browser/android/vr_shell/vr_web_contents_observer.h"
|
| -#include "content/public/browser/navigation_controller.h"
|
| -#include "content/public/browser/render_view_host.h"
|
| -#include "content/public/browser/render_widget_host.h"
|
| -#include "content/public/browser/render_widget_host_view.h"
|
| -#include "content/public/browser/web_contents.h"
|
| -#include "content/public/common/referrer.h"
|
| -#include "device/vr/android/gvr/gvr_device_provider.h"
|
| -#include "jni/VrShellImpl_jni.h"
|
| -#include "ui/android/view_android.h"
|
| -#include "ui/android/window_android.h"
|
| -#include "ui/base/page_transition_types.h"
|
| -#include "ui/display/display.h"
|
| -#include "ui/display/screen.h"
|
| +#include "third_party/WebKit/public/platform/WebInputEvent.h"
|
| +#include "ui/gfx/vsync_provider.h"
|
| +#include "ui/gl/android/scoped_java_surface.h"
|
| +#include "ui/gl/android/surface_texture.h"
|
| #include "ui/gl/gl_bindings.h"
|
| +#include "ui/gl/gl_context.h"
|
| +#include "ui/gl/gl_surface.h"
|
| #include "ui/gl/init/gl_factory.h"
|
|
|
| -using base::android::JavaParamRef;
|
| -
|
| namespace vr_shell {
|
|
|
| namespace {
|
| @@ -95,10 +86,6 @@ static constexpr int kViewportListHeadlockedOffset = 2;
|
| // data. Must match the magic numbers used in blink's VRDisplay.cpp.
|
| static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
|
|
|
| -vr_shell::VrShell* g_instance;
|
| -
|
| -static const char kVrShellUIURL[] = "chrome://vr-shell-ui";
|
| -
|
| float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
|
| float xdiff = (vec1.x - vec2.x);
|
| float ydiff = (vec1.y - vec2.y);
|
| @@ -147,113 +134,155 @@ std::unique_ptr<blink::WebMouseEvent> MakeMouseEvent(WebInputEvent::Type type,
|
|
|
| return mouse_event;
|
| }
|
| +
|
| +enum class ViewerType {
|
| + UNKNOWN_TYPE = 0,
|
| + CARDBOARD = 1,
|
| + DAYDREAM = 2,
|
| + VIEWER_TYPE_MAX,
|
| +};
|
| +
|
| +int GetPixelEncodedPoseIndexByte() {
|
| + TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex");
|
| + // Read the pose index encoded in a bottom left pixel as color values.
|
| + // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
|
| + // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
|
| + // which tracks poses. Returns the low byte (0..255) if valid, or -1
|
| + // if not valid due to bad magic number.
|
| + uint8_t pixels[4];
|
| + // Assume we're reading from the framebuffer we just wrote to.
|
| + // That's true currently, we may need to use glReadBuffer(GL_BACK)
|
| + // or equivalent if the rendering setup changes in the future.
|
| + glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
|
| +
|
| + // Check for the magic number written by VRDevice.cpp on submit.
|
| + // This helps avoid glitches from garbage data in the render
|
| + // buffer that can appear during initialization or resizing. These
|
| + // often appear as flashes of all-black or all-white pixels.
|
| + if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
|
| + pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
|
| + // Pose is good.
|
| + return pixels[0];
|
| + }
|
| + VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
|
| + ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
|
| + return -1;
|
| +}
|
| +
|
| } // namespace
|
|
|
| -VrShell::VrShell(JNIEnv* env,
|
| - jobject obj,
|
| - content::WebContents* main_contents,
|
| - ui::WindowAndroid* content_window,
|
| - content::WebContents* ui_contents,
|
| - ui::WindowAndroid* ui_window,
|
| - bool for_web_vr)
|
| - : WebContentsObserver(ui_contents),
|
| - main_contents_(main_contents),
|
| - ui_contents_(ui_contents),
|
| - metrics_helper_(new VrMetricsHelper(main_contents)),
|
| - main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
|
| - weak_ptr_factory_(this) {
|
| - DCHECK(g_instance == nullptr);
|
| - g_instance = this;
|
| - j_vr_shell_.Reset(env, obj);
|
| - scene_.reset(new UiScene);
|
| +VrShellGl::VrShellGl(
|
| + VrShell* vr_shell,
|
| + const base::WeakPtr<VrShell>& weak_vr_shell,
|
| + const base::WeakPtr<VrInputManager>& content_input_manager,
|
| + const base::WeakPtr<VrInputManager>& ui_input_manager,
|
| + scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
|
| + gvr_context* gvr_api)
|
| + : task_runner_(base::ThreadTaskRunnerHandle::Get()),
|
| + vr_shell_(vr_shell),
|
| + weak_vr_shell_(weak_vr_shell),
|
| + content_input_manager_(content_input_manager),
|
| + ui_input_manager_(ui_input_manager),
|
| + main_thread_task_runner_(std::move(main_thread_task_runner)),
|
| + weak_ptr_factory_(this) {
|
| + GvrInit(gvr_api);
|
| +}
|
|
|
| - if (for_web_vr)
|
| - metrics_helper_->SetWebVREnabled(true);
|
| - html_interface_.reset(new UiInterface(
|
| - for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD,
|
| - main_contents_->IsFullscreen()));
|
| - content_compositor_.reset(new VrCompositor(content_window, false));
|
| - ui_compositor_.reset(new VrCompositor(ui_window, true));
|
| - vr_web_contents_observer_.reset(new VrWebContentsObserver(
|
| - main_contents, html_interface_.get(), this));
|
| +VrShellGl::~VrShellGl() {
|
| + draw_task_.Cancel();
|
| +}
|
|
|
| - LoadUIContentOnUI();
|
| +bool VrShellGl::Initialize() {
|
| + if (!InitializeGl()) return false;
|
|
|
| gvr::Mat4f identity;
|
| SetIdentityM(identity);
|
| webvr_head_pose_.resize(kPoseRingBufferSize, identity);
|
| webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
|
|
|
| - content_input_manager_.reset(new VrInputManager(main_contents_));
|
| - ui_input_manager_.reset(new VrInputManager(ui_contents_));
|
| - weak_content_input_manager_ = content_input_manager_->GetWeakPtr();
|
| - weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr();
|
| + draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this)));
|
|
|
| - SetShowingOverscrollGlowOnUI(false);
|
| -}
|
| + scene_.reset(new UiScene);
|
|
|
| -void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env,
|
| - const JavaParamRef<jobject>& obj) {
|
| - content_compositor_->SetLayer(main_contents_);
|
| - ui_compositor_->SetLayer(ui_contents_);
|
| -}
|
| + InitializeRenderer();
|
|
|
| -void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| - delete this;
|
| + ScheduleNextDrawFrame();
|
| + return true;
|
| }
|
|
|
| -void VrShell::LoadUIContentOnUI() {
|
| - GURL url(kVrShellUIURL);
|
| - ui_contents_->GetController().LoadURL(
|
| - url, content::Referrer(),
|
| - ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string(""));
|
| -}
|
| +bool VrShellGl::InitializeGl() {
|
| + if (gl::GetGLImplementation() == gl::kGLImplementationNone &&
|
| + !gl::init::InitializeGLOneOff()) {
|
| + LOG(ERROR) << "gl::init::InitializeGLOneOff failed";
|
| + ForceExitVR();
|
| + return false;
|
| + }
|
| + surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size());
|
| + if (!surface_.get()) {
|
| + LOG(ERROR) << "gl::init::CreateOffscreenGLSurface failed";
|
| + ForceExitVR();
|
| + return false;
|
| + }
|
| + context_ = gl::init::CreateGLContext(nullptr, surface_.get(),
|
| + gl::GLContextAttribs());
|
| + if (!context_.get()) {
|
| + LOG(ERROR) << "gl::init::CreateGLContext failed";
|
| + ForceExitVR();
|
| + return false;
|
| + }
|
| + if (!context_->MakeCurrent(surface_.get())) {
|
| + LOG(ERROR) << "gl::GLContext::MakeCurrent() failed";
|
| + ForceExitVR();
|
| + return false;
|
| + }
|
|
|
| -bool RegisterVrShell(JNIEnv* env) {
|
| - return RegisterNativesImpl(env);
|
| + // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is
|
| + // sort of okay, because the GVR swap chain will block if we render too fast,
|
| + // but we should address this properly.
|
| + if (surface_->GetVSyncProvider()) {
|
| + surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind(
|
| + &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr()));
|
| + } else {
|
| + LOG(ERROR) << "No VSync Provider";
|
| + }
|
| +
|
| + unsigned int textures[2];
|
| + glGenTextures(2, textures);
|
| + ui_texture_id_ = textures[0];
|
| + content_texture_id_ = textures[1];
|
| + ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_);
|
| + content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_);
|
| + ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get()));
|
| + content_surface_.reset(new gl::ScopedJavaSurface(
|
| + content_surface_texture_.get()));
|
| + ui_surface_texture_->SetFrameAvailableCallback(base::Bind(
|
| + &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
|
| + content_surface_texture_->SetFrameAvailableCallback(base::Bind(
|
| + &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
|
| +
|
| + content_surface_texture_->SetDefaultBufferSize(
|
| + content_tex_physical_size_.width, content_tex_physical_size_.height);
|
| + ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width,
|
| + ui_tex_physical_size_.height);
|
| +
|
| + main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
|
| + &VrShell::SurfacesChanged, weak_vr_shell_,
|
| + content_surface_->j_surface().obj(),
|
| + ui_surface_->j_surface().obj()));
|
| + return true;
|
| }
|
|
|
| -VrShell::~VrShell() {
|
| - if (delegate_ && delegate_->GetDeviceProvider()) {
|
| - delegate_->GetDeviceProvider()->OnGvrDelegateRemoved();
|
| - }
|
| - g_instance = nullptr;
|
| - gl::init::ShutdownGL();
|
| +void VrShellGl::OnUIFrameAvailable() {
|
| + ui_surface_texture_->UpdateTexImage();
|
| }
|
|
|
| -void VrShell::SetDelegateOnUI(JNIEnv* env,
|
| - const base::android::JavaParamRef<jobject>& obj,
|
| - const base::android::JavaParamRef<jobject>& delegate) {
|
| - base::AutoLock lock(gvr_init_lock_);
|
| - delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate);
|
| - if (swap_chain_.get()) {
|
| - delegate_->GetDeviceProvider()->OnGvrDelegateReady(
|
| - weak_ptr_factory_.GetWeakPtr());
|
| - }
|
| +void VrShellGl::OnContentFrameAvailable() {
|
| + content_surface_texture_->UpdateTexImage();
|
| }
|
|
|
| -enum class ViewerType {
|
| - UNKNOWN_TYPE = 0,
|
| - CARDBOARD = 1,
|
| - DAYDREAM = 2,
|
| - VIEWER_TYPE_MAX,
|
| -};
|
| -
|
| -void VrShell::GvrInitOnGL(JNIEnv* env,
|
| - const JavaParamRef<jobject>& obj,
|
| - jlong native_gvr_api) {
|
| - // set the initial webvr state
|
| - metrics_helper_->SetVRActive(true);
|
| -
|
| - gvr_api_ =
|
| - gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api));
|
| - // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once
|
| - // we switch to using a WebVR render surface. We currently need to wait for
|
| - // the compositor window's size to be known first. See also
|
| - // ContentSurfaceChanged.
|
| - controller_.reset(
|
| - new VrController(reinterpret_cast<gvr_context*>(native_gvr_api)));
|
| -
|
| +void VrShellGl::GvrInit(gvr_context* gvr_api) {
|
| + gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api);
|
| + controller_.reset(new VrController(gvr_api));
|
|
|
| ViewerType viewerType;
|
| switch (gvr_api_->GetViewerType()) {
|
| @@ -272,17 +301,7 @@ void VrShell::GvrInitOnGL(JNIEnv* env,
|
| static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
|
| }
|
|
|
| -void VrShell::InitializeGlOnGL(JNIEnv* env,
|
| - const JavaParamRef<jobject>& obj,
|
| - jint content_texture_handle,
|
| - jint ui_texture_handle) {
|
| - base::AutoLock lock(gvr_init_lock_);
|
| - CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
|
| - gl::init::InitializeGLOneOff());
|
| -
|
| - content_texture_id_ = content_texture_handle;
|
| - ui_texture_id_ = ui_texture_handle;
|
| -
|
| +void VrShellGl::InitializeRenderer() {
|
| // While WebVR is going through the compositor path, it shares
|
| // the same texture ID. This will change once it gets its own
|
| // surface, but store it separately to avoid future confusion.
|
| @@ -357,41 +376,38 @@ void VrShell::InitializeGlOnGL(JNIEnv* env,
|
| webvr_right_viewport_.get());
|
| webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
|
|
|
| - if (delegate_) {
|
| - main_thread_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady,
|
| - delegate_->GetDeviceProvider(),
|
| - weak_ptr_factory_.GetWeakPtr()));
|
| - }
|
| + main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
|
| + &VrShell::GvrDelegateReady, weak_vr_shell_));
|
| }
|
|
|
| -void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
|
| +void VrShellGl::UpdateController(const gvr::Vec3f& forward_vector) {
|
| controller_->UpdateState();
|
|
|
| #if defined(ENABLE_VR_SHELL)
|
| + // TODO(mthiesse): Fix menu button handling, which should be posted to the UI
|
| + // thread instead of handled here.
|
| +
|
| // Note that button up/down state is transient, so ButtonUpHappened only
|
| - // returns
|
| - // true for a single frame (and we're guaranteed not to miss it).
|
| + // returns true for a single frame (and we're guaranteed not to miss it).
|
| if (controller_->ButtonUpHappened(
|
| gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
|
| - html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
|
| +// html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
|
|
|
| // TODO(mthiesse): The page is no longer visible when in menu mode. We
|
| // should unfocus or otherwise let it know it's hidden.
|
| - if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
|
| - const auto&& task = html_interface_->GetMenuMode() ?
|
| - &device::GvrDeviceProvider::OnDisplayBlur :
|
| - &device::GvrDeviceProvider::OnDisplayFocus;
|
| - main_thread_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
|
| - }
|
| +// if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
|
| +// const auto&& task = html_interface_->GetMenuMode() ?
|
| +// &device::GvrDeviceProvider::OnDisplayBlur :
|
| +// &device::GvrDeviceProvider::OnDisplayFocus;
|
| +// main_thread_task_runner_->PostTask(
|
| +// FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
|
| +// }
|
| }
|
| #endif
|
| - if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
|
| + if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
|
| // Process screen touch events for Cardboard button compatibility.
|
| // Also send tap events for controller "touchpad click" events.
|
| - if (touch_pending_ ||
|
| - controller_->ButtonUpHappened(
|
| + if (touch_pending_ || controller_->ButtonUpHappened(
|
| gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
|
| touch_pending_ = false;
|
| std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
|
| @@ -401,7 +417,7 @@ void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
|
| gesture->type = WebInputEvent::GestureTapDown;
|
| gesture->x = 0;
|
| gesture->y = 0;
|
| - SendGestureOnGL(CONTENT, std::move(gesture));
|
| + SendGesture(InputTarget::CONTENT, std::move(gesture));
|
| }
|
|
|
| return;
|
| @@ -461,7 +477,7 @@ void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
|
| int pixel_x = 0;
|
| int pixel_y = 0;
|
| target_element_ = nullptr;
|
| - InputTarget input_target = NONE;
|
| + InputTarget input_target = InputTarget::NONE;
|
|
|
| for (const auto& plane : scene_->GetUiElements()) {
|
| if (!plane->visible || !plane->hit_testable) {
|
| @@ -493,15 +509,16 @@ void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
|
|
|
| target_point_ = plane_intersection_point;
|
| target_element_ = plane.get();
|
| - input_target = plane->content_quad ? CONTENT : UI;
|
| + input_target = plane->content_quad ? InputTarget::CONTENT
|
| + : InputTarget::UI;
|
| }
|
| }
|
| - SendEventsToTargetOnGL(input_target, pixel_x, pixel_y);
|
| + SendEventsToTarget(input_target, pixel_x, pixel_y);
|
| }
|
|
|
| -void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
|
| - int pixel_x,
|
| - int pixel_y) {
|
| +void VrShellGl::SendEventsToTarget(InputTarget input_target,
|
| + int pixel_x,
|
| + int pixel_y) {
|
| std::vector<std::unique_ptr<WebGestureEvent>> gesture_list =
|
| controller_->DetectGestures();
|
| double timestamp = gesture_list.front()->timeStampSeconds;
|
| @@ -524,15 +541,15 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
|
| case WebInputEvent::GestureScrollEnd:
|
| case WebInputEvent::GestureFlingCancel:
|
| case WebInputEvent::GestureFlingStart:
|
| - SendGestureOnGL(CONTENT,
|
| - base::WrapUnique(new WebGestureEvent(*gesture)));
|
| + SendGesture(InputTarget::CONTENT,
|
| + base::WrapUnique(new WebGestureEvent(*gesture)));
|
| break;
|
| case WebInputEvent::GestureTapDown:
|
| gesture->x = pixel_x;
|
| gesture->y = pixel_y;
|
| - if (input_target != NONE)
|
| - SendGestureOnGL(input_target,
|
| - base::WrapUnique(new WebGestureEvent(*gesture)));
|
| + if (input_target != InputTarget::NONE)
|
| + SendGesture(input_target,
|
| + base::WrapUnique(new WebGestureEvent(*gesture)));
|
| break;
|
| case WebInputEvent::Undefined:
|
| break;
|
| @@ -543,66 +560,38 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
|
|
|
| // Hover support
|
| bool new_target = input_target != current_input_target_;
|
| - if (new_target && current_input_target_ != NONE) {
|
| + if (new_target && current_input_target_ != InputTarget::NONE) {
|
| // Send a move event indicating that the pointer moved off of an element.
|
| - SendGestureOnGL(current_input_target_,
|
| - MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0));
|
| + SendGesture(current_input_target_,
|
| + MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0));
|
| }
|
| -
|
| current_input_target_ = input_target;
|
| - if (current_input_target_ != NONE) {
|
| + if (current_input_target_ != InputTarget::NONE) {
|
| WebInputEvent::Type type =
|
| new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
|
| - SendGestureOnGL(input_target,
|
| - MakeMouseEvent(type, timestamp, pixel_x, pixel_y));
|
| + SendGesture(input_target,
|
| + MakeMouseEvent(type, timestamp, pixel_x, pixel_y));
|
| }
|
| }
|
|
|
| -void VrShell::SendGestureOnGL(InputTarget input_target,
|
| - std::unique_ptr<blink::WebInputEvent> event) {
|
| - DCHECK(input_target != NONE);
|
| +void VrShellGl::SendGesture(InputTarget input_target,
|
| + std::unique_ptr<blink::WebInputEvent> event) {
|
| + DCHECK(input_target != InputTarget::NONE);
|
| const base::WeakPtr<VrInputManager>& weak_ptr =
|
| - input_target == CONTENT ? weak_content_input_manager_
|
| - : weak_ui_input_manager_;
|
| + input_target == InputTarget::CONTENT ? content_input_manager_
|
| + : ui_input_manager_;
|
| main_thread_task_runner_->PostTask(
|
| FROM_HERE,
|
| base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
|
| base::Passed(std::move(event))));
|
| }
|
|
|
| -void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
|
| +void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
|
| webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose;
|
| webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true;
|
| }
|
|
|
| -int GetPixelEncodedPoseIndexByte() {
|
| - TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
|
| - // Read the pose index encoded in a bottom left pixel as color values.
|
| - // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
|
| - // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
|
| - // which tracks poses. Returns the low byte (0..255) if valid, or -1
|
| - // if not valid due to bad magic number.
|
| - uint8_t pixels[4];
|
| - // Assume we're reading from the framebuffer we just wrote to.
|
| - // That's true currently, we may need to use glReadBuffer(GL_BACK)
|
| - // or equivalent if the rendering setup changes in the future.
|
| - glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
|
| -
|
| - // Check for the magic number written by VRDevice.cpp on submit.
|
| - // This helps avoid glitches from garbage data in the render
|
| - // buffer that can appear during initialization or resizing. These
|
| - // often appear as flashes of all-black or all-white pixels.
|
| - if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
|
| - pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
|
| - // Pose is good.
|
| - return pixels[0];
|
| - }
|
| - VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
|
| - ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
|
| - return -1;
|
| -}
|
| -
|
| -bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
|
| +bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) {
|
| if (pose_index_byte < 0) {
|
| return false;
|
| }
|
| @@ -614,29 +603,31 @@ bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
|
| return true;
|
| }
|
|
|
| -void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| - TRACE_EVENT0("gpu", "VrShell::DrawFrame");
|
| +void VrShellGl::DrawFrame() {
|
| + TRACE_EVENT0("gpu", "VrShellGl::DrawFrame");
|
| // Reset the viewport list to just the pair of viewports for the
|
| // primary buffer each frame. Head-locked viewports get added by
|
| // DrawVrShell if needed.
|
| buffer_viewport_list_->SetToRecommendedBufferViewports();
|
|
|
| - if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
|
| - // If needed, resize the primary buffer for use with WebVR.
|
| - if (render_size_primary_ != render_size_primary_webvr_) {
|
| - if (!render_size_primary_webvr_.width) {
|
| - VLOG(2) << "WebVR rendering size not known yet, dropping frame";
|
| - return;
|
| - }
|
| - render_size_primary_ = render_size_primary_webvr_;
|
| - swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
|
| - }
|
| - } else {
|
| - if (render_size_primary_ != render_size_primary_vrshell_) {
|
| - render_size_primary_ = render_size_primary_vrshell_;
|
| - swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
|
| - }
|
| - }
|
| + // TODO(klausw): Fix this. Resizing buffers here leads to webVR mode showing
|
| + // nothing but a black screen.
|
| +// if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
|
| +// // If needed, resize the primary buffer for use with WebVR.
|
| +// if (render_size_primary_ != render_size_primary_webvr_) {
|
| +// if (!render_size_primary_webvr_.width) {
|
| +// VLOG(2) << "WebVR rendering size not known yet, dropping frame";
|
| +// return;
|
| +// }
|
| +// render_size_primary_ = render_size_primary_webvr_;
|
| +// swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
|
| +// }
|
| +// } else {
|
| +// if (render_size_primary_ != render_size_primary_vrshell_) {
|
| +// render_size_primary_ = render_size_primary_vrshell_;
|
| +// swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
|
| +// }
|
| +// }
|
|
|
| gvr::Frame frame = swap_chain_->AcquireFrame();
|
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
|
| @@ -655,19 +646,16 @@ void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| gvr_api_->ApplyNeckModel(head_pose, 1.0f);
|
| }
|
|
|
| - // Bind the primary framebuffer.
|
| frame.BindBuffer(kFramePrimaryBuffer);
|
|
|
| - HandleQueuedTasksOnGL();
|
| -
|
| // Update the render position of all UI elements (including desktop).
|
| const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
|
| scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds());
|
|
|
| - UpdateControllerOnGL(GetForwardVector(head_pose));
|
| + UpdateController(GetForwardVector(head_pose));
|
|
|
| - if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
|
| - DrawWebVrOnGL();
|
| + if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
|
| + DrawWebVr();
|
|
|
| // When using async reprojection, we need to know which pose was used in
|
| // the WebVR app for drawing this frame. Due to unknown amounts of
|
| @@ -678,7 +666,7 @@ void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| // doing this once we have working no-compositor rendering for WebVR.
|
| if (gvr_api_->GetAsyncReprojectionEnabled()) {
|
| int pose_index_byte = GetPixelEncodedPoseIndexByte();
|
| - if (WebVrPoseByteIsValidOnGL(pose_index_byte)) {
|
| + if (WebVrPoseByteIsValid(pose_index_byte)) {
|
| // We have a valid pose, use it for reprojection.
|
| webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
|
| webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
|
| @@ -699,15 +687,18 @@ void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| }
|
| }
|
|
|
| - DrawVrShellOnGL(head_pose, frame);
|
| + DrawVrShell(head_pose, frame);
|
|
|
| frame.Unbind();
|
| frame.Submit(*buffer_viewport_list_, head_pose);
|
| +
|
| + // No need to SwapBuffers for an offscreen surface.
|
| + ScheduleNextDrawFrame();
|
| }
|
|
|
| -void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
|
| - gvr::Frame &frame) {
|
| - TRACE_EVENT0("gpu", "VrShell::DrawVrShell");
|
| +void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose,
|
| + gvr::Frame &frame) {
|
| + TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell");
|
| std::vector<const ContentRectangle*> head_locked_elements;
|
| std::vector<const ContentRectangle*> world_elements;
|
| for (const auto& rect : scene_->GetUiElements()) {
|
| @@ -721,7 +712,7 @@ void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
|
| }
|
| }
|
|
|
| - if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
|
| + if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
|
| // WebVR is incompatible with 3D world compositing since the
|
| // depth buffer was already populated with unknown scaling - the
|
| // WebVR app has full control over zNear/zFar. Just leave the
|
| @@ -738,9 +729,8 @@ void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
|
| glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
|
| glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
| }
|
| -
|
| if (!world_elements.empty()) {
|
| - DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_,
|
| + DrawUiView(&head_pose, world_elements, render_size_primary_,
|
| kViewportListPrimaryOffset);
|
| }
|
|
|
| @@ -755,33 +745,35 @@ void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
|
| *headlocked_right_viewport_);
|
|
|
| // Bind the headlocked framebuffer.
|
| + // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order
|
| + // here.
|
| frame.BindBuffer(kFrameHeadlockedBuffer);
|
| glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
|
| glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
| - DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_,
|
| + DrawUiView(nullptr, head_locked_elements, render_size_headlocked_,
|
| kViewportListHeadlockedOffset);
|
| }
|
| }
|
|
|
| -void VrShell::SetWebVRRenderSurfaceSize(int width, int height) {
|
| +void VrShellGl::SetWebVRRenderSurfaceSize(int width, int height) {
|
| render_size_primary_webvr_.width = width;
|
| render_size_primary_webvr_.height = height;
|
| // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once
|
| // we have that.
|
| }
|
|
|
| -gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() {
|
| +gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() {
|
| // This is a stopgap while we're using the WebVR compositor rendering path.
|
| // TODO(klausw,crbug.com/655722): Remove this method and member once we're
|
| // using a separate WebVR render surface.
|
| return content_tex_physical_size_;
|
| }
|
|
|
| -
|
| -void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose,
|
| - const std::vector<const ContentRectangle*>& elements,
|
| - const gvr::Sizei& render_size, int viewport_offset) {
|
| - TRACE_EVENT0("gpu", "VrShell::DrawUiView");
|
| +void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose,
|
| + const std::vector<const ContentRectangle*>& elements,
|
| + const gvr::Sizei& render_size,
|
| + int viewport_offset) {
|
| + TRACE_EVENT0("gpu", "VrShellGl::DrawUiView");
|
| for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
|
| buffer_viewport_list_->GetBufferViewport(
|
| eye + viewport_offset, buffer_viewport_.get());
|
| @@ -802,15 +794,15 @@ void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose,
|
| buffer_viewport_->GetSourceFov(), kZNear, kZFar),
|
| view_matrix);
|
|
|
| - DrawElementsOnGL(render_matrix, elements);
|
| + DrawElements(render_matrix, elements);
|
| if (head_pose != nullptr &&
|
| - html_interface_->GetMode() != UiInterface::Mode::WEB_VR) {
|
| - DrawCursorOnGL(render_matrix);
|
| + vr_shell_->GetUiInterface()->GetMode() != UiInterface::Mode::WEB_VR) {
|
| + DrawCursor(render_matrix);
|
| }
|
| }
|
| }
|
|
|
| -void VrShell::DrawElementsOnGL(
|
| +void VrShellGl::DrawElements(
|
| const gvr::Mat4f& render_matrix,
|
| const std::vector<const ContentRectangle*>& elements) {
|
| for (const auto& rect : elements) {
|
| @@ -834,7 +826,7 @@ void VrShell::DrawElementsOnGL(
|
| }
|
| }
|
|
|
| -void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) {
|
| +void VrShellGl::DrawCursor(const gvr::Mat4f& render_matrix) {
|
| gvr::Mat4f mat;
|
| SetIdentityM(mat);
|
|
|
| @@ -907,8 +899,8 @@ void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) {
|
| }
|
| }
|
|
|
| -void VrShell::DrawWebVrOnGL() {
|
| - TRACE_EVENT0("gpu", "VrShell::DrawWebVr");
|
| +void VrShellGl::DrawWebVr() {
|
| + TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr");
|
| // Don't need face culling, depth testing, blending, etc. Turn it all off.
|
| glDisable(GL_CULL_FACE);
|
| glDepthMask(GL_FALSE);
|
| @@ -926,240 +918,96 @@ void VrShell::DrawWebVrOnGL() {
|
| *webvr_right_viewport_);
|
| }
|
|
|
| -void VrShell::OnTriggerEventOnUI(JNIEnv* env,
|
| - const JavaParamRef<jobject>& obj) {
|
| +void VrShellGl::OnTriggerEvent() {
|
| // Set a flag to handle this on the render thread at the next frame.
|
| touch_pending_ = true;
|
| }
|
|
|
| -void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| - if (gvr_api_ == nullptr)
|
| - return;
|
| -
|
| - // TODO(mthiesse): Clean up threading here.
|
| +void VrShellGl::OnPause() {
|
| + draw_task_.Cancel();
|
| controller_->OnPause();
|
| gvr_api_->PauseTracking();
|
| - SetShowingOverscrollGlowOnUI(true);
|
| -
|
| - // exit vr session
|
| - metrics_helper_->SetVRActive(false);
|
| }
|
|
|
| -void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
|
| - if (gvr_api_ == nullptr)
|
| - return;
|
| -
|
| - // TODO(mthiesse): Clean up threading here.
|
| +void VrShellGl::OnResume() {
|
| gvr_api_->RefreshViewerProfile();
|
| gvr_api_->ResumeTracking();
|
| controller_->OnResume();
|
| - SetShowingOverscrollGlowOnUI(false);
|
| -
|
| - // exit vr session
|
| - metrics_helper_->SetVRActive(true);
|
| + draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this)));
|
| + ScheduleNextDrawFrame();
|
| }
|
|
|
| -void VrShell::SetShowingOverscrollGlowOnUI(bool showing_glow) {
|
| - main_contents_->GetRenderWidgetHostView()->SetShowingOverscrollGlow(
|
| - showing_glow);
|
| -}
|
| -
|
| -base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI(
|
| - const content::WebContents* web_contents) {
|
| - // Ensure that the WebContents requesting the VrShell instance is the one
|
| - // we created.
|
| - if (g_instance != nullptr && g_instance->ui_contents_ == web_contents)
|
| - return g_instance->weak_ptr_factory_.GetWeakPtr();
|
| - return base::WeakPtr<VrShell>(nullptr);
|
| -}
|
| -
|
| -void VrShell::OnDomContentsLoadedOnUI() {
|
| - html_interface_->SetURL(main_contents_->GetVisibleURL());
|
| - html_interface_->SetLoading(main_contents_->IsLoading());
|
| - html_interface_->OnDomContentsLoaded();
|
| -}
|
| -
|
| -void VrShell::SetWebVrModeOnUI(JNIEnv* env,
|
| - const base::android::JavaParamRef<jobject>& obj,
|
| - bool enabled) {
|
| - metrics_helper_->SetWebVREnabled(enabled);
|
| +void VrShellGl::SetWebVrMode(bool enabled) {
|
| if (enabled) {
|
| - html_interface_->SetMode(UiInterface::Mode::WEB_VR);
|
| + vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::WEB_VR);
|
| } else {
|
| - html_interface_->SetMode(UiInterface::Mode::STANDARD);
|
| + vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::STANDARD);
|
| }
|
| }
|
|
|
| -void VrShell::SetWebVRSecureOrigin(bool secure_origin) {
|
| - // TODO(cjgrant): Align this state with the logic that drives the omnibox.
|
| - html_interface_->SetWebVRSecureOrigin(secure_origin);
|
| -}
|
| -
|
| -void VrShell::SubmitWebVRFrame() {}
|
| -
|
| -void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
|
| - const gvr::Rectf& right_bounds) {
|
| +void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
|
| + const gvr::Rectf& right_bounds) {
|
| webvr_left_viewport_->SetSourceUv(left_bounds);
|
| webvr_right_viewport_->SetSourceUv(right_bounds);
|
| }
|
|
|
| -gvr::GvrApi* VrShell::gvr_api() {
|
| +gvr::GvrApi* VrShellGl::gvr_api() {
|
| return gvr_api_.get();
|
| }
|
|
|
| -void VrShell::SurfacesChangedOnUI(JNIEnv* env,
|
| - const JavaParamRef<jobject>& object,
|
| - const JavaParamRef<jobject>& content_surface,
|
| - const JavaParamRef<jobject>& ui_surface) {
|
| - content_compositor_->SurfaceChanged(content_surface);
|
| - ui_compositor_->SurfaceChanged(ui_surface);
|
| +void VrShellGl::ContentBoundsChanged(int width, int height) {
|
| + TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged");
|
| + content_tex_css_width_ = width;
|
| + content_tex_css_height_ = height;
|
| }
|
|
|
| -void VrShell::ContentBoundsChangedOnUI(JNIEnv* env,
|
| - const JavaParamRef<jobject>& object,
|
| - jint width, jint height, jfloat dpr) {
|
| - TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged");
|
| +void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) {
|
| + if (content_surface_texture_.get())
|
| + content_surface_texture_->SetDefaultBufferSize(width, height);
|
| content_tex_physical_size_.width = width;
|
| content_tex_physical_size_.height = height;
|
| - // TODO(mthiesse): Synchronize with GL thread, and update tex css size in
|
| - // response to MainFrameWasResized, not here.
|
| - content_tex_css_width_ = width / dpr;
|
| - content_tex_css_height_ = height / dpr;
|
| -
|
| - content_compositor_->SetWindowBounds(width, height);
|
| }
|
|
|
| -void VrShell::UIBoundsChangedOnUI(JNIEnv* env,
|
| - const JavaParamRef<jobject>& object,
|
| - jint width, jint height, jfloat dpr) {
|
| - ui_compositor_->SetWindowBounds(width, height);
|
| +void VrShellGl::UIBoundsChanged(int width, int height) {
|
| + ui_tex_css_width_ = width;
|
| + ui_tex_css_height_ = height;
|
| }
|
|
|
| -UiScene* VrShell::GetSceneOnGL() {
|
| - return scene_.get();
|
| +void VrShellGl::UIPhysicalBoundsChanged(int width, int height) {
|
| + if (ui_surface_texture_.get())
|
| + ui_surface_texture_->SetDefaultBufferSize(width, height);
|
| + ui_tex_physical_size_.width = width;
|
| + ui_tex_physical_size_.height = height;
|
| }
|
|
|
| -UiInterface* VrShell::GetUiInterfaceOnGL() {
|
| - return html_interface_.get();
|
| +base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() {
|
| + return weak_ptr_factory_.GetWeakPtr();
|
| }
|
|
|
| -void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) {
|
| - base::AutoLock lock(task_queue_lock_);
|
| - task_queue_.push(callback);
|
| +void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase,
|
| + const base::TimeDelta interval) {
|
| + vsync_timebase_ = timebase;
|
| + vsync_interval_ = interval;
|
| }
|
|
|
| -void VrShell::HandleQueuedTasksOnGL() {
|
| - // To protect a stream of tasks from blocking rendering indefinitely,
|
| - // process only the number of tasks present when first checked.
|
| - std::vector<base::Callback<void()>> tasks;
|
| - {
|
| - base::AutoLock lock(task_queue_lock_);
|
| - const size_t count = task_queue_.size();
|
| - for (size_t i = 0; i < count; i++) {
|
| - tasks.push_back(task_queue_.front());
|
| - task_queue_.pop();
|
| - }
|
| - }
|
| - for (auto &task : tasks) {
|
| - task.Run();
|
| - }
|
| -}
|
| +void VrShellGl::ScheduleNextDrawFrame() {
|
| + base::TimeTicks now = base::TimeTicks::Now();
|
| + base::TimeTicks target;
|
|
|
| -void VrShell::DoUiActionOnUI(const UiAction action) {
|
| - content::NavigationController& controller = main_contents_->GetController();
|
| - switch (action) {
|
| - case HISTORY_BACK:
|
| - if (main_contents_->IsFullscreen()) {
|
| - main_contents_->ExitFullscreen(true /* will_cause_resize */);
|
| - } else if (controller.CanGoBack()) {
|
| - controller.GoBack();
|
| - }
|
| - break;
|
| - case HISTORY_FORWARD:
|
| - if (controller.CanGoForward())
|
| - controller.GoForward();
|
| - break;
|
| - case RELOAD:
|
| - controller.Reload(false);
|
| - break;
|
| -#if defined(ENABLE_VR_SHELL_UI_DEV)
|
| - case RELOAD_UI:
|
| - ui_contents_->GetController().Reload(false);
|
| - html_interface_.reset(new UiInterface(UiInterface::Mode::STANDARD,
|
| - main_contents_->IsFullscreen()));
|
| - vr_web_contents_observer_->SetUiInterface(html_interface_.get());
|
| - break;
|
| -#endif
|
| - case ZOOM_OUT: // Not handled yet.
|
| - case ZOOM_IN: // Not handled yet.
|
| - break;
|
| - default:
|
| - NOTREACHED();
|
| + if (vsync_interval_.is_zero()) {
|
| + target = now;
|
| + } else {
|
| + target = now + vsync_interval_;
|
| + int64_t intervals = (target - vsync_timebase_) / vsync_interval_;
|
| + target = vsync_timebase_ + intervals * vsync_interval_;
|
| }
|
| -}
|
| -
|
| -void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host,
|
| - content::RenderViewHost* new_host) {
|
| - new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT);
|
| -}
|
| -
|
| -void VrShell::MainFrameWasResized(bool width_changed) {
|
| - display::Display display = display::Screen::GetScreen()
|
| - ->GetDisplayNearestWindow(ui_contents_->GetNativeView());
|
| - // TODO(mthiesse): Synchronize with GL thread.
|
| - ui_tex_css_width_ = display.size().width();
|
| - ui_tex_css_height_ = display.size().height();
|
| -}
|
|
|
| -void VrShell::WebContentsDestroyed() {
|
| - ui_input_manager_.reset();
|
| - ui_contents_ = nullptr;
|
| - // TODO(mthiesse): Handle web contents being destroyed.
|
| - delegate_->ForceExitVr();
|
| + task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now);
|
| }
|
|
|
| -void VrShell::ContentWebContentsDestroyedOnUI() {
|
| - content_input_manager_.reset();
|
| - main_contents_ = nullptr;
|
| - // TODO(mthiesse): Handle web contents being destroyed.
|
| - delegate_->ForceExitVr();
|
| -}
|
| -
|
| -void VrShell::ContentWasHiddenOnUI() {
|
| - // Ensure we don't continue sending input to it.
|
| - content_input_manager_.reset();
|
| - // TODO(mthiesse): Handle web contents being hidden.
|
| - delegate_->ForceExitVr();
|
| -}
|
| -
|
| -void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) {
|
| - JNIEnv* env = base::android::AttachCurrentThread();
|
| - Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height,
|
| - dpr);
|
| -}
|
| -
|
| -void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) {
|
| - JNIEnv* env = base::android::AttachCurrentThread();
|
| - Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr);
|
| -}
|
| -
|
| -// ----------------------------------------------------------------------------
|
| -// Native JNI methods
|
| -// ----------------------------------------------------------------------------
|
| -
|
| -jlong InitOnUI(JNIEnv* env,
|
| - const JavaParamRef<jobject>& obj,
|
| - const JavaParamRef<jobject>& content_web_contents,
|
| - jlong content_window_android,
|
| - const JavaParamRef<jobject>& ui_web_contents,
|
| - jlong ui_window_android,
|
| - jboolean for_web_vr) {
|
| - return reinterpret_cast<intptr_t>(new VrShell(
|
| - env, obj, content::WebContents::FromJavaWebContents(content_web_contents),
|
| - reinterpret_cast<ui::WindowAndroid*>(content_window_android),
|
| - content::WebContents::FromJavaWebContents(ui_web_contents),
|
| - reinterpret_cast<ui::WindowAndroid*>(ui_window_android),
|
| - for_web_vr));
|
| +void VrShellGl::ForceExitVR() {
|
| + main_thread_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VrShell::ForceExitVR, weak_vr_shell_));
|
| }
|
|
|
| } // namespace vr_shell
|
|
|