Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(887)

Unified Diff: chrome/browser/android/vr_shell/vr_shell_gl.cc

Issue 2562733002: Implement our own GLThread for VR Shell. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/browser/android/vr_shell/vr_shell_gl.cc
diff --git a/chrome/browser/android/vr_shell/vr_shell.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc
similarity index 64%
copy from chrome/browser/android/vr_shell/vr_shell.cc
copy to chrome/browser/android/vr_shell/vr_shell_gl.cc
index f9152a5bd3a640c629600919a9a47562f9ce33e4..b0c28c2d8f485893d810361338bc57c19a95b7fe 100644
--- a/chrome/browser/android/vr_shell/vr_shell.cc
+++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc
@@ -2,38 +2,29 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "chrome/browser/android/vr_shell/vr_shell.h"
+#include "chrome/browser/android/vr_shell/vr_shell_gl.h"
+#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
+#include "base/threading/thread_task_runner_handle.h"
#include "chrome/browser/android/vr_shell/ui_elements.h"
#include "chrome/browser/android/vr_shell/ui_interface.h"
#include "chrome/browser/android/vr_shell/ui_scene.h"
-#include "chrome/browser/android/vr_shell/vr_compositor.h"
#include "chrome/browser/android/vr_shell/vr_controller.h"
#include "chrome/browser/android/vr_shell/vr_gl_util.h"
#include "chrome/browser/android/vr_shell/vr_input_manager.h"
-#include "chrome/browser/android/vr_shell/vr_shell_delegate.h"
+#include "chrome/browser/android/vr_shell/vr_math.h"
+#include "chrome/browser/android/vr_shell/vr_shell.h"
#include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
-#include "chrome/browser/android/vr_shell/vr_usage_monitor.h"
-#include "chrome/browser/android/vr_shell/vr_web_contents_observer.h"
-#include "content/public/browser/navigation_controller.h"
-#include "content/public/browser/render_view_host.h"
-#include "content/public/browser/render_widget_host.h"
-#include "content/public/browser/render_widget_host_view.h"
-#include "content/public/browser/web_contents.h"
-#include "content/public/common/referrer.h"
-#include "device/vr/android/gvr/gvr_device_provider.h"
-#include "jni/VrShellImpl_jni.h"
-#include "ui/android/view_android.h"
-#include "ui/android/window_android.h"
-#include "ui/base/page_transition_types.h"
-#include "ui/display/display.h"
-#include "ui/display/screen.h"
+#include "third_party/WebKit/public/platform/WebInputEvent.h"
+#include "ui/gfx/vsync_provider.h"
+#include "ui/gl/android/scoped_java_surface.h"
+#include "ui/gl/android/surface_texture.h"
#include "ui/gl/gl_bindings.h"
+#include "ui/gl/gl_context.h"
+#include "ui/gl/gl_surface.h"
#include "ui/gl/init/gl_factory.h"
-using base::android::JavaParamRef;
-
namespace vr_shell {
namespace {
@@ -95,10 +86,6 @@ static constexpr int kViewportListHeadlockedOffset = 2;
// data. Must match the magic numbers used in blink's VRDisplay.cpp.
static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
-vr_shell::VrShell* g_instance;
-
-static const char kVrShellUIURL[] = "chrome://vr-shell-ui";
-
float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
float xdiff = (vec1.x - vec2.x);
float ydiff = (vec1.y - vec2.y);
@@ -147,111 +134,137 @@ std::unique_ptr<blink::WebMouseEvent> MakeMouseEvent(WebInputEvent::Type type,
return mouse_event;
}
-} // namespace
-VrShell::VrShell(JNIEnv* env,
- jobject obj,
- content::WebContents* main_contents,
- ui::WindowAndroid* content_window,
- content::WebContents* ui_contents,
- ui::WindowAndroid* ui_window,
- bool for_web_vr)
- : WebContentsObserver(ui_contents),
- main_contents_(main_contents),
- ui_contents_(ui_contents),
- metrics_helper_(new VrMetricsHelper(main_contents)),
- main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
- weak_ptr_factory_(this) {
- DCHECK(g_instance == nullptr);
- g_instance = this;
- j_vr_shell_.Reset(env, obj);
- scene_.reset(new UiScene);
+enum class ViewerType {
+ UNKNOWN_TYPE = 0,
+ CARDBOARD = 1,
+ DAYDREAM = 2,
+ VIEWER_TYPE_MAX,
+};
- if (for_web_vr)
- metrics_helper_->SetWebVREnabled(true);
- html_interface_.reset(new UiInterface(
- for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD,
- main_contents_->IsFullscreen()));
- content_compositor_.reset(new VrCompositor(content_window, false));
- ui_compositor_.reset(new VrCompositor(ui_window, true));
- vr_web_contents_observer_.reset(new VrWebContentsObserver(
- main_contents, html_interface_.get(), this));
+int GetPixelEncodedPoseIndexByte() {
+ TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex");
+ // Read the pose index encoded in a bottom left pixel as color values.
+ // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
+ // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
+ // which tracks poses. Returns the low byte (0..255) if valid, or -1
+ // if not valid due to bad magic number.
+ uint8_t pixels[4];
+ // Assume we're reading from the framebuffer we just wrote to.
+ // That's true currently, we may need to use glReadBuffer(GL_BACK)
+ // or equivalent if the rendering setup changes in the future.
+ glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
- LoadUIContentOnUI();
+ // Check for the magic number written by VRDevice.cpp on submit.
+ // This helps avoid glitches from garbage data in the render
+ // buffer that can appear during initialization or resizing. These
+ // often appear as flashes of all-black or all-white pixels.
+ if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
+ pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
+ // Pose is good.
+ return pixels[0];
+ }
+ VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
+ ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
+ return -1;
+}
+
+} // namespace
+
+VrShellGl::VrShellGl(
+ VrShell* vr_shell,
+ base::WeakPtr<VrShell> weak_vr_shell,
+ base::WeakPtr<VrInputManager> content_input_manager,
+ base::WeakPtr<VrInputManager> ui_input_manager,
+ scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
+ gvr_context* gvr_api)
+ : task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ vr_shell_(vr_shell),
+ weak_vr_shell_(weak_vr_shell),
+ content_input_manager_(content_input_manager),
+ ui_input_manager_(ui_input_manager),
+ main_thread_task_runner_(std::move(main_thread_task_runner)),
+ weak_ptr_factory_(this) {
+
+ GvrInit(gvr_api);
+ InitializeGl();
gvr::Mat4f identity;
SetIdentityM(identity);
webvr_head_pose_.resize(kPoseRingBufferSize, identity);
webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
- content_input_manager_.reset(new VrInputManager(main_contents_));
- ui_input_manager_.reset(new VrInputManager(ui_contents_));
- weak_content_input_manager_ = content_input_manager_->GetWeakPtr();
- weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr();
-}
+ draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this)));
-void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& obj) {
- content_compositor_->SetLayer(main_contents_);
- ui_compositor_->SetLayer(ui_contents_);
-}
+ scene_.reset(new UiScene);
-void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- delete this;
-}
+ InitializeRenderer();
-void VrShell::LoadUIContentOnUI() {
- GURL url(kVrShellUIURL);
- ui_contents_->GetController().LoadURL(
- url, content::Referrer(),
- ui::PageTransition::PAGE_TRANSITION_AUTO_TOPLEVEL, std::string(""));
+ ScheduleNextDrawFrame();
}
-bool RegisterVrShell(JNIEnv* env) {
- return RegisterNativesImpl(env);
+VrShellGl::~VrShellGl() {
+ draw_task_.Cancel();
}
-VrShell::~VrShell() {
- if (delegate_ && delegate_->GetDeviceProvider()) {
- delegate_->GetDeviceProvider()->OnGvrDelegateRemoved();
+void VrShellGl::InitializeGl() {
+ CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
+ gl::init::InitializeGLOneOff());
+ surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size());
+ CHECK(surface_.get()) << "gl::init::CreateOffscreenGLSurface failed";
+ context_ = gl::init::CreateGLContext(nullptr, surface_.get(),
+ gl::GLContextAttribs());
+ CHECK(context_.get()) << "gl::init::CreateGLContext failed";
+
+ CHECK(context_->MakeCurrent(surface_.get()))
+ << "gl::GLContext::MakeCurrent() failed";
+
+ // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is
+ // sort of okay, because the GVR swap chain will block if we render too fast,
+ // but we should address this properly.
+ if (surface_->GetVSyncProvider()) {
+ surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind(
+ &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr()));
+ } else {
+ LOG(ERROR) << "No VSync Provider.";
cjgrant 2016/12/08 17:02:49 Typically no periods in logging sentences.
mthiesse 2016/12/09 01:28:42 Done
}
- g_instance = nullptr;
- gl::init::ShutdownGL();
-}
-void VrShell::SetDelegateOnUI(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& obj,
- const base::android::JavaParamRef<jobject>& delegate) {
- base::AutoLock lock(gvr_init_lock_);
- delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate);
- if (swap_chain_.get()) {
- delegate_->GetDeviceProvider()->OnGvrDelegateReady(
- weak_ptr_factory_.GetWeakPtr());
- }
-}
+ unsigned int textures[2];
+ glGenTextures(2, textures);
+ ui_texture_id_ = textures[0];
+ content_texture_id_ = textures[1];
+ ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_);
+ content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_);
+ ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get()));
+ content_surface_.reset(new gl::ScopedJavaSurface(
+ content_surface_texture_.get()));
+ ui_surface_texture_->SetFrameAvailableCallback(base::Bind(
+ &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
+ content_surface_texture_->SetFrameAvailableCallback(base::Bind(
+ &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
-enum class ViewerType {
- UNKNOWN_TYPE = 0,
- CARDBOARD = 1,
- DAYDREAM = 2,
- VIEWER_TYPE_MAX,
-};
+ content_surface_texture_->SetDefaultBufferSize(
+ content_tex_physical_size_.width, content_tex_physical_size_.height);
+ ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width,
+ ui_tex_physical_size_.height);
-void VrShell::GvrInitOnGL(JNIEnv* env,
- const JavaParamRef<jobject>& obj,
- jlong native_gvr_api) {
- // set the initial webvr state
- metrics_helper_->SetVRActive(true);
+ main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
+ &VrShell::SurfacesChanged, weak_vr_shell_,
+ content_surface_->j_surface().obj(),
+ ui_surface_->j_surface().obj()));
+}
- gvr_api_ =
- gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api));
- // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once
- // we switch to using a WebVR render surface. We currently need to wait for
- // the compositor window's size to be known first. See also
- // ContentSurfaceChanged.
- controller_.reset(
- new VrController(reinterpret_cast<gvr_context*>(native_gvr_api)));
+void VrShellGl::OnUIFrameAvailable() {
+ ui_surface_texture_->UpdateTexImage();
+}
+
+void VrShellGl::OnContentFrameAvailable() {
+ content_surface_texture_->UpdateTexImage();
+}
+void VrShellGl::GvrInit(gvr_context* gvr_api) {
+ gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api);
+ controller_.reset(new VrController(gvr_api));
ViewerType viewerType;
switch (gvr_api_->GetViewerType()) {
@@ -270,17 +283,7 @@ void VrShell::GvrInitOnGL(JNIEnv* env,
static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
}
-void VrShell::InitializeGlOnGL(JNIEnv* env,
- const JavaParamRef<jobject>& obj,
- jint content_texture_handle,
- jint ui_texture_handle) {
- base::AutoLock lock(gvr_init_lock_);
- CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
- gl::init::InitializeGLOneOff());
-
- content_texture_id_ = content_texture_handle;
- ui_texture_id_ = ui_texture_handle;
-
+void VrShellGl::InitializeRenderer() {
// While WebVR is going through the compositor path, it shares
// the same texture ID. This will change once it gets its own
// surface, but store it separately to avoid future confusion.
@@ -355,41 +358,37 @@ void VrShell::InitializeGlOnGL(JNIEnv* env,
webvr_right_viewport_.get());
webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
- if (delegate_) {
- main_thread_task_runner_->PostTask(
- FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady,
- delegate_->GetDeviceProvider(),
- weak_ptr_factory_.GetWeakPtr()));
- }
+ main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
+ &VrShell::GvrDelegateReady, weak_vr_shell_));
}
-void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
+void VrShellGl::UpdateController(const gvr::Vec3f& forward_vector) {
controller_->UpdateState();
#if defined(ENABLE_VR_SHELL)
+ // TODO(mthiesse): Fix this properly.
cjgrant 2016/12/08 17:02:49 Should elaborate.
mthiesse 2016/12/09 01:28:42 Done.
+
// Note that button up/down state is transient, so ButtonUpHappened only
- // returns
- // true for a single frame (and we're guaranteed not to miss it).
+ // returns true for a single frame (and we're guaranteed not to miss it).
if (controller_->ButtonUpHappened(
gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
- html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
+// html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
// TODO(mthiesse): The page is no longer visible when in menu mode. We
// should unfocus or otherwise let it know it's hidden.
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- const auto&& task = html_interface_->GetMenuMode() ?
- &device::GvrDeviceProvider::OnDisplayBlur :
- &device::GvrDeviceProvider::OnDisplayFocus;
- main_thread_task_runner_->PostTask(
- FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
- }
+// if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
cjgrant 2016/12/08 17:02:49 Remove this block?
mthiesse 2016/12/09 01:28:42 For clarity, I'd like to leave it here as a sign t
+// const auto&& task = html_interface_->GetMenuMode() ?
+// &device::GvrDeviceProvider::OnDisplayBlur :
+// &device::GvrDeviceProvider::OnDisplayFocus;
+// main_thread_task_runner_->PostTask(
+// FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
+// }
}
#endif
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
+ if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
// Process screen touch events for Cardboard button compatibility.
// Also send tap events for controller "touchpad click" events.
- if (touch_pending_ ||
- controller_->ButtonUpHappened(
+ if (touch_pending_ || controller_->ButtonUpHappened(
gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
touch_pending_ = false;
std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
@@ -399,7 +398,7 @@ void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
gesture->type = WebInputEvent::GestureTapDown;
gesture->x = 0;
gesture->y = 0;
- SendGestureOnGL(CONTENT, std::move(gesture));
+ SendGesture(CONTENT, std::move(gesture));
}
return;
@@ -494,12 +493,12 @@ void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
input_target = plane->content_quad ? CONTENT : UI;
}
}
- SendEventsToTargetOnGL(input_target, pixel_x, pixel_y);
+ SendEventsToTarget(input_target, pixel_x, pixel_y);
}
-void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
- int pixel_x,
- int pixel_y) {
+void VrShellGl::SendEventsToTarget(InputTarget input_target,
+ int pixel_x,
+ int pixel_y) {
std::vector<std::unique_ptr<WebGestureEvent>> gesture_list =
controller_->DetectGestures();
std::unique_ptr<WebGestureEvent> gesture = std::move(gesture_list.front());
@@ -509,7 +508,7 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
gesture->type == WebInputEvent::GestureScrollUpdate ||
gesture->type == WebInputEvent::GestureScrollEnd ||
gesture->type == WebInputEvent::GestureFlingCancel) {
- SendGestureOnGL(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture)));
+ SendGesture(CONTENT, base::WrapUnique(new WebGestureEvent(*gesture)));
}
if (gesture->type == WebInputEvent::GestureScrollEnd) {
@@ -518,9 +517,9 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
gesture_list.back()->x = pixel_x;
gesture_list.back()->y = pixel_y;
if (input_target != NONE)
- SendGestureOnGL(input_target, std::move(gesture_list.back()));
+ SendGesture(input_target, std::move(gesture_list.back()));
} else if (gesture_list.back()->type == WebInputEvent::GestureFlingStart) {
- SendGestureOnGL(CONTENT, std::move(gesture_list.back()));
+ SendGesture(CONTENT, std::move(gesture_list.back()));
} else {
NOTREACHED();
}
@@ -531,7 +530,7 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
bool new_target = input_target != current_input_target_;
if (new_target && current_input_target_ != NONE) {
// Send a move event indicating that the pointer moved off of an element.
- SendGestureOnGL(current_input_target_, MakeMouseEvent(
+ SendGesture(current_input_target_, MakeMouseEvent(
WebInputEvent::MouseLeave, gesture->timeStampSeconds, 0, 0));
}
current_input_target_ = input_target;
@@ -540,7 +539,7 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
}
WebInputEvent::Type type =
new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
- SendGestureOnGL(current_input_target_, MakeMouseEvent(
+ SendGesture(current_input_target_, MakeMouseEvent(
type, gesture->timeStampSeconds, pixel_x, pixel_y));
if (original_type == WebInputEvent::GestureTapDown || touch_pending_) {
@@ -554,55 +553,27 @@ void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
event->type = WebInputEvent::GestureTapDown;
event->x = pixel_x;
event->y = pixel_y;
- SendGestureOnGL(current_input_target_, std::move(event));
+ SendGesture(current_input_target_, std::move(event));
}
}
-void VrShell::SendGestureOnGL(InputTarget input_target,
- std::unique_ptr<blink::WebInputEvent> event) {
+void VrShellGl::SendGesture(InputTarget input_target,
+ std::unique_ptr<blink::WebInputEvent> event) {
DCHECK(input_target != NONE);
const base::WeakPtr<VrInputManager>& weak_ptr =
- input_target == CONTENT ? weak_content_input_manager_
- : weak_ui_input_manager_;
+ input_target == CONTENT ? content_input_manager_: ui_input_manager_;
main_thread_task_runner_->PostTask(
FROM_HERE,
base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
base::Passed(std::move(event))));
}
-void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
+void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose;
webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true;
}
-int GetPixelEncodedPoseIndexByte() {
- TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
- // Read the pose index encoded in a bottom left pixel as color values.
- // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
- // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
- // which tracks poses. Returns the low byte (0..255) if valid, or -1
- // if not valid due to bad magic number.
- uint8_t pixels[4];
- // Assume we're reading from the framebuffer we just wrote to.
- // That's true currently, we may need to use glReadBuffer(GL_BACK)
- // or equivalent if the rendering setup changes in the future.
- glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
-
- // Check for the magic number written by VRDevice.cpp on submit.
- // This helps avoid glitches from garbage data in the render
- // buffer that can appear during initialization or resizing. These
- // often appear as flashes of all-black or all-white pixels.
- if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
- pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
- // Pose is good.
- return pixels[0];
- }
- VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
- ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
- return -1;
-}
-
-bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
+bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) {
if (pose_index_byte < 0) {
return false;
}
@@ -614,29 +585,31 @@ bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
return true;
}
-void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- TRACE_EVENT0("gpu", "VrShell::DrawFrame");
+void VrShellGl::DrawFrame() {
+ TRACE_EVENT0("gpu", "VrShellGl::DrawFrame");
// Reset the viewport list to just the pair of viewports for the
// primary buffer each frame. Head-locked viewports get added by
// DrawVrShell if needed.
buffer_viewport_list_->SetToRecommendedBufferViewports();
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- // If needed, resize the primary buffer for use with WebVR.
- if (render_size_primary_ != render_size_primary_webvr_) {
- if (!render_size_primary_webvr_.width) {
- VLOG(2) << "WebVR rendering size not known yet, dropping frame";
- return;
- }
- render_size_primary_ = render_size_primary_webvr_;
- swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
- }
- } else {
- if (render_size_primary_ != render_size_primary_vrshell_) {
- render_size_primary_ = render_size_primary_vrshell_;
- swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
- }
- }
+ // TODO(klausw): Fix this. Resizing buffers here leads to webVR mode showing
+ // nothing but a black screen.
+// if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
+// // If needed, resize the primary buffer for use with WebVR.
+// if (render_size_primary_ != render_size_primary_webvr_) {
+// if (!render_size_primary_webvr_.width) {
+// VLOG(2) << "WebVR rendering size not known yet, dropping frame";
+// return;
+// }
+// render_size_primary_ = render_size_primary_webvr_;
+// swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
+// }
+// } else {
+// if (render_size_primary_ != render_size_primary_vrshell_) {
+// render_size_primary_ = render_size_primary_vrshell_;
+// swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
+// }
+// }
gvr::Frame frame = swap_chain_->AcquireFrame();
gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
@@ -655,19 +628,16 @@ void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
gvr_api_->ApplyNeckModel(head_pose, 1.0f);
}
- // Bind the primary framebuffer.
frame.BindBuffer(kFramePrimaryBuffer);
- HandleQueuedTasksOnGL();
-
// Update the render position of all UI elements (including desktop).
const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds());
- UpdateControllerOnGL(GetForwardVector(head_pose));
+ UpdateController(GetForwardVector(head_pose));
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- DrawWebVrOnGL();
+ if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
+ DrawWebVr();
// When using async reprojection, we need to know which pose was used in
// the WebVR app for drawing this frame. Due to unknown amounts of
@@ -678,7 +648,7 @@ void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
// doing this once we have working no-compositor rendering for WebVR.
if (gvr_api_->GetAsyncReprojectionEnabled()) {
int pose_index_byte = GetPixelEncodedPoseIndexByte();
- if (WebVrPoseByteIsValidOnGL(pose_index_byte)) {
+ if (WebVrPoseByteIsValid(pose_index_byte)) {
// We have a valid pose, use it for reprojection.
webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
@@ -699,15 +669,18 @@ void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
}
}
- DrawVrShellOnGL(head_pose, frame);
+ DrawVrShell(head_pose, frame);
frame.Unbind();
frame.Submit(*buffer_viewport_list_, head_pose);
+
+ // No need to SwapBuffers for an offscreen surface.
+ ScheduleNextDrawFrame();
}
-void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
- gvr::Frame &frame) {
- TRACE_EVENT0("gpu", "VrShell::DrawVrShell");
+void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose,
+ gvr::Frame &frame) {
+ TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell");
std::vector<const ContentRectangle*> head_locked_elements;
std::vector<const ContentRectangle*> world_elements;
for (const auto& rect : scene_->GetUiElements()) {
@@ -721,7 +694,7 @@ void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
}
}
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
+ if (vr_shell_->GetUiInterface()->GetMode() == UiInterface::Mode::WEB_VR) {
// WebVR is incompatible with 3D world compositing since the
// depth buffer was already populated with unknown scaling - the
// WebVR app has full control over zNear/zFar. Just leave the
@@ -738,9 +711,8 @@ void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
-
if (!world_elements.empty()) {
- DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_,
+ DrawUiView(&head_pose, world_elements, render_size_primary_,
kViewportListPrimaryOffset);
}
@@ -755,33 +727,35 @@ void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
*headlocked_right_viewport_);
// Bind the headlocked framebuffer.
+ // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order
+ // here.
frame.BindBuffer(kFrameHeadlockedBuffer);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
- DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_,
+ DrawUiView(nullptr, head_locked_elements, render_size_headlocked_,
kViewportListHeadlockedOffset);
}
}
-void VrShell::SetWebVRRenderSurfaceSize(int width, int height) {
+void VrShellGl::SetWebVRRenderSurfaceSize(int width, int height) {
render_size_primary_webvr_.width = width;
render_size_primary_webvr_.height = height;
// TODO(klausw,crbug.com/655722): set the WebVR render surface size here once
// we have that.
}
-gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() {
+gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() {
// This is a stopgap while we're using the WebVR compositor rendering path.
// TODO(klausw,crbug.com/655722): Remove this method and member once we're
// using a separate WebVR render surface.
return content_tex_physical_size_;
}
-
-void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose,
- const std::vector<const ContentRectangle*>& elements,
- const gvr::Sizei& render_size, int viewport_offset) {
- TRACE_EVENT0("gpu", "VrShell::DrawUiView");
+void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose,
+ const std::vector<const ContentRectangle*>& elements,
+ const gvr::Sizei& render_size,
+ int viewport_offset) {
+ TRACE_EVENT0("gpu", "VrShellGl::DrawUiView");
for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
buffer_viewport_list_->GetBufferViewport(
eye + viewport_offset, buffer_viewport_.get());
@@ -802,15 +776,16 @@ void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose,
buffer_viewport_->GetSourceFov(), kZNear, kZFar),
view_matrix);
- DrawElementsOnGL(render_matrix, elements);
+ DrawElements(render_matrix, elements);
if (head_pose != nullptr &&
- html_interface_->GetMode() != UiInterface::Mode::WEB_VR) {
- DrawCursorOnGL(render_matrix);
+ vr_shell_->GetUiInterface()->GetMode() !=
+ UiInterface::Mode::WEB_VR) {
+ DrawCursor(render_matrix);
}
}
}
-void VrShell::DrawElementsOnGL(
+void VrShellGl::DrawElements(
const gvr::Mat4f& render_matrix,
const std::vector<const ContentRectangle*>& elements) {
for (const auto& rect : elements) {
@@ -834,7 +809,7 @@ void VrShell::DrawElementsOnGL(
}
}
-void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) {
+void VrShellGl::DrawCursor(const gvr::Mat4f& render_matrix) {
gvr::Mat4f mat;
SetIdentityM(mat);
@@ -907,8 +882,8 @@ void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) {
}
}
-void VrShell::DrawWebVrOnGL() {
- TRACE_EVENT0("gpu", "VrShell::DrawWebVr");
+void VrShellGl::DrawWebVr() {
+ TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr");
// Don't need face culling, depth testing, blending, etc. Turn it all off.
glDisable(GL_CULL_FACE);
glDepthMask(GL_FALSE);
@@ -926,233 +901,88 @@ void VrShell::DrawWebVrOnGL() {
*webvr_right_viewport_);
}
-void VrShell::OnTriggerEventOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& obj) {
+void VrShellGl::OnTriggerEvent() {
// Set a flag to handle this on the render thread at the next frame.
touch_pending_ = true;
}
-void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- if (gvr_api_ == nullptr)
- return;
-
- // TODO(mthiesse): Clean up threading here.
+void VrShellGl::OnPause() {
controller_->OnPause();
gvr_api_->PauseTracking();
-
- // exit vr session
- metrics_helper_->SetVRActive(false);
}
-void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- if (gvr_api_ == nullptr)
- return;
-
- // TODO(mthiesse): Clean up threading here.
+void VrShellGl::OnResume() {
gvr_api_->RefreshViewerProfile();
gvr_api_->ResumeTracking();
controller_->OnResume();
-
- // exit vr session
- metrics_helper_->SetVRActive(true);
-}
-
-base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI(
- const content::WebContents* web_contents) {
- // Ensure that the WebContents requesting the VrShell instance is the one
- // we created.
- if (g_instance != nullptr && g_instance->ui_contents_ == web_contents)
- return g_instance->weak_ptr_factory_.GetWeakPtr();
- return base::WeakPtr<VrShell>(nullptr);
-}
-
-void VrShell::OnDomContentsLoadedOnUI() {
- html_interface_->SetURL(main_contents_->GetVisibleURL());
- html_interface_->SetLoading(main_contents_->IsLoading());
- html_interface_->OnDomContentsLoaded();
}
-void VrShell::SetWebVrModeOnUI(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& obj,
- bool enabled) {
- metrics_helper_->SetWebVREnabled(enabled);
+void VrShellGl::SetWebVrMode(bool enabled) {
if (enabled) {
- html_interface_->SetMode(UiInterface::Mode::WEB_VR);
+ vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::WEB_VR);
} else {
- html_interface_->SetMode(UiInterface::Mode::STANDARD);
+ vr_shell_->GetUiInterface()->SetMode(UiInterface::Mode::STANDARD);
}
}
-void VrShell::SetWebVRSecureOrigin(bool secure_origin) {
- // TODO(cjgrant): Align this state with the logic that drives the omnibox.
- html_interface_->SetWebVRSecureOrigin(secure_origin);
-}
-
-void VrShell::SubmitWebVRFrame() {}
-
-void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
- const gvr::Rectf& right_bounds) {
+void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
+ const gvr::Rectf& right_bounds) {
webvr_left_viewport_->SetSourceUv(left_bounds);
webvr_right_viewport_->SetSourceUv(right_bounds);
}
-gvr::GvrApi* VrShell::gvr_api() {
+gvr::GvrApi* VrShellGl::gvr_api() {
return gvr_api_.get();
}
-void VrShell::SurfacesChangedOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& object,
- const JavaParamRef<jobject>& content_surface,
- const JavaParamRef<jobject>& ui_surface) {
- content_compositor_->SurfaceChanged(content_surface);
- ui_compositor_->SurfaceChanged(ui_surface);
+void VrShellGl::ContentBoundsChanged(int width, int height) {
+ TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged");
+ content_tex_css_width_ = width;
+ content_tex_css_height_ = height;
}
-void VrShell::ContentBoundsChangedOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& object,
- jint width, jint height, jfloat dpr) {
- TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged");
+void VrShellGl::ContentPhysicalBoundsChanged(int width, int height) {
+ if (content_surface_texture_.get())
+ content_surface_texture_->SetDefaultBufferSize(width, height);
content_tex_physical_size_.width = width;
content_tex_physical_size_.height = height;
- // TODO(mthiesse): Synchronize with GL thread, and update tex css size in
- // response to MainFrameWasResized, not here.
- content_tex_css_width_ = width / dpr;
- content_tex_css_height_ = height / dpr;
-
- content_compositor_->SetWindowBounds(width, height);
}
-void VrShell::UIBoundsChangedOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& object,
- jint width, jint height, jfloat dpr) {
- ui_compositor_->SetWindowBounds(width, height);
+void VrShellGl::UIBoundsChanged(int width, int height) {
+ ui_tex_css_width_ = width;
+ ui_tex_css_height_ = height;
}
-UiScene* VrShell::GetSceneOnGL() {
- return scene_.get();
+void VrShellGl::UIPhysicalBoundsChanged(int width, int height) {
+ if (ui_surface_texture_.get())
+ ui_surface_texture_->SetDefaultBufferSize(width, height);
+ ui_tex_physical_size_.width = width;
+ ui_tex_physical_size_.height = height;
}
-UiInterface* VrShell::GetUiInterfaceOnGL() {
- return html_interface_.get();
+base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() {
+ return weak_ptr_factory_.GetWeakPtr();
}
-void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) {
- base::AutoLock lock(task_queue_lock_);
- task_queue_.push(callback);
+void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase,
+ const base::TimeDelta interval) {
+ vsync_timebase_ = timebase;
+ vsync_interval_ = interval;
}
-void VrShell::HandleQueuedTasksOnGL() {
- // To protect a stream of tasks from blocking rendering indefinitely,
- // process only the number of tasks present when first checked.
- std::vector<base::Callback<void()>> tasks;
- {
- base::AutoLock lock(task_queue_lock_);
- const size_t count = task_queue_.size();
- for (size_t i = 0; i < count; i++) {
- tasks.push_back(task_queue_.front());
- task_queue_.pop();
- }
- }
- for (auto &task : tasks) {
- task.Run();
- }
-}
+void VrShellGl::ScheduleNextDrawFrame() {
+ base::TimeTicks now = base::TimeTicks::Now();
+ base::TimeTicks target;
-void VrShell::DoUiActionOnUI(const UiAction action) {
- content::NavigationController& controller = main_contents_->GetController();
- switch (action) {
- case HISTORY_BACK:
- if (main_contents_->IsFullscreen()) {
- main_contents_->ExitFullscreen(true /* will_cause_resize */);
- } else if (controller.CanGoBack()) {
- controller.GoBack();
- }
- break;
- case HISTORY_FORWARD:
- if (controller.CanGoForward())
- controller.GoForward();
- break;
- case RELOAD:
- controller.Reload(false);
- break;
-#if defined(ENABLE_VR_SHELL_UI_DEV)
- case RELOAD_UI:
- ui_contents_->GetController().Reload(false);
- html_interface_.reset(new UiInterface(UiInterface::Mode::STANDARD,
- main_contents_->IsFullscreen()));
- vr_web_contents_observer_->SetUiInterface(html_interface_.get());
- break;
-#endif
- case ZOOM_OUT: // Not handled yet.
- case ZOOM_IN: // Not handled yet.
- break;
- default:
- NOTREACHED();
+ if (vsync_interval_.is_zero()) {
+ target = now;
+ } else {
+ target = now + vsync_interval_;
+ int64_t intervals = (target - vsync_timebase_) / vsync_interval_;
+ target = vsync_timebase_ + intervals * vsync_interval_;
}
-}
-
-void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host,
- content::RenderViewHost* new_host) {
- new_host->GetWidget()->GetView()->SetBackgroundColor(SK_ColorTRANSPARENT);
-}
-
-void VrShell::MainFrameWasResized(bool width_changed) {
- display::Display display = display::Screen::GetScreen()
- ->GetDisplayNearestWindow(ui_contents_->GetNativeView());
- // TODO(mthiesse): Synchronize with GL thread.
- ui_tex_css_width_ = display.size().width();
- ui_tex_css_height_ = display.size().height();
-}
-
-void VrShell::WebContentsDestroyed() {
- ui_input_manager_.reset();
- ui_contents_ = nullptr;
- // TODO(mthiesse): Handle web contents being destroyed.
- delegate_->ForceExitVr();
-}
-
-void VrShell::ContentWebContentsDestroyedOnUI() {
- content_input_manager_.reset();
- main_contents_ = nullptr;
- // TODO(mthiesse): Handle web contents being destroyed.
- delegate_->ForceExitVr();
-}
-
-void VrShell::ContentWasHiddenOnUI() {
- // Ensure we don't continue sending input to it.
- content_input_manager_.reset();
- // TODO(mthiesse): Handle web contents being hidden.
- delegate_->ForceExitVr();
-}
-
-void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) {
- JNIEnv* env = base::android::AttachCurrentThread();
- Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height,
- dpr);
-}
-
-void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) {
- JNIEnv* env = base::android::AttachCurrentThread();
- Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr);
-}
-// ----------------------------------------------------------------------------
-// Native JNI methods
-// ----------------------------------------------------------------------------
-
-jlong InitOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& obj,
- const JavaParamRef<jobject>& content_web_contents,
- jlong content_window_android,
- const JavaParamRef<jobject>& ui_web_contents,
- jlong ui_window_android,
- jboolean for_web_vr) {
- return reinterpret_cast<intptr_t>(new VrShell(
- env, obj, content::WebContents::FromJavaWebContents(content_web_contents),
- reinterpret_cast<ui::WindowAndroid*>(content_window_android),
- content::WebContents::FromJavaWebContents(ui_web_contents),
- reinterpret_cast<ui::WindowAndroid*>(ui_window_android),
- for_web_vr));
+ task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now);
}
} // namespace vr_shell

Powered by Google App Engine
This is Rietveld 408576698