Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(399)

Unified Diff: chrome/browser/android/vr_shell/vr_shell.cc

Issue 2562733002: Implement our own GLThread for VR Shell. (Closed)
Patch Set: Address bshe's comments Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/browser/android/vr_shell/vr_shell.cc
diff --git a/chrome/browser/android/vr_shell/vr_shell.cc b/chrome/browser/android/vr_shell/vr_shell.cc
index 8fad473cc54708ffc5cedc0da52b4d5bcb9dd974..d9cbb274bed26298813a7463e7b9e2297b4fcd11 100644
--- a/chrome/browser/android/vr_shell/vr_shell.cc
+++ b/chrome/browser/android/vr_shell/vr_shell.cc
@@ -5,15 +5,15 @@
#include "chrome/browser/android/vr_shell/vr_shell.h"
#include "base/metrics/histogram_macros.h"
-#include "chrome/browser/android/vr_shell/ui_elements.h"
+#include "base/threading/platform_thread.h"
+#include "base/threading/thread.h"
+#include "base/threading/thread_restrictions.h"
+#include "base/threading/thread_task_runner_handle.h"
#include "chrome/browser/android/vr_shell/ui_interface.h"
-#include "chrome/browser/android/vr_shell/ui_scene.h"
#include "chrome/browser/android/vr_shell/vr_compositor.h"
-#include "chrome/browser/android/vr_shell/vr_controller.h"
-#include "chrome/browser/android/vr_shell/vr_gl_util.h"
#include "chrome/browser/android/vr_shell/vr_input_manager.h"
#include "chrome/browser/android/vr_shell/vr_shell_delegate.h"
-#include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
+#include "chrome/browser/android/vr_shell/vr_shell_gl.h"
#include "chrome/browser/android/vr_shell/vr_usage_monitor.h"
#include "chrome/browser/android/vr_shell/vr_web_contents_observer.h"
#include "content/public/browser/navigation_controller.h"
@@ -29,124 +29,65 @@
#include "ui/base/page_transition_types.h"
#include "ui/display/display.h"
#include "ui/display/screen.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/init/gl_factory.h"
using base::android::JavaParamRef;
+using base::android::JavaRef;
namespace vr_shell {
namespace {
-// Constant taken from treasure_hunt demo.
-static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000;
-
-static constexpr float kZNear = 0.1f;
-static constexpr float kZFar = 1000.0f;
-
-// Screen angle in degrees. 0 = vertical, positive = top closer.
-static constexpr float kDesktopScreenTiltDefault = 0;
-
-static constexpr float kReticleWidth = 0.025f;
-static constexpr float kReticleHeight = 0.025f;
-
-static constexpr float kLaserWidth = 0.01f;
-
-// Angle (radians) the beam down from the controller axis, for wrist comfort.
-static constexpr float kErgoAngleOffset = 0.26f;
-
-static constexpr gvr::Vec3f kOrigin = {0.0f, 0.0f, 0.0f};
-
-// In lieu of an elbow model, we assume a position for the user's hand.
-// TODO(mthiesse): Handedness options.
-static constexpr gvr::Vec3f kHandPosition = {0.2f, -0.5f, -0.2f};
-
-// If there is no content quad, and the reticle isn't hitting another element,
-// draw the reticle at this distance.
-static constexpr float kDefaultReticleDistance = 2.0f;
-
-// Fraction of the distance to the object the cursor is drawn at to avoid
-// rounding errors drawing the cursor behind the object.
-static constexpr float kReticleOffset = 0.99f;
-
-// Limit the rendering distance of the reticle to the distance to a corner of
-// the content quad, times this value. This lets the rendering distance
-// adjust according to content quad placement.
-static constexpr float kReticleDistanceMultiplier = 1.5f;
-
-// GVR buffer indices for use with viewport->SetSourceBufferIndex
-// or frame.BindBuffer. We use one for world content (with reprojection)
-// including main VrShell and WebVR content plus world-space UI.
-// The headlocked buffer is for UI that should not use reprojection.
-static constexpr int kFramePrimaryBuffer = 0;
-static constexpr int kFrameHeadlockedBuffer = 1;
-
-// Pixel dimensions and field of view for the head-locked content. This
-// is currently sized to fit the WebVR "insecure transport" warnings,
-// adjust it as needed if there is additional content.
-static constexpr gvr::Sizei kHeadlockedBufferDimensions = {1024, 1024};
-static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f};
-
-// The GVR viewport list has two entries (left eye and right eye) for each
-// GVR buffer.
-static constexpr int kViewportListPrimaryOffset = 0;
-static constexpr int kViewportListHeadlockedOffset = 2;
-
-// Magic numbers used to mark valid pose index values encoded in frame
-// data. Must match the magic numbers used in blink's VRDisplay.cpp.
-static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
-
vr_shell::VrShell* g_instance;
static const char kVrShellUIURL[] = "chrome://vr-shell-ui";
-float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
- float xdiff = (vec1.x - vec2.x);
- float ydiff = (vec1.y - vec2.y);
- float zdiff = (vec1.z - vec2.z);
- float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff;
- return std::sqrt(scale);
-}
-
-// Generate a quaternion representing the rotation from the negative Z axis
-// (0, 0, -1) to a specified vector. This is an optimized version of a more
-// general vector-to-vector calculation.
-gvr::Quatf GetRotationFromZAxis(gvr::Vec3f vec) {
- vr_shell::NormalizeVector(vec);
- gvr::Quatf quat;
- quat.qw = 1.0f - vec.z;
- if (quat.qw < 1e-6f) {
- // Degenerate case: vectors are exactly opposite. Replace by an
- // arbitrary 180 degree rotation to avoid invalid normalization.
- quat.qx = 1.0f;
- quat.qy = 0.0f;
- quat.qz = 0.0f;
- quat.qw = 0.0f;
- } else {
- quat.qx = vec.y;
- quat.qy = -vec.x;
- quat.qz = 0.0f;
- vr_shell::NormalizeQuat(quat);
+class GLThread : public base::Thread {
+ public:
+ GLThread(VrShell* vr_shell, base::WeakPtr<VrShell> weak_vr_shell,
+ base::WeakPtr<VrInputManager> content_input_manager,
+ base::WeakPtr<VrInputManager> ui_input_manager,
+ scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
+ gvr_context* gvr_api)
+ : base::Thread("VrShellGL"),
+ vr_shell_(vr_shell),
+ weak_vr_shell_(weak_vr_shell),
+ content_input_manager_(content_input_manager),
+ ui_input_manager_(ui_input_manager),
+ main_thread_task_runner_(std::move(main_thread_task_runner)),
+ gvr_api_(gvr_api) {}
+
+ base::WeakPtr<VrShellGl> GetVrShellGl() { return weak_vr_shell_gl_; }
+ VrShellGl* GetVrShellGlUnsafe() { return vr_shell_gl_.get(); }
+ void StopRenderLoop() {
+ vr_shell_gl_.reset();
+ }
+
+ protected:
+ void Init() override {
+ vr_shell_gl_.reset(new VrShellGl(vr_shell_,
+ std::move(weak_vr_shell_),
+ std::move(content_input_manager_),
+ std::move(ui_input_manager_),
+ std::move(main_thread_task_runner_),
+ gvr_api_));
+ weak_vr_shell_gl_ = vr_shell_gl_->GetWeakPtr();
+ if (!vr_shell_gl_->Initialize()) {
+ vr_shell_gl_.reset();
+ }
}
- return quat;
-}
-std::unique_ptr<blink::WebMouseEvent> MakeMouseEvent(WebInputEvent::Type type,
- double timestamp,
- float x,
- float y) {
- std::unique_ptr<blink::WebMouseEvent> mouse_event(new blink::WebMouseEvent);
- mouse_event->type = type;
- mouse_event->pointerType = blink::WebPointerProperties::PointerType::Mouse;
- mouse_event->x = x;
- mouse_event->y = y;
- mouse_event->windowX = x;
- mouse_event->windowY = y;
- mouse_event->timeStampSeconds = timestamp;
- mouse_event->clickCount = 1;
- mouse_event->modifiers = 0;
+ private:
+ std::unique_ptr<VrShellGl> vr_shell_gl_;
+ base::WeakPtr<VrShellGl> weak_vr_shell_gl_;
+
+ // TODO(mthiesse): Remove vr_shell_.
+ VrShell* vr_shell_;
+ base::WeakPtr<VrShell> weak_vr_shell_;
+ base::WeakPtr<VrInputManager> content_input_manager_;
+ base::WeakPtr<VrInputManager> ui_input_manager_;
+ scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_;
+ gvr_context* gvr_api_;
+};
- return mouse_event;
-}
} // namespace
VrShell::VrShell(JNIEnv* env,
@@ -155,17 +96,32 @@ VrShell::VrShell(JNIEnv* env,
ui::WindowAndroid* content_window,
content::WebContents* ui_contents,
ui::WindowAndroid* ui_window,
- bool for_web_vr)
+ bool for_web_vr,
+ VrShellDelegate* delegate,
+ gvr_context* gvr_api)
: WebContentsObserver(ui_contents),
main_contents_(main_contents),
ui_contents_(ui_contents),
+ delegate_(delegate),
metrics_helper_(new VrMetricsHelper(main_contents)),
main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
weak_ptr_factory_(this) {
DCHECK(g_instance == nullptr);
g_instance = this;
j_vr_shell_.Reset(env, obj);
- scene_.reset(new UiScene);
+
+ content_input_manager_.reset(new VrInputManager(main_contents_));
+ ui_input_manager_.reset(new VrInputManager(ui_contents_));
+
+ gl_thread_.reset(new GLThread(this, weak_ptr_factory_.GetWeakPtr(),
+ content_input_manager_->GetWeakPtr(),
+ ui_input_manager_->GetWeakPtr(),
+ main_thread_task_runner_,
+ gvr_api));
+
+ base::Thread::Options options(base::MessageLoop::TYPE_DEFAULT, 0);
+ options.priority = base::ThreadPriority::DISPLAY;
+ gl_thread_->StartWithOptions(options);
if (for_web_vr)
metrics_helper_->SetWebVREnabled(true);
@@ -173,36 +129,20 @@ VrShell::VrShell(JNIEnv* env,
for_web_vr ? UiInterface::Mode::WEB_VR : UiInterface::Mode::STANDARD,
main_contents_->IsFullscreen()));
content_compositor_.reset(new VrCompositor(content_window, false));
+ content_compositor_->SetLayer(main_contents_);
ui_compositor_.reset(new VrCompositor(ui_window, true));
+ ui_compositor_->SetLayer(ui_contents_);
vr_web_contents_observer_.reset(new VrWebContentsObserver(
main_contents, html_interface_.get(), this));
- LoadUIContentOnUI();
-
- gvr::Mat4f identity;
- SetIdentityM(identity);
- webvr_head_pose_.resize(kPoseRingBufferSize, identity);
- webvr_head_pose_valid_.resize(kPoseRingBufferSize, false);
-
- content_input_manager_.reset(new VrInputManager(main_contents_));
- ui_input_manager_.reset(new VrInputManager(ui_contents_));
- weak_content_input_manager_ = content_input_manager_->GetWeakPtr();
- weak_ui_input_manager_ = ui_input_manager_->GetWeakPtr();
-
- SetShowingOverscrollGlowOnUI(false);
+ SetShowingOverscrollGlow(false);
}
-void VrShell::UpdateCompositorLayersOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& obj) {
- content_compositor_->SetLayer(main_contents_);
- ui_compositor_->SetLayer(ui_contents_);
-}
-
-void VrShell::DestroyOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
+void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) {
delete this;
}
-void VrShell::LoadUIContentOnUI() {
+void VrShell::LoadUIContent(JNIEnv* env, const JavaParamRef<jobject>& obj) {
GURL url(kVrShellUIURL);
ui_contents_->GetController().LoadURL(
url, content::Referrer(),
@@ -214,757 +154,90 @@ bool RegisterVrShell(JNIEnv* env) {
}
VrShell::~VrShell() {
- if (delegate_ && delegate_->GetDeviceProvider()) {
- delegate_->GetDeviceProvider()->OnGvrDelegateRemoved();
- }
- g_instance = nullptr;
- gl::init::ShutdownGL();
-}
-
-void VrShell::SetDelegateOnUI(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& obj,
- const base::android::JavaParamRef<jobject>& delegate) {
- base::AutoLock lock(gvr_init_lock_);
- delegate_ = VrShellDelegate::GetNativeDelegate(env, delegate);
- if (swap_chain_.get()) {
- delegate_->GetDeviceProvider()->OnGvrDelegateReady(
- weak_ptr_factory_.GetWeakPtr());
- }
-}
-
-enum class ViewerType {
- UNKNOWN_TYPE = 0,
- CARDBOARD = 1,
- DAYDREAM = 2,
- VIEWER_TYPE_MAX,
-};
-
-void VrShell::GvrInitOnGL(JNIEnv* env,
- const JavaParamRef<jobject>& obj,
- jlong native_gvr_api) {
- // set the initial webvr state
- metrics_helper_->SetVRActive(true);
-
- gvr_api_ =
- gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api));
- // TODO(klausw,crbug.com/655722): should report OnGvrDelegateReady here once
- // we switch to using a WebVR render surface. We currently need to wait for
- // the compositor window's size to be known first. See also
- // ContentSurfaceChanged.
- controller_.reset(
- new VrController(reinterpret_cast<gvr_context*>(native_gvr_api)));
-
-
- ViewerType viewerType;
- switch (gvr_api_->GetViewerType()) {
- case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM:
- viewerType = ViewerType::DAYDREAM;
- break;
- case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD:
- viewerType = ViewerType::CARDBOARD;
- break;
- default:
- NOTREACHED();
- viewerType = ViewerType::UNKNOWN_TYPE;
- break;
- }
- UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType),
- static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
-}
-
-void VrShell::InitializeGlOnGL(JNIEnv* env,
- const JavaParamRef<jobject>& obj,
- jint content_texture_handle,
- jint ui_texture_handle) {
- base::AutoLock lock(gvr_init_lock_);
- CHECK(gl::GetGLImplementation() != gl::kGLImplementationNone ||
- gl::init::InitializeGLOneOff());
-
- content_texture_id_ = content_texture_handle;
- ui_texture_id_ = ui_texture_handle;
-
- // While WebVR is going through the compositor path, it shares
- // the same texture ID. This will change once it gets its own
- // surface, but store it separately to avoid future confusion.
- // TODO(klausw,crbug.com/655722): remove this.
- webvr_texture_id_ = content_texture_id_;
- // Out of paranoia, explicitly reset the "pose valid" flags to false
- // from the GL thread. The constructor ran in the UI thread.
- // TODO(klausw,crbug.com/655722): remove this.
- webvr_head_pose_valid_.assign(kPoseRingBufferSize, false);
-
- gvr_api_->InitializeGl();
- std::vector<gvr::BufferSpec> specs;
- // For kFramePrimaryBuffer (primary VrShell and WebVR content)
- specs.push_back(gvr_api_->CreateBufferSpec());
- render_size_primary_ = specs[kFramePrimaryBuffer].GetSize();
- render_size_primary_vrshell_ = render_size_primary_;
-
- // For kFrameHeadlockedBuffer (for WebVR insecure content warning).
- // Set this up at fixed resolution, the (smaller) FOV gets set below.
- specs.push_back(gvr_api_->CreateBufferSpec());
- specs.back().SetSize(kHeadlockedBufferDimensions);
- render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize();
-
- swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs)));
-
- vr_shell_renderer_.reset(new VrShellRenderer());
-
- // Allocate a buffer viewport for use in UI drawing. This isn't
- // initialized at this point, it'll be set from other viewport list
- // entries as needed.
- buffer_viewport_.reset(
- new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
-
- // Set up main content viewports. The list has two elements, 0=left
- // eye and 1=right eye.
- buffer_viewport_list_.reset(
- new gvr::BufferViewportList(gvr_api_->CreateEmptyBufferViewportList()));
- buffer_viewport_list_->SetToRecommendedBufferViewports();
-
- // Set up head-locked UI viewports, these will be elements 2=left eye
- // and 3=right eye. For now, use a hardcoded 20-degree-from-center FOV
- // frustum to reduce rendering cost for this overlay. This fits the
- // current content, but will need to be adjusted once there's more dynamic
- // head-locked content that could be larger.
- headlocked_left_viewport_.reset(
- new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
- buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE,
- headlocked_left_viewport_.get());
- headlocked_left_viewport_->SetSourceBufferIndex(kFrameHeadlockedBuffer);
- headlocked_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
- headlocked_left_viewport_->SetSourceFov(kHeadlockedBufferFov);
-
- headlocked_right_viewport_.reset(
- new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
- buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE,
- headlocked_right_viewport_.get());
- headlocked_right_viewport_->SetSourceBufferIndex(kFrameHeadlockedBuffer);
- headlocked_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
- headlocked_right_viewport_->SetSourceFov(kHeadlockedBufferFov);
-
- // Save copies of the first two viewport items for use by WebVR, it
- // sets its own UV bounds.
- webvr_left_viewport_.reset(
- new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
- buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE,
- webvr_left_viewport_.get());
- webvr_left_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
-
- webvr_right_viewport_.reset(
- new gvr::BufferViewport(gvr_api_->CreateBufferViewport()));
- buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE,
- webvr_right_viewport_.get());
- webvr_right_viewport_->SetSourceBufferIndex(kFramePrimaryBuffer);
-
- if (delegate_) {
- main_thread_task_runner_->PostTask(
- FROM_HERE, base::Bind(&device::GvrDeviceProvider::OnGvrDelegateReady,
- delegate_->GetDeviceProvider(),
- weak_ptr_factory_.GetWeakPtr()));
- }
-}
-
-void VrShell::UpdateControllerOnGL(const gvr::Vec3f& forward_vector) {
- controller_->UpdateState();
-
-#if defined(ENABLE_VR_SHELL)
- // Note that button up/down state is transient, so ButtonUpHappened only
- // returns
- // true for a single frame (and we're guaranteed not to miss it).
- if (controller_->ButtonUpHappened(
- gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
- html_interface_->SetMenuMode(!html_interface_->GetMenuMode());
-
- // TODO(mthiesse): The page is no longer visible when in menu mode. We
- // should unfocus or otherwise let it know it's hidden.
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- const auto&& task = html_interface_->GetMenuMode() ?
- &device::GvrDeviceProvider::OnDisplayBlur :
- &device::GvrDeviceProvider::OnDisplayFocus;
- main_thread_task_runner_->PostTask(
- FROM_HERE, base::Bind(task, delegate_->GetDeviceProvider()));
- }
- }
-#endif
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- // Process screen touch events for Cardboard button compatibility.
- // Also send tap events for controller "touchpad click" events.
- if (touch_pending_ ||
- controller_->ButtonUpHappened(
- gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
- touch_pending_ = false;
- std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
- gesture->sourceDevice = blink::WebGestureDeviceTouchpad;
- gesture->timeStampSeconds =
- (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF();
- gesture->type = WebInputEvent::GestureTapDown;
- gesture->x = 0;
- gesture->y = 0;
- SendGestureOnGL(CONTENT, std::move(gesture));
- }
-
- return;
- }
-
- gvr::Vec3f ergo_neutral_pose;
- if (!controller_->IsConnected()) {
- // No controller detected, set up a gaze cursor that tracks the
- // forward direction.
- ergo_neutral_pose = {0.0f, 0.0f, -1.0f};
- controller_quat_ = GetRotationFromZAxis(forward_vector);
- } else {
- ergo_neutral_pose = {0.0f, -sin(kErgoAngleOffset), -cos(kErgoAngleOffset)};
- controller_quat_ = controller_->Orientation();
- }
-
- gvr::Mat4f mat = QuatToMatrix(controller_quat_);
- gvr::Vec3f forward = MatrixVectorMul(mat, ergo_neutral_pose);
- gvr::Vec3f origin = kHandPosition;
-
- // If we place the reticle based on elements intersecting the controller beam,
- // we can end up with the reticle hiding behind elements, or jumping laterally
- // in the field of view. This is physically correct, but hard to use. For
- // usability, do the following instead:
- //
- // - Project the controller laser onto an outer surface, which is the
- // closer of the desktop plane, or a distance-limiting sphere.
- // - Create a vector between the eyes and the outer surface point.
- // - If any UI elements intersect this vector, choose the closest to the eyes,
- // and place the reticle at the intersection point.
-
- // Find distance to a corner of the content quad, and limit the cursor
- // distance to a multiple of that distance. This lets us keep the reticle on
- // the content plane near the content window, and on the surface of a sphere
- // in other directions. Note that this approach uses distance from controller,
- // rather than eye, for simplicity. This will make the sphere slightly
- // off-center.
- float distance = kDefaultReticleDistance;
- ContentRectangle* content_plane = scene_->GetContentQuad();
- if (content_plane) {
- distance = content_plane->GetRayDistance(origin, forward);
- gvr::Vec3f corner = {0.5f, 0.5f, 0.0f};
- corner = MatrixVectorMul(content_plane->transform.to_world, corner);
- float max_distance = Distance(origin, corner) * kReticleDistanceMultiplier;
- if (distance > max_distance || distance <= 0.0f) {
- distance = max_distance;
- }
- }
-
- target_point_ = GetRayPoint(origin, forward, distance);
- gvr::Vec3f eye_to_target = target_point_;
- NormalizeVector(eye_to_target);
-
- // Determine which UI element (if any) intersects the line between the eyes
- // and the controller target position.
- float closest_element_distance = std::numeric_limits<float>::infinity();
- int pixel_x = 0;
- int pixel_y = 0;
- target_element_ = nullptr;
- InputTarget input_target = NONE;
-
- for (const auto& plane : scene_->GetUiElements()) {
- if (!plane->visible || !plane->hit_testable) {
- continue;
- }
- float distance_to_plane = plane->GetRayDistance(kOrigin, eye_to_target);
- gvr::Vec3f plane_intersection_point =
- GetRayPoint(kOrigin, eye_to_target, distance_to_plane);
-
- gvr::Vec3f rect_2d_point =
- MatrixVectorMul(plane->transform.from_world, plane_intersection_point);
- if (distance_to_plane > 0 && distance_to_plane < closest_element_distance) {
- float x = rect_2d_point.x + 0.5f;
- float y = 0.5f - rect_2d_point.y;
- bool is_inside = x >= 0.0f && x < 1.0f && y >= 0.0f && y < 1.0f;
- if (!is_inside)
- continue;
-
- closest_element_distance = distance_to_plane;
- Rectf pixel_rect;
- if (plane->content_quad) {
- pixel_rect = {0, 0, content_tex_css_width_, content_tex_css_height_};
- } else {
- pixel_rect = {plane->copy_rect.x, plane->copy_rect.y,
- plane->copy_rect.width, plane->copy_rect.height};
- }
- pixel_x = pixel_rect.width * x + pixel_rect.x;
- pixel_y = pixel_rect.height * y + pixel_rect.y;
-
- target_point_ = plane_intersection_point;
- target_element_ = plane.get();
- input_target = plane->content_quad ? CONTENT : UI;
- }
- }
- SendEventsToTargetOnGL(input_target, pixel_x, pixel_y);
-}
-
-void VrShell::SendEventsToTargetOnGL(InputTarget input_target,
- int pixel_x,
- int pixel_y) {
- std::vector<std::unique_ptr<WebGestureEvent>> gesture_list =
- controller_->DetectGestures();
- double timestamp = gesture_list.front()->timeStampSeconds;
-
- if (touch_pending_) {
- touch_pending_ = false;
- std::unique_ptr<WebGestureEvent> event(new WebGestureEvent());
- event->type = WebInputEvent::GestureTapDown;
- event->sourceDevice = blink::WebGestureDeviceTouchpad;
- event->timeStampSeconds = timestamp;
- event->x = pixel_x;
- event->y = pixel_y;
- gesture_list.push_back(std::move(event));
- }
-
- for (const auto& gesture : gesture_list) {
- switch (gesture->type) {
- case WebInputEvent::GestureScrollBegin:
- case WebInputEvent::GestureScrollUpdate:
- case WebInputEvent::GestureScrollEnd:
- case WebInputEvent::GestureFlingCancel:
- case WebInputEvent::GestureFlingStart:
- SendGestureOnGL(CONTENT,
- base::WrapUnique(new WebGestureEvent(*gesture)));
- break;
- case WebInputEvent::GestureTapDown:
- gesture->x = pixel_x;
- gesture->y = pixel_y;
- if (input_target != NONE)
- SendGestureOnGL(input_target,
- base::WrapUnique(new WebGestureEvent(*gesture)));
- break;
- case WebInputEvent::Undefined:
- break;
- default:
- NOTREACHED();
- }
- }
-
- // Hover support
- bool new_target = input_target != current_input_target_;
- if (new_target && current_input_target_ != NONE) {
- // Send a move event indicating that the pointer moved off of an element.
- SendGestureOnGL(current_input_target_,
- MakeMouseEvent(WebInputEvent::MouseLeave, timestamp, 0, 0));
- }
-
- current_input_target_ = input_target;
- if (current_input_target_ != NONE) {
- WebInputEvent::Type type =
- new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
- SendGestureOnGL(input_target,
- MakeMouseEvent(type, timestamp, pixel_x, pixel_y));
- }
-}
-
-void VrShell::SendGestureOnGL(InputTarget input_target,
- std::unique_ptr<blink::WebInputEvent> event) {
- DCHECK(input_target != NONE);
- const base::WeakPtr<VrInputManager>& weak_ptr =
- input_target == CONTENT ? weak_content_input_manager_
- : weak_ui_input_manager_;
- main_thread_task_runner_->PostTask(
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ thread->task_runner()->PostTask(
FROM_HERE,
- base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
- base::Passed(std::move(event))));
+ base::Bind(&GLThread::StopRenderLoop, base::Unretained(thread)));
+ {
+ // Unfortunately stopping threads asserts that IO is allowed, and on the
+ // Android UI thread, IO is not allowed by default. However, we can't allow
+ // the thread to keep running, because the GVR instance is about to be
+ // deleted and we'll segfault if the thread keeps running for any amount of
+ // time.
+ // Our options are:
brettw 2016/12/12 19:12:40 Can you replace this "options" section with the in
mthiesse 2016/12/12 19:49:30 Done.
+ // 1. Use the *heavily* discouraged ScopedAllowIO.
+ // 2. a. Post a task to the GLThread telling it to stop delete vr_shell_gl.
+ // b. Wait for a signal that this has been completed.
+ // c. Call thread->StopSoon().
+ // d. Post a task to the current thread to check if GLThread has stopped
+ // yet.
+ // e. If the thread has stopped, delete it, otherwise post delayed to
+ // the current thread all over again. Repeat as needed.
+ //
+ // TODO(mthiesse): I've gone with option 1. Are there better options?
+ base::ThreadRestrictions::ScopedAllowIO allow_io;
+ thread->Stop();
+ }
+ delegate_->RemoveDelegate();
+ g_instance = nullptr;
}
void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
- webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose;
- webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true;
-}
-
-int GetPixelEncodedPoseIndexByte() {
- TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
- // Read the pose index encoded in a bottom left pixel as color values.
- // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
- // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
- // which tracks poses. Returns the low byte (0..255) if valid, or -1
- // if not valid due to bad magic number.
- uint8_t pixels[4];
- // Assume we're reading from the framebuffer we just wrote to.
- // That's true currently, we may need to use glReadBuffer(GL_BACK)
- // or equivalent if the rendering setup changes in the future.
- glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
-
- // Check for the magic number written by VRDevice.cpp on submit.
- // This helps avoid glitches from garbage data in the render
- // buffer that can appear during initialization or resizing. These
- // often appear as flashes of all-black or all-white pixels.
- if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
- pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
- // Pose is good.
- return pixels[0];
- }
- VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
- ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
- return -1;
-}
-
-bool VrShell::WebVrPoseByteIsValidOnGL(int pose_index_byte) {
- if (pose_index_byte < 0) {
- return false;
- }
- if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) {
- VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte <<
- ", not a valid pose";
- return false;
- }
- return true;
-}
-
-void VrShell::DrawFrameOnGL(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- TRACE_EVENT0("gpu", "VrShell::DrawFrame");
- // Reset the viewport list to just the pair of viewports for the
- // primary buffer each frame. Head-locked viewports get added by
- // DrawVrShell if needed.
- buffer_viewport_list_->SetToRecommendedBufferViewports();
-
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- // If needed, resize the primary buffer for use with WebVR.
- if (render_size_primary_ != render_size_primary_webvr_) {
- if (!render_size_primary_webvr_.width) {
- VLOG(2) << "WebVR rendering size not known yet, dropping frame";
- return;
- }
- render_size_primary_ = render_size_primary_webvr_;
- swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
- }
- } else {
- if (render_size_primary_ != render_size_primary_vrshell_) {
- render_size_primary_ = render_size_primary_vrshell_;
- swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
- }
- }
-
- gvr::Frame frame = swap_chain_->AcquireFrame();
- gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
- target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
-
- gvr::Mat4f head_pose =
- gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
-
- gvr::Vec3f position = GetTranslation(head_pose);
- if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
- // This appears to be a 3DOF pose without a neck model. Add one.
- // The head pose has redundant data. Assume we're only using the
- // object_from_reference_matrix, we're not updating position_external.
- // TODO: Not sure what object_from_reference_matrix is. The new api removed
- // it. For now, removing it seems working fine.
- gvr_api_->ApplyNeckModel(head_pose, 1.0f);
- }
-
- // Bind the primary framebuffer.
- frame.BindBuffer(kFramePrimaryBuffer);
-
- HandleQueuedTasksOnGL();
-
- // Update the render position of all UI elements (including desktop).
- const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
- scene_->UpdateTransforms(screen_tilt, UiScene::TimeInMicroseconds());
-
- UpdateControllerOnGL(GetForwardVector(head_pose));
-
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- DrawWebVrOnGL();
-
- // When using async reprojection, we need to know which pose was used in
- // the WebVR app for drawing this frame. Due to unknown amounts of
- // buffering in the compositor and SurfaceTexture, we read the pose number
- // from a corner pixel. There's no point in doing this for legacy
- // distortion rendering since that doesn't need a pose, and reading back
- // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
- // doing this once we have working no-compositor rendering for WebVR.
- if (gvr_api_->GetAsyncReprojectionEnabled()) {
- int pose_index_byte = GetPixelEncodedPoseIndexByte();
- if (WebVrPoseByteIsValidOnGL(pose_index_byte)) {
- // We have a valid pose, use it for reprojection.
- webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
- webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
- head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize];
- // We can't mark the used pose as invalid since unfortunately
- // we have to reuse them. The compositor will re-submit stale
- // frames on vsync, and we can't tell that this has happened
- // until we've read the pose index from it, and at that point
- // it's too late to skip rendering.
- } else {
- // If we don't get a valid frame ID back we shouldn't attempt
- // to reproject by an invalid matrix, so turn off reprojection
- // instead. Invalid poses can permanently break reprojection
- // for this GVR instance: http://crbug.com/667327
- webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
- webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
- }
- }
- }
-
- DrawVrShellOnGL(head_pose, frame);
-
- frame.Unbind();
- frame.Submit(*buffer_viewport_list_, head_pose);
-}
-
-void VrShell::DrawVrShellOnGL(const gvr::Mat4f& head_pose,
- gvr::Frame &frame) {
- TRACE_EVENT0("gpu", "VrShell::DrawVrShell");
- std::vector<const ContentRectangle*> head_locked_elements;
- std::vector<const ContentRectangle*> world_elements;
- for (const auto& rect : scene_->GetUiElements()) {
- if (!rect->visible) {
- continue;
- }
- if (rect->lock_to_fov) {
- head_locked_elements.push_back(rect.get());
- } else {
- world_elements.push_back(rect.get());
- }
- }
-
- if (html_interface_->GetMode() == UiInterface::Mode::WEB_VR) {
- // WebVR is incompatible with 3D world compositing since the
- // depth buffer was already populated with unknown scaling - the
- // WebVR app has full control over zNear/zFar. Just leave the
- // existing content in place in the primary buffer without
- // clearing. Currently, there aren't any world elements in WebVR
- // mode, this will need further testing if those get added
- // later.
- } else {
- // Non-WebVR mode, enable depth testing and clear the primary buffers.
- glEnable(GL_CULL_FACE);
- glEnable(GL_DEPTH_TEST);
- glDepthMask(GL_TRUE);
-
- glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
- }
-
- if (!world_elements.empty()) {
- DrawUiViewOnGL(&head_pose, world_elements, render_size_primary_,
- kViewportListPrimaryOffset);
- }
-
- if (!head_locked_elements.empty()) {
- // Add head-locked viewports. The list gets reset to just
- // the recommended viewports (for the primary buffer) each frame.
- buffer_viewport_list_->SetBufferViewport(
- kViewportListHeadlockedOffset + GVR_LEFT_EYE,
- *headlocked_left_viewport_);
- buffer_viewport_list_->SetBufferViewport(
- kViewportListHeadlockedOffset + GVR_RIGHT_EYE,
- *headlocked_right_viewport_);
-
- // Bind the headlocked framebuffer.
- frame.BindBuffer(kFrameHeadlockedBuffer);
- glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
- DrawUiViewOnGL(nullptr, head_locked_elements, render_size_headlocked_,
- kViewportListHeadlockedOffset);
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ if (thread->GetVrShellGlUnsafe()) {
bshe 2016/12/12 20:30:20 why do you use Unsafe here instead of post to GL t
+ thread->GetVrShellGlUnsafe()->SetGvrPoseForWebVr(pose, pose_num);
}
}
void VrShell::SetWebVRRenderSurfaceSize(int width, int height) {
- render_size_primary_webvr_.width = width;
- render_size_primary_webvr_.height = height;
- // TODO(klausw,crbug.com/655722): set the WebVR render surface size here once
- // we have that.
-}
-
-gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() {
- // This is a stopgap while we're using the WebVR compositor rendering path.
- // TODO(klausw,crbug.com/655722): Remove this method and member once we're
- // using a separate WebVR render surface.
- return content_tex_physical_size_;
-}
-
-
-void VrShell::DrawUiViewOnGL(const gvr::Mat4f* head_pose,
- const std::vector<const ContentRectangle*>& elements,
- const gvr::Sizei& render_size, int viewport_offset) {
- TRACE_EVENT0("gpu", "VrShell::DrawUiView");
- for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
- buffer_viewport_list_->GetBufferViewport(
- eye + viewport_offset, buffer_viewport_.get());
-
- gvr::Mat4f view_matrix = gvr_api_->GetEyeFromHeadMatrix(eye);
- if (head_pose != nullptr) {
- view_matrix = MatrixMul(view_matrix, *head_pose);
- }
-
- gvr::Recti pixel_rect =
- CalculatePixelSpaceRect(render_size, buffer_viewport_->GetSourceUv());
- glViewport(pixel_rect.left, pixel_rect.bottom,
- pixel_rect.right - pixel_rect.left,
- pixel_rect.top - pixel_rect.bottom);
-
- const gvr::Mat4f render_matrix = MatrixMul(
- PerspectiveMatrixFromView(
- buffer_viewport_->GetSourceFov(), kZNear, kZFar),
- view_matrix);
-
- DrawElementsOnGL(render_matrix, elements);
- if (head_pose != nullptr &&
- html_interface_->GetMode() != UiInterface::Mode::WEB_VR) {
- DrawCursorOnGL(render_matrix);
- }
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ if (thread->GetVrShellGlUnsafe()) {
+ thread->GetVrShellGlUnsafe()->SetWebVRRenderSurfaceSize(width, height);
}
}
-void VrShell::DrawElementsOnGL(
- const gvr::Mat4f& render_matrix,
- const std::vector<const ContentRectangle*>& elements) {
- for (const auto& rect : elements) {
- Rectf copy_rect;
- jint texture_handle;
- if (rect->content_quad) {
- copy_rect = {0, 0, 1, 1};
- texture_handle = content_texture_id_;
- } else {
- copy_rect.x = static_cast<float>(rect->copy_rect.x) / ui_tex_css_width_;
- copy_rect.y = static_cast<float>(rect->copy_rect.y) / ui_tex_css_height_;
- copy_rect.width = static_cast<float>(rect->copy_rect.width) /
- ui_tex_css_width_;
- copy_rect.height = static_cast<float>(rect->copy_rect.height) /
- ui_tex_css_height_;
- texture_handle = ui_texture_id_;
- }
- gvr::Mat4f transform = MatrixMul(render_matrix, rect->transform.to_world);
- vr_shell_renderer_->GetTexturedQuadRenderer()->Draw(
- texture_handle, transform, copy_rect);
- }
-}
-
-void VrShell::DrawCursorOnGL(const gvr::Mat4f& render_matrix) {
- gvr::Mat4f mat;
- SetIdentityM(mat);
-
- // Draw the reticle.
-
- // Scale the pointer to have a fixed FOV size at any distance.
- const float eye_to_target = Distance(target_point_, kOrigin);
- ScaleM(mat, mat, kReticleWidth * eye_to_target,
- kReticleHeight * eye_to_target, 1.0f);
-
- gvr::Quatf rotation;
- if (target_element_ != nullptr) {
- // Make the reticle planar to the element it's hitting.
- rotation = GetRotationFromZAxis(target_element_->GetNormal());
- } else {
- // Rotate the cursor to directly face the eyes.
- rotation = GetRotationFromZAxis(target_point_);
- }
- mat = MatrixMul(QuatToMatrix(rotation), mat);
-
- // Place the pointer slightly in front of the plane intersection point.
- TranslateM(mat, mat, target_point_.x * kReticleOffset,
- target_point_.y * kReticleOffset,
- target_point_.z * kReticleOffset);
-
- gvr::Mat4f transform = MatrixMul(render_matrix, mat);
- vr_shell_renderer_->GetReticleRenderer()->Draw(transform);
-
- // Draw the laser.
-
- // Find the length of the beam (from hand to target).
- const float laser_length = Distance(kHandPosition, target_point_);
-
- // Build a beam, originating from the origin.
- SetIdentityM(mat);
-
- // Move the beam half its height so that its end sits on the origin.
- TranslateM(mat, mat, 0.0f, 0.5f, 0.0f);
- ScaleM(mat, mat, kLaserWidth, laser_length, 1);
-
- // Tip back 90 degrees to flat, pointing at the scene.
- const gvr::Quatf q = QuatFromAxisAngle({1.0f, 0.0f, 0.0f}, -M_PI / 2);
- mat = MatrixMul(QuatToMatrix(q), mat);
-
- const gvr::Vec3f beam_direction = {
- target_point_.x - kHandPosition.x,
- target_point_.y - kHandPosition.y,
- target_point_.z - kHandPosition.z
- };
- const gvr::Mat4f beam_direction_mat =
- QuatToMatrix(GetRotationFromZAxis(beam_direction));
-
- // Render multiple faces to make the laser appear cylindrical.
- const int faces = 4;
- for (int i = 0; i < faces; i++) {
- // Rotate around Z.
- const float angle = M_PI * 2 * i / faces;
- const gvr::Quatf rot = QuatFromAxisAngle({0.0f, 0.0f, 1.0f}, angle);
- gvr::Mat4f face_transform = MatrixMul(QuatToMatrix(rot), mat);
-
- // Orient according to target direction.
- face_transform = MatrixMul(beam_direction_mat, face_transform);
-
- // Move the beam origin to the hand.
- TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y,
- kHandPosition.z);
-
- transform = MatrixMul(render_matrix, face_transform);
- vr_shell_renderer_->GetLaserRenderer()->Draw(transform);
+gvr::Sizei VrShell::GetWebVRCompositorSurfaceSize() {
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ if (thread->GetVrShellGlUnsafe()) {
+ return thread->GetVrShellGlUnsafe()->GetWebVRCompositorSurfaceSize();
}
+ return gvr::Sizei();
}
-void VrShell::DrawWebVrOnGL() {
- TRACE_EVENT0("gpu", "VrShell::DrawWebVr");
- // Don't need face culling, depth testing, blending, etc. Turn it all off.
- glDisable(GL_CULL_FACE);
- glDepthMask(GL_FALSE);
- glDisable(GL_DEPTH_TEST);
- glDisable(GL_SCISSOR_TEST);
- glDisable(GL_BLEND);
- glDisable(GL_POLYGON_OFFSET_FILL);
-
- glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
- vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
-
- buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
- *webvr_left_viewport_);
- buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
- *webvr_right_viewport_);
+void VrShell::OnTriggerEvent(JNIEnv* env,
+ const JavaParamRef<jobject>& obj) {
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ thread->task_runner()->PostTask(FROM_HERE,
+ base::Bind(&VrShellGl::OnTriggerEvent,
+ thread->GetVrShellGl()));
}
-void VrShell::OnTriggerEventOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& obj) {
- // Set a flag to handle this on the render thread at the next frame.
- touch_pending_ = true;
-}
-
-void VrShell::OnPauseOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- if (gvr_api_ == nullptr)
- return;
-
- // TODO(mthiesse): Clean up threading here.
- controller_->OnPause();
- gvr_api_->PauseTracking();
- SetShowingOverscrollGlowOnUI(true);
+void VrShell::OnPause(JNIEnv* env, const JavaParamRef<jobject>& obj) {
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ thread->task_runner()->PostTask(
+ FROM_HERE, base::Bind(&VrShellGl::OnPause, thread->GetVrShellGl()));
// exit vr session
metrics_helper_->SetVRActive(false);
+ SetShowingOverscrollGlow(true);
}
-void VrShell::OnResumeOnUI(JNIEnv* env, const JavaParamRef<jobject>& obj) {
- if (gvr_api_ == nullptr)
- return;
-
- // TODO(mthiesse): Clean up threading here.
- gvr_api_->RefreshViewerProfile();
- gvr_api_->ResumeTracking();
- controller_->OnResume();
- SetShowingOverscrollGlowOnUI(false);
+void VrShell::OnResume(JNIEnv* env, const JavaParamRef<jobject>& obj) {
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ thread->task_runner()->PostTask(
+ FROM_HERE, base::Bind(&VrShellGl::OnResume, thread->GetVrShellGl()));
// exit vr session
metrics_helper_->SetVRActive(true);
+ SetShowingOverscrollGlow(false);
}
-void VrShell::SetShowingOverscrollGlowOnUI(bool showing_glow) {
+void VrShell::SetShowingOverscrollGlow(bool showing_glow) {
main_contents_->GetRenderWidgetHostView()->SetShowingOverscrollGlow(
showing_glow);
}
-base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI(
+base::WeakPtr<VrShell> VrShell::GetWeakPtr(
const content::WebContents* web_contents) {
// Ensure that the WebContents requesting the VrShell instance is the one
// we created.
@@ -973,13 +246,13 @@ base::WeakPtr<VrShell> VrShell::GetWeakPtrOnUI(
return base::WeakPtr<VrShell>(nullptr);
}
-void VrShell::OnDomContentsLoadedOnUI() {
+void VrShell::OnDomContentsLoaded() {
html_interface_->SetURL(main_contents_->GetVisibleURL());
html_interface_->SetLoading(main_contents_->IsLoading());
html_interface_->OnDomContentsLoaded();
}
-void VrShell::SetWebVrModeOnUI(JNIEnv* env,
+void VrShell::SetWebVrMode(JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj,
bool enabled) {
metrics_helper_->SetWebVREnabled(enabled);
@@ -999,73 +272,82 @@ void VrShell::SubmitWebVRFrame() {}
void VrShell::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
const gvr::Rectf& right_bounds) {
- webvr_left_viewport_->SetSourceUv(left_bounds);
- webvr_right_viewport_->SetSourceUv(right_bounds);
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ if (thread->GetVrShellGlUnsafe()) {
+ thread->GetVrShellGlUnsafe()->UpdateWebVRTextureBounds(left_bounds,
+ right_bounds);
+ }
}
+// TODO(mthiesse): Do not expose GVR API outside of GL thread.
+// It's not thread-safe.
gvr::GvrApi* VrShell::gvr_api() {
- return gvr_api_.get();
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ if (thread->GetVrShellGlUnsafe()) {
+ return thread->GetVrShellGlUnsafe()->gvr_api();
+ }
+ CHECK(false);
+ return nullptr;
}
-void VrShell::SurfacesChangedOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& object,
- const JavaParamRef<jobject>& content_surface,
- const JavaParamRef<jobject>& ui_surface) {
+void VrShell::SurfacesChanged(jobject content_surface, jobject ui_surface) {
content_compositor_->SurfaceChanged(content_surface);
ui_compositor_->SurfaceChanged(ui_surface);
}
-void VrShell::ContentBoundsChangedOnUI(JNIEnv* env,
+void VrShell::GvrDelegateReady() {
+ delegate_->SetDelegate(weak_ptr_factory_.GetWeakPtr());
+}
+
+void VrShell::ContentBoundsChanged(JNIEnv* env,
const JavaParamRef<jobject>& object,
jint width, jint height, jfloat dpr) {
TRACE_EVENT0("gpu", "VrShell::ContentBoundsChanged");
- content_tex_physical_size_.width = width;
- content_tex_physical_size_.height = height;
- // TODO(mthiesse): Synchronize with GL thread, and update tex css size in
- // response to MainFrameWasResized, not here.
- content_tex_css_width_ = width / dpr;
- content_tex_css_height_ = height / dpr;
-
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
+ // finished starting?
+ thread->WaitUntilThreadStarted();
+ CHECK(thread->task_runner()->PostTask(
+ FROM_HERE, base::Bind(&VrShellGl::ContentPhysicalBoundsChanged,
+ thread->GetVrShellGl(),
+ width, height)));
content_compositor_->SetWindowBounds(width, height);
}
-void VrShell::UIBoundsChangedOnUI(JNIEnv* env,
+void VrShell::UIBoundsChanged(JNIEnv* env,
const JavaParamRef<jobject>& object,
jint width, jint height, jfloat dpr) {
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
+ // finished starting?
+ thread->WaitUntilThreadStarted();
+ thread->task_runner()->PostTask(
+ FROM_HERE, base::Bind(&VrShellGl::UIPhysicalBoundsChanged,
+ thread->GetVrShellGl(),
+ width, height));
ui_compositor_->SetWindowBounds(width, height);
}
-UiScene* VrShell::GetSceneOnGL() {
- return scene_.get();
+UiScene* VrShell::GetScene() {
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
+ // finished starting?
+ thread->WaitUntilThreadStarted();
+ if (thread->GetVrShellGlUnsafe()) {
+ return thread->GetVrShellGlUnsafe()->GetScene();
+ }
+ return nullptr;
}
-UiInterface* VrShell::GetUiInterfaceOnGL() {
+UiInterface* VrShell::GetUiInterface() {
return html_interface_.get();
}
-void VrShell::QueueTaskOnUI(base::Callback<void()>& callback) {
- base::AutoLock lock(task_queue_lock_);
- task_queue_.push(callback);
+void VrShell::QueueTask(base::Callback<void()>& callback) {
+ gl_thread_->task_runner()->PostTask(FROM_HERE, callback);
}
-void VrShell::HandleQueuedTasksOnGL() {
- // To protect a stream of tasks from blocking rendering indefinitely,
- // process only the number of tasks present when first checked.
- std::vector<base::Callback<void()>> tasks;
- {
- base::AutoLock lock(task_queue_lock_);
- const size_t count = task_queue_.size();
- for (size_t i = 0; i < count; i++) {
- tasks.push_back(task_queue_.front());
- task_queue_.pop();
- }
- }
- for (auto &task : tasks) {
- task.Run();
- }
-}
-
-void VrShell::DoUiActionOnUI(const UiAction action) {
+void VrShell::DoUiAction(const UiAction action) {
content::NavigationController& controller = main_contents_->GetController();
switch (action) {
case HISTORY_BACK:
@@ -1106,60 +388,83 @@ void VrShell::RenderViewHostChanged(content::RenderViewHost* old_host,
void VrShell::MainFrameWasResized(bool width_changed) {
display::Display display = display::Screen::GetScreen()
->GetDisplayNearestWindow(ui_contents_->GetNativeView());
- // TODO(mthiesse): Synchronize with GL thread.
- ui_tex_css_width_ = display.size().width();
- ui_tex_css_height_ = display.size().height();
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
+ // finished starting?
+ thread->WaitUntilThreadStarted();
+ thread->task_runner()->PostTask(
+ FROM_HERE, base::Bind(&VrShellGl::UIBoundsChanged,
+ thread->GetVrShellGl(),
+ display.size().width(), display.size().height()));
+}
+
+void VrShell::ContentFrameWasResized(bool width_changed) {
+ display::Display display = display::Screen::GetScreen()
+ ->GetDisplayNearestWindow(main_contents_->GetNativeView());
+ GLThread* thread = static_cast<GLThread*>(gl_thread_.get());
+ // TODO(mthiesse): Remove this blocking wait. Queue up events if thread isn't
+ // finished starting?
+ thread->WaitUntilThreadStarted();
+ thread->task_runner()->PostTask(
+ FROM_HERE, base::Bind(&VrShellGl::ContentBoundsChanged,
+ thread->GetVrShellGl(),
+ display.size().width(), display.size().height()));
}
void VrShell::WebContentsDestroyed() {
ui_input_manager_.reset();
ui_contents_ = nullptr;
// TODO(mthiesse): Handle web contents being destroyed.
- delegate_->ForceExitVr();
+ ForceExitVR();
}
-void VrShell::ContentWebContentsDestroyedOnUI() {
+void VrShell::ContentWebContentsDestroyed() {
content_input_manager_.reset();
main_contents_ = nullptr;
// TODO(mthiesse): Handle web contents being destroyed.
- delegate_->ForceExitVr();
+ ForceExitVR();
}
-void VrShell::ContentWasHiddenOnUI() {
+void VrShell::ContentWasHidden() {
// Ensure we don't continue sending input to it.
content_input_manager_.reset();
// TODO(mthiesse): Handle web contents being hidden.
+ ForceExitVR();
+}
+
+void VrShell::ForceExitVR() {
bshe 2016/12/12 20:30:20 nit: VR/Vr to be consistent
delegate_->ForceExitVr();
}
-void VrShell::SetContentCssSizeOnUI(float width, float height, float dpr) {
+void VrShell::SetContentCssSize(float width, float height, float dpr) {
JNIEnv* env = base::android::AttachCurrentThread();
- Java_VrShellImpl_setContentCssSizeOnUI(env, j_vr_shell_.obj(), width, height,
- dpr);
+ Java_VrShellImpl_setContentCssSize(env, j_vr_shell_.obj(), width, height,
+ dpr);
}
-void VrShell::SetUiCssSizeOnUI(float width, float height, float dpr) {
+void VrShell::SetUiCssSize(float width, float height, float dpr) {
JNIEnv* env = base::android::AttachCurrentThread();
- Java_VrShellImpl_setUiCssSizeOnUI(env, j_vr_shell_.obj(), width, height, dpr);
+ Java_VrShellImpl_setUiCssSize(env, j_vr_shell_.obj(), width, height, dpr);
}
// ----------------------------------------------------------------------------
// Native JNI methods
// ----------------------------------------------------------------------------
-jlong InitOnUI(JNIEnv* env,
- const JavaParamRef<jobject>& obj,
- const JavaParamRef<jobject>& content_web_contents,
- jlong content_window_android,
- const JavaParamRef<jobject>& ui_web_contents,
- jlong ui_window_android,
- jboolean for_web_vr) {
+jlong Init(JNIEnv* env, const JavaParamRef<jobject>& obj,
+ const JavaParamRef<jobject>& content_web_contents,
+ jlong content_window_android,
+ const JavaParamRef<jobject>& ui_web_contents,
+ jlong ui_window_android, jboolean for_web_vr,
+ const base::android::JavaParamRef<jobject>& delegate,
+ jlong gvr_api) {
return reinterpret_cast<intptr_t>(new VrShell(
env, obj, content::WebContents::FromJavaWebContents(content_web_contents),
reinterpret_cast<ui::WindowAndroid*>(content_window_android),
content::WebContents::FromJavaWebContents(ui_web_contents),
reinterpret_cast<ui::WindowAndroid*>(ui_window_android),
- for_web_vr));
+ for_web_vr, VrShellDelegate::GetNativeDelegate(env, delegate),
+ reinterpret_cast<gvr_context*>(gvr_api)));
}
} // namespace vr_shell

Powered by Google App Engine
This is Rietveld 408576698