Chromium Code Reviews| Index: chrome/browser/android/vr_shell/vr_shell.cc |
| diff --git a/chrome/browser/android/vr_shell/vr_shell.cc b/chrome/browser/android/vr_shell/vr_shell.cc |
| index cbc58091fe6421241007f0f10fd073949453836f..4299b38c6c5d2ceaf594d235f54e2bc81906f12d 100644 |
| --- a/chrome/browser/android/vr_shell/vr_shell.cc |
| +++ b/chrome/browser/android/vr_shell/vr_shell.cc |
| @@ -56,7 +56,8 @@ void ContentRect::Translate(float x, float y, float z) { |
| transfrom_to_world.m[2][3] += z; |
| } |
| -VrShell::VrShell(JNIEnv* env, jobject obj) { |
| +VrShell::VrShell(JNIEnv* env, jobject obj) : |
| + webvr_mode_(false) { |
| j_vr_shell_.Reset(env, obj); |
| } |
| @@ -69,13 +70,17 @@ bool RegisterVrShell(JNIEnv* env) { |
| return RegisterNativesImpl(env); |
| } |
| -VrShell::~VrShell() {} |
| +VrShell::~VrShell() { |
| + device::GvrDelegateManager::GetInstance()->Shutdown(); |
| +} |
| void VrShell::GvrInit(JNIEnv* env, |
| const base::android::JavaParamRef<jobject>& obj, |
| jlong native_gvr_api) { |
| gvr_api_ = |
| gvr::GvrApi::WrapNonOwned(reinterpret_cast<gvr_context*>(native_gvr_api)); |
| + |
| + device::GvrDelegateManager::GetInstance()->Initialize(this); |
| } |
| void VrShell::InitializeGl(JNIEnv* env, |
| @@ -101,43 +106,58 @@ void VrShell::DrawFrame(JNIEnv* env, |
| const base::android::JavaParamRef<jobject>& obj) { |
| buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| gvr::Frame frame = swap_chain_->AcquireFrame(); |
| - gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| - target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| - head_pose_ = gvr_api_->GetHeadPoseInStartSpace(target_time); |
| - |
| - // Content area positioning. |
| - content_rect_->SetIdentity(); |
| - content_rect_->Translate(kContentRectPositionDefault.x, |
| - kContentRectPositionDefault.y, |
| - kContentRectPositionDefault.z); |
| - |
| - gvr::Mat4f left_eye_view_matrix = |
| - MatrixMul(gvr_api_->GetEyeFromHeadMatrix(GVR_LEFT_EYE), head_pose_); |
| - gvr::Mat4f right_eye_view_matrix = |
| - MatrixMul(gvr_api_->GetEyeFromHeadMatrix(GVR_RIGHT_EYE), head_pose_); |
| // Bind back to the default framebuffer. |
| frame.BindBuffer(0); |
| - // Use culling to remove back faces. |
| - glEnable(GL_CULL_FACE); |
| + if (webvr_mode_) { |
| + // Don't need face culling, depth testing, blending, etc. Turn it all off. |
| + glDisable(GL_CULL_FACE); |
| + glDepthMask(GL_FALSE); |
| + glDisable(GL_DEPTH_TEST); |
| + glDisable(GL_SCISSOR_TEST); |
| + glDisable(GL_BLEND); |
| + glDisable(GL_POLYGON_OFFSET_FILL); |
| + |
| + // Don't need to clear, since we're drawing over the entire render target. |
| + |
| + DrawWebVR(); |
| + } else { |
| + gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
|
mthiesse
2016/09/04 16:04:03
Can you move this branch out to a DrawVRShell() fu
|
| + target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| + head_pose_ = gvr_api_->GetHeadPoseInStartSpace(target_time); |
| + |
| + // Content area positioning. |
| + content_rect_->SetIdentity(); |
| + content_rect_->Translate(kContentRectPositionDefault.x, |
| + kContentRectPositionDefault.y, |
| + kContentRectPositionDefault.z); |
| + |
| + gvr::Mat4f left_eye_view_matrix = |
| + MatrixMul(gvr_api_->GetEyeFromHeadMatrix(GVR_LEFT_EYE), head_pose_); |
| + gvr::Mat4f right_eye_view_matrix = |
| + MatrixMul(gvr_api_->GetEyeFromHeadMatrix(GVR_RIGHT_EYE), head_pose_); |
| + |
| + // Use culling to remove back faces. |
| + glEnable(GL_CULL_FACE); |
| - // Enable depth testing. |
| - glEnable(GL_DEPTH_TEST); |
| - glEnable(GL_SCISSOR_TEST); |
| + // Enable depth testing. |
| + glEnable(GL_DEPTH_TEST); |
| + glEnable(GL_SCISSOR_TEST); |
| - glClearColor(0.1f, 0.1f, 0.1f, 1.0f); |
| + glClearColor(0.1f, 0.1f, 0.1f, 1.0f); |
| - // Enable transparency. |
| - glEnable(GL_BLEND); |
| - glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); |
| + // Enable transparency. |
| + glEnable(GL_BLEND); |
| + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); |
| - buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, |
| - buffer_viewport_.get()); |
| - DrawEye(left_eye_view_matrix, *buffer_viewport_); |
| - buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, |
| - buffer_viewport_.get()); |
| - DrawEye(right_eye_view_matrix, *buffer_viewport_); |
| + buffer_viewport_list_->GetBufferViewport(GVR_LEFT_EYE, |
| + buffer_viewport_.get()); |
| + DrawEye(left_eye_view_matrix, *buffer_viewport_); |
| + buffer_viewport_list_->GetBufferViewport(GVR_RIGHT_EYE, |
| + buffer_viewport_.get()); |
| + DrawEye(right_eye_view_matrix, *buffer_viewport_); |
| + } |
| frame.Unbind(); |
| frame.Submit(*buffer_viewport_list_, head_pose_); |
| @@ -172,6 +192,12 @@ void VrShell::DrawContentRect() { |
| content_rect_->content_texture_handle, content_rect_combined_matrix); |
| } |
| +void VrShell::DrawWebVR() { |
| + glViewport(0, 0, render_size_.width, render_size_.height); |
| + vr_shell_renderer_->GetWebVrRenderer()->Draw( |
| + reinterpret_cast<int>(content_rect_->content_texture_handle)); |
| +} |
| + |
| void VrShell::OnPause(JNIEnv* env, |
| const base::android::JavaParamRef<jobject>& obj) { |
| if (gvr_api_ == nullptr) |
| @@ -187,6 +213,27 @@ void VrShell::OnResume(JNIEnv* env, |
| gvr_api_->ResumeTracking(); |
| } |
| +void VrShell::RequestWebVRPresent() { |
| + webvr_mode_ = true; |
| +} |
| + |
| +void VrShell::ExitWebVRPresent() { |
| + webvr_mode_ = false; |
| +} |
| + |
| +void VrShell::SubmitWebVRFrame() { |
| +} |
| + |
| +void VrShell::UpdateWebVRTextureBounds( |
| + int eye, float left, float top, float width, float height) { |
| + gvr::Rectf bounds = { left, top, width, height }; |
| + vr_shell_renderer_->GetWebVrRenderer()->UpdateTextureBounds(eye, bounds); |
| +} |
| + |
| +gvr::GvrApi* VrShell::gvr_api() { |
| + return gvr_api_.get(); |
| +} |
| + |
| // ---------------------------------------------------------------------------- |
| // Native JNI methods |
| // ---------------------------------------------------------------------------- |