OLD | NEW |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/android/vr_shell/vr_shell.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell.h" |
6 | 6 |
7 #include "chrome/browser/android/vr_shell/ui_scene.h" | 7 #include "chrome/browser/android/vr_shell/ui_scene.h" |
8 #include "chrome/browser/android/vr_shell/vr_compositor.h" | 8 #include "chrome/browser/android/vr_shell/vr_compositor.h" |
9 #include "chrome/browser/android/vr_shell/vr_controller.h" | 9 #include "chrome/browser/android/vr_shell/vr_controller.h" |
10 #include "chrome/browser/android/vr_shell/vr_gl_util.h" | 10 #include "chrome/browser/android/vr_shell/vr_gl_util.h" |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
149 float screen_height = kScreenHeightRatio * desktop_height_; | 149 float screen_height = kScreenHeightRatio * desktop_height_; |
150 std::unique_ptr<ContentRectangle> rect(new ContentRectangle()); | 150 std::unique_ptr<ContentRectangle> rect(new ContentRectangle()); |
151 rect->id = kBrowserUiElementId; | 151 rect->id = kBrowserUiElementId; |
152 rect->size = {screen_width, screen_height, 1.0f}; | 152 rect->size = {screen_width, screen_height, 1.0f}; |
153 rect->translation = kDesktopPositionDefault; | 153 rect->translation = kDesktopPositionDefault; |
154 scene_.AddUiElement(rect); | 154 scene_.AddUiElement(rect); |
155 | 155 |
156 desktop_plane_ = scene_.GetUiElementById(kBrowserUiElementId); | 156 desktop_plane_ = scene_.GetUiElementById(kBrowserUiElementId); |
157 | 157 |
158 LoadUIContent(); | 158 LoadUIContent(); |
| 159 |
| 160 gvr::Mat4f identity; |
| 161 SetIdentityM(identity); |
| 162 webvr_head_pose_.resize(kPoseRingBufferSize, identity); |
159 } | 163 } |
160 | 164 |
161 void VrShell::UpdateCompositorLayers(JNIEnv* env, | 165 void VrShell::UpdateCompositorLayers(JNIEnv* env, |
162 const JavaParamRef<jobject>& obj) { | 166 const JavaParamRef<jobject>& obj) { |
163 content_compositor_->SetLayer(content_cvc_); | 167 content_compositor_->SetLayer(content_cvc_); |
164 ui_compositor_->SetLayer(ui_cvc_); | 168 ui_compositor_->SetLayer(ui_cvc_); |
165 } | 169 } |
166 | 170 |
167 void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 171 void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
168 delete this; | 172 delete this; |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 | 343 |
340 if (original_type == WebInputEvent::GestureTap || touch_pending_) { | 344 if (original_type == WebInputEvent::GestureTap || touch_pending_) { |
341 touch_pending_ = false; | 345 touch_pending_ = false; |
342 gesture->type = WebInputEvent::GestureTap; | 346 gesture->type = WebInputEvent::GestureTap; |
343 gesture->details.buttons.pos.x = pixel_x; | 347 gesture->details.buttons.pos.x = pixel_x; |
344 gesture->details.buttons.pos.y = pixel_y; | 348 gesture->details.buttons.pos.y = pixel_y; |
345 current_input_target_->ProcessUpdatedGesture(*gesture.get()); | 349 current_input_target_->ProcessUpdatedGesture(*gesture.get()); |
346 } | 350 } |
347 } | 351 } |
348 | 352 |
| 353 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { |
| 354 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; |
| 355 } |
| 356 |
| 357 uint32_t GetPixelEncodedPoseIndex() { |
| 358 // Read the pose index encoded in a bottom left pixel as color values. |
| 359 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
| 360 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
| 361 // which tracks poses. |
| 362 uint8_t pixels[4]; |
| 363 // Assume we're reading from the frambebuffer we just wrote to. |
| 364 // That's true currently, we may need to use glReadBuffer(GL_BACK) |
| 365 // or equivalent if the rendering setup changes in the future. |
| 366 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
| 367 return pixels[0] | (pixels[1] << 8) | (pixels[2] << 16); |
| 368 } |
| 369 |
349 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 370 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
350 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 371 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
351 | 372 |
352 gvr::Frame frame = swap_chain_->AcquireFrame(); | 373 gvr::Frame frame = swap_chain_->AcquireFrame(); |
353 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 374 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
354 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 375 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
355 | 376 |
356 gvr::Mat4f head_pose = | 377 gvr::Mat4f head_pose = |
357 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 378 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
358 | 379 |
359 gvr::Vec3f position = GetTranslation(head_pose); | 380 gvr::Vec3f position = GetTranslation(head_pose); |
360 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 381 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
361 // This appears to be a 3DOF pose without a neck model. Add one. | 382 // This appears to be a 3DOF pose without a neck model. Add one. |
362 // The head pose has redundant data. Assume we're only using the | 383 // The head pose has redundant data. Assume we're only using the |
363 // object_from_reference_matrix, we're not updating position_external. | 384 // object_from_reference_matrix, we're not updating position_external. |
364 // TODO: Not sure what object_from_reference_matrix is. The new api removed | 385 // TODO: Not sure what object_from_reference_matrix is. The new api removed |
365 // it. For now, removing it seems working fine. | 386 // it. For now, removing it seems working fine. |
366 gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 387 gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
367 } | 388 } |
368 | 389 |
369 // Bind back to the default framebuffer. | 390 // Bind back to the default framebuffer. |
370 frame.BindBuffer(0); | 391 frame.BindBuffer(0); |
371 | 392 |
372 if (webvr_mode_) { | 393 if (webvr_mode_) { |
373 DrawWebVr(); | 394 DrawWebVr(); |
374 if (!webvr_secure_origin_) { | 395 if (!webvr_secure_origin_) { |
375 DrawWebVrOverlay(target_time.monotonic_system_time_nanos); | 396 DrawWebVrOverlay(target_time.monotonic_system_time_nanos); |
376 } | 397 } |
| 398 |
| 399 // When using async reprojection, we need to know which pose was used in |
| 400 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 401 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 402 // from a corner pixel. There's no point in doing this for legacy |
| 403 // distortion rendering since that doesn't need a pose, and reading back |
| 404 // pixels is an expensive operation. TODO(klausw): stop doing this once we |
| 405 // have working no-compositor rendering for WebVR. |
| 406 if (gvr_api_->GetAsyncReprojectionEnabled()) { |
| 407 uint32_t webvr_pose_frame = GetPixelEncodedPoseIndex(); |
| 408 head_pose = webvr_head_pose_[webvr_pose_frame % kPoseRingBufferSize]; |
| 409 } |
377 } else { | 410 } else { |
378 DrawVrShell(head_pose); | 411 DrawVrShell(head_pose); |
379 } | 412 } |
380 | 413 |
381 frame.Unbind(); | 414 frame.Unbind(); |
382 frame.Submit(*buffer_viewport_list_, head_pose); | 415 frame.Submit(*buffer_viewport_list_, head_pose); |
383 } | 416 } |
384 | 417 |
385 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose) { | 418 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose) { |
386 float screen_tilt = desktop_screen_tilt_ * M_PI / 180.0f; | 419 float screen_tilt = desktop_screen_tilt_ * M_PI / 180.0f; |
(...skipping 405 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
792 content::WebContents::FromJavaWebContents(content_web_contents)); | 825 content::WebContents::FromJavaWebContents(content_web_contents)); |
793 content::ContentViewCore* ui_core = content::ContentViewCore::FromWebContents( | 826 content::ContentViewCore* ui_core = content::ContentViewCore::FromWebContents( |
794 content::WebContents::FromJavaWebContents(ui_web_contents)); | 827 content::WebContents::FromJavaWebContents(ui_web_contents)); |
795 return reinterpret_cast<intptr_t>(new VrShell( | 828 return reinterpret_cast<intptr_t>(new VrShell( |
796 env, obj, c_core, | 829 env, obj, c_core, |
797 reinterpret_cast<ui::WindowAndroid*>(content_window_android), ui_core, | 830 reinterpret_cast<ui::WindowAndroid*>(content_window_android), ui_core, |
798 reinterpret_cast<ui::WindowAndroid*>(ui_window_android))); | 831 reinterpret_cast<ui::WindowAndroid*>(ui_window_android))); |
799 } | 832 } |
800 | 833 |
801 } // namespace vr_shell | 834 } // namespace vr_shell |
OLD | NEW |