Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell.h" |
| 6 | 6 |
| 7 #include <thread> | 7 #include <thread> |
| 8 | 8 |
| 9 #include "chrome/browser/android/vr_shell/ui_scene.h" | 9 #include "chrome/browser/android/vr_shell/ui_scene.h" |
| 10 #include "chrome/browser/android/vr_shell/vr_compositor.h" | 10 #include "chrome/browser/android/vr_shell/vr_compositor.h" |
| (...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 269 bool is_inside = x >= 0.0f && x < 1.0f && y >= 0.0f && y < 1.0f; | 269 bool is_inside = x >= 0.0f && x < 1.0f && y >= 0.0f && y < 1.0f; |
| 270 if (is_inside) { | 270 if (is_inside) { |
| 271 closest_element = distance_to_plane; | 271 closest_element = distance_to_plane; |
| 272 target_point_ = plane_intersection_point; | 272 target_point_ = plane_intersection_point; |
| 273 target_element_ = &plane; | 273 target_element_ = &plane; |
| 274 } | 274 } |
| 275 } | 275 } |
| 276 } | 276 } |
| 277 } | 277 } |
| 278 | 278 |
| 279 void VrShell::SetGvrPoseForWebVr(gvr::Mat4f pose, uint32_t pose_num) { | |
|
dcheng
2016/10/04 23:17:53
Nit: pass |pose| by const ref
klausw
2016/10/04 23:56:42
Done.
| |
| 280 webvr_head_pose_[pose_num % POSE_QUEUE_SIZE] = pose; | |
| 281 webvr_newest_pose_num_ = pose_num; | |
| 282 } | |
| 283 | |
| 284 int32_t GetPixelEncodedFrameNumber() { | |
|
dcheng
2016/10/04 23:17:53
Let's be uniform with our types through (uint32 ev
klausw
2016/10/04 23:56:42
Done, using pose index consistently.
| |
| 285 // Read the frame number encoded in a bottom left pixel as color values. | |
| 286 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | |
| 287 // encodes the frame number, and device/vr/android/gvr/gvr_device.cc | |
| 288 // which tracks poses. | |
| 289 uint8_t pixels[4]; | |
| 290 // Assume we're reading from the frambebuffer we just wrote to. | |
| 291 // That's true currently, we may need to use glReadBuffer(GL_BACK) | |
| 292 // or equivalent if the rendering setup changes in the future. | |
| 293 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | |
| 294 return pixels[0] | (pixels[1] << 8) | (pixels[2] << 16); | |
|
mthiesse
2016/10/04 21:19:16
Optionally, you really only need 3 bits, so you co
klausw
2016/10/04 23:08:14
Not sure that would make a big difference - for no
| |
| 295 } | |
| 296 | |
| 279 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { | 297 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
| 280 buffer_viewport_list_->SetToRecommendedBufferViewports(); | 298 buffer_viewport_list_->SetToRecommendedBufferViewports(); |
| 281 | 299 |
| 282 gvr::Frame frame = swap_chain_->AcquireFrame(); | 300 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 283 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 301 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 284 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 302 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 285 gvr::Mat4f head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_tim e); | 303 gvr::Mat4f head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_tim e); |
| 286 head_pose = gvr_api_->ApplyNeckModel(head_pose, 1.0f); | 304 head_pose = gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
| 287 | 305 |
| 288 // Bind back to the default framebuffer. | 306 // Bind back to the default framebuffer. |
| 289 frame.BindBuffer(0); | 307 frame.BindBuffer(0); |
| 290 | 308 |
| 291 if (webvr_mode_) { | 309 if (webvr_mode_) { |
| 292 DrawWebVr(); | 310 DrawWebVr(); |
| 293 if (!webvr_secure_origin_) { | 311 if (!webvr_secure_origin_) { |
| 294 DrawWebVrOverlay(target_time.monotonic_system_time_nanos); | 312 DrawWebVrOverlay(target_time.monotonic_system_time_nanos); |
| 295 } | 313 } |
| 314 | |
| 315 // When using async reprojection, we need to know which pose was used in | |
| 316 // the WebVR app for drawing this frame. Due to unknown amounts of | |
| 317 // buffering in the compositor and SurfaceTexture, we read the pose number | |
| 318 // from a corner pixel. There's no point in doing this for legacy | |
| 319 // distortion rendering since that doesn't need a pose, and reading back | |
| 320 // pixels is an expensive operation. TODO(klausw): stop doing this once we | |
| 321 // have working no-compositor rendering for WebVR. | |
| 322 if (gvr_api_->GetAsyncReprojectionEnabled()) { | |
| 323 int32_t webvr_pose_frame = GetPixelEncodedFrameNumber(); | |
| 324 // LOG << "klausw: newest_pose=" << webvr_newest_pose_num_ << | |
|
mthiesse
2016/10/04 21:19:16
Remove this
klausw
2016/10/04 23:08:14
Done.
| |
| 325 // " pixel=" << webvr_pose_frame << | |
| 326 // "(" << webvr_pose_frame - webvr_newest_pose_num_ << ")"; | |
| 327 head_pose = webvr_head_pose_[webvr_pose_frame % POSE_QUEUE_SIZE]; | |
| 328 } | |
| 296 } else { | 329 } else { |
| 297 DrawVrShell(head_pose); | 330 DrawVrShell(head_pose); |
| 298 } | 331 } |
| 299 | 332 |
| 300 frame.Unbind(); | 333 frame.Unbind(); |
| 301 frame.Submit(*buffer_viewport_list_, head_pose); | 334 frame.Submit(*buffer_viewport_list_, head_pose); |
| 302 } | 335 } |
| 303 | 336 |
| 304 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose) { | 337 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose) { |
| 305 float screen_tilt = desktop_screen_tilt_ * M_PI / 180.0f; | 338 float screen_tilt = desktop_screen_tilt_ * M_PI / 180.0f; |
| (...skipping 368 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 674 content::WebContents::FromJavaWebContents(content_web_contents)); | 707 content::WebContents::FromJavaWebContents(content_web_contents)); |
| 675 content::ContentViewCore* ui_core = content::ContentViewCore::FromWebContents( | 708 content::ContentViewCore* ui_core = content::ContentViewCore::FromWebContents( |
| 676 content::WebContents::FromJavaWebContents(ui_web_contents)); | 709 content::WebContents::FromJavaWebContents(ui_web_contents)); |
| 677 return reinterpret_cast<intptr_t>(new VrShell( | 710 return reinterpret_cast<intptr_t>(new VrShell( |
| 678 env, obj, c_core, | 711 env, obj, c_core, |
| 679 reinterpret_cast<ui::WindowAndroid*>(content_window_android), ui_core, | 712 reinterpret_cast<ui::WindowAndroid*>(content_window_android), ui_core, |
| 680 reinterpret_cast<ui::WindowAndroid*>(ui_window_android))); | 713 reinterpret_cast<ui::WindowAndroid*>(ui_window_android))); |
| 681 } | 714 } |
| 682 | 715 |
| 683 } // namespace vr_shell | 716 } // namespace vr_shell |
| OLD | NEW |