Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
| 6 | 6 |
| 7 #include "base/android/jni_android.h" | 7 #include "base/android/jni_android.h" |
| 8 #include "base/memory/ptr_util.h" | 8 #include "base/memory/ptr_util.h" |
| 9 #include "base/metrics/histogram_macros.h" | 9 #include "base/metrics/histogram_macros.h" |
| 10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 77 // is currently sized to fit the WebVR "insecure transport" warnings, | 77 // is currently sized to fit the WebVR "insecure transport" warnings, |
| 78 // adjust it as needed if there is additional content. | 78 // adjust it as needed if there is additional content. |
| 79 static constexpr gvr::Sizei kHeadlockedBufferDimensions = {1024, 1024}; | 79 static constexpr gvr::Sizei kHeadlockedBufferDimensions = {1024, 1024}; |
| 80 static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f}; | 80 static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f}; |
| 81 | 81 |
| 82 // The GVR viewport list has two entries (left eye and right eye) for each | 82 // The GVR viewport list has two entries (left eye and right eye) for each |
| 83 // GVR buffer. | 83 // GVR buffer. |
| 84 static constexpr int kViewportListPrimaryOffset = 0; | 84 static constexpr int kViewportListPrimaryOffset = 0; |
| 85 static constexpr int kViewportListHeadlockedOffset = 2; | 85 static constexpr int kViewportListHeadlockedOffset = 2; |
| 86 | 86 |
| 87 // Buffer size large enough to handle the current backlog of poses which is | |
| 88 // 2-3 frames. | |
| 89 static constexpr int kPoseRingBufferSize = 8; | |
|
dcheng
2017/01/18 23:58:07
Nit: unsigned?
mthiesse
2017/01/19 01:19:08
Done.
| |
| 90 | |
| 87 // Magic numbers used to mark valid pose index values encoded in frame | 91 // Magic numbers used to mark valid pose index values encoded in frame |
| 88 // data. Must match the magic numbers used in blink's VRDisplay.cpp. | 92 // data. Must match the magic numbers used in blink's VRDisplay.cpp. |
| 89 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; | 93 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; |
| 90 | 94 |
| 91 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { | 95 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { |
| 92 float xdiff = (vec1.x - vec2.x); | 96 float xdiff = (vec1.x - vec2.x); |
| 93 float ydiff = (vec1.y - vec2.y); | 97 float ydiff = (vec1.y - vec2.y); |
| 94 float zdiff = (vec1.z - vec2.z); | 98 float zdiff = (vec1.z - vec2.z); |
| 95 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; | 99 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; |
| 96 return std::sqrt(scale); | 100 return std::sqrt(scale); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 167 ui_input_manager_(ui_input_manager), | 171 ui_input_manager_(ui_input_manager), |
| 168 delegate_provider_(delegate_provider), | 172 delegate_provider_(delegate_provider), |
| 169 main_thread_task_runner_(std::move(main_thread_task_runner)), | 173 main_thread_task_runner_(std::move(main_thread_task_runner)), |
| 170 weak_ptr_factory_(this) { | 174 weak_ptr_factory_(this) { |
| 171 GvrInit(gvr_api); | 175 GvrInit(gvr_api); |
| 172 } | 176 } |
| 173 | 177 |
| 174 VrShellGl::~VrShellGl() { | 178 VrShellGl::~VrShellGl() { |
| 175 vsync_task_.Cancel(); | 179 vsync_task_.Cancel(); |
| 176 if (!callback_.is_null()) | 180 if (!callback_.is_null()) |
| 177 callback_.Run(nullptr, base::TimeDelta()); | 181 callback_.Run(nullptr, base::TimeDelta(), -1); |
| 178 if (binding_.is_bound()) { | 182 if (binding_.is_bound()) { |
| 179 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( | 183 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| 180 &VrShellDelegate::OnVRVsyncProviderRequest, delegate_provider_, | 184 &VrShellDelegate::OnVRVsyncProviderRequest, delegate_provider_, |
| 181 base::Passed(binding_.Unbind()))); | 185 base::Passed(binding_.Unbind()))); |
| 182 } | 186 } |
| 183 } | 187 } |
| 184 | 188 |
| 185 void VrShellGl::Initialize() { | 189 void VrShellGl::Initialize() { |
| 186 scene_.reset(new UiScene); | 190 scene_.reset(new UiScene); |
| 187 | 191 |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 259 | 263 |
| 260 void VrShellGl::OnUIFrameAvailable() { | 264 void VrShellGl::OnUIFrameAvailable() { |
| 261 ui_surface_texture_->UpdateTexImage(); | 265 ui_surface_texture_->UpdateTexImage(); |
| 262 } | 266 } |
| 263 | 267 |
| 264 void VrShellGl::OnContentFrameAvailable() { | 268 void VrShellGl::OnContentFrameAvailable() { |
| 265 content_surface_texture_->UpdateTexImage(); | 269 content_surface_texture_->UpdateTexImage(); |
| 266 received_frame_ = true; | 270 received_frame_ = true; |
| 267 } | 271 } |
| 268 | 272 |
| 269 bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { | 273 bool VrShellGl::GetPixelEncodedFrameIndex(int* frame_index) { |
| 270 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); | 274 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); |
| 271 if (!received_frame_) { | 275 if (!received_frame_) { |
| 272 *pose_index = last_pose_; | 276 if (last_frame_index_ == -1) |
| 277 return false; | |
| 278 *frame_index = last_frame_index_; | |
| 273 return true; | 279 return true; |
| 274 } | 280 } |
| 275 received_frame_ = false; | 281 received_frame_ = false; |
| 276 | 282 |
| 277 // Read the pose index encoded in a bottom left pixel as color values. | 283 // Read the pose index encoded in a bottom left pixel as color values. |
| 278 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | 284 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
| 279 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | 285 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
| 280 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | 286 // which tracks poses. Returns the low byte (0..255) if valid, or -1 |
| 281 // if not valid due to bad magic number. | 287 // if not valid due to bad magic number. |
| 282 uint8_t pixels[4]; | 288 uint8_t pixels[4]; |
| 283 // Assume we're reading from the framebuffer we just wrote to. | 289 // Assume we're reading from the framebuffer we just wrote to. |
| 284 // That's true currently, we may need to use glReadBuffer(GL_BACK) | 290 // That's true currently, we may need to use glReadBuffer(GL_BACK) |
| 285 // or equivalent if the rendering setup changes in the future. | 291 // or equivalent if the rendering setup changes in the future. |
| 286 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | 292 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
| 287 | 293 |
| 288 // Check for the magic number written by VRDevice.cpp on submit. | 294 // Check for the magic number written by VRDevice.cpp on submit. |
| 289 // This helps avoid glitches from garbage data in the render | 295 // This helps avoid glitches from garbage data in the render |
| 290 // buffer that can appear during initialization or resizing. These | 296 // buffer that can appear during initialization or resizing. These |
| 291 // often appear as flashes of all-black or all-white pixels. | 297 // often appear as flashes of all-black or all-white pixels. |
| 292 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | 298 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| 293 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | 299 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| 294 // Pose is good. | 300 // Pose is good. |
| 295 *pose_index = pixels[0]; | 301 *frame_index = pixels[0]; |
| 296 last_pose_ = pixels[0]; | 302 last_frame_index_ = pixels[0]; |
| 297 return true; | 303 return true; |
| 298 } | 304 } |
| 299 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] | 305 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
| 300 << ", bad magic number " << (int)pixels[1] << ", " | 306 << ", bad magic number " << (int)pixels[1] << ", " |
| 301 << (int)pixels[2]; | 307 << (int)pixels[2]; |
| 302 return false; | 308 return false; |
| 303 } | 309 } |
| 304 | 310 |
| 305 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 311 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
| 306 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 312 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
| (...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 602 | 608 |
| 603 gvr::Frame frame = swap_chain_->AcquireFrame(); | 609 gvr::Frame frame = swap_chain_->AcquireFrame(); |
| 604 if (!frame.is_valid()) { | 610 if (!frame.is_valid()) { |
| 605 return; | 611 return; |
| 606 } | 612 } |
| 607 frame.BindBuffer(kFramePrimaryBuffer); | 613 frame.BindBuffer(kFramePrimaryBuffer); |
| 608 if (web_vr_mode_) { | 614 if (web_vr_mode_) { |
| 609 DrawWebVr(); | 615 DrawWebVr(); |
| 610 } | 616 } |
| 611 | 617 |
| 612 int pose_index; | 618 int frame_index; |
| 613 gvr::Mat4f head_pose; | 619 gvr::Mat4f head_pose; |
| 614 | 620 |
| 615 // When using async reprojection, we need to know which pose was used in | 621 // When using async reprojection, we need to know which pose was used in |
| 616 // the WebVR app for drawing this frame. Due to unknown amounts of | 622 // the WebVR app for drawing this frame. Due to unknown amounts of |
| 617 // buffering in the compositor and SurfaceTexture, we read the pose number | 623 // buffering in the compositor and SurfaceTexture, we read the pose number |
| 618 // from a corner pixel. There's no point in doing this for legacy | 624 // from a corner pixel. There's no point in doing this for legacy |
| 619 // distortion rendering since that doesn't need a pose, and reading back | 625 // distortion rendering since that doesn't need a pose, and reading back |
| 620 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | 626 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| 621 // doing this once we have working no-compositor rendering for WebVR. | 627 // doing this once we have working no-compositor rendering for WebVR. |
| 622 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && | 628 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
| 623 GetPixelEncodedPoseIndexByte(&pose_index)) { | 629 GetPixelEncodedFrameIndex(&frame_index)) { |
| 624 head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; | 630 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
| 631 "kPoseRingBufferSize must be a power of 2"); | |
| 632 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; | |
| 633 // Process all pending_bounds_ changes targeted for before this frame, being | |
| 634 // careful of wrapping frame indices. | |
| 635 static constexpr int max = | |
| 636 (int) std::numeric_limits<typeof(frame_index_)>::max(); | |
|
dcheng
2017/01/18 23:58:07
Nit: let's be consistent and not mix signed/unsign
mthiesse
2017/01/19 01:19:08
Done.
| |
| 637 static_assert(max > kPoseRingBufferSize * 2, | |
| 638 "To detect wrapping, kPoseRingBufferSize must be smaller " | |
| 639 "than half of frame_index_ range."); | |
| 640 while (!pending_bounds_.empty()) { | |
| 641 int index = pending_bounds_.front().first; | |
| 642 if (index < frame_index) index += max; | |
| 643 if (index > frame_index && index <= frame_index + kPoseRingBufferSize) | |
| 644 break; | |
| 645 | |
| 646 const BoundsPair& bounds = pending_bounds_.front().second; | |
| 647 webvr_left_viewport_->SetSourceUv(bounds.first); | |
| 648 webvr_right_viewport_->SetSourceUv(bounds.second); | |
| 649 pending_bounds_.pop(); | |
| 650 } | |
| 651 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | |
| 652 *webvr_left_viewport_); | |
| 653 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | |
| 654 *webvr_right_viewport_); | |
| 625 } else { | 655 } else { |
| 626 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 656 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 627 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 657 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 628 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 658 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 629 } | 659 } |
| 630 | 660 |
| 631 gvr::Vec3f position = GetTranslation(head_pose); | 661 gvr::Vec3f position = GetTranslation(head_pose); |
| 632 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 662 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
| 633 // This appears to be a 3DOF pose without a neck model. Add one. | 663 // This appears to be a 3DOF pose without a neck model. Add one. |
| 634 // The head pose has redundant data. Assume we're only using the | 664 // The head pose has redundant data. Assume we're only using the |
| (...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 855 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 885 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
| 856 glDisable(GL_CULL_FACE); | 886 glDisable(GL_CULL_FACE); |
| 857 glDepthMask(GL_FALSE); | 887 glDepthMask(GL_FALSE); |
| 858 glDisable(GL_DEPTH_TEST); | 888 glDisable(GL_DEPTH_TEST); |
| 859 glDisable(GL_SCISSOR_TEST); | 889 glDisable(GL_SCISSOR_TEST); |
| 860 glDisable(GL_BLEND); | 890 glDisable(GL_BLEND); |
| 861 glDisable(GL_POLYGON_OFFSET_FILL); | 891 glDisable(GL_POLYGON_OFFSET_FILL); |
| 862 | 892 |
| 863 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 893 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
| 864 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 894 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| 865 | |
| 866 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | |
| 867 *webvr_left_viewport_); | |
| 868 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | |
| 869 *webvr_right_viewport_); | |
| 870 } | 895 } |
| 871 | 896 |
| 872 void VrShellGl::OnTriggerEvent() { | 897 void VrShellGl::OnTriggerEvent() { |
| 873 // Set a flag to handle this on the render thread at the next frame. | 898 // Set a flag to handle this on the render thread at the next frame. |
| 874 touch_pending_ = true; | 899 touch_pending_ = true; |
| 875 } | 900 } |
| 876 | 901 |
| 877 void VrShellGl::OnPause() { | 902 void VrShellGl::OnPause() { |
| 878 vsync_task_.Cancel(); | 903 vsync_task_.Cancel(); |
| 879 controller_->OnPause(); | 904 controller_->OnPause(); |
| 880 gvr_api_->PauseTracking(); | 905 gvr_api_->PauseTracking(); |
| 881 } | 906 } |
| 882 | 907 |
| 883 void VrShellGl::OnResume() { | 908 void VrShellGl::OnResume() { |
| 884 gvr_api_->RefreshViewerProfile(); | 909 gvr_api_->RefreshViewerProfile(); |
| 885 gvr_api_->ResumeTracking(); | 910 gvr_api_->ResumeTracking(); |
| 886 controller_->OnResume(); | 911 controller_->OnResume(); |
| 887 if (ready_to_draw_) { | 912 if (ready_to_draw_) { |
| 888 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 913 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 889 OnVSync(); | 914 OnVSync(); |
| 890 } | 915 } |
| 891 } | 916 } |
| 892 | 917 |
| 893 void VrShellGl::SetWebVrMode(bool enabled) { | 918 void VrShellGl::SetWebVrMode(bool enabled) { |
| 894 web_vr_mode_ = enabled; | 919 web_vr_mode_ = enabled; |
| 895 } | 920 } |
| 896 | 921 |
| 897 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | 922 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
| 923 const gvr::Rectf& left_bounds, | |
| 898 const gvr::Rectf& right_bounds) { | 924 const gvr::Rectf& right_bounds) { |
| 899 webvr_left_viewport_->SetSourceUv(left_bounds); | 925 if (frame_index < 0) { |
| 900 webvr_right_viewport_->SetSourceUv(right_bounds); | 926 webvr_left_viewport_->SetSourceUv(left_bounds); |
| 927 webvr_right_viewport_->SetSourceUv(right_bounds); | |
| 928 } else { | |
| 929 pending_bounds_.emplace( | |
| 930 std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); | |
| 931 } | |
| 901 } | 932 } |
| 902 | 933 |
| 903 gvr::GvrApi* VrShellGl::gvr_api() { | 934 gvr::GvrApi* VrShellGl::gvr_api() { |
| 904 return gvr_api_.get(); | 935 return gvr_api_.get(); |
| 905 } | 936 } |
| 906 | 937 |
| 907 void VrShellGl::ContentBoundsChanged(int width, int height) { | 938 void VrShellGl::ContentBoundsChanged(int width, int height) { |
| 908 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); | 939 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
| 909 content_tex_css_width_ = width; | 940 content_tex_css_width_ = width; |
| 910 content_tex_css_height_ = height; | 941 content_tex_css_height_ = height; |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 941 if (vsync_interval_.is_zero()) | 972 if (vsync_interval_.is_zero()) |
| 942 return; | 973 return; |
| 943 target = now + vsync_interval_; | 974 target = now + vsync_interval_; |
| 944 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 975 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
| 945 target = vsync_timebase_ + intervals * vsync_interval_; | 976 target = vsync_timebase_ + intervals * vsync_interval_; |
| 946 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | 977 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
| 947 target - now); | 978 target - now); |
| 948 | 979 |
| 949 base::TimeDelta time = intervals * vsync_interval_; | 980 base::TimeDelta time = intervals * vsync_interval_; |
| 950 if (!callback_.is_null()) { | 981 if (!callback_.is_null()) { |
| 951 callback_.Run(GetPose(), time); | 982 SendVSync(time, std::move(callback_)); |
| 952 callback_.Reset(); | |
| 953 } else { | 983 } else { |
| 954 pending_vsync_ = true; | 984 pending_vsync_ = true; |
| 955 pending_time_ = time; | 985 pending_time_ = time; |
| 956 } | 986 } |
| 957 DrawFrame(); | 987 DrawFrame(); |
| 958 } | 988 } |
| 959 | 989 |
| 960 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { | 990 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
| 961 if (binding_.is_bound()) | 991 if (binding_.is_bound()) |
| 962 binding_.Close(); | 992 binding_.Close(); |
| 963 binding_.Bind(std::move(request)); | 993 binding_.Bind(std::move(request)); |
| 964 } | 994 } |
| 965 | 995 |
| 966 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | 996 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
| 967 if (!pending_vsync_) { | 997 if (!pending_vsync_) { |
| 968 if (!callback_.is_null()) { | 998 if (!callback_.is_null()) { |
| 969 mojo::ReportBadMessage("Requested VSync before waiting for response to " | 999 mojo::ReportBadMessage("Requested VSync before waiting for response to " |
| 970 "previous request."); | 1000 "previous request."); |
| 971 return; | 1001 return; |
| 972 } | 1002 } |
| 973 callback_ = std::move(callback); | 1003 callback_ = std::move(callback); |
| 974 return; | 1004 return; |
| 975 } | 1005 } |
| 976 pending_vsync_ = false; | 1006 pending_vsync_ = false; |
| 977 callback.Run(GetPose(), pending_time_); | 1007 SendVSync(pending_time_, std::move(callback)); |
|
dcheng
2017/01/18 23:58:07
Same comment about std::move() on a const ref bein
mthiesse
2017/01/19 01:19:08
Done.
| |
| 978 } | 1008 } |
| 979 | 1009 |
| 980 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, | 1010 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
| 981 double interval_seconds) { | 1011 double interval_seconds) { |
| 982 vsync_timebase_ = base::TimeTicks(); | 1012 vsync_timebase_ = base::TimeTicks(); |
| 983 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); | 1013 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); |
| 984 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); | 1014 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); |
| 985 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 1015 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
| 986 OnVSync(); | 1016 OnVSync(); |
| 987 } | 1017 } |
| 988 | 1018 |
| 989 void VrShellGl::ForceExitVr() { | 1019 void VrShellGl::ForceExitVr() { |
| 990 main_thread_task_runner_->PostTask( | 1020 main_thread_task_runner_->PostTask( |
| 991 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); | 1021 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); |
| 992 } | 1022 } |
| 993 | 1023 |
| 994 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { | 1024 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
| 995 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); | 1025 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
| 996 } | 1026 } |
| 997 | 1027 |
| 998 device::mojom::VRPosePtr VrShellGl::GetPose() { | 1028 void VrShellGl::SendVSync(const base::TimeDelta& time, |
| 999 TRACE_EVENT0("input", "VrShellGl::GetPose"); | 1029 GetVSyncCallback callback) { |
| 1030 TRACE_EVENT0("input", "VrShellGl::SendVSync"); | |
| 1031 | |
| 1032 uint8_t frame_index = frame_index_++; | |
| 1000 | 1033 |
| 1001 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 1034 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| 1002 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 1035 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| 1003 | 1036 |
| 1004 gvr::Mat4f head_mat = | 1037 gvr::Mat4f head_mat = |
| 1005 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 1038 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| 1006 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); | 1039 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
| 1007 | 1040 |
| 1008 uint32_t pose_index = pose_index_++; | 1041 webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat; |
| 1009 webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; | |
| 1010 | 1042 |
| 1011 return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); | 1043 callback.Run(VrShell::VRPosePtrFromGvrPose(head_mat), time, frame_index); |
| 1012 } | 1044 } |
| 1013 | 1045 |
| 1014 } // namespace vr_shell | 1046 } // namespace vr_shell |
| OLD | NEW |