OLD | NEW |
---|---|
1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" | 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" |
6 | 6 |
7 #include "base/android/jni_android.h" | 7 #include "base/android/jni_android.h" |
8 #include "base/memory/ptr_util.h" | 8 #include "base/memory/ptr_util.h" |
9 #include "base/metrics/histogram_macros.h" | 9 #include "base/metrics/histogram_macros.h" |
10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
166 content_input_manager_(content_input_manager), | 166 content_input_manager_(content_input_manager), |
167 ui_input_manager_(ui_input_manager), | 167 ui_input_manager_(ui_input_manager), |
168 main_thread_task_runner_(std::move(main_thread_task_runner)), | 168 main_thread_task_runner_(std::move(main_thread_task_runner)), |
169 weak_ptr_factory_(this) { | 169 weak_ptr_factory_(this) { |
170 GvrInit(gvr_api); | 170 GvrInit(gvr_api); |
171 } | 171 } |
172 | 172 |
173 VrShellGl::~VrShellGl() { | 173 VrShellGl::~VrShellGl() { |
174 vsync_task_.Cancel(); | 174 vsync_task_.Cancel(); |
175 if (!callback_.is_null()) | 175 if (!callback_.is_null()) |
176 callback_.Run(nullptr, 0); | 176 callback_.Run(nullptr, 0, -1); |
177 } | 177 } |
178 | 178 |
179 void VrShellGl::Initialize() { | 179 void VrShellGl::Initialize() { |
180 scene_.reset(new UiScene); | 180 scene_.reset(new UiScene); |
181 | 181 |
182 if (surfaceless_rendering_) { | 182 if (surfaceless_rendering_) { |
183 // If we're rendering surfaceless, we'll never get a java surface to render | 183 // If we're rendering surfaceless, we'll never get a java surface to render |
184 // into, so we can initialize GL right away. | 184 // into, so we can initialize GL right away. |
185 InitializeGl(nullptr); | 185 InitializeGl(nullptr); |
186 } | 186 } |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
253 | 253 |
254 void VrShellGl::OnUIFrameAvailable() { | 254 void VrShellGl::OnUIFrameAvailable() { |
255 ui_surface_texture_->UpdateTexImage(); | 255 ui_surface_texture_->UpdateTexImage(); |
256 } | 256 } |
257 | 257 |
258 void VrShellGl::OnContentFrameAvailable() { | 258 void VrShellGl::OnContentFrameAvailable() { |
259 content_surface_texture_->UpdateTexImage(); | 259 content_surface_texture_->UpdateTexImage(); |
260 received_frame_ = true; | 260 received_frame_ = true; |
261 } | 261 } |
262 | 262 |
263 bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { | 263 bool VrShellGl::GetPixelEncodedFrameIndex(uint8_t* frame_index) { |
264 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); | 264 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); |
265 if (!received_frame_) { | 265 if (!received_frame_) { |
266 *pose_index = last_pose_; | 266 *frame_index = last_frame_index_; |
267 return true; | 267 return true; |
268 } | 268 } |
269 received_frame_ = false; | 269 received_frame_ = false; |
270 | 270 |
271 // Read the pose index encoded in a bottom left pixel as color values. | 271 // Read the pose index encoded in a bottom left pixel as color values. |
272 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which | 272 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
273 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc | 273 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc |
274 // which tracks poses. Returns the low byte (0..255) if valid, or -1 | 274 // which tracks poses. Returns the low byte (0..255) if valid, or -1 |
275 // if not valid due to bad magic number. | 275 // if not valid due to bad magic number. |
276 uint8_t pixels[4]; | 276 uint8_t pixels[4]; |
277 // Assume we're reading from the framebuffer we just wrote to. | 277 // Assume we're reading from the framebuffer we just wrote to. |
278 // That's true currently, we may need to use glReadBuffer(GL_BACK) | 278 // That's true currently, we may need to use glReadBuffer(GL_BACK) |
279 // or equivalent if the rendering setup changes in the future. | 279 // or equivalent if the rendering setup changes in the future. |
280 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); | 280 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
281 | 281 |
282 // Check for the magic number written by VRDevice.cpp on submit. | 282 // Check for the magic number written by VRDevice.cpp on submit. |
283 // This helps avoid glitches from garbage data in the render | 283 // This helps avoid glitches from garbage data in the render |
284 // buffer that can appear during initialization or resizing. These | 284 // buffer that can appear during initialization or resizing. These |
285 // often appear as flashes of all-black or all-white pixels. | 285 // often appear as flashes of all-black or all-white pixels. |
286 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && | 286 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
287 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { | 287 pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
288 // Pose is good. | 288 // Pose is good. |
289 *pose_index = pixels[0]; | 289 *frame_index = pixels[0]; |
290 last_pose_ = pixels[0]; | |
291 return true; | 290 return true; |
292 } | 291 } |
293 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] | 292 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
294 << ", bad magic number " << (int)pixels[1] << ", " | 293 << ", bad magic number " << (int)pixels[1] << ", " |
295 << (int)pixels[2]; | 294 << (int)pixels[2]; |
296 return false; | 295 return false; |
297 } | 296 } |
298 | 297 |
299 void VrShellGl::GvrInit(gvr_context* gvr_api) { | 298 void VrShellGl::GvrInit(gvr_context* gvr_api) { |
300 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); | 299 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
598 | 597 |
599 gvr::Frame frame = swap_chain_->AcquireFrame(); | 598 gvr::Frame frame = swap_chain_->AcquireFrame(); |
600 if (!frame.is_valid()) { | 599 if (!frame.is_valid()) { |
601 return; | 600 return; |
602 } | 601 } |
603 frame.BindBuffer(kFramePrimaryBuffer); | 602 frame.BindBuffer(kFramePrimaryBuffer); |
604 if (web_vr_mode_) { | 603 if (web_vr_mode_) { |
605 DrawWebVr(); | 604 DrawWebVr(); |
606 } | 605 } |
607 | 606 |
608 int pose_index; | 607 uint8_t frame_index; |
609 gvr::Mat4f head_pose; | 608 gvr::Mat4f head_pose; |
610 | 609 |
611 // When using async reprojection, we need to know which pose was used in | 610 // When using async reprojection, we need to know which pose was used in |
612 // the WebVR app for drawing this frame. Due to unknown amounts of | 611 // the WebVR app for drawing this frame. Due to unknown amounts of |
613 // buffering in the compositor and SurfaceTexture, we read the pose number | 612 // buffering in the compositor and SurfaceTexture, we read the pose number |
614 // from a corner pixel. There's no point in doing this for legacy | 613 // from a corner pixel. There's no point in doing this for legacy |
615 // distortion rendering since that doesn't need a pose, and reading back | 614 // distortion rendering since that doesn't need a pose, and reading back |
616 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop | 615 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
617 // doing this once we have working no-compositor rendering for WebVR. | 616 // doing this once we have working no-compositor rendering for WebVR. |
618 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && | 617 if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
619 GetPixelEncodedPoseIndexByte(&pose_index)) { | 618 GetPixelEncodedFrameIndex(&frame_index)) { |
620 head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; | 619 static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
620 "kPoseRingBufferSize must be a power of 2"); | |
621 head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; | |
622 // Process all pending_bounds_ changes targeted for before this frame, being | |
623 // careful of wrapping frame indices. | |
624 static_assert(std::numeric_limits<typeof(frame_index_)>::max() > | |
625 kPoseRingBufferSize * 2, | |
626 "To detect wrapping, kPoseRingBufferSize must be smaller " | |
627 "than half of frame_index_ range."); | |
628 while (!pending_bounds_.empty() && | |
629 pending_bounds_.front().first <= frame_index && | |
630 frame_index - pending_bounds_.front().first < kPoseRingBufferSize) { | |
631 const BoundsPair& bounds = pending_bounds_.front().second; | |
632 webvr_left_viewport_->SetSourceUv(bounds.first); | |
633 webvr_right_viewport_->SetSourceUv(bounds.second); | |
634 pending_bounds_.pop(); | |
635 } | |
636 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | |
637 *webvr_left_viewport_); | |
638 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | |
639 *webvr_right_viewport_); | |
621 } else { | 640 } else { |
622 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 641 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
623 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 642 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
624 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 643 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
625 } | 644 } |
626 | 645 |
627 gvr::Vec3f position = GetTranslation(head_pose); | 646 gvr::Vec3f position = GetTranslation(head_pose); |
628 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { | 647 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
629 // This appears to be a 3DOF pose without a neck model. Add one. | 648 // This appears to be a 3DOF pose without a neck model. Add one. |
630 // The head pose has redundant data. Assume we're only using the | 649 // The head pose has redundant data. Assume we're only using the |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
852 // Don't need face culling, depth testing, blending, etc. Turn it all off. | 871 // Don't need face culling, depth testing, blending, etc. Turn it all off. |
853 glDisable(GL_CULL_FACE); | 872 glDisable(GL_CULL_FACE); |
854 glDepthMask(GL_FALSE); | 873 glDepthMask(GL_FALSE); |
855 glDisable(GL_DEPTH_TEST); | 874 glDisable(GL_DEPTH_TEST); |
856 glDisable(GL_SCISSOR_TEST); | 875 glDisable(GL_SCISSOR_TEST); |
857 glDisable(GL_BLEND); | 876 glDisable(GL_BLEND); |
858 glDisable(GL_POLYGON_OFFSET_FILL); | 877 glDisable(GL_POLYGON_OFFSET_FILL); |
859 | 878 |
860 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); | 879 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
861 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); | 880 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
862 | |
863 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, | |
864 *webvr_left_viewport_); | |
865 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, | |
866 *webvr_right_viewport_); | |
867 } | 881 } |
868 | 882 |
869 void VrShellGl::OnTriggerEvent() { | 883 void VrShellGl::OnTriggerEvent() { |
870 // Set a flag to handle this on the render thread at the next frame. | 884 // Set a flag to handle this on the render thread at the next frame. |
871 touch_pending_ = true; | 885 touch_pending_ = true; |
872 } | 886 } |
873 | 887 |
874 void VrShellGl::OnPause() { | 888 void VrShellGl::OnPause() { |
875 vsync_task_.Cancel(); | 889 vsync_task_.Cancel(); |
876 controller_->OnPause(); | 890 controller_->OnPause(); |
877 gvr_api_->PauseTracking(); | 891 gvr_api_->PauseTracking(); |
878 } | 892 } |
879 | 893 |
880 void VrShellGl::OnResume() { | 894 void VrShellGl::OnResume() { |
881 gvr_api_->RefreshViewerProfile(); | 895 gvr_api_->RefreshViewerProfile(); |
882 gvr_api_->ResumeTracking(); | 896 gvr_api_->ResumeTracking(); |
883 controller_->OnResume(); | 897 controller_->OnResume(); |
884 if (ready_to_draw_) { | 898 if (ready_to_draw_) { |
885 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 899 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
886 OnVSync(); | 900 OnVSync(); |
887 } | 901 } |
888 } | 902 } |
889 | 903 |
890 void VrShellGl::SetWebVrMode(bool enabled) { | 904 void VrShellGl::SetWebVrMode(bool enabled) { |
891 web_vr_mode_ = enabled; | 905 web_vr_mode_ = enabled; |
892 } | 906 } |
893 | 907 |
894 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, | 908 void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
909 const gvr::Rectf& left_bounds, | |
895 const gvr::Rectf& right_bounds) { | 910 const gvr::Rectf& right_bounds) { |
896 webvr_left_viewport_->SetSourceUv(left_bounds); | 911 if (frame_index < 0) { |
897 webvr_right_viewport_->SetSourceUv(right_bounds); | 912 webvr_left_viewport_->SetSourceUv(left_bounds); |
913 webvr_right_viewport_->SetSourceUv(right_bounds); | |
914 } else { | |
915 pending_bounds_.emplace( | |
916 std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); | |
917 } | |
898 } | 918 } |
899 | 919 |
900 gvr::GvrApi* VrShellGl::gvr_api() { | 920 gvr::GvrApi* VrShellGl::gvr_api() { |
901 return gvr_api_.get(); | 921 return gvr_api_.get(); |
902 } | 922 } |
903 | 923 |
904 void VrShellGl::ContentBoundsChanged(int width, int height) { | 924 void VrShellGl::ContentBoundsChanged(int width, int height) { |
905 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); | 925 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); |
906 content_tex_css_width_ = width; | 926 content_tex_css_width_ = width; |
907 content_tex_css_height_ = height; | 927 content_tex_css_height_ = height; |
(...skipping 30 matching lines...) Expand all Loading... | |
938 if (vsync_interval_.is_zero()) | 958 if (vsync_interval_.is_zero()) |
939 return; | 959 return; |
940 target = now + vsync_interval_; | 960 target = now + vsync_interval_; |
941 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; | 961 int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
942 target = vsync_timebase_ + intervals * vsync_interval_; | 962 target = vsync_timebase_ + intervals * vsync_interval_; |
943 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), | 963 task_runner_->PostDelayedTask(FROM_HERE, vsync_task_.callback(), |
944 target - now); | 964 target - now); |
945 | 965 |
946 double time = (intervals * vsync_interval_).InSecondsF(); | 966 double time = (intervals * vsync_interval_).InSecondsF(); |
947 if (!callback_.is_null()) { | 967 if (!callback_.is_null()) { |
948 callback_.Run(GetPose(), time); | 968 SendVSync(time, std::move(callback_)); |
949 callback_.Reset(); | |
950 } else { | 969 } else { |
951 pending_vsync_ = true; | 970 pending_vsync_ = true; |
952 pending_time_ = time; | 971 pending_time_ = time; |
953 } | 972 } |
954 DrawFrame(); | 973 DrawFrame(); |
955 } | 974 } |
956 | 975 |
957 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { | 976 void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) { |
958 binding_.Close(); | 977 binding_.Close(); |
959 binding_.Bind(std::move(request)); | 978 binding_.Bind(std::move(request)); |
960 } | 979 } |
961 | 980 |
962 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { | 981 void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
963 if (!pending_vsync_) { | 982 if (!pending_vsync_) { |
964 CHECK(callback_.is_null()); | 983 CHECK(callback_.is_null()); |
dcheng
2017/01/14 11:17:01
Since this is getting called by the renderer, we c
mthiesse
2017/01/16 22:21:32
Fixed in the CL this depends on.
| |
965 callback_ = std::move(callback); | 984 callback_ = std::move(callback); |
966 return; | 985 return; |
967 } | 986 } |
968 pending_vsync_ = false; | 987 pending_vsync_ = false; |
969 callback.Run(GetPose(), pending_time_); | 988 SendVSync(pending_time_, std::move(callback)); |
970 } | 989 } |
971 | 990 |
972 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, | 991 void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
973 double interval_seconds) { | 992 double interval_seconds) { |
974 vsync_timebase_ = base::TimeTicks(); | 993 vsync_timebase_ = base::TimeTicks(); |
975 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); | 994 vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); |
976 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); | 995 vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); |
977 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); | 996 vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this))); |
978 OnVSync(); | 997 OnVSync(); |
979 } | 998 } |
980 | 999 |
981 void VrShellGl::ForceExitVr() { | 1000 void VrShellGl::ForceExitVr() { |
982 main_thread_task_runner_->PostTask( | 1001 main_thread_task_runner_->PostTask( |
983 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); | 1002 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); |
984 } | 1003 } |
985 | 1004 |
986 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { | 1005 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
987 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); | 1006 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
988 } | 1007 } |
989 | 1008 |
990 device::mojom::VRPosePtr VrShellGl::GetPose() { | 1009 void VrShellGl::SendVSync(double time, GetVSyncCallback callback) { |
991 TRACE_EVENT0("input", "VrShellGl::GetPose"); | 1010 TRACE_EVENT0("input", "VrShellGl::SendVSync"); |
1011 | |
1012 uint8_t frame_index = frame_index_++; | |
992 | 1013 |
993 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); | 1014 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
994 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; | 1015 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
995 | 1016 |
996 gvr::Mat4f head_mat = | 1017 gvr::Mat4f head_mat = |
997 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); | 1018 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
998 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); | 1019 head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
999 | 1020 |
1000 uint32_t pose_index = pose_index_++; | 1021 webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat; |
1001 webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; | |
1002 | 1022 |
1003 return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); | 1023 callback.Run(VrShell::VRPosePtrFromGvrPose(head_mat), time, frame_index); |
1004 } | 1024 } |
1005 | 1025 |
1006 } // namespace vr_shell | 1026 } // namespace vr_shell |
OLD | NEW |