Chromium Code Reviews| Index: chrome/browser/android/vr_shell/vr_shell_gl.cc |
| diff --git a/chrome/browser/android/vr_shell/vr_shell_gl.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| index c8b00a69edb41a2690255c0250449bd9ea9eb0e1..5becab650c9007cc66d219250d18a02775c8b3d3 100644 |
| --- a/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| +++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| @@ -86,6 +86,10 @@ static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f}; |
| static constexpr int kViewportListPrimaryOffset = 0; |
| static constexpr int kViewportListHeadlockedOffset = 2; |
| +// Buffer size large enough to handle the current backlog of poses which is |
| +// 2-3 frames. |
| +static constexpr unsigned kPoseRingBufferSize = 8; |
| + |
| // Magic numbers used to mark valid pose index values encoded in frame |
| // data. Must match the magic numbers used in blink's VRDisplay.cpp. |
| static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; |
| @@ -175,8 +179,10 @@ VrShellGl::VrShellGl( |
| VrShellGl::~VrShellGl() { |
| vsync_task_.Cancel(); |
| - if (!callback_.is_null()) |
| - callback_.Run(nullptr, base::TimeDelta()); |
| + if (!callback_.is_null()) { |
| + callback_.Run(nullptr, base::TimeDelta(), -1); |
| + callback_.Reset(); |
| + } |
| if (binding_.is_bound()) { |
| main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| &VrShellDelegate::OnVRVsyncProviderRequest, delegate_provider_, |
| @@ -268,10 +274,12 @@ void VrShellGl::OnContentFrameAvailable() { |
| received_frame_ = true; |
| } |
| -bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { |
| - TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
| +bool VrShellGl::GetPixelEncodedFrameIndex(uint16_t* frame_index) { |
| + TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); |
| if (!received_frame_) { |
| - *pose_index = last_pose_; |
| + if (last_frame_index_ == -1) |
| + return false; |
| + *frame_index = last_frame_index_; |
| return true; |
| } |
| received_frame_ = false; |
| @@ -294,8 +302,8 @@ bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { |
| if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| // Pose is good. |
| - *pose_index = pixels[0]; |
| - last_pose_ = pixels[0]; |
| + *frame_index = pixels[0]; |
| + last_frame_index_ = pixels[0]; |
| return true; |
| } |
| VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
| @@ -611,7 +619,7 @@ void VrShellGl::DrawFrame() { |
| DrawWebVr(); |
| } |
| - int pose_index; |
| + uint16_t frame_index; |
| gvr::Mat4f head_pose; |
| // When using async reprojection, we need to know which pose was used in |
| @@ -622,8 +630,32 @@ void VrShellGl::DrawFrame() { |
| // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| // doing this once we have working no-compositor rendering for WebVR. |
| if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
| - GetPixelEncodedPoseIndexByte(&pose_index)) { |
| - head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; |
| + GetPixelEncodedFrameIndex(&frame_index)) { |
| + static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
| + "kPoseRingBufferSize must be a power of 2"); |
| + head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; |
| + // Process all pending_bounds_ changes targeted for before this frame, being |
| + // careful of wrapping frame indices. |
| + static constexpr unsigned max = |
| + std::numeric_limits<decltype(frame_index_)>::max(); |
| + static_assert(max > kPoseRingBufferSize * 2, |
| + "To detect wrapping, kPoseRingBufferSize must be smaller " |
| + "than half of frame_index_ range."); |
| + while (!pending_bounds_.empty()) { |
| + uint16_t index = pending_bounds_.front().first; |
| + if (index < frame_index) index += max; |
|
dcheng
2017/01/27 09:41:33
Nit: a few more comments here could be helpful for
mthiesse
2017/01/27 16:15:31
Done.
|
| + if (index > frame_index && index <= frame_index + kPoseRingBufferSize) |
| + break; |
| + |
| + const BoundsPair& bounds = pending_bounds_.front().second; |
| + webvr_left_viewport_->SetSourceUv(bounds.first); |
| + webvr_right_viewport_->SetSourceUv(bounds.second); |
| + pending_bounds_.pop(); |
| + } |
| + buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| + *webvr_left_viewport_); |
| + buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| + *webvr_right_viewport_); |
| } else { |
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| @@ -864,11 +896,6 @@ void VrShellGl::DrawWebVr() { |
| glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
| vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| - |
| - buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| - *webvr_left_viewport_); |
| - buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| - *webvr_right_viewport_); |
| } |
| void VrShellGl::OnTriggerEvent() { |
| @@ -896,10 +923,16 @@ void VrShellGl::SetWebVrMode(bool enabled) { |
| web_vr_mode_ = enabled; |
| } |
| -void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| +void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
| + const gvr::Rectf& left_bounds, |
| const gvr::Rectf& right_bounds) { |
| - webvr_left_viewport_->SetSourceUv(left_bounds); |
| - webvr_right_viewport_->SetSourceUv(right_bounds); |
| + if (frame_index < 0) { |
| + webvr_left_viewport_->SetSourceUv(left_bounds); |
| + webvr_right_viewport_->SetSourceUv(right_bounds); |
| + } else { |
| + pending_bounds_.emplace( |
| + std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); |
| + } |
| } |
| gvr::GvrApi* VrShellGl::gvr_api() { |
| @@ -950,7 +983,7 @@ void VrShellGl::OnVSync() { |
| base::TimeDelta time = intervals * vsync_interval_; |
| if (!callback_.is_null()) { |
| - callback_.Run(GetPose(), time); |
| + SendVSync(time, callback_); |
| callback_.Reset(); |
| } else { |
| pending_vsync_ = true; |
| @@ -975,7 +1008,7 @@ void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
| return; |
| } |
| pending_vsync_ = false; |
| - callback.Run(GetPose(), pending_time_); |
| + SendVSync(pending_time_, callback); |
| } |
| void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
| @@ -996,8 +1029,11 @@ void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
| scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
| } |
| -device::mojom::VRPosePtr VrShellGl::GetPose() { |
| - TRACE_EVENT0("input", "VrShellGl::GetPose"); |
| +void VrShellGl::SendVSync(base::TimeDelta time, |
| + const GetVSyncCallback& callback) { |
| + TRACE_EVENT0("input", "VrShellGl::SendVSync"); |
| + |
| + uint8_t frame_index = frame_index_++; |
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| @@ -1006,10 +1042,9 @@ device::mojom::VRPosePtr VrShellGl::GetPose() { |
| gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
| - uint32_t pose_index = pose_index_++; |
| - webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; |
| + webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat; |
| - return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); |
| + callback.Run(VrShell::VRPosePtrFromGvrPose(head_mat), time, frame_index); |
| } |
| } // namespace vr_shell |