Chromium Code Reviews| Index: chrome/browser/android/vr_shell/vr_shell_gl.cc |
| diff --git a/chrome/browser/android/vr_shell/vr_shell_gl.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| index 23ee2a512d9626ae769e8a79956bd16b8fc66a66..a80259a2110e1957b2f14d73396a6ce192db4023 100644 |
| --- a/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| +++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| @@ -84,6 +84,10 @@ static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f}; |
| static constexpr int kViewportListPrimaryOffset = 0; |
| static constexpr int kViewportListHeadlockedOffset = 2; |
| +// Buffer size large enough to handle the current backlog of poses which is |
| +// 2-3 frames. |
| +static constexpr int kPoseRingBufferSize = 8; |
|
dcheng
2017/01/18 23:58:07
Nit: unsigned?
mthiesse
2017/01/19 01:19:08
Done.
|
| + |
| // Magic numbers used to mark valid pose index values encoded in frame |
| // data. Must match the magic numbers used in blink's VRDisplay.cpp. |
| static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}}; |
| @@ -174,7 +178,7 @@ VrShellGl::VrShellGl( |
| VrShellGl::~VrShellGl() { |
| vsync_task_.Cancel(); |
| if (!callback_.is_null()) |
| - callback_.Run(nullptr, base::TimeDelta()); |
| + callback_.Run(nullptr, base::TimeDelta(), -1); |
| if (binding_.is_bound()) { |
| main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
| &VrShellDelegate::OnVRVsyncProviderRequest, delegate_provider_, |
| @@ -266,10 +270,12 @@ void VrShellGl::OnContentFrameAvailable() { |
| received_frame_ = true; |
| } |
| -bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { |
| - TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
| +bool VrShellGl::GetPixelEncodedFrameIndex(int* frame_index) { |
| + TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex"); |
| if (!received_frame_) { |
| - *pose_index = last_pose_; |
| + if (last_frame_index_ == -1) |
| + return false; |
| + *frame_index = last_frame_index_; |
| return true; |
| } |
| received_frame_ = false; |
| @@ -292,8 +298,8 @@ bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) { |
| if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
| pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
| // Pose is good. |
| - *pose_index = pixels[0]; |
| - last_pose_ = pixels[0]; |
| + *frame_index = pixels[0]; |
| + last_frame_index_ = pixels[0]; |
| return true; |
| } |
| VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
| @@ -609,7 +615,7 @@ void VrShellGl::DrawFrame() { |
| DrawWebVr(); |
| } |
| - int pose_index; |
| + int frame_index; |
| gvr::Mat4f head_pose; |
| // When using async reprojection, we need to know which pose was used in |
| @@ -620,8 +626,32 @@ void VrShellGl::DrawFrame() { |
| // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
| // doing this once we have working no-compositor rendering for WebVR. |
| if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
| - GetPixelEncodedPoseIndexByte(&pose_index)) { |
| - head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; |
| + GetPixelEncodedFrameIndex(&frame_index)) { |
| + static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize), |
| + "kPoseRingBufferSize must be a power of 2"); |
| + head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize]; |
| + // Process all pending_bounds_ changes targeted for before this frame, being |
| + // careful of wrapping frame indices. |
| + static constexpr int max = |
| + (int) std::numeric_limits<typeof(frame_index_)>::max(); |
|
dcheng
2017/01/18 23:58:07
Nit: let's be consistent and not mix signed/unsign
mthiesse
2017/01/19 01:19:08
Done.
|
| + static_assert(max > kPoseRingBufferSize * 2, |
| + "To detect wrapping, kPoseRingBufferSize must be smaller " |
| + "than half of frame_index_ range."); |
| + while (!pending_bounds_.empty()) { |
| + int index = pending_bounds_.front().first; |
| + if (index < frame_index) index += max; |
| + if (index > frame_index && index <= frame_index + kPoseRingBufferSize) |
| + break; |
| + |
| + const BoundsPair& bounds = pending_bounds_.front().second; |
| + webvr_left_viewport_->SetSourceUv(bounds.first); |
| + webvr_right_viewport_->SetSourceUv(bounds.second); |
| + pending_bounds_.pop(); |
| + } |
| + buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| + *webvr_left_viewport_); |
| + buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| + *webvr_right_viewport_); |
| } else { |
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| @@ -862,11 +892,6 @@ void VrShellGl::DrawWebVr() { |
| glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); |
| vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); |
| - |
| - buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, |
| - *webvr_left_viewport_); |
| - buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, |
| - *webvr_right_viewport_); |
| } |
| void VrShellGl::OnTriggerEvent() { |
| @@ -894,10 +919,16 @@ void VrShellGl::SetWebVrMode(bool enabled) { |
| web_vr_mode_ = enabled; |
| } |
| -void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, |
| +void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index, |
| + const gvr::Rectf& left_bounds, |
| const gvr::Rectf& right_bounds) { |
| - webvr_left_viewport_->SetSourceUv(left_bounds); |
| - webvr_right_viewport_->SetSourceUv(right_bounds); |
| + if (frame_index < 0) { |
| + webvr_left_viewport_->SetSourceUv(left_bounds); |
| + webvr_right_viewport_->SetSourceUv(right_bounds); |
| + } else { |
| + pending_bounds_.emplace( |
| + std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds))); |
| + } |
| } |
| gvr::GvrApi* VrShellGl::gvr_api() { |
| @@ -948,8 +979,7 @@ void VrShellGl::OnVSync() { |
| base::TimeDelta time = intervals * vsync_interval_; |
| if (!callback_.is_null()) { |
| - callback_.Run(GetPose(), time); |
| - callback_.Reset(); |
| + SendVSync(time, std::move(callback_)); |
| } else { |
| pending_vsync_ = true; |
| pending_time_ = time; |
| @@ -974,7 +1004,7 @@ void VrShellGl::GetVSync(const GetVSyncCallback& callback) { |
| return; |
| } |
| pending_vsync_ = false; |
| - callback.Run(GetPose(), pending_time_); |
| + SendVSync(pending_time_, std::move(callback)); |
|
dcheng
2017/01/18 23:58:07
Same comment about std::move() on a const ref bein
mthiesse
2017/01/19 01:19:08
Done.
|
| } |
| void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
| @@ -995,8 +1025,11 @@ void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
| scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
| } |
| -device::mojom::VRPosePtr VrShellGl::GetPose() { |
| - TRACE_EVENT0("input", "VrShellGl::GetPose"); |
| +void VrShellGl::SendVSync(const base::TimeDelta& time, |
| + GetVSyncCallback callback) { |
| + TRACE_EVENT0("input", "VrShellGl::SendVSync"); |
| + |
| + uint8_t frame_index = frame_index_++; |
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
| @@ -1005,10 +1038,9 @@ device::mojom::VRPosePtr VrShellGl::GetPose() { |
| gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
| head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
| - uint32_t pose_index = pose_index_++; |
| - webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat; |
| + webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat; |
| - return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index); |
| + callback.Run(VrShell::VRPosePtrFromGvrPose(head_mat), time, frame_index); |
| } |
| } // namespace vr_shell |