| Index: chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| diff --git a/chrome/browser/android/vr_shell/vr_shell_gl.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| index 2fa6c6adb7e6dcd704d48872c7510be0b069c462..3e94c66d24713a46688a8ff916ac743eca5af100 100644
|
| --- a/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| +++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| @@ -173,7 +173,7 @@ VrShellGl::VrShellGl(
|
| VrShellGl::~VrShellGl() {
|
| vsync_task_.Cancel();
|
| if (!callback_.is_null())
|
| - callback_.Run(nullptr, 0);
|
| + callback_.Run(nullptr, 0, -1);
|
| }
|
|
|
| void VrShellGl::Initialize() {
|
| @@ -260,10 +260,10 @@ void VrShellGl::OnContentFrameAvailable() {
|
| received_frame_ = true;
|
| }
|
|
|
| -bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) {
|
| - TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex");
|
| +bool VrShellGl::GetPixelEncodedFrameIndex(uint8_t* frame_index) {
|
| + TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex");
|
| if (!received_frame_) {
|
| - *pose_index = last_pose_;
|
| + *frame_index = last_frame_index_;
|
| return true;
|
| }
|
| received_frame_ = false;
|
| @@ -286,8 +286,7 @@ bool VrShellGl::GetPixelEncodedPoseIndexByte(int* pose_index) {
|
| if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
|
| pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
|
| // Pose is good.
|
| - *pose_index = pixels[0];
|
| - last_pose_ = pixels[0];
|
| + *frame_index = pixels[0];
|
| return true;
|
| }
|
| VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0]
|
| @@ -605,7 +604,7 @@ void VrShellGl::DrawFrame() {
|
| DrawWebVr();
|
| }
|
|
|
| - int pose_index;
|
| + uint8_t frame_index;
|
| gvr::Mat4f head_pose;
|
|
|
| // When using async reprojection, we need to know which pose was used in
|
| @@ -616,8 +615,28 @@ void VrShellGl::DrawFrame() {
|
| // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
|
| // doing this once we have working no-compositor rendering for WebVR.
|
| if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() &&
|
| - GetPixelEncodedPoseIndexByte(&pose_index)) {
|
| - head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize];
|
| + GetPixelEncodedFrameIndex(&frame_index)) {
|
| + static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize),
|
| + "kPoseRingBufferSize must be a power of 2");
|
| + head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize];
|
| + // Process all pending_bounds_ changes targeted for before this frame, being
|
| + // careful of wrapping frame indices.
|
| + static_assert(std::numeric_limits<typeof(frame_index_)>::max() >
|
| + kPoseRingBufferSize * 2,
|
| + "To detect wrapping, kPoseRingBufferSize must be smaller "
|
| + "than half of frame_index_ range.");
|
| + while (!pending_bounds_.empty() &&
|
| + pending_bounds_.front().first <= frame_index &&
|
| + frame_index - pending_bounds_.front().first < kPoseRingBufferSize) {
|
| + const BoundsPair& bounds = pending_bounds_.front().second;
|
| + webvr_left_viewport_->SetSourceUv(bounds.first);
|
| + webvr_right_viewport_->SetSourceUv(bounds.second);
|
| + pending_bounds_.pop();
|
| + }
|
| + buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
|
| + *webvr_left_viewport_);
|
| + buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
|
| + *webvr_right_viewport_);
|
| } else {
|
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
|
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
|
| @@ -859,11 +878,6 @@ void VrShellGl::DrawWebVr() {
|
|
|
| glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
|
| vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
|
| -
|
| - buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
|
| - *webvr_left_viewport_);
|
| - buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
|
| - *webvr_right_viewport_);
|
| }
|
|
|
| void VrShellGl::OnTriggerEvent() {
|
| @@ -891,10 +905,16 @@ void VrShellGl::SetWebVrMode(bool enabled) {
|
| web_vr_mode_ = enabled;
|
| }
|
|
|
| -void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds,
|
| +void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index,
|
| + const gvr::Rectf& left_bounds,
|
| const gvr::Rectf& right_bounds) {
|
| - webvr_left_viewport_->SetSourceUv(left_bounds);
|
| - webvr_right_viewport_->SetSourceUv(right_bounds);
|
| + if (frame_index < 0) {
|
| + webvr_left_viewport_->SetSourceUv(left_bounds);
|
| + webvr_right_viewport_->SetSourceUv(right_bounds);
|
| + } else {
|
| + pending_bounds_.emplace(
|
| + std::make_pair(frame_index, std::make_pair(left_bounds, right_bounds)));
|
| + }
|
| }
|
|
|
| gvr::GvrApi* VrShellGl::gvr_api() {
|
| @@ -945,8 +965,7 @@ void VrShellGl::OnVSync() {
|
|
|
| double time = (intervals * vsync_interval_).InSecondsF();
|
| if (!callback_.is_null()) {
|
| - callback_.Run(GetPose(), time);
|
| - callback_.Reset();
|
| + SendVSync(time, std::move(callback_));
|
| } else {
|
| pending_vsync_ = true;
|
| pending_time_ = time;
|
| @@ -966,7 +985,7 @@ void VrShellGl::GetVSync(const GetVSyncCallback& callback) {
|
| return;
|
| }
|
| pending_vsync_ = false;
|
| - callback.Run(GetPose(), pending_time_);
|
| + SendVSync(pending_time_, std::move(callback));
|
| }
|
|
|
| void VrShellGl::UpdateVSyncInterval(long timebase_nanos,
|
| @@ -987,8 +1006,10 @@ void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) {
|
| scene_->HandleCommands(std::move(commands), TimeInMicroseconds());
|
| }
|
|
|
| -device::mojom::VRPosePtr VrShellGl::GetPose() {
|
| - TRACE_EVENT0("input", "VrShellGl::GetPose");
|
| +void VrShellGl::SendVSync(double time, GetVSyncCallback callback) {
|
| + TRACE_EVENT0("input", "VrShellGl::SendVSync");
|
| +
|
| + uint8_t frame_index = frame_index_++;
|
|
|
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
|
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
|
| @@ -997,10 +1018,9 @@ device::mojom::VRPosePtr VrShellGl::GetPose() {
|
| gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
|
| head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f);
|
|
|
| - uint32_t pose_index = pose_index_++;
|
| - webvr_head_pose_[pose_index % kPoseRingBufferSize] = head_mat;
|
| + webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat;
|
|
|
| - return VrShell::VRPosePtrFromGvrPose(head_mat, pose_index);
|
| + callback.Run(VrShell::VRPosePtrFromGvrPose(head_mat), time, frame_index);
|
| }
|
|
|
| } // namespace vr_shell
|
|
|