Chromium Code Reviews| Index: chrome/browser/android/vr_shell/vr_shell_gl.cc |
| diff --git a/chrome/browser/android/vr_shell/vr_shell_gl.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| index 14a22becd74bfc2730231dd912f501a23375bc54..6bd8fd32cc178bf762c018f687976a7c0949c58e 100644 |
| --- a/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| +++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
| @@ -82,6 +82,10 @@ static constexpr int kViewportListHeadlockedOffset = 2; |
| // 2-3 frames. |
| static constexpr unsigned kPoseRingBufferSize = 8; |
| +// Number of frames to use for sliding averages for pose timings, |
| +// as used for estimating prediction times. |
| +static constexpr unsigned kWebVRSlidingAverageSize = 5; |
| + |
| // Criteria for considering holding the app button in combination with |
| // controller movement as a gesture. |
| static constexpr float kMinAppButtonGestureAngleRad = 0.25; |
| @@ -226,6 +230,8 @@ VrShellGl::VrShellGl(VrBrowserInterface* browser, |
| #if DCHECK_IS_ON() |
| fps_meter_(new FPSMeter()), |
| #endif |
| + webvr_js_time_(new SlidingAverage(kWebVRSlidingAverageSize)), |
| + webvr_render_time_(new SlidingAverage(kWebVRSlidingAverageSize)), |
| weak_ptr_factory_(this) { |
| GvrInit(gvr_api); |
| } |
| @@ -1524,9 +1530,20 @@ void VrShellGl::SendVSync(base::TimeDelta time, |
| TRACE_EVENT1("input", "VrShellGl::SendVSync", "frame", frame_index); |
| + int64_t frame_time_micros = vsync_interval_.InMicroseconds(); |
|
cjgrant
2017/05/24 13:37:30
How do you feel about dropping this block into VrS
klausw
2017/05/24 16:32:23
Done, I've also added TRACE_COUNTER output so that
|
| + // If we aim to submit at vsync, that frame will start scanning out |
| + // one vsync later. Add a half frame to split the difference between |
| + // left and right eye. |
| + int64_t overhead_micros = frame_time_micros * 3 / 2; |
| + int64_t expected_frame_time = |
| + webvr_js_time_->GetAverageOrDefault(frame_time_micros) + |
| + webvr_render_time_->GetAverageOrDefault(frame_time_micros); |
| + int64_t prediction_nanos = (expected_frame_time + overhead_micros) * 1000; |
| + |
| vr::Mat4f head_mat; |
| device::mojom::VRPosePtr pose = |
| - device::GvrDelegate::GetVRPosePtrWithNeckModel(gvr_api_.get(), &head_mat); |
| + device::GvrDelegate::GetVRPosePtrWithNeckModel(gvr_api_.get(), &head_mat, |
| + prediction_nanos); |
| webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat; |