Index: chrome/browser/android/vr_shell/vr_shell.cc |
diff --git a/chrome/browser/android/vr_shell/vr_shell.cc b/chrome/browser/android/vr_shell/vr_shell.cc |
index 642a0caf3d58dff62f8dc17f676845f69d1d6f45..ce24f2e386be2f0c214ddfb6bc8c3792c4b34e6f 100644 |
--- a/chrome/browser/android/vr_shell/vr_shell.cc |
+++ b/chrome/browser/android/vr_shell/vr_shell.cc |
@@ -276,6 +276,24 @@ void VrShell::UpdateController(const gvr::Vec3f& forward_vector) { |
} |
} |
+void VrShell::SetGvrPoseForWebVr(gvr::Mat4f pose, uint32_t pose_num) { |
dcheng
2016/10/04 23:17:53
Nit: pass |pose| by const ref
klausw
2016/10/04 23:56:42
Done.
|
+ webvr_head_pose_[pose_num % POSE_QUEUE_SIZE] = pose; |
+ webvr_newest_pose_num_ = pose_num; |
+} |
+ |
+int32_t GetPixelEncodedFrameNumber() { |
dcheng
2016/10/04 23:17:53
Let's be uniform with our types through (uint32 ev
klausw
2016/10/04 23:56:42
Done, using pose index consistently.
|
+ // Read the frame number encoded in a bottom left pixel as color values. |
+ // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
+ // encodes the frame number, and device/vr/android/gvr/gvr_device.cc |
+ // which tracks poses. |
+ uint8_t pixels[4]; |
+ // Assume we're reading from the frambebuffer we just wrote to. |
+ // That's true currently, we may need to use glReadBuffer(GL_BACK) |
+ // or equivalent if the rendering setup changes in the future. |
+ glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels); |
+ return pixels[0] | (pixels[1] << 8) | (pixels[2] << 16); |
mthiesse
2016/10/04 21:19:16
Optionally, you really only need 3 bits, so you co
klausw
2016/10/04 23:08:14
Not sure that would make a big difference - for no
|
+} |
+ |
void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
buffer_viewport_list_->SetToRecommendedBufferViewports(); |
@@ -293,6 +311,21 @@ void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { |
if (!webvr_secure_origin_) { |
DrawWebVrOverlay(target_time.monotonic_system_time_nanos); |
} |
+ |
+ // When using async reprojection, we need to know which pose was used in |
+ // the WebVR app for drawing this frame. Due to unknown amounts of |
+ // buffering in the compositor and SurfaceTexture, we read the pose number |
+ // from a corner pixel. There's no point in doing this for legacy |
+ // distortion rendering since that doesn't need a pose, and reading back |
+ // pixels is an expensive operation. TODO(klausw): stop doing this once we |
+ // have working no-compositor rendering for WebVR. |
+ if (gvr_api_->GetAsyncReprojectionEnabled()) { |
+ int32_t webvr_pose_frame = GetPixelEncodedFrameNumber(); |
+ // LOG << "klausw: newest_pose=" << webvr_newest_pose_num_ << |
mthiesse
2016/10/04 21:19:16
Remove this
klausw
2016/10/04 23:08:14
Done.
|
+ // " pixel=" << webvr_pose_frame << |
+ // "(" << webvr_pose_frame - webvr_newest_pose_num_ << ")"; |
+ head_pose = webvr_head_pose_[webvr_pose_frame % POSE_QUEUE_SIZE]; |
+ } |
} else { |
DrawVrShell(head_pose); |
} |