Index: chrome/browser/android/vr_shell/vr_shell_gl.cc |
diff --git a/chrome/browser/android/vr_shell/vr_shell_gl.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
index effea8744a33b0116316f64481f8d79c751d5497..3f18f80bee498e4ca43963fac2ff2b662e002941 100644 |
--- a/chrome/browser/android/vr_shell/vr_shell_gl.cc |
+++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc |
@@ -17,6 +17,8 @@ |
#include "chrome/browser/android/vr_shell/vr_shell.h" |
#include "chrome/browser/android/vr_shell/vr_shell_renderer.h" |
#include "third_party/WebKit/public/platform/WebInputEvent.h" |
+#include "ui/gfx/transform.h" |
+#include "ui/gfx/transform_util.h" |
#include "ui/gfx/vsync_provider.h" |
#include "ui/gl/android/scoped_java_surface.h" |
#include "ui/gl/android/surface_texture.h" |
@@ -28,7 +30,8 @@ |
namespace vr_shell { |
namespace { |
-// Constant taken from treasure_hunt demo. |
+// TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever |
+// exposed, use that instead (it defaults to 50ms on most platforms). |
static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; |
static constexpr float kZNear = 0.1f; |
@@ -142,7 +145,7 @@ enum class ViewerType { |
VIEWER_TYPE_MAX, |
}; |
-int GetPixelEncodedPoseIndexByte() { |
+bool GetPixelEncodedPoseIndexByte(int* pose_index) { |
TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex"); |
// Read the pose index encoded in a bottom left pixel as color values. |
// See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which |
@@ -162,11 +165,13 @@ int GetPixelEncodedPoseIndexByte() { |
if (pixels[1] == kWebVrPosePixelMagicNumbers[0] && |
pixels[2] == kWebVrPosePixelMagicNumbers[1]) { |
// Pose is good. |
- return pixels[0]; |
+ *pose_index = pixels[0]; |
+ return true; |
} |
- VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] << |
- ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2]; |
- return -1; |
+ VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] |
+ << ", bad magic number " << (int)pixels[1] << ", " |
+ << (int)pixels[2]; |
+ return false; |
} |
int64_t TimeInMicroseconds() { |
@@ -204,11 +209,6 @@ VrShellGl::~VrShellGl() { |
} |
void VrShellGl::Initialize() { |
- gvr::Mat4f identity; |
- SetIdentityM(identity); |
- webvr_head_pose_.resize(kPoseRingBufferSize, identity); |
- webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); |
- |
scene_.reset(new UiScene); |
if (surfaceless_rendering_) { |
@@ -251,16 +251,6 @@ void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { |
return; |
} |
- // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is |
- // sort of okay, because the GVR swap chain will block if we render too fast, |
- // but we should address this properly. |
- if (surface_->GetVSyncProvider()) { |
- surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( |
- &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); |
- } else { |
- LOG(ERROR) << "No VSync Provider"; |
- } |
- |
unsigned int textures[2]; |
glGenTextures(2, textures); |
ui_texture_id_ = textures[0]; |
@@ -290,6 +280,9 @@ void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) { |
draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); |
ScheduleNextDrawFrame(); |
+ main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( |
+ &VrShell::OnVRVsyncProviderReady, weak_vr_shell_)); |
+ |
ready_to_draw_ = true; |
} |
@@ -328,12 +321,12 @@ void VrShellGl::InitializeRenderer() { |
// surface, but store it separately to avoid future confusion. |
// TODO(klausw,crbug.com/655722): remove this. |
webvr_texture_id_ = content_texture_id_; |
- // Out of paranoia, explicitly reset the "pose valid" flags to false |
- // from the GL thread. The constructor ran in the UI thread. |
- // TODO(klausw,crbug.com/655722): remove this. |
- webvr_head_pose_valid_.assign(kPoseRingBufferSize, false); |
gvr_api_->InitializeGl(); |
+ webvr_head_pose_.assign(kPoseRingBufferSize, |
+ gvr_api_->GetHeadSpaceFromStartSpaceRotation( |
+ gvr::GvrApi::GetTimePointNow())); |
+ |
std::vector<gvr::BufferSpec> specs; |
// For kFramePrimaryBuffer (primary VrShell and WebVR content) |
specs.push_back(gvr_api_->CreateBufferSpec()); |
@@ -593,36 +586,39 @@ void VrShellGl::SendGesture(InputTarget input_target, |
base::Passed(std::move(event)))); |
} |
-void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { |
- webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; |
- webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; |
-} |
- |
-bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { |
- if (pose_index_byte < 0) { |
- return false; |
- } |
- if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) { |
- VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte << |
- ", not a valid pose"; |
- return false; |
- } |
- return true; |
-} |
- |
void VrShellGl::DrawFrame() { |
TRACE_EVENT0("gpu", "VrShellGl::DrawFrame"); |
+ |
// Reset the viewport list to just the pair of viewports for the |
// primary buffer each frame. Head-locked viewports get added by |
// DrawVrShell if needed. |
buffer_viewport_list_->SetToRecommendedBufferViewports(); |
gvr::Frame frame = swap_chain_->AcquireFrame(); |
- gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
- target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
- gvr::Mat4f head_pose = |
- gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
+ frame.BindBuffer(kFramePrimaryBuffer); |
+ if (web_vr_mode_) { |
+ DrawWebVr(); |
+ } |
+ |
+ int pose_index; |
+ gvr::Mat4f head_pose; |
+ |
+ // When using async reprojection, we need to know which pose was used in |
+ // the WebVR app for drawing this frame. Due to unknown amounts of |
+ // buffering in the compositor and SurfaceTexture, we read the pose number |
+ // from a corner pixel. There's no point in doing this for legacy |
+ // distortion rendering since that doesn't need a pose, and reading back |
+ // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
+ // doing this once we have working no-compositor rendering for WebVR. |
+ if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() && |
+ GetPixelEncodedPoseIndexByte(&pose_index)) { |
+ head_pose = webvr_head_pose_[pose_index % kPoseRingBufferSize]; |
+ } else { |
+ gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
+ target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
+ head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
+ } |
gvr::Vec3f position = GetTranslation(head_pose); |
if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { |
@@ -634,47 +630,12 @@ void VrShellGl::DrawFrame() { |
gvr_api_->ApplyNeckModel(head_pose, 1.0f); |
} |
- frame.BindBuffer(kFramePrimaryBuffer); |
- |
// Update the render position of all UI elements (including desktop). |
const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; |
scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); |
UpdateController(GetForwardVector(head_pose)); |
- if (web_vr_mode_) { |
- DrawWebVr(); |
- |
- // When using async reprojection, we need to know which pose was used in |
- // the WebVR app for drawing this frame. Due to unknown amounts of |
- // buffering in the compositor and SurfaceTexture, we read the pose number |
- // from a corner pixel. There's no point in doing this for legacy |
- // distortion rendering since that doesn't need a pose, and reading back |
- // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop |
- // doing this once we have working no-compositor rendering for WebVR. |
- if (gvr_api_->GetAsyncReprojectionEnabled()) { |
- int pose_index_byte = GetPixelEncodedPoseIndexByte(); |
- if (WebVrPoseByteIsValid(pose_index_byte)) { |
- // We have a valid pose, use it for reprojection. |
- webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
- webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL); |
- head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize]; |
- // We can't mark the used pose as invalid since unfortunately |
- // we have to reuse them. The compositor will re-submit stale |
- // frames on vsync, and we can't tell that this has happened |
- // until we've read the pose index from it, and at that point |
- // it's too late to skip rendering. |
- } else { |
- // If we don't get a valid frame ID back we shouldn't attempt |
- // to reproject by an invalid matrix, so turn off reprojection |
- // instead. Invalid poses can permanently break reprojection |
- // for this GVR instance: http://crbug.com/667327 |
- webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
- webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE); |
- } |
- } |
- } |
- |
DrawVrShell(head_pose, frame); |
frame.Unbind(); |
@@ -973,12 +934,6 @@ base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() { |
return weak_ptr_factory_.GetWeakPtr(); |
} |
-void VrShellGl::UpdateVSyncParameters(const base::TimeTicks timebase, |
- const base::TimeDelta interval) { |
- vsync_timebase_ = timebase; |
- vsync_interval_ = interval; |
-} |
- |
void VrShellGl::ScheduleNextDrawFrame() { |
base::TimeTicks now = base::TimeTicks::Now(); |
base::TimeTicks target; |
@@ -990,8 +945,21 @@ void VrShellGl::ScheduleNextDrawFrame() { |
int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
target = vsync_timebase_ + intervals * vsync_interval_; |
} |
- |
task_runner_->PostDelayedTask(FROM_HERE, draw_task_.callback(), target - now); |
+ |
+ if (!client_.is_bound()) |
+ return; |
+ int64_t intervals = (target - vsync_timebase_) / vsync_interval_; |
+ double time = (intervals * vsync_interval_).InSecondsF(); |
+ if (!pending_ack_) { |
+ client_->OnVSync( |
+ GetPose(), time, |
+ base::Bind(&VrShellGl::OnVSyncAck, weak_ptr_factory_.GetWeakPtr())); |
+ pending_ack_ = true; |
+ } else { |
+ pending_vsync_ = true; |
+ pending_time_ = time; |
+ } |
} |
void VrShellGl::ForceExitVr() { |
@@ -999,8 +967,92 @@ void VrShellGl::ForceExitVr() { |
FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); |
} |
+void VrShellGl::OnVRVSyncProviderClientConnected( |
+ device::mojom::VRVSyncProviderClientPtr client) { |
+ client_.Bind(client.PassInterface()); |
+} |
+ |
+void VrShellGl::OnVSyncAck() { |
+ pending_ack_ = false; |
+ if (!pending_vsync_) { |
+ triggered_vsync_from_ack_ = false; |
+ return; |
+ } |
+ pending_vsync_ = false; |
+ if (triggered_vsync_from_ack_) |
+ return; |
+ if (!client_.is_bound()) |
+ return; |
+ |
+ // Allow VSyncAck to trigger a VSync in the case that we missed the previous |
+ // frame to make it less likely multiple frames are missed in a row. However, |
+ // we don't want to get into a state where VSync is only ever triggered from |
+ // VSyncAck so we make sure the next VSync is really from a VSync and not an |
+ // Ack. |
+ client_->OnVSync( |
+ GetPose(), pending_time_, |
+ base::Bind(&VrShellGl::OnVSyncAck, weak_ptr_factory_.GetWeakPtr())); |
+ pending_ack_ = true; |
+ triggered_vsync_from_ack_ = true; |
+} |
+ |
+void VrShellGl::UpdateVSyncInterval(long timebase_nanos, |
+ double interval_seconds) { |
+ vsync_timebase_ = base::TimeTicks(); |
+ vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000); |
+ vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds); |
+} |
+ |
void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { |
scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); |
} |
+device::mojom::VRPosePtr VrShellGl::GetPose() { |
+ TRACE_EVENT0("input", "GvrDevice::GetSensorState"); |
+ |
+ device::mojom::VRPosePtr pose = device::mojom::VRPose::New(); |
+ |
+ pose->timestamp = base::Time::Now().ToJsTime(); |
+ |
+ // Increment pose frame counter always, even if it's a faked pose. |
+ pose->poseIndex = ++pose_index_; |
+ |
+ pose->orientation.emplace(4); |
+ |
+ gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); |
+ target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; |
+ |
+ gvr::Mat4f head_mat = |
+ gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); |
+ head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f); |
+ |
+ // Save the underlying GVR pose for use by rendering. It can't use a |
+ // VRPosePtr since that's a different data type. |
+ webvr_head_pose_[pose_index_ % kPoseRingBufferSize] = head_mat; |
+ |
+ gfx::Transform inv_transform( |
+ head_mat.m[0][0], head_mat.m[0][1], head_mat.m[0][2], head_mat.m[0][3], |
+ head_mat.m[1][0], head_mat.m[1][1], head_mat.m[1][2], head_mat.m[1][3], |
+ head_mat.m[2][0], head_mat.m[2][1], head_mat.m[2][2], head_mat.m[2][3], |
+ head_mat.m[3][0], head_mat.m[3][1], head_mat.m[3][2], head_mat.m[3][3]); |
+ |
+ gfx::Transform transform; |
+ if (inv_transform.GetInverse(&transform)) { |
+ gfx::DecomposedTransform decomposed_transform; |
+ gfx::DecomposeTransform(&decomposed_transform, transform); |
+ |
+ pose->orientation.value()[0] = decomposed_transform.quaternion[0]; |
+ pose->orientation.value()[1] = decomposed_transform.quaternion[1]; |
+ pose->orientation.value()[2] = decomposed_transform.quaternion[2]; |
+ pose->orientation.value()[3] = decomposed_transform.quaternion[3]; |
+ |
+ pose->position.emplace(3); |
+ pose->position.value()[0] = decomposed_transform.translate[0]; |
+ pose->position.value()[1] = decomposed_transform.translate[1]; |
+ pose->position.value()[2] = decomposed_transform.translate[2]; |
+ } |
+ |
+ return pose; |
+} |
+ |
} // namespace vr_shell |