| Index: chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| diff --git a/chrome/browser/android/vr_shell/vr_shell_gl.cc b/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| index b7e699248d55de307307762a6a2856de73e20b19..ec775194ad3c74b2a479193bd5274e15b47f2a79 100644
|
| --- a/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| +++ b/chrome/browser/android/vr_shell/vr_shell_gl.cc
|
| @@ -18,6 +18,7 @@
|
| #include "chrome/browser/android/vr_shell/vr_gl_util.h"
|
| #include "chrome/browser/android/vr_shell/vr_math.h"
|
| #include "chrome/browser/android/vr_shell/vr_shell.h"
|
| +#include "chrome/browser/android/vr_shell/vr_shell_command_buffer_gl.h"
|
| #include "chrome/browser/android/vr_shell/vr_shell_delegate.h"
|
| #include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
|
| #include "device/vr/android/gvr/gvr_device.h"
|
| @@ -83,10 +84,6 @@ static constexpr int kViewportListHeadlockedOffset = 2;
|
| // 2-3 frames.
|
| static constexpr unsigned kPoseRingBufferSize = 8;
|
|
|
| -// Magic numbers used to mark valid pose index values encoded in frame
|
| -// data. Must match the magic numbers used in blink's VRDisplay.cpp.
|
| -static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
|
| -
|
| float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
|
| float xdiff = (vec1.x - vec2.x);
|
| float ydiff = (vec1.y - vec2.y);
|
| @@ -235,22 +232,27 @@ void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) {
|
| return;
|
| }
|
|
|
| - unsigned int textures[2];
|
| - glGenTextures(2, textures);
|
| + unsigned int textures[3];
|
| + glGenTextures(3, textures);
|
| ui_texture_id_ = textures[0];
|
| content_texture_id_ = textures[1];
|
| + webvr_texture_id_ = textures[2];
|
| ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_);
|
| content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_);
|
| + webvr_surface_texture_ = gl::SurfaceTexture::Create(webvr_texture_id_);
|
| CreateUiSurface();
|
| CreateContentSurface();
|
| + CreateWebVRSurface();
|
| ui_surface_texture_->SetFrameAvailableCallback(base::Bind(
|
| &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
|
| content_surface_texture_->SetFrameAvailableCallback(base::Bind(
|
| &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
|
| - content_surface_texture_->SetDefaultBufferSize(
|
| - content_tex_physical_size_.width, content_tex_physical_size_.height);
|
| + webvr_surface_texture_->SetFrameAvailableCallback(base::Bind(
|
| + &VrShellGl::OnWebVRFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
|
| ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width,
|
| ui_tex_physical_size_.height);
|
| + content_surface_texture_->SetDefaultBufferSize(
|
| + content_tex_physical_size_.width, content_tex_physical_size_.height);
|
| InitializeRenderer();
|
|
|
| vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this)));
|
| @@ -259,6 +261,7 @@ void VrShellGl::InitializeGl(gfx::AcceleratedWidget window) {
|
| ready_to_draw_ = true;
|
| }
|
|
|
| +
|
| void VrShellGl::CreateContentSurface() {
|
| content_surface_ =
|
| base::MakeUnique<gl::ScopedJavaSurface>(content_surface_texture_.get());
|
| @@ -275,51 +278,78 @@ void VrShellGl::CreateUiSurface() {
|
| ui_surface_->j_surface().obj()));
|
| }
|
|
|
| +void VrShellGl::CreateWebVRSurface() {
|
| + VLOG(1) << __FUNCTION__ << ";;; content_tex_physical_size=" <<
|
| + content_tex_physical_size_.width << "x" <<
|
| + content_tex_physical_size_.height;
|
| + VLOG(1) << __FUNCTION__ << ";;; render_size_primary=" <<
|
| + render_size_primary_.width << "x" << render_size_primary_.height;
|
| + // FIXME: get correct sizes. Currently getting 0x0 from the
|
| + // likely candidates. For now, hardcode Pixel XL resolution.
|
| + // This will work on other devices too, though it's not the most
|
| + // efficient.
|
| + webvr_surface_texture_->SetDefaultBufferSize(2560, 1440);
|
| +
|
| + command_buffer_gl_ = base::MakeUnique<VrShellCommandBufferGl>();
|
| + command_buffer_gl_->CreateContext(webvr_surface_texture_);
|
| +}
|
| +
|
| +void VrShellGl::SubmitWebVRFrame(int frame_index, const gpu::Mailbox& mailbox) {
|
| + TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame");
|
| + bool drawn = command_buffer_gl_->CopyFrameToSurface(
|
| + frame_index, mailbox, !pending_frames_.empty());
|
| + // If we get here, we're committed to drawing and swapBuffers.
|
| + // Continue after errors.
|
| + if (drawn) {
|
| + VLOG(1) << __FUNCTION__ << ";;; add frame=" << frame_index <<
|
| + " to pending_frames, size=" << pending_frames_.size();
|
| + main_thread_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VrShell::OnSubmitWebVRFrameTransferred,
|
| + weak_vr_shell_, frame_index));
|
| + pending_frames_.emplace(frame_index);
|
| + } else {
|
| + VLOG(1) << __FUNCTION__ << ";;; NOT DRAWN, FIXME!";
|
| + }
|
| +}
|
| +
|
| void VrShellGl::OnUIFrameAvailable() {
|
| + VLOG(1) << __FUNCTION__ << ";;; UI UpdateTexImage start";
|
| ui_surface_texture_->UpdateTexImage();
|
| + VLOG(1) << __FUNCTION__ << ";;; UI UpdateTexImage end";
|
| }
|
|
|
| void VrShellGl::OnContentFrameAvailable() {
|
| + VLOG(1) << __FUNCTION__ << ";;; Content UpdateTexImage start";
|
| content_surface_texture_->UpdateTexImage();
|
| received_frame_ = true;
|
| + VLOG(1) << __FUNCTION__ << ";;; Content UpdateTexImage end";
|
| }
|
|
|
| -bool VrShellGl::GetPixelEncodedFrameIndex(uint16_t* frame_index) {
|
| - TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedFrameIndex");
|
| - if (!received_frame_) {
|
| - if (last_frame_index_ == (uint16_t)-1)
|
| - return false;
|
| - *frame_index = last_frame_index_;
|
| - return true;
|
| +void VrShellGl::OnWebVRFrameAvailable() {
|
| + TRACE_EVENT0("gpu", "VrShellGl::OnWebVRFrameAvailable");
|
| + VLOG(1) << __FUNCTION__ << ";;; pending count=" << pending_frames_.size();
|
| + // A "while" loop here is a bad idea. It's legal to call
|
| + // UpdateTexImage repeatedly even if no frames are available, but
|
| + // that does *not* wait for a new frame, it just reuses the most
|
| + // recent one. That would mess up the count.
|
| + if (pending_frames_.empty()) {
|
| + VLOG(1) << __FUNCTION__ << ";;; no pending frames? Please retry! " <<
|
| + "premature_received_frames " << premature_received_frames_ << " => " <<
|
| + (premature_received_frames_ + 1);
|
| + ++premature_received_frames_;
|
| + return;
|
| }
|
| - received_frame_ = false;
|
|
|
| - // Read the pose index encoded in a bottom left pixel as color values.
|
| - // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
|
| - // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
|
| - // which tracks poses. Returns the low byte (0..255) if valid, or -1
|
| - // if not valid due to bad magic number.
|
| - uint8_t pixels[4];
|
| - // Assume we're reading from the framebuffer we just wrote to.
|
| - // That's true currently, we may need to use glReadBuffer(GL_BACK)
|
| - // or equivalent if the rendering setup changes in the future.
|
| - glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
|
| -
|
| - // Check for the magic number written by VRDevice.cpp on submit.
|
| - // This helps avoid glitches from garbage data in the render
|
| - // buffer that can appear during initialization or resizing. These
|
| - // often appear as flashes of all-black or all-white pixels.
|
| - if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
|
| - pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
|
| - // Pose is good.
|
| - *frame_index = pixels[0];
|
| - last_frame_index_ = pixels[0];
|
| - return true;
|
| - }
|
| - VLOG(1) << "WebVR: reject decoded pose index " << static_cast<int>(pixels[0])
|
| - << ", bad magic number " << static_cast<int>(pixels[1]) << ", "
|
| - << static_cast<int>(pixels[2]);
|
| - return false;
|
| + VLOG(1) << __FUNCTION__ << ";;; WebVR UpdateTexImage start";
|
| + webvr_surface_texture_->UpdateTexImage();
|
| + int frame_index = pending_frames_.front();
|
| + pending_frames_.pop();
|
| + VLOG(1) << __FUNCTION__ << ";;; WebVR UpdateTexImage end, got frame=" <<
|
| + frame_index;
|
| + main_thread_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VrShell::OnSubmitWebVRFrameRendered,
|
| + weak_vr_shell_, frame_index));
|
| + DrawFrame(frame_index);
|
| }
|
|
|
| void VrShellGl::GvrInit(gvr_context* gvr_api) {
|
| @@ -344,12 +374,6 @@ void VrShellGl::GvrInit(gvr_context* gvr_api) {
|
| }
|
|
|
| void VrShellGl::InitializeRenderer() {
|
| - // While WebVR is going through the compositor path, it shares
|
| - // the same texture ID. This will change once it gets its own
|
| - // surface, but store it separately to avoid future confusion.
|
| - // TODO(klausw,crbug.com/655722): remove this.
|
| - webvr_texture_id_ = content_texture_id_;
|
| -
|
| gvr_api_->InitializeGl();
|
| webvr_head_pose_.assign(kPoseRingBufferSize,
|
| gvr_api_->GetHeadSpaceFromStartSpaceRotation(
|
| @@ -630,7 +654,7 @@ void VrShellGl::SendGesture(InputTarget input_target,
|
| base::Bind(target, weak_vr_shell_, base::Passed(std::move(event))));
|
| }
|
|
|
| -void VrShellGl::DrawFrame() {
|
| +void VrShellGl::DrawFrame(int frame_index) {
|
| TRACE_EVENT0("gpu", "VrShellGl::DrawFrame");
|
|
|
| // Reset the viewport list to just the pair of viewports for the
|
| @@ -638,7 +662,9 @@ void VrShellGl::DrawFrame() {
|
| // DrawVrShell if needed.
|
| buffer_viewport_list_->SetToRecommendedBufferViewports();
|
|
|
| + TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame");
|
| gvr::Frame frame = swap_chain_->AcquireFrame();
|
| + TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame");
|
| if (!frame.is_valid()) {
|
| return;
|
| }
|
| @@ -647,7 +673,6 @@ void VrShellGl::DrawFrame() {
|
| DrawWebVr();
|
| }
|
|
|
| - uint16_t frame_index;
|
| gvr::Mat4f head_pose;
|
|
|
| // When using async reprojection, we need to know which pose was used in
|
| @@ -657,8 +682,7 @@ void VrShellGl::DrawFrame() {
|
| // distortion rendering since that doesn't need a pose, and reading back
|
| // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
|
| // doing this once we have working no-compositor rendering for WebVR.
|
| - if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled() &&
|
| - GetPixelEncodedFrameIndex(&frame_index)) {
|
| + if (web_vr_mode_ && gvr_api_->GetAsyncReprojectionEnabled()) {
|
| static_assert(!((kPoseRingBufferSize - 1) & kPoseRingBufferSize),
|
| "kPoseRingBufferSize must be a power of 2");
|
| head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize];
|
| @@ -671,6 +695,7 @@ void VrShellGl::DrawFrame() {
|
| "than half of frame_index_ range.");
|
| while (!pending_bounds_.empty()) {
|
| uint16_t index = pending_bounds_.front().first;
|
| + VLOG(1) << __FUNCTION__ << ";;; new bounds, index=" << index;
|
| // If index is less than the frame_index it's possible we've wrapped, so
|
| // we extend the range and 'un-wrap' to account for this.
|
| if (index < frame_index)
|
| @@ -711,16 +736,25 @@ void VrShellGl::DrawFrame() {
|
| const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
|
| scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds());
|
|
|
| - UpdateController(GetForwardVector(head_pose));
|
| + {
|
| + TRACE_EVENT0("gpu", "VrShellGl::UpdateController");
|
| + UpdateController(GetForwardVector(head_pose));
|
| + }
|
|
|
| - DrawVrShell(head_pose, frame);
|
| + // Drawing VrShell causes GL error 0x501 GL_INVALID_VALUE while in
|
| + // WebVR mode. FIXME.
|
| + if (!web_vr_mode_) DrawVrShell(head_pose, frame);
|
|
|
| - frame.Unbind();
|
| - frame.Submit(*buffer_viewport_list_, head_pose);
|
| + {
|
| + TRACE_EVENT0("gpu", "VrShellGl::Submit");
|
| + frame.Unbind();
|
| + frame.Submit(*buffer_viewport_list_, head_pose);
|
| + }
|
|
|
| // No need to swap buffers for surfaceless rendering.
|
| if (!surfaceless_rendering_) {
|
| // TODO(mthiesse): Support asynchronous SwapBuffers.
|
| + TRACE_EVENT0("gpu", "VrShellGl::SwapBuffers");
|
| surface_->SwapBuffers();
|
| }
|
| }
|
| @@ -999,6 +1033,7 @@ void VrShellGl::DrawWebVr() {
|
|
|
| glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
|
| vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
|
| + VLOG(1) << __FUNCTION__ << ";;; WebVrRenderer done";
|
| }
|
|
|
| void VrShellGl::OnTriggerEvent() {
|
| @@ -1029,6 +1064,11 @@ void VrShellGl::SetWebVrMode(bool enabled) {
|
| void VrShellGl::UpdateWebVRTextureBounds(int16_t frame_index,
|
| const gvr::Rectf& left_bounds,
|
| const gvr::Rectf& right_bounds) {
|
| + VLOG(1) << __FUNCTION__ << ";;; frame_index=" << frame_index <<
|
| + " left=" << left_bounds.left << "," << left_bounds.bottom <<
|
| + "," << left_bounds.right << "," << left_bounds.top <<
|
| + " right=" << right_bounds.left << "," << right_bounds.bottom <<
|
| + "," << right_bounds.right << "," << right_bounds.top;
|
| if (frame_index < 0) {
|
| webvr_left_viewport_->SetSourceUv(left_bounds);
|
| webvr_right_viewport_->SetSourceUv(right_bounds);
|
| @@ -1068,6 +1108,14 @@ base::WeakPtr<VrShellGl> VrShellGl::GetWeakPtr() {
|
| }
|
|
|
| void VrShellGl::OnVSync() {
|
| + while (premature_received_frames_ > 0) {
|
| + VLOG(1) << __FUNCTION__ << ";;; Retrying premature received frame " <<
|
| + premature_received_frames_ << " => " <<
|
| + (premature_received_frames_ - 1);
|
| + --premature_received_frames_;
|
| + OnWebVRFrameAvailable();
|
| + }
|
| +
|
| base::TimeTicks now = base::TimeTicks::Now();
|
| base::TimeTicks target;
|
|
|
| @@ -1082,12 +1130,15 @@ void VrShellGl::OnVSync() {
|
|
|
| base::TimeDelta time = intervals * vsync_interval_;
|
| if (!callback_.is_null()) {
|
| + VLOG(1) << __FUNCTION__ << ";;; vsync B, interval=" << vsync_interval_;
|
| SendVSync(time, base::ResetAndReturn(&callback_));
|
| } else {
|
| pending_vsync_ = true;
|
| pending_time_ = time;
|
| }
|
| - DrawFrame();
|
| + if (!web_vr_mode_) {
|
| + DrawFrame(-1);
|
| + }
|
| }
|
|
|
| void VrShellGl::OnRequest(device::mojom::VRVSyncProviderRequest request) {
|
| @@ -1107,7 +1158,11 @@ void VrShellGl::GetVSync(const GetVSyncCallback& callback) {
|
| return;
|
| }
|
| pending_vsync_ = false;
|
| + VLOG(1) << __FUNCTION__ << ";;; vsync A, pending time=" << pending_time_;
|
| SendVSync(pending_time_, callback);
|
| +
|
| + // FIXME: A glFinish here triggers flickering?
|
| + // if (web_vr_mode_) glFinish();
|
| }
|
|
|
| void VrShellGl::UpdateVSyncInterval(int64_t timebase_nanos,
|
| @@ -1116,6 +1171,7 @@ void VrShellGl::UpdateVSyncInterval(int64_t timebase_nanos,
|
| vsync_timebase_ += base::TimeDelta::FromMicroseconds(timebase_nanos / 1000);
|
| vsync_interval_ = base::TimeDelta::FromSecondsD(interval_seconds);
|
| vsync_task_.Reset(base::Bind(&VrShellGl::OnVSync, base::Unretained(this)));
|
| + VLOG(1) << __FUNCTION__ << ";;; vsync_interval=" << vsync_interval_;
|
| OnVSync();
|
| }
|
|
|
| @@ -1133,6 +1189,8 @@ void VrShellGl::SendVSync(base::TimeDelta time,
|
| TRACE_EVENT0("input", "VrShellGl::SendVSync");
|
|
|
| uint8_t frame_index = frame_index_++;
|
| + VLOG(1) << __FUNCTION__ << ";;; vsync for frame=" <<
|
| + static_cast<int>(frame_index);
|
|
|
| gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
|
| target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
|
|
|