Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(23)

Side by Side Diff: chrome/browser/android/vr_shell/vr_shell.cc

Issue 2384593002: Encode frame number in pixel data for pose sync (Closed)
Patch Set: dcheng #22, mthiesse #31: rebase, simpler ring buffer initialization, fix type Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/android/vr_shell/vr_shell.h" 5 #include "chrome/browser/android/vr_shell/vr_shell.h"
6 6
7 #include <thread> 7 #include <thread>
8 8
9 #include "chrome/browser/android/vr_shell/ui_scene.h" 9 #include "chrome/browser/android/vr_shell/ui_scene.h"
10 #include "chrome/browser/android/vr_shell/vr_compositor.h" 10 #include "chrome/browser/android/vr_shell/vr_compositor.h"
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
152 float screen_height = kScreenHeightRatio * desktop_height_; 152 float screen_height = kScreenHeightRatio * desktop_height_;
153 std::unique_ptr<ContentRectangle> rect(new ContentRectangle()); 153 std::unique_ptr<ContentRectangle> rect(new ContentRectangle());
154 rect->id = kBrowserUiElementId; 154 rect->id = kBrowserUiElementId;
155 rect->size = {screen_width, screen_height, 1.0f}; 155 rect->size = {screen_width, screen_height, 1.0f};
156 rect->translation = kDesktopPositionDefault; 156 rect->translation = kDesktopPositionDefault;
157 scene_.AddUiElement(rect); 157 scene_.AddUiElement(rect);
158 158
159 desktop_plane_ = scene_.GetUiElementById(kBrowserUiElementId); 159 desktop_plane_ = scene_.GetUiElementById(kBrowserUiElementId);
160 160
161 LoadUIContent(); 161 LoadUIContent();
162
163 gvr::Mat4f identity;
164 SetIdentityM(identity);
165 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
162 } 166 }
163 167
164 void VrShell::UpdateCompositorLayers(JNIEnv* env, 168 void VrShell::UpdateCompositorLayers(JNIEnv* env,
165 const JavaParamRef<jobject>& obj) { 169 const JavaParamRef<jobject>& obj) {
166 content_compositor_->SetLayer(content_cvc_); 170 content_compositor_->SetLayer(content_cvc_);
167 ui_compositor_->SetLayer(ui_cvc_); 171 ui_compositor_->SetLayer(ui_cvc_);
168 } 172 }
169 173
170 void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) { 174 void VrShell::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) {
171 delete this; 175 delete this;
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
341 current_input_target_->ProcessUpdatedGesture(*gesture.get()); 345 current_input_target_->ProcessUpdatedGesture(*gesture.get());
342 346
343 if (original_type == WebInputEvent::GestureTap) { 347 if (original_type == WebInputEvent::GestureTap) {
344 gesture->type = WebInputEvent::GestureTap; 348 gesture->type = WebInputEvent::GestureTap;
345 gesture->details.buttons.pos.x = pixel_x; 349 gesture->details.buttons.pos.x = pixel_x;
346 gesture->details.buttons.pos.y = pixel_y; 350 gesture->details.buttons.pos.y = pixel_y;
347 current_input_target_->ProcessUpdatedGesture(*gesture.get()); 351 current_input_target_->ProcessUpdatedGesture(*gesture.get());
348 } 352 }
349 } 353 }
350 354
355 void VrShell::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) {
356 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose;
357 }
358
359 uint32_t GetPixelEncodedPoseIndex() {
360 // Read the pose index encoded in a bottom left pixel as color values.
361 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
362 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
363 // which tracks poses.
364 uint8_t pixels[4];
365 // Assume we're reading from the frambebuffer we just wrote to.
366 // That's true currently, we may need to use glReadBuffer(GL_BACK)
367 // or equivalent if the rendering setup changes in the future.
368 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
369 return pixels[0] | (pixels[1] << 8) | (pixels[2] << 16);
370 }
371
351 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) { 372 void VrShell::DrawFrame(JNIEnv* env, const JavaParamRef<jobject>& obj) {
352 buffer_viewport_list_->SetToRecommendedBufferViewports(); 373 buffer_viewport_list_->SetToRecommendedBufferViewports();
353 374
354 gvr::Frame frame = swap_chain_->AcquireFrame(); 375 gvr::Frame frame = swap_chain_->AcquireFrame();
355 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); 376 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
356 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; 377 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
357 378
358 gvr::Mat4f head_pose = 379 gvr::Mat4f head_pose =
359 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time); 380 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
360 381
361 gvr::Vec3f position = GetTranslation(head_pose); 382 gvr::Vec3f position = GetTranslation(head_pose);
362 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { 383 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
363 // This appears to be a 3DOF pose without a neck model. Add one. 384 // This appears to be a 3DOF pose without a neck model. Add one.
364 // The head pose has redundant data. Assume we're only using the 385 // The head pose has redundant data. Assume we're only using the
365 // object_from_reference_matrix, we're not updating position_external. 386 // object_from_reference_matrix, we're not updating position_external.
366 // TODO: Not sure what object_from_reference_matrix is. The new api removed 387 // TODO: Not sure what object_from_reference_matrix is. The new api removed
367 // it. For now, removing it seems working fine. 388 // it. For now, removing it seems working fine.
368 gvr_api_->ApplyNeckModel(head_pose, 1.0f); 389 gvr_api_->ApplyNeckModel(head_pose, 1.0f);
369 } 390 }
370 391
371 // Bind back to the default framebuffer. 392 // Bind back to the default framebuffer.
372 frame.BindBuffer(0); 393 frame.BindBuffer(0);
373 394
374 if (webvr_mode_) { 395 if (webvr_mode_) {
375 DrawWebVr(); 396 DrawWebVr();
376 if (!webvr_secure_origin_) { 397 if (!webvr_secure_origin_) {
377 DrawWebVrOverlay(target_time.monotonic_system_time_nanos); 398 DrawWebVrOverlay(target_time.monotonic_system_time_nanos);
378 } 399 }
400
401 // When using async reprojection, we need to know which pose was used in
402 // the WebVR app for drawing this frame. Due to unknown amounts of
403 // buffering in the compositor and SurfaceTexture, we read the pose number
404 // from a corner pixel. There's no point in doing this for legacy
405 // distortion rendering since that doesn't need a pose, and reading back
406 // pixels is an expensive operation. TODO(klausw): stop doing this once we
407 // have working no-compositor rendering for WebVR.
408 if (gvr_api_->GetAsyncReprojectionEnabled()) {
409 uint32_t webvr_pose_frame = GetPixelEncodedPoseIndex();
410 head_pose = webvr_head_pose_[webvr_pose_frame % kPoseRingBufferSize];
411 }
379 } else { 412 } else {
380 DrawVrShell(head_pose); 413 DrawVrShell(head_pose);
381 } 414 }
382 415
383 frame.Unbind(); 416 frame.Unbind();
384 frame.Submit(*buffer_viewport_list_, head_pose); 417 frame.Submit(*buffer_viewport_list_, head_pose);
385 } 418 }
386 419
387 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose) { 420 void VrShell::DrawVrShell(const gvr::Mat4f& head_pose) {
388 float screen_tilt = desktop_screen_tilt_ * M_PI / 180.0f; 421 float screen_tilt = desktop_screen_tilt_ * M_PI / 180.0f;
(...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after
766 content::WebContents::FromJavaWebContents(content_web_contents)); 799 content::WebContents::FromJavaWebContents(content_web_contents));
767 content::ContentViewCore* ui_core = content::ContentViewCore::FromWebContents( 800 content::ContentViewCore* ui_core = content::ContentViewCore::FromWebContents(
768 content::WebContents::FromJavaWebContents(ui_web_contents)); 801 content::WebContents::FromJavaWebContents(ui_web_contents));
769 return reinterpret_cast<intptr_t>(new VrShell( 802 return reinterpret_cast<intptr_t>(new VrShell(
770 env, obj, c_core, 803 env, obj, c_core,
771 reinterpret_cast<ui::WindowAndroid*>(content_window_android), ui_core, 804 reinterpret_cast<ui::WindowAndroid*>(content_window_android), ui_core,
772 reinterpret_cast<ui::WindowAndroid*>(ui_window_android))); 805 reinterpret_cast<ui::WindowAndroid*>(ui_window_android)));
773 } 806 }
774 807
775 } // namespace vr_shell 808 } // namespace vr_shell
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698