Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(24)

Unified Diff: gpu/ipc/service/gpu_command_buffer_stub.cc

Issue 2107783003: Pass initial size and GPU preference via context attributes (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: gyp fix Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « gpu/ipc/service/gpu_channel_unittest.cc ('k') | ui/compositor/test/in_process_context_provider.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: gpu/ipc/service/gpu_command_buffer_stub.cc
diff --git a/gpu/ipc/service/gpu_command_buffer_stub.cc b/gpu/ipc/service/gpu_command_buffer_stub.cc
index d7a2c370c1cba0154cba65dff901d9714d9ed67b..a3fec2cc52e6b9f7096119869b8035c3bb6e02c5 100644
--- a/gpu/ipc/service/gpu_command_buffer_stub.cc
+++ b/gpu/ipc/service/gpu_command_buffer_stub.cc
@@ -476,7 +476,7 @@ bool GpuCommandBufferStub::Initialize(
// Virtualize PreferIntegratedGpu contexts by default on OS X to prevent
// performance regressions when enabling FCM.
// http://crbug.com/180463
- if (init_params.gpu_preference == gl::PreferIntegratedGpu)
+ if (init_params.attribs.gpu_preference == gl::PreferIntegratedGpu)
use_virtualized_gl_context_ = true;
#endif
@@ -506,14 +506,6 @@ bool GpuCommandBufferStub::Initialize(
use_virtualized_gl_context_ = false;
#endif
- gfx::Size initial_size = init_params.size;
- if (offscreen && initial_size.IsEmpty()) {
- // If we're an offscreen surface with zero width and/or height, set to a
- // non-zero size so that we have a complete framebuffer for operations like
- // glClear.
- initial_size = gfx::Size(1, 1);
- }
-
command_buffer_.reset(new CommandBufferService(
context_group_->transfer_buffer_manager()));
@@ -546,7 +538,7 @@ bool GpuCommandBufferStub::Initialize(
context = gl_share_group->GetSharedContext();
if (!context.get()) {
context = gl::init::CreateGLContext(gl_share_group, default_surface,
- init_params.gpu_preference);
+ init_params.attribs.gpu_preference);
if (!context.get()) {
DLOG(ERROR) << "Failed to create shared context for virtualization.";
return false;
@@ -563,7 +555,8 @@ bool GpuCommandBufferStub::Initialize(
gl::GetGLImplementation() == gl::kGLImplementationMockGL);
context = new GLContextVirtual(
gl_share_group, context.get(), decoder_->AsWeakPtr());
- if (!context->Initialize(surface_.get(), init_params.gpu_preference)) {
+ if (!context->Initialize(surface_.get(),
+ init_params.attribs.gpu_preference)) {
// The real context created above for the default offscreen surface
// might not be compatible with this surface.
context = NULL;
@@ -573,7 +566,7 @@ bool GpuCommandBufferStub::Initialize(
}
if (!context.get()) {
context = gl::init::CreateGLContext(gl_share_group, surface_.get(),
- init_params.gpu_preference);
+ init_params.attribs.gpu_preference);
}
if (!context.get()) {
DLOG(ERROR) << "Failed to create context.";
@@ -596,7 +589,7 @@ bool GpuCommandBufferStub::Initialize(
}
// Initialize the decoder with either the view or pbuffer GLContext.
- if (!decoder_->Initialize(surface_, context, offscreen, initial_size,
+ if (!decoder_->Initialize(surface_, context, offscreen,
gpu::gles2::DisallowedFeatures(),
init_params.attribs)) {
DLOG(ERROR) << "Failed to initialize decoder.";
« no previous file with comments | « gpu/ipc/service/gpu_channel_unittest.cc ('k') | ui/compositor/test/in_process_context_provider.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698