Chromium Code Reviews| Index: content/common/gpu/gpu_command_buffer_stub.cc |
| diff --git a/content/common/gpu/gpu_command_buffer_stub.cc b/content/common/gpu/gpu_command_buffer_stub.cc |
| index d0ed3d1c42908ca3b99bd7de42dc60870fad4c93..e7900c919e7bfc98aa4257c000dc43f218989700 100644 |
| --- a/content/common/gpu/gpu_command_buffer_stub.cc |
| +++ b/content/common/gpu/gpu_command_buffer_stub.cc |
| @@ -13,6 +13,7 @@ |
| #include "base/json/json_writer.h" |
| #include "base/macros.h" |
| #include "base/memory/shared_memory.h" |
| +#include "base/sys_info.h" |
| #include "base/time/time.h" |
| #include "base/trace_event/trace_event.h" |
| #include "build/build_config.h" |
| @@ -161,11 +162,9 @@ uint64_t GetCommandBufferID(int channel_id, int32_t route_id) { |
| } |
| gfx::GLSurface::Format GetSurfaceFormatFromAttribute( |
| - const gpu::gles2::ContextCreationAttribHelper& attrib, |
| - bool use_virtualized_gl_context) { |
| + const gpu::gles2::ContextCreationAttribHelper& attrib) { |
| gfx::GLSurface::Format format = gfx::GLSurface::SURFACE_DEFAULT; // ARGB8888 |
| - if (!use_virtualized_gl_context && |
| - attrib.red_size <= 5 && |
| + if (attrib.red_size <= 5 && |
| attrib.green_size <= 6 && |
| attrib.blue_size <= 5 && |
| attrib.alpha_size == 0) { |
| @@ -250,8 +249,7 @@ GpuCommandBufferStub::GpuCommandBufferStub( |
| // only a single context. See crbug.com/510243 for details. |
| use_virtualized_gl_context_ |= mailbox_manager->UsesSync(); |
| - surface_format_ = GetSurfaceFormatFromAttribute(attrib_parser, |
| - use_virtualized_gl_context_); |
| + surface_format_ = GetSurfaceFormatFromAttribute(attrib_parser); |
| if (offscreen && initial_size_.IsEmpty()) { |
| // If we're an offscreen surface with zero width and/or height, set to a |
| @@ -561,8 +559,9 @@ void GpuCommandBufferStub::OnInitialize( |
| } |
| scoped_refptr<gfx::GLContext> context; |
| - if (use_virtualized_gl_context_ && channel_->share_group()) { |
| - context = channel_->share_group()->GetSharedContext(); |
| + gfx::GLShareGroup* share_group = channel_->share_group(); |
| + if (use_virtualized_gl_context_ && share_group) { |
| + context = share_group->GetSharedContext(); |
| if (!context.get()) { |
| context = gfx::GLContext::CreateGLContext( |
| channel_->share_group(), |
| @@ -578,22 +577,17 @@ void GpuCommandBufferStub::OnInitialize( |
| // This should be a non-virtual GL context. |
| DCHECK(context->GetHandle()); |
| context = new gpu::GLContextVirtual( |
| - channel_->share_group(), context.get(), decoder_->AsWeakPtr()); |
| + share_group, context.get(), decoder_->AsWeakPtr()); |
| if (!context->Initialize(surface_.get(), gpu_preference_)) { |
|
no sievers
2016/02/18 20:19:45
Unfortunately Initialize() is not reliable to dete
Jinsuk Kim
2016/02/19 01:04:40
Got it. I guarded it with #if for it to take effec
|
| - // TODO(sievers): The real context created above for the default |
| - // offscreen surface might not be compatible with this surface. |
| - // Need to adjust at least GLX to be able to create the initial context |
| - // with a config that is compatible with onscreen and offscreen surfaces. |
| + // The real context created above for the default offscreen surface |
| + // is not compatible with this surface. Do not use a virtualized context. |
| + // in this case. |
| context = NULL; |
| - |
| - DLOG(ERROR) << "Failed to initialize virtual GL context."; |
| - OnInitializeFailed(reply_message); |
| - return; |
| } |
| } |
| if (!context.get()) { |
| context = gfx::GLContext::CreateGLContext( |
| - channel_->share_group(), surface_.get(), gpu_preference_); |
| + share_group, surface_.get(), gpu_preference_); |
| } |
| if (!context.get()) { |
| DLOG(ERROR) << "Failed to create context."; |