Chromium Code Reviews| Index: ui/gl/gl_context_cgl.cc |
| diff --git a/ui/gl/gl_context_cgl.cc b/ui/gl/gl_context_cgl.cc |
| index cdd84ba50697a8731802e18bed67ae065a854560..a6af7c459d9d1d66bcee6302dc1d867a0c05b232 100644 |
| --- a/ui/gl/gl_context_cgl.cc |
| +++ b/ui/gl/gl_context_cgl.cc |
| @@ -17,47 +17,63 @@ |
| namespace gfx { |
| -GLContextCGL::GLContextCGL(GLShareGroup* share_group) |
| - : GLContext(share_group), |
| - context_(NULL), |
| - gpu_preference_(PreferIntegratedGpu), |
| - discrete_pixelformat_(NULL) { |
| -} |
| - |
| -bool GLContextCGL::Initialize(GLSurface* compatible_surface, |
| - GpuPreference gpu_preference) { |
| - DCHECK(compatible_surface); |
| - |
| - gpu_preference = ui::GpuSwitchingManager::GetInstance()->AdjustGpuPreference( |
| - gpu_preference); |
| - |
| - GLContextCGL* share_context = share_group() ? |
| - static_cast<GLContextCGL*>(share_group()->GetContext()) : NULL; |
| +bool g_support_renderer_switching; |
| +static CGLPixelFormatObj GetPixelFormat() { |
| + static CGLPixelFormatObj format; |
|
Ken Russell (switch to Gerrit)
2013/03/01 22:00:58
Does this need to be initialized to NULL?
jbauman
2013/03/02 03:52:23
No, static variables are always initialized to 0 (
|
| + if (format) |
| + return format; |
| std::vector<CGLPixelFormatAttribute> attribs; |
| // If the system supports dual gpus then allow offline renderers for every |
| // context, so that they can all be in the same share group. |
| - if (ui::GpuSwitchingManager::GetInstance()->SupportsDualGpus()) |
| + if (ui::GpuSwitchingManager::GetInstance()->SupportsDualGpus()) { |
| attribs.push_back(kCGLPFAAllowOfflineRenderers); |
| + g_support_renderer_switching = true; |
| + } |
| if (GetGLImplementation() == kGLImplementationAppleGL) { |
| attribs.push_back(kCGLPFARendererID); |
| attribs.push_back((CGLPixelFormatAttribute) kCGLRendererGenericFloatID); |
| + g_support_renderer_switching = false; |
| } |
| attribs.push_back((CGLPixelFormatAttribute) 0); |
| - CGLPixelFormatObj format; |
| - GLint num_pixel_formats; |
| + GLint num_virtual_screens; |
| if (CGLChoosePixelFormat(&attribs.front(), |
| &format, |
| - &num_pixel_formats) != kCGLNoError) { |
| + &num_virtual_screens) != kCGLNoError) { |
| LOG(ERROR) << "Error choosing pixel format."; |
| - return false; |
| + return NULL; |
| } |
| if (!format) { |
| LOG(ERROR) << "format == 0."; |
| - return false; |
| + return NULL; |
| } |
| - DCHECK_NE(num_pixel_formats, 0); |
| + DCHECK_NE(num_virtual_screens, 0); |
| + return format; |
| +} |
| + |
| +GLContextCGL::GLContextCGL(GLShareGroup* share_group) |
| + : GLContext(share_group), |
| + context_(NULL), |
| + gpu_preference_(PreferIntegratedGpu), |
| + discrete_pixelformat_(NULL), |
| + screen_(-1), |
| + renderer_(-1) { |
| +} |
| + |
| +bool GLContextCGL::Initialize(GLSurface* compatible_surface, |
| + GpuPreference gpu_preference) { |
| + DCHECK(compatible_surface); |
| + |
| + gpu_preference = ui::GpuSwitchingManager::GetInstance()->AdjustGpuPreference( |
| + gpu_preference); |
| + |
| + GLContextCGL* share_context = share_group() ? |
| + static_cast<GLContextCGL*>(share_group()->GetContext()) : NULL; |
| + |
| + CGLPixelFormatObj format = GetPixelFormat(); |
| + if (!format) |
| + return false; |
| // If using the discrete gpu, create a pixel format requiring it before we |
| // create the context. |
| @@ -79,7 +95,6 @@ bool GLContextCGL::Initialize(GLSurface* compatible_surface, |
| share_context ? |
| static_cast<CGLContextObj>(share_context->GetHandle()) : NULL, |
| reinterpret_cast<CGLContextObj*>(&context_)); |
| - CGLReleasePixelFormat(format); |
| if (res != kCGLNoError) { |
| LOG(ERROR) << "Error creating context."; |
| Destroy(); |
| @@ -103,6 +118,36 @@ void GLContextCGL::Destroy() { |
| bool GLContextCGL::MakeCurrent(GLSurface* surface) { |
| DCHECK(context_); |
| + int renderer = share_group()->GetRenderer(); |
| + int screen; |
| + CGLGetVirtualScreen(static_cast<CGLContextObj>(context_), &screen); |
| + |
| + if (g_support_renderer_switching && |
| + (screen != screen_ || renderer != renderer_)) { |
| + CGLPixelFormatObj format = GetPixelFormat(); |
| + // Attempt to find a virtual screen that's using the requested renderer, |
| + // and switch the context to use that screen. |
| + int virtual_screen_count; |
| + if (CGLDescribePixelFormat(format, 0, kCGLPFAVirtualScreenCount, |
| + &virtual_screen_count) != kCGLNoError) |
| + return false; |
| + |
| + for (int i = 0; i < virtual_screen_count; ++i) { |
| + int screen_renderer; |
| + if (CGLDescribePixelFormat(format, i, kCGLPFARendererID, |
| + &screen_renderer) != kCGLNoError) |
| + return false; |
| + |
| + screen_renderer &= kCGLRendererIDMatchingMask; |
| + if (screen_renderer == renderer) { |
| + CGLSetVirtualScreen(static_cast<CGLContextObj>(context_), i); |
| + screen_ = i; |
| + break; |
| + } |
| + } |
| + renderer_ = renderer; |
| + } |
| + |
| if (IsCurrent(surface)) |
| return true; |