Index: content/common/gpu/client/context_provider_command_buffer.cc |
diff --git a/content/common/gpu/client/context_provider_command_buffer.cc b/content/common/gpu/client/context_provider_command_buffer.cc |
index 03791359c5adfa5e736a0e962a9f2bccf58c7f6b..4851431c29f3904095dfa3aece0c6f68e2f85b5a 100644 |
--- a/content/common/gpu/client/context_provider_command_buffer.cc |
+++ b/content/common/gpu/client/context_provider_command_buffer.cc |
@@ -23,6 +23,9 @@ |
namespace content { |
+ContextProviderCommandBuffer::SharedProviders::SharedProviders() = default; |
+ContextProviderCommandBuffer::SharedProviders::~SharedProviders() = default; |
+ |
class ContextProviderCommandBuffer::LostContextCallbackProxy |
: public WebGraphicsContext3DCommandBufferImpl:: |
WebGraphicsContextLostCallback { |
@@ -45,8 +48,12 @@ class ContextProviderCommandBuffer::LostContextCallbackProxy |
ContextProviderCommandBuffer::ContextProviderCommandBuffer( |
std::unique_ptr<WebGraphicsContext3DCommandBufferImpl> context3d, |
const gpu::SharedMemoryLimits& memory_limits, |
+ ContextProviderCommandBuffer* shared_context_provider, |
CommandBufferContextType type) |
- : context3d_(std::move(context3d)), |
+ : shared_providers_(shared_context_provider |
+ ? shared_context_provider->shared_providers_ |
+ : new SharedProviders), |
+ context3d_(std::move(context3d)), |
memory_limits_(memory_limits), |
context_type_(type), |
debug_name_(CommandBufferContextTypeToString(type)) { |
@@ -59,6 +66,14 @@ ContextProviderCommandBuffer::~ContextProviderCommandBuffer() { |
DCHECK(main_thread_checker_.CalledOnValidThread() || |
context_thread_checker_.CalledOnValidThread()); |
+ { |
+ base::AutoLock hold(shared_providers_->lock); |
+ auto it = std::find(shared_providers_->list.begin(), |
+ shared_providers_->list.end(), this); |
+ if (it != shared_providers_->list.end()) |
+ shared_providers_->list.erase(it); |
+ } |
+ |
// Destroy references to the context3d_ before leaking it. |
// TODO(danakj): Delete this. |
if (context3d_->GetCommandBufferProxy()) |
@@ -91,8 +106,43 @@ bool ContextProviderCommandBuffer::BindToCurrentThread() { |
return true; |
context3d_->SetContextType(context_type_); |
- if (!context3d_->InitializeOnCurrentThread(memory_limits_)) |
- return false; |
+ |
+ ContextProviderCommandBuffer* shared_context_provider = nullptr; |
+ gpu::CommandBufferProxyImpl* shared_command_buffer = nullptr; |
piman
2016/04/27 22:09:33
nit: move both of those inside the AutoLock scope
danakj
2016/04/28 00:25:58
Done. I forget why I moved these out to start with
|
+ scoped_refptr<gpu::gles2::ShareGroup> share_group; |
+ |
+ // It's possible to be running BindToCurrentThread on two contexts |
+ // on different threads at the same time, but which will be in the same share |
+ // group. To ensure they end up in the same group, hold the lock on the |
+ // shared_providers_ (which they will share) after querying the group, until |
+ // this context has been added to the list. |
+ { |
+ base::AutoLock hold(shared_providers_->lock); |
+ |
+ if (!shared_providers_->list.empty()) { |
+ shared_context_provider = shared_providers_->list.front(); |
+ shared_command_buffer = |
+ shared_context_provider->context3d_->GetCommandBufferProxy(); |
+ share_group = shared_context_provider->context3d_->GetImplementation() |
+ ->share_group(); |
+ |
+ // If any context in the share group has been lost, then abort and don't |
+ // continue since we need to go back to the caller of the constructor to |
+ // find the correct share group. |
+ // This may happen in between the share group being chosen at the |
+ // constructor, and getting to run this BindToCurrentThread method which |
+ // can be on some other thread. |
+ if (share_group->IsLost()) |
+ return false; |
+ } |
+ |
+ if (!context3d_->InitializeOnCurrentThread( |
+ memory_limits_, shared_command_buffer, std::move(share_group))) |
+ return false; |
+ |
+ shared_providers_->list.push_back(this); |
+ } |
+ |
lost_context_callback_proxy_.reset(new LostContextCallbackProxy(this)); |
if (base::CommandLine::ForCurrentProcess()->HasSwitch( |