Chromium Code Reviews| Index: content/common/gpu/gpu_command_buffer_stub.cc |
| diff --git a/content/common/gpu/gpu_command_buffer_stub.cc b/content/common/gpu/gpu_command_buffer_stub.cc |
| index e14cf986092b9c8817dcd3024e2474890734c368..ecb99048ce18776caa1ef4c8f4f70e0a85b7b8f2 100644 |
| --- a/content/common/gpu/gpu_command_buffer_stub.cc |
| +++ b/content/common/gpu/gpu_command_buffer_stub.cc |
| @@ -421,9 +421,11 @@ void GpuCommandBufferStub::Destroy() { |
| scheduler_.reset(); |
| bool have_context = false; |
| - if (decoder_ && command_buffer_ && |
| - command_buffer_->GetLastState().error != gpu::error::kLostContext) |
| - have_context = decoder_->MakeCurrent(); |
| + if (decoder_) { |
| + // Try to make the context current regardless of whether it was lost, so we |
| + // don't leak resources. |
|
Ken Russell (switch to Gerrit)
2015/04/27 22:35:33
Presumably if the driver has actually lost the con
no sievers
2015/04/27 23:24:41
But it's a problem if we forced the context to be
|
| + have_context = decoder_->GetGLContext()->MakeCurrent(surface_.get()); |
| + } |
| FOR_EACH_OBSERVER(DestructionObserver, |
| destruction_observers_, |
| OnWillDestroyStub()); |
| @@ -671,7 +673,7 @@ void GpuCommandBufferStub::OnParseError() { |
| DCHECK(command_buffer_.get()); |
| gpu::CommandBuffer::State state = command_buffer_->GetLastState(); |
| IPC::Message* msg = new GpuCommandBufferMsg_Destroyed( |
| - route_id_, state.context_lost_reason); |
| + route_id_, state.context_lost_reason, state.error); |
| msg->set_unblock(true); |
| Send(msg); |
| @@ -1089,7 +1091,7 @@ void GpuCommandBufferStub::MarkContextLost() { |
| command_buffer_->SetContextLostReason(gpu::error::kUnknown); |
| if (decoder_) |
| - decoder_->LoseContext(GL_UNKNOWN_CONTEXT_RESET_ARB); |
| + decoder_->MarkContextLost(gpu::error::kUnknown); |
| command_buffer_->SetParseError(gpu::error::kLostContext); |
| } |