Chromium Code Reviews| Index: components/display_compositor/buffer_queue.cc |
| diff --git a/components/display_compositor/buffer_queue.cc b/components/display_compositor/buffer_queue.cc |
| index bcdb62c3d21157019b5df48f4ea8c1ded7670a95..bdab3f0567e74812c14f0fef444e979a841a3485 100644 |
| --- a/components/display_compositor/buffer_queue.cc |
| +++ b/components/display_compositor/buffer_queue.cc |
| @@ -14,6 +14,7 @@ |
| #include "gpu/command_buffer/common/gpu_memory_buffer_support.h" |
| #include "third_party/skia/include/core/SkRect.h" |
| #include "third_party/skia/include/core/SkRegion.h" |
| +#include "ui/display/types/display_snapshot.h" |
| #include "ui/gfx/gpu_memory_buffer.h" |
| #include "ui/gfx/skia_util.h" |
| @@ -212,10 +213,11 @@ std::unique_ptr<BufferQueue::AllocatedSurface> BufferQueue::GetNextSurface() { |
| // We don't want to allow anything more than triple buffering. |
| DCHECK_LT(allocated_count_, 4U); |
|
ccameron
2016/08/08 19:29:10
I'd rather that the gfx::BufferFormat be passed in
Daniele Castagna
2016/08/09 00:34:29
Sure, done.
Now the call to ui::DisplaySnapshot::
|
| + gfx::BufferFormat format = ui::DisplaySnapshot::PrimaryFormat(); |
| + DCHECK_EQ(static_cast<uint32_t>(GL_RGB), internal_format_); |
| std::unique_ptr<gfx::GpuMemoryBuffer> buffer( |
| gpu_memory_buffer_manager_->AllocateGpuMemoryBuffer( |
| - size_, gpu::DefaultBufferFormatForImageFormat(internal_format_), |
| - gfx::BufferUsage::SCANOUT, surface_handle_)); |
| + size_, format, gfx::BufferUsage::SCANOUT, surface_handle_)); |
| if (!buffer.get()) { |
| gl_->DeleteTextures(1, &texture); |
| DLOG(ERROR) << "Failed to allocate GPU memory buffer"; |