Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(362)

Side by Side Diff: content/common/gpu/client/gpu_channel_host.cc

Issue 19762004: Add multi-process GpuMemoryBuffer framework. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/client/gpu_channel_host.h" 5 #include "content/common/gpu/client/gpu_channel_host.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/debug/trace_event.h" 10 #include "base/debug/trace_event.h"
(...skipping 28 matching lines...) Expand all
39 int client_id, 39 int client_id,
40 const gpu::GPUInfo& gpu_info, 40 const gpu::GPUInfo& gpu_info,
41 const IPC::ChannelHandle& channel_handle) { 41 const IPC::ChannelHandle& channel_handle) {
42 DCHECK(factory->IsMainThread()); 42 DCHECK(factory->IsMainThread());
43 scoped_refptr<GpuChannelHost> host = new GpuChannelHost( 43 scoped_refptr<GpuChannelHost> host = new GpuChannelHost(
44 factory, gpu_host_id, client_id, gpu_info); 44 factory, gpu_host_id, client_id, gpu_info);
45 host->Connect(channel_handle); 45 host->Connect(channel_handle);
46 return host; 46 return host;
47 } 47 }
48 48
49 // static
50 bool GpuChannelHost::IsValidGpuMemoryBuffer(
51 gfx::GpuMemoryBufferHandle handle) {
52 switch (handle.type) {
53 case gfx::SHARED_MEMORY_BUFFER:
54 return true;
55 default:
56 return false;
57 }
58 }
59
49 GpuChannelHost::GpuChannelHost(GpuChannelHostFactory* factory, 60 GpuChannelHost::GpuChannelHost(GpuChannelHostFactory* factory,
50 int gpu_host_id, 61 int gpu_host_id,
51 int client_id, 62 int client_id,
52 const gpu::GPUInfo& gpu_info) 63 const gpu::GPUInfo& gpu_info)
53 : factory_(factory), 64 : factory_(factory),
54 client_id_(client_id), 65 client_id_(client_id),
55 gpu_host_id_(gpu_host_id), 66 gpu_host_id_(gpu_host_id),
56 gpu_info_(gpu_info) { 67 gpu_info_(gpu_info) {
57 next_transfer_buffer_id_.GetNext(); 68 next_transfer_buffer_id_.GetNext();
69 next_gpu_memory_buffer_id_.GetNext();
58 } 70 }
59 71
60 void GpuChannelHost::Connect(const IPC::ChannelHandle& channel_handle) { 72 void GpuChannelHost::Connect(const IPC::ChannelHandle& channel_handle) {
61 // Open a channel to the GPU process. We pass NULL as the main listener here 73 // Open a channel to the GPU process. We pass NULL as the main listener here
62 // since we need to filter everything to route it to the right thread. 74 // since we need to filter everything to route it to the right thread.
63 scoped_refptr<base::MessageLoopProxy> io_loop = factory_->GetIOLoopProxy(); 75 scoped_refptr<base::MessageLoopProxy> io_loop = factory_->GetIOLoopProxy();
64 channel_.reset(new IPC::SyncChannel(channel_handle, 76 channel_.reset(new IPC::SyncChannel(channel_handle,
65 IPC::Channel::MODE_CLIENT, 77 IPC::Channel::MODE_CLIENT,
66 NULL, 78 NULL,
67 io_loop.get(), 79 io_loop.get(),
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
278 if (generate_count > 0) 290 if (generate_count > 0)
279 Send(new GpuChannelMsg_GenerateMailboxNamesAsync(generate_count)); 291 Send(new GpuChannelMsg_GenerateMailboxNamesAsync(generate_count));
280 292
281 return true; 293 return true;
282 } 294 }
283 295
284 int32 GpuChannelHost::ReserveTransferBufferId() { 296 int32 GpuChannelHost::ReserveTransferBufferId() {
285 return next_transfer_buffer_id_.GetNext(); 297 return next_transfer_buffer_id_.GetNext();
286 } 298 }
287 299
300 gfx::GpuMemoryBufferHandle GpuChannelHost::ShareGpuMemoryBufferToGpuProcess(
301 gfx::GpuMemoryBufferHandle source_handle) {
302 switch (source_handle.type) {
303 case gfx::SHARED_MEMORY_BUFFER: {
304 gfx::GpuMemoryBufferHandle handle;
305 handle.type = gfx::SHARED_MEMORY_BUFFER;
306 handle.handle = ShareToGpuProcess(source_handle.handle);
307 return handle;
308 }
309 default:
310 NOTREACHED();
311 return gfx::GpuMemoryBufferHandle();
312 }
313 }
314
315 int32 GpuChannelHost::ReserveGpuMemoryBufferId() {
316 return next_gpu_memory_buffer_id_.GetNext();
317 }
318
288 GpuChannelHost::~GpuChannelHost() { 319 GpuChannelHost::~GpuChannelHost() {
289 // channel_ must be destroyed on the main thread. 320 // channel_ must be destroyed on the main thread.
290 if (!factory_->IsMainThread()) 321 if (!factory_->IsMainThread())
291 factory_->GetMainLoop()->DeleteSoon(FROM_HERE, channel_.release()); 322 factory_->GetMainLoop()->DeleteSoon(FROM_HERE, channel_.release());
292 } 323 }
293 324
294 325
295 GpuChannelHost::MessageFilter::MessageFilter() 326 GpuChannelHost::MessageFilter::MessageFilter()
296 : lost_(false), 327 : lost_(false),
297 requested_mailboxes_(0) { 328 requested_mailboxes_(0) {
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
408 AutoLock lock(lock_); 439 AutoLock lock(lock_);
409 DCHECK_LE(names.size(), requested_mailboxes_); 440 DCHECK_LE(names.size(), requested_mailboxes_);
410 requested_mailboxes_ -= names.size(); 441 requested_mailboxes_ -= names.size();
411 mailbox_name_pool_.insert(mailbox_name_pool_.end(), 442 mailbox_name_pool_.insert(mailbox_name_pool_.end(),
412 names.begin(), 443 names.begin(),
413 names.end()); 444 names.end());
414 } 445 }
415 446
416 447
417 } // namespace content 448 } // namespace content
OLDNEW
« no previous file with comments | « content/common/gpu/client/gpu_channel_host.h ('k') | content/common/gpu/client/gpu_memory_buffer_impl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698