OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/gpu_memory_manager.h" | 5 #include "content/common/gpu/gpu_memory_manager.h" |
6 | 6 |
7 #if defined(ENABLE_GPU) | 7 #if defined(ENABLE_GPU) |
8 | 8 |
9 #include <algorithm> | 9 #include <algorithm> |
10 | 10 |
11 #include "base/bind.h" | 11 #include "base/bind.h" |
| 12 #include "base/command_line.h" |
12 #include "base/debug/trace_event.h" | 13 #include "base/debug/trace_event.h" |
13 #include "base/message_loop.h" | 14 #include "base/message_loop.h" |
| 15 #include "base/string_number_conversions.h" |
14 #include "content/common/gpu/gpu_command_buffer_stub.h" | 16 #include "content/common/gpu/gpu_command_buffer_stub.h" |
15 #include "content/common/gpu/gpu_memory_allocation.h" | 17 #include "content/common/gpu/gpu_memory_allocation.h" |
| 18 #include "gpu/command_buffer/service/gpu_switches.h" |
16 | 19 |
17 namespace { | 20 namespace { |
18 | 21 |
19 const int kDelayedScheduleManageTimeoutMs = 67; | 22 const int kDelayedScheduleManageTimeoutMs = 67; |
20 | 23 |
21 bool IsInSameContextShareGroupAsAnyOf( | 24 bool IsInSameContextShareGroupAsAnyOf( |
22 const GpuCommandBufferStubBase* stub, | 25 const GpuCommandBufferStubBase* stub, |
23 const std::vector<GpuCommandBufferStubBase*>& stubs) { | 26 const std::vector<GpuCommandBufferStubBase*>& stubs) { |
24 for (std::vector<GpuCommandBufferStubBase*>::const_iterator it = | 27 for (std::vector<GpuCommandBufferStubBase*>::const_iterator it = |
25 stubs.begin(); it != stubs.end(); ++it) { | 28 stubs.begin(); it != stubs.end(); ++it) { |
26 if (stub->IsInSameContextShareGroup(**it)) | 29 if (stub->IsInSameContextShareGroup(**it)) |
27 return true; | 30 return true; |
28 } | 31 } |
29 return false; | 32 return false; |
30 } | 33 } |
31 | 34 |
32 #if defined(OS_ANDROID) | |
33 size_t CalculateBonusMemoryAllocationBasedOnSize(gfx::Size size) { | |
34 const int viewportMultiplier = 16; | |
35 const unsigned int componentsPerPixel = 4; // GraphicsContext3D::RGBA | |
36 const unsigned int bytesPerComponent = 1; // sizeof(GC3Dubyte) | |
37 | |
38 if (size.IsEmpty()) | |
39 return 0; | |
40 | |
41 size_t limit = viewportMultiplier * size.width() * size.height() * | |
42 componentsPerPixel * bytesPerComponent; | |
43 if (limit < GpuMemoryManager::kMinimumAllocationForTab) | |
44 limit = GpuMemoryManager::kMinimumAllocationForTab; | |
45 else if (limit > GpuMemoryManager::kMaximumAllocationForTabs) | |
46 limit = GpuMemoryManager::kMaximumAllocationForTabs; | |
47 return limit - GpuMemoryManager::kMinimumAllocationForTab; | |
48 } | |
49 #endif | |
50 | |
51 void AssignMemoryAllocations( | 35 void AssignMemoryAllocations( |
52 GpuMemoryManager::StubMemoryStatMap* stub_memory_stats, | 36 GpuMemoryManager::StubMemoryStatMap* stub_memory_stats, |
53 const std::vector<GpuCommandBufferStubBase*>& stubs, | 37 const std::vector<GpuCommandBufferStubBase*>& stubs, |
54 GpuMemoryAllocation allocation, | 38 GpuMemoryAllocation allocation, |
55 bool visible) { | 39 bool visible) { |
56 for (std::vector<GpuCommandBufferStubBase*>::const_iterator it = | 40 for (std::vector<GpuCommandBufferStubBase*>::const_iterator it = |
57 stubs.begin(); | 41 stubs.begin(); |
58 it != stubs.end(); | 42 it != stubs.end(); |
59 ++it) { | 43 ++it) { |
60 (*it)->SetMemoryAllocation(allocation); | 44 (*it)->SetMemoryAllocation(allocation); |
61 (*stub_memory_stats)[*it].allocation = allocation; | 45 (*stub_memory_stats)[*it].allocation = allocation; |
62 (*stub_memory_stats)[*it].visible = visible; | 46 (*stub_memory_stats)[*it].visible = visible; |
63 } | 47 } |
64 } | 48 } |
65 | 49 |
66 } | 50 } |
67 | 51 |
| 52 size_t GpuMemoryManager::CalculateBonusMemoryAllocationBasedOnSize( |
| 53 gfx::Size size) const { |
| 54 const int kViewportMultiplier = 16; |
| 55 const unsigned int kComponentsPerPixel = 4; // GraphicsContext3D::RGBA |
| 56 const unsigned int kBytesPerComponent = 1; // sizeof(GC3Dubyte) |
| 57 |
| 58 if (size.IsEmpty()) |
| 59 return 0; |
| 60 |
| 61 size_t limit = kViewportMultiplier * size.width() * size.height() * |
| 62 kComponentsPerPixel * kBytesPerComponent; |
| 63 if (limit < GetMinimumTabAllocation()) |
| 64 limit = GetMinimumTabAllocation(); |
| 65 else if (limit > GetAvailableGpuMemory()) |
| 66 limit = GetAvailableGpuMemory(); |
| 67 return limit - GetMinimumTabAllocation(); |
| 68 } |
| 69 |
68 GpuMemoryManager::GpuMemoryManager(GpuMemoryManagerClient* client, | 70 GpuMemoryManager::GpuMemoryManager(GpuMemoryManagerClient* client, |
69 size_t max_surfaces_with_frontbuffer_soft_limit) | 71 size_t max_surfaces_with_frontbuffer_soft_limit) |
70 : client_(client), | 72 : client_(client), |
71 manage_immediate_scheduled_(false), | 73 manage_immediate_scheduled_(false), |
72 max_surfaces_with_frontbuffer_soft_limit_( | 74 max_surfaces_with_frontbuffer_soft_limit_( |
73 max_surfaces_with_frontbuffer_soft_limit), | 75 max_surfaces_with_frontbuffer_soft_limit), |
| 76 bytes_available_gpu_memory_(0), |
74 bytes_allocated_current_(0), | 77 bytes_allocated_current_(0), |
75 bytes_allocated_historical_max_(0) { | 78 bytes_allocated_historical_max_(0) { |
| 79 CommandLine* command_line = CommandLine::ForCurrentProcess(); |
| 80 if (command_line->HasSwitch(switches::kForceGpuMemAvailableMb)) { |
| 81 base::StringToSizeT( |
| 82 command_line->GetSwitchValueASCII(switches::kForceGpuMemAvailableMb), |
| 83 &bytes_available_gpu_memory_); |
| 84 bytes_available_gpu_memory_ *= 1024 * 1024; |
| 85 } else { |
| 86 #if defined(OS_ANDROID) |
| 87 bytes_available_gpu_memory_ = 64 * 1024 * 1024; |
| 88 #else |
| 89 bytes_available_gpu_memory_ = 448 * 1024 * 1024; |
| 90 #endif |
| 91 } |
76 } | 92 } |
77 | 93 |
78 GpuMemoryManager::~GpuMemoryManager() { | 94 GpuMemoryManager::~GpuMemoryManager() { |
79 } | 95 } |
80 | 96 |
81 bool GpuMemoryManager::StubWithSurfaceComparator::operator()( | 97 bool GpuMemoryManager::StubWithSurfaceComparator::operator()( |
82 GpuCommandBufferStubBase* lhs, | 98 GpuCommandBufferStubBase* lhs, |
83 GpuCommandBufferStubBase* rhs) { | 99 GpuCommandBufferStubBase* rhs) { |
84 DCHECK(lhs->has_surface_state() && rhs->has_surface_state()); | 100 DCHECK(lhs->has_surface_state() && rhs->has_surface_state()); |
85 const GpuCommandBufferStubBase::SurfaceState& lhs_ss = lhs->surface_state(); | 101 const GpuCommandBufferStubBase::SurfaceState& lhs_ss = lhs->surface_state(); |
(...skipping 19 matching lines...) Expand all Loading... |
105 return; | 121 return; |
106 delayed_manage_callback_.Reset(base::Bind(&GpuMemoryManager::Manage, | 122 delayed_manage_callback_.Reset(base::Bind(&GpuMemoryManager::Manage, |
107 AsWeakPtr())); | 123 AsWeakPtr())); |
108 MessageLoop::current()->PostDelayedTask( | 124 MessageLoop::current()->PostDelayedTask( |
109 FROM_HERE, | 125 FROM_HERE, |
110 delayed_manage_callback_.callback(), | 126 delayed_manage_callback_.callback(), |
111 base::TimeDelta::FromMilliseconds(kDelayedScheduleManageTimeoutMs)); | 127 base::TimeDelta::FromMilliseconds(kDelayedScheduleManageTimeoutMs)); |
112 } | 128 } |
113 } | 129 } |
114 | 130 |
115 size_t GpuMemoryManager::GetAvailableGpuMemory() const { | |
116 // TODO(mmocny): Implement this with real system figures. | |
117 return kMaximumAllocationForTabs; | |
118 } | |
119 | |
120 void GpuMemoryManager::TrackMemoryAllocatedChange(size_t old_size, | 131 void GpuMemoryManager::TrackMemoryAllocatedChange(size_t old_size, |
121 size_t new_size) | 132 size_t new_size) |
122 { | 133 { |
123 if (new_size < old_size) { | 134 if (new_size < old_size) { |
124 size_t delta = old_size - new_size; | 135 size_t delta = old_size - new_size; |
125 DCHECK(bytes_allocated_current_ >= delta); | 136 DCHECK(bytes_allocated_current_ >= delta); |
126 bytes_allocated_current_ -= delta; | 137 bytes_allocated_current_ -= delta; |
127 } | 138 } else { |
128 else { | |
129 size_t delta = new_size - old_size; | 139 size_t delta = new_size - old_size; |
130 bytes_allocated_current_ += delta; | 140 bytes_allocated_current_ += delta; |
131 if (bytes_allocated_current_ > bytes_allocated_historical_max_) { | 141 if (bytes_allocated_current_ > bytes_allocated_historical_max_) { |
132 bytes_allocated_historical_max_ = bytes_allocated_current_; | 142 bytes_allocated_historical_max_ = bytes_allocated_current_; |
133 } | 143 } |
134 } | 144 } |
135 if (new_size != old_size) { | 145 if (new_size != old_size) { |
136 TRACE_COUNTER_ID1("GpuMemoryManager", | 146 TRACE_COUNTER_ID1("GpuMemoryManager", |
137 "GpuMemoryUsage", | 147 "GpuMemoryUsage", |
138 this, | 148 this, |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
234 stubs_without_surface_hibernated.push_back(stub); | 244 stubs_without_surface_hibernated.push_back(stub); |
235 } | 245 } |
236 | 246 |
237 size_t bonus_allocation = 0; | 247 size_t bonus_allocation = 0; |
238 #if !defined(OS_ANDROID) | 248 #if !defined(OS_ANDROID) |
239 // Calculate bonus allocation by splitting remainder of global limit equally | 249 // Calculate bonus allocation by splitting remainder of global limit equally |
240 // after giving out the minimum to those that need it. | 250 // after giving out the minimum to those that need it. |
241 size_t num_stubs_need_mem = stubs_with_surface_foreground.size() + | 251 size_t num_stubs_need_mem = stubs_with_surface_foreground.size() + |
242 stubs_without_surface_foreground.size() + | 252 stubs_without_surface_foreground.size() + |
243 stubs_without_surface_background.size(); | 253 stubs_without_surface_background.size(); |
244 size_t base_allocation_size = kMinimumAllocationForTab * num_stubs_need_mem; | 254 size_t base_allocation_size = GetMinimumTabAllocation() * num_stubs_need_mem; |
245 if (base_allocation_size < kMaximumAllocationForTabs && | 255 if (base_allocation_size < GetAvailableGpuMemory() && |
246 !stubs_with_surface_foreground.empty()) | 256 !stubs_with_surface_foreground.empty()) |
247 bonus_allocation = (kMaximumAllocationForTabs - base_allocation_size) / | 257 bonus_allocation = (GetAvailableGpuMemory() - base_allocation_size) / |
248 stubs_with_surface_foreground.size(); | 258 stubs_with_surface_foreground.size(); |
249 #else | 259 #else |
250 // On android, calculate bonus allocation based on surface size. | 260 // On android, calculate bonus allocation based on surface size. |
251 if (!stubs_with_surface_foreground.empty()) | 261 if (!stubs_with_surface_foreground.empty()) |
252 bonus_allocation = CalculateBonusMemoryAllocationBasedOnSize( | 262 bonus_allocation = CalculateBonusMemoryAllocationBasedOnSize( |
253 stubs_with_surface_foreground[0]->GetSurfaceSize()); | 263 stubs_with_surface_foreground[0]->GetSurfaceSize()); |
254 #endif | 264 #endif |
255 | 265 |
256 stub_memory_stats_for_last_manage_.clear(); | 266 stub_memory_stats_for_last_manage_.clear(); |
257 | 267 |
258 // Now give out allocations to everyone. | 268 // Now give out allocations to everyone. |
259 AssignMemoryAllocations( | 269 AssignMemoryAllocations( |
260 &stub_memory_stats_for_last_manage_, | 270 &stub_memory_stats_for_last_manage_, |
261 stubs_with_surface_foreground, | 271 stubs_with_surface_foreground, |
262 GpuMemoryAllocation(kMinimumAllocationForTab + bonus_allocation, | 272 GpuMemoryAllocation(GetMinimumTabAllocation() + bonus_allocation, |
263 GpuMemoryAllocation::kHasFrontbuffer | | 273 GpuMemoryAllocation::kHasFrontbuffer | |
264 GpuMemoryAllocation::kHasBackbuffer), | 274 GpuMemoryAllocation::kHasBackbuffer), |
265 true); | 275 true); |
266 | 276 |
267 AssignMemoryAllocations( | 277 AssignMemoryAllocations( |
268 &stub_memory_stats_for_last_manage_, | 278 &stub_memory_stats_for_last_manage_, |
269 stubs_with_surface_background, | 279 stubs_with_surface_background, |
270 GpuMemoryAllocation(0, GpuMemoryAllocation::kHasFrontbuffer), | 280 GpuMemoryAllocation(0, GpuMemoryAllocation::kHasFrontbuffer), |
271 false); | 281 false); |
272 | 282 |
273 AssignMemoryAllocations( | 283 AssignMemoryAllocations( |
274 &stub_memory_stats_for_last_manage_, | 284 &stub_memory_stats_for_last_manage_, |
275 stubs_with_surface_hibernated, | 285 stubs_with_surface_hibernated, |
276 GpuMemoryAllocation(0, GpuMemoryAllocation::kHasNoBuffers), | 286 GpuMemoryAllocation(0, GpuMemoryAllocation::kHasNoBuffers), |
277 false); | 287 false); |
278 | 288 |
279 AssignMemoryAllocations( | 289 AssignMemoryAllocations( |
280 &stub_memory_stats_for_last_manage_, | 290 &stub_memory_stats_for_last_manage_, |
281 stubs_without_surface_foreground, | 291 stubs_without_surface_foreground, |
282 GpuMemoryAllocation(kMinimumAllocationForTab, | 292 GpuMemoryAllocation(GetMinimumTabAllocation(), |
283 GpuMemoryAllocation::kHasNoBuffers), | 293 GpuMemoryAllocation::kHasNoBuffers), |
284 true); | 294 true); |
285 | 295 |
286 AssignMemoryAllocations( | 296 AssignMemoryAllocations( |
287 &stub_memory_stats_for_last_manage_, | 297 &stub_memory_stats_for_last_manage_, |
288 stubs_without_surface_background, | 298 stubs_without_surface_background, |
289 GpuMemoryAllocation(kMinimumAllocationForTab, | 299 GpuMemoryAllocation(GetMinimumTabAllocation(), |
290 GpuMemoryAllocation::kHasNoBuffers), | 300 GpuMemoryAllocation::kHasNoBuffers), |
291 false); | 301 false); |
292 | 302 |
293 AssignMemoryAllocations( | 303 AssignMemoryAllocations( |
294 &stub_memory_stats_for_last_manage_, | 304 &stub_memory_stats_for_last_manage_, |
295 stubs_without_surface_hibernated, | 305 stubs_without_surface_hibernated, |
296 GpuMemoryAllocation(0, GpuMemoryAllocation::kHasNoBuffers), | 306 GpuMemoryAllocation(0, GpuMemoryAllocation::kHasNoBuffers), |
297 false); | 307 false); |
298 | |
299 size_t assigned_allocation_sum = 0; | |
300 for (StubMemoryStatMap::iterator it = | |
301 stub_memory_stats_for_last_manage_.begin(); | |
302 it != stub_memory_stats_for_last_manage_.end(); | |
303 ++it) { | |
304 assigned_allocation_sum += it->second.allocation.gpu_resource_size_in_bytes; | |
305 } | |
306 } | 308 } |
307 | 309 |
308 #endif | 310 #endif |
OLD | NEW |