OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/gpu_command_buffer_stub.h" | 5 #include "content/common/gpu/gpu_command_buffer_stub.h" |
6 #include "content/common/gpu/gpu_memory_allocation.h" | 6 #include "content/common/gpu/gpu_memory_allocation.h" |
7 #include "content/common/gpu/gpu_memory_manager.h" | 7 #include "content/common/gpu/gpu_memory_manager.h" |
8 | 8 |
9 #include "testing/gtest/include/gtest/gtest.h" | 9 #include "testing/gtest/include/gtest/gtest.h" |
10 | 10 |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
107 static int32 GenerateUniqueSurfaceId() { | 107 static int32 GenerateUniqueSurfaceId() { |
108 static int32 surface_id_ = 1; | 108 static int32 surface_id_ = 1; |
109 return surface_id_++; | 109 return surface_id_++; |
110 } | 110 } |
111 | 111 |
112 static bool IsMoreImportant(GpuCommandBufferStubBase* lhs, | 112 static bool IsMoreImportant(GpuCommandBufferStubBase* lhs, |
113 GpuCommandBufferStubBase* rhs) { | 113 GpuCommandBufferStubBase* rhs) { |
114 return GpuMemoryManager::StubWithSurfaceComparator()(lhs, rhs); | 114 return GpuMemoryManager::StubWithSurfaceComparator()(lhs, rhs); |
115 } | 115 } |
116 | 116 |
117 static bool IsAllocationForegroundForSurfaceYes( | 117 bool IsAllocationForegroundForSurfaceYes( |
118 const GpuMemoryAllocation& alloc) { | 118 const GpuMemoryAllocation& alloc) { |
119 return alloc.suggest_have_frontbuffer && | 119 return alloc.suggest_have_frontbuffer && |
120 alloc.suggest_have_backbuffer && | 120 alloc.suggest_have_backbuffer && |
121 alloc.gpu_resource_size_in_bytes >= | 121 alloc.gpu_resource_size_in_bytes >= GetMinimumTabAllocation(); |
122 GpuMemoryManager::kMinimumAllocationForTab; | |
123 } | 122 } |
124 static bool IsAllocationBackgroundForSurfaceYes( | 123 bool IsAllocationBackgroundForSurfaceYes( |
125 const GpuMemoryAllocation& alloc) { | 124 const GpuMemoryAllocation& alloc) { |
126 return alloc.suggest_have_frontbuffer && | 125 return alloc.suggest_have_frontbuffer && |
127 !alloc.suggest_have_backbuffer && | 126 !alloc.suggest_have_backbuffer && |
128 alloc.gpu_resource_size_in_bytes == 0; | 127 alloc.gpu_resource_size_in_bytes == 0; |
129 } | 128 } |
130 static bool IsAllocationHibernatedForSurfaceYes( | 129 bool IsAllocationHibernatedForSurfaceYes( |
131 const GpuMemoryAllocation& alloc) { | 130 const GpuMemoryAllocation& alloc) { |
132 return !alloc.suggest_have_frontbuffer && | 131 return !alloc.suggest_have_frontbuffer && |
133 !alloc.suggest_have_backbuffer && | 132 !alloc.suggest_have_backbuffer && |
134 alloc.gpu_resource_size_in_bytes == 0; | 133 alloc.gpu_resource_size_in_bytes == 0; |
135 } | 134 } |
136 static bool IsAllocationForegroundForSurfaceNo( | 135 bool IsAllocationForegroundForSurfaceNo( |
137 const GpuMemoryAllocation& alloc) { | 136 const GpuMemoryAllocation& alloc) { |
138 return !alloc.suggest_have_frontbuffer && | 137 return !alloc.suggest_have_frontbuffer && |
139 !alloc.suggest_have_backbuffer && | 138 !alloc.suggest_have_backbuffer && |
140 alloc.gpu_resource_size_in_bytes == | 139 alloc.gpu_resource_size_in_bytes == GetMinimumTabAllocation(); |
141 GpuMemoryManager::kMinimumAllocationForTab; | |
142 } | 140 } |
143 static bool IsAllocationBackgroundForSurfaceNo( | 141 bool IsAllocationBackgroundForSurfaceNo( |
144 const GpuMemoryAllocation& alloc) { | 142 const GpuMemoryAllocation& alloc) { |
145 return !alloc.suggest_have_frontbuffer && | 143 return !alloc.suggest_have_frontbuffer && |
146 !alloc.suggest_have_backbuffer && | 144 !alloc.suggest_have_backbuffer && |
147 alloc.gpu_resource_size_in_bytes == | 145 alloc.gpu_resource_size_in_bytes == GetMinimumTabAllocation(); |
148 GpuMemoryManager::kMinimumAllocationForTab; | |
149 } | 146 } |
150 static bool IsAllocationHibernatedForSurfaceNo( | 147 bool IsAllocationHibernatedForSurfaceNo( |
151 const GpuMemoryAllocation& alloc) { | 148 const GpuMemoryAllocation& alloc) { |
152 return !alloc.suggest_have_frontbuffer && | 149 return !alloc.suggest_have_frontbuffer && |
153 !alloc.suggest_have_backbuffer && | 150 !alloc.suggest_have_backbuffer && |
154 alloc.gpu_resource_size_in_bytes == 0; | 151 alloc.gpu_resource_size_in_bytes == 0; |
155 } | 152 } |
156 | 153 |
157 void Manage() { | 154 void Manage() { |
158 memory_manager_.Manage(); | 155 memory_manager_.Manage(); |
159 } | 156 } |
160 | 157 |
| 158 size_t GetAvailableGpuMemory() { |
| 159 return memory_manager_.GetAvailableGpuMemory(); |
| 160 } |
| 161 |
| 162 size_t GetMinimumTabAllocation() { |
| 163 return memory_manager_.GetMinimumTabAllocation(); |
| 164 } |
| 165 |
161 base::TimeTicks older_, newer_, newest_; | 166 base::TimeTicks older_, newer_, newest_; |
162 FakeClient client_; | 167 FakeClient client_; |
163 GpuMemoryManager memory_manager_; | 168 GpuMemoryManager memory_manager_; |
164 }; | 169 }; |
165 | 170 |
166 // Create fake stubs with every combination of {visibilty,last_use_time} | 171 // Create fake stubs with every combination of {visibilty,last_use_time} |
167 // and make sure they compare correctly. Only compare stubs with surfaces. | 172 // and make sure they compare correctly. Only compare stubs with surfaces. |
168 // Expect {more visible, newer} surfaces to be more important, in that order. | 173 // Expect {more visible, newer} surfaces to be more important, in that order. |
169 TEST_F(GpuMemoryManagerTest, ComparatorTests) { | 174 TEST_F(GpuMemoryManagerTest, ComparatorTests) { |
170 FakeCommandBufferStub stub_true1(GenerateUniqueSurfaceId(), true, older_), | 175 FakeCommandBufferStub stub_true1(GenerateUniqueSurfaceId(), true, older_), |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
474 EXPECT_TRUE(IsAllocationHibernatedForSurfaceYes(stub1.allocation_)); | 479 EXPECT_TRUE(IsAllocationHibernatedForSurfaceYes(stub1.allocation_)); |
475 EXPECT_TRUE(IsAllocationHibernatedForSurfaceYes(stub2.allocation_)); | 480 EXPECT_TRUE(IsAllocationHibernatedForSurfaceYes(stub2.allocation_)); |
476 EXPECT_TRUE(IsAllocationHibernatedForSurfaceNo(stub3.allocation_)); | 481 EXPECT_TRUE(IsAllocationHibernatedForSurfaceNo(stub3.allocation_)); |
477 EXPECT_TRUE(IsAllocationHibernatedForSurfaceNo(stub4.allocation_)); | 482 EXPECT_TRUE(IsAllocationHibernatedForSurfaceNo(stub4.allocation_)); |
478 } | 483 } |
479 | 484 |
480 #if !defined(OS_ANDROID) | 485 #if !defined(OS_ANDROID) |
481 // Test GpuMemoryAllocation memory allocation bonuses: | 486 // Test GpuMemoryAllocation memory allocation bonuses: |
482 // When the number of visible tabs is small, each tab should get a | 487 // When the number of visible tabs is small, each tab should get a |
483 // gpu_resource_size_in_bytes allocation value that is greater than | 488 // gpu_resource_size_in_bytes allocation value that is greater than |
484 // kMinimumAllocationForTab, and when the number of tabs is large, each should | 489 // GetMinimumTabAllocation(), and when the number of tabs is large, each should |
485 // get exactly kMinimumAllocationForTab and not less. | 490 // get exactly GetMinimumTabAllocation() and not less. |
486 TEST_F(GpuMemoryManagerTest, TestForegroundStubsGetBonusAllocation) { | 491 TEST_F(GpuMemoryManagerTest, TestForegroundStubsGetBonusAllocation) { |
487 size_t max_stubs_before_no_bonus = | 492 size_t max_stubs_before_no_bonus = |
488 GpuMemoryManager::kMaximumAllocationForTabs / | 493 GetAvailableGpuMemory() / (GetMinimumTabAllocation() + 1); |
489 (GpuMemoryManager::kMinimumAllocationForTab + 1); | |
490 | 494 |
491 std::vector<FakeCommandBufferStub> stubs; | 495 std::vector<FakeCommandBufferStub> stubs; |
492 for (size_t i = 0; i < max_stubs_before_no_bonus; ++i) { | 496 for (size_t i = 0; i < max_stubs_before_no_bonus; ++i) { |
493 stubs.push_back( | 497 stubs.push_back( |
494 FakeCommandBufferStub(GenerateUniqueSurfaceId(), true, older_)); | 498 FakeCommandBufferStub(GenerateUniqueSurfaceId(), true, older_)); |
495 } | 499 } |
496 for (size_t i = 0; i < stubs.size(); ++i) { | 500 for (size_t i = 0; i < stubs.size(); ++i) { |
497 client_.stubs_.push_back(&stubs[i]); | 501 client_.stubs_.push_back(&stubs[i]); |
498 } | 502 } |
499 | 503 |
500 Manage(); | 504 Manage(); |
501 for (size_t i = 0; i < stubs.size(); ++i) { | 505 for (size_t i = 0; i < stubs.size(); ++i) { |
502 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stubs[i].allocation_)); | 506 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stubs[i].allocation_)); |
503 EXPECT_GT(stubs[i].allocation_.gpu_resource_size_in_bytes, | 507 EXPECT_GT(stubs[i].allocation_.gpu_resource_size_in_bytes, |
504 static_cast<size_t>(GpuMemoryManager::kMinimumAllocationForTab)); | 508 static_cast<size_t>(GetMinimumTabAllocation())); |
505 } | 509 } |
506 | 510 |
507 FakeCommandBufferStub extra_stub(GenerateUniqueSurfaceId(), true, older_); | 511 FakeCommandBufferStub extra_stub(GenerateUniqueSurfaceId(), true, older_); |
508 client_.stubs_.push_back(&extra_stub); | 512 client_.stubs_.push_back(&extra_stub); |
509 | 513 |
510 Manage(); | 514 Manage(); |
511 for (size_t i = 0; i < stubs.size(); ++i) { | 515 for (size_t i = 0; i < stubs.size(); ++i) { |
512 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stubs[i].allocation_)); | 516 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stubs[i].allocation_)); |
513 EXPECT_EQ(stubs[i].allocation_.gpu_resource_size_in_bytes, | 517 EXPECT_EQ(stubs[i].allocation_.gpu_resource_size_in_bytes, |
514 GpuMemoryManager::kMinimumAllocationForTab); | 518 GetMinimumTabAllocation()); |
515 } | 519 } |
516 } | 520 } |
517 #else | 521 #else |
518 // Test GpuMemoryAllocation memory allocation bonuses: | 522 // Test GpuMemoryAllocation memory allocation bonuses: |
519 // When the size of tab contents is small, bonus allocation should be 0. | 523 // When the size of tab contents is small, bonus allocation should be 0. |
520 // As the size of tab contents increases, bonus allocation should increase | 524 // As the size of tab contents increases, bonus allocation should increase |
521 // until finally reaching the maximum allocation limit. | 525 // until finally reaching the maximum allocation limit. |
522 TEST_F(GpuMemoryManagerTest, TestForegroundStubsGetBonusAllocationAndroid) { | 526 TEST_F(GpuMemoryManagerTest, TestForegroundStubsGetBonusAllocationAndroid) { |
523 FakeCommandBufferStub stub(GenerateUniqueSurfaceId(), true, older_); | 527 FakeCommandBufferStub stub(GenerateUniqueSurfaceId(), true, older_); |
524 client_.stubs_.push_back(&stub); | 528 client_.stubs_.push_back(&stub); |
525 | 529 |
526 stub.size_ = gfx::Size(1,1); | 530 stub.size_ = gfx::Size(1,1); |
527 Manage(); | 531 Manage(); |
528 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stub.allocation_)); | 532 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stub.allocation_)); |
529 EXPECT_EQ(stub.allocation_.gpu_resource_size_in_bytes, | 533 EXPECT_EQ(stub.allocation_.gpu_resource_size_in_bytes, |
530 GpuMemoryManager::kMinimumAllocationForTab); | 534 GetMinimumTabAllocation()); |
531 | 535 |
532 // Keep increasing size, making sure allocation is always increasing | 536 // Keep increasing size, making sure allocation is always increasing |
533 // Until it finally reaches the maximum. | 537 // Until it finally reaches the maximum. |
534 while (stub.allocation_.gpu_resource_size_in_bytes < | 538 while (stub.allocation_.gpu_resource_size_in_bytes < |
535 GpuMemoryManager::kMaximumAllocationForTabs) { | 539 GetAvailableGpuMemory()) { |
536 size_t previous_allocation = stub.allocation_.gpu_resource_size_in_bytes; | 540 size_t previous_allocation = stub.allocation_.gpu_resource_size_in_bytes; |
537 | 541 |
538 stub.size_ = stub.size_.Scale(1, 2); | 542 stub.size_ = stub.size_.Scale(1, 2); |
539 | 543 |
540 Manage(); | 544 Manage(); |
541 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stub.allocation_)); | 545 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stub.allocation_)); |
542 EXPECT_GE(stub.allocation_.gpu_resource_size_in_bytes, | 546 EXPECT_GE(stub.allocation_.gpu_resource_size_in_bytes, |
543 GpuMemoryManager::kMinimumAllocationForTab); | 547 GetMinimumTabAllocation()); |
544 EXPECT_LE(stub.allocation_.gpu_resource_size_in_bytes, | 548 EXPECT_LE(stub.allocation_.gpu_resource_size_in_bytes, |
545 GpuMemoryManager::kMaximumAllocationForTabs); | 549 GetAvailableGpuMemory()); |
546 EXPECT_GE(stub.allocation_.gpu_resource_size_in_bytes, | 550 EXPECT_GE(stub.allocation_.gpu_resource_size_in_bytes, |
547 previous_allocation); | 551 previous_allocation); |
548 } | 552 } |
549 | 553 |
550 // One final size increase to confirm it stays capped at maximum. | 554 // One final size increase to confirm it stays capped at maximum. |
551 stub.size_ = stub.size_.Scale(1, 2); | 555 stub.size_ = stub.size_.Scale(1, 2); |
552 | 556 |
553 Manage(); | 557 Manage(); |
554 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stub.allocation_)); | 558 EXPECT_TRUE(IsAllocationForegroundForSurfaceYes(stub.allocation_)); |
555 EXPECT_EQ(stub.allocation_.gpu_resource_size_in_bytes, | 559 EXPECT_EQ(stub.allocation_.gpu_resource_size_in_bytes, |
556 GpuMemoryManager::kMaximumAllocationForTabs); | 560 GetAvailableGpuMemory()); |
557 } | 561 } |
558 #endif | 562 #endif |
559 | 563 |
560 // Test GpuMemoryAllocation comparison operators: Iterate over all possible | 564 // Test GpuMemoryAllocation comparison operators: Iterate over all possible |
561 // combinations of gpu_resource_size_in_bytes, suggest_have_backbuffer, and | 565 // combinations of gpu_resource_size_in_bytes, suggest_have_backbuffer, and |
562 // suggest_have_frontbuffer, and make sure allocations with equal values test | 566 // suggest_have_frontbuffer, and make sure allocations with equal values test |
563 // equal and non equal values test not equal. | 567 // equal and non equal values test not equal. |
564 TEST_F(GpuMemoryManagerTest, GpuMemoryAllocationCompareTests) { | 568 TEST_F(GpuMemoryManagerTest, GpuMemoryAllocationCompareTests) { |
565 std::vector<int> gpu_resource_size_in_bytes_values; | 569 std::vector<int> gpu_resource_size_in_bytes_values; |
566 gpu_resource_size_in_bytes_values.push_back(0); | 570 gpu_resource_size_in_bytes_values.push_back(0); |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
666 EXPECT_EQ(stats.size(), 3ul); | 670 EXPECT_EQ(stats.size(), 3ul); |
667 EXPECT_FALSE(stats[&stub1].visible); | 671 EXPECT_FALSE(stats[&stub1].visible); |
668 EXPECT_FALSE(stats[&stub2].visible); | 672 EXPECT_FALSE(stats[&stub2].visible); |
669 EXPECT_TRUE(stats[&stub3].visible); | 673 EXPECT_TRUE(stats[&stub3].visible); |
670 EXPECT_EQ(stub1allocation4, 0ul); | 674 EXPECT_EQ(stub1allocation4, 0ul); |
671 EXPECT_GE(stub2allocation4, 0ul); | 675 EXPECT_GE(stub2allocation4, 0ul); |
672 EXPECT_GT(stub3allocation4, 0ul); | 676 EXPECT_GT(stub3allocation4, 0ul); |
673 if (compositors_get_bonus_allocation) | 677 if (compositors_get_bonus_allocation) |
674 EXPECT_GT(stub3allocation4, stub3allocation3); | 678 EXPECT_GT(stub3allocation4, stub3allocation3); |
675 } | 679 } |
OLD | NEW |