OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "platform/assert.h" | 5 #include "platform/assert.h" |
6 #include "vm/freelist.h" | 6 #include "vm/freelist.h" |
7 #include "vm/unit_test.h" | 7 #include "vm/unit_test.h" |
8 | 8 |
9 namespace dart { | 9 namespace dart { |
10 | 10 |
11 TEST_CASE(FreeList) { | 11 static uword Allocate(FreeList* free_list, intptr_t size, bool is_protected) { |
12 FreeList* free_list = new FreeList(); | 12 uword result = free_list->TryAllocate(size, is_protected); |
13 intptr_t kBlobSize = 1 * MB; | 13 if (result && is_protected) { |
14 intptr_t kSmallObjectSize = 4 * kWordSize; | 14 bool status = VirtualMemory::Protect(reinterpret_cast<void*>(result), |
15 intptr_t kMediumObjectSize = 16 * kWordSize; | 15 size, |
16 intptr_t kLargeObjectSize = 8 * KB; | 16 VirtualMemory::kReadExecute); |
17 uword blob = reinterpret_cast<uword>(malloc(kBlobSize)); | 17 ASSERT(status); |
| 18 } |
| 19 return result; |
| 20 } |
| 21 |
| 22 |
| 23 static void Free(FreeList* free_list, |
| 24 uword address, |
| 25 intptr_t size, |
| 26 bool is_protected) { |
| 27 if (is_protected) { |
| 28 bool status = VirtualMemory::Protect(reinterpret_cast<void*>(address), |
| 29 size, |
| 30 VirtualMemory::kReadWrite); |
| 31 ASSERT(status); |
| 32 } |
| 33 free_list->Free(address, size); |
| 34 if (is_protected) { |
| 35 bool status = VirtualMemory::Protect(reinterpret_cast<void*>(address), |
| 36 size, |
| 37 VirtualMemory::kReadExecute); |
| 38 ASSERT(status); |
| 39 } |
| 40 } |
| 41 |
| 42 |
| 43 static void TestFreeList(VirtualMemory* region, |
| 44 FreeList* free_list, |
| 45 bool is_protected) { |
| 46 const intptr_t kSmallObjectSize = 4 * kWordSize; |
| 47 const intptr_t kMediumObjectSize = 16 * kWordSize; |
| 48 const intptr_t kLargeObjectSize = 8 * KB; |
| 49 uword blob = region->start(); |
18 // Enqueue the large blob as one free block. | 50 // Enqueue the large blob as one free block. |
19 free_list->Free(blob, kBlobSize); | 51 free_list->Free(blob, region->size()); |
| 52 |
| 53 if (is_protected) { |
| 54 // Write protect the whole region. |
| 55 region->Protect(VirtualMemory::kReadExecute); |
| 56 } |
| 57 |
20 // Allocate a small object. Expect it to be positioned as the first element. | 58 // Allocate a small object. Expect it to be positioned as the first element. |
21 uword small_object = free_list->TryAllocate(kSmallObjectSize, false); | 59 uword small_object = Allocate(free_list, kSmallObjectSize, is_protected); |
22 EXPECT_EQ(blob, small_object); | 60 EXPECT_EQ(blob, small_object); |
23 // Freeing and allocating should give us the same memory back. | 61 // Freeing and allocating should give us the same memory back. |
24 free_list->Free(small_object, kSmallObjectSize); | 62 Free(free_list, small_object, kSmallObjectSize, is_protected); |
25 small_object = free_list->TryAllocate(kSmallObjectSize, false); | 63 small_object = Allocate(free_list, kSmallObjectSize, is_protected); |
26 EXPECT_EQ(blob, small_object); | 64 EXPECT_EQ(blob, small_object); |
27 // Splitting the remainder further with small and medium objects. | 65 // Splitting the remainder further with small and medium objects. |
28 uword small_object2 = free_list->TryAllocate(kSmallObjectSize, false); | 66 uword small_object2 = Allocate(free_list, kSmallObjectSize, is_protected); |
29 EXPECT_EQ(blob + kSmallObjectSize, small_object2); | 67 EXPECT_EQ(blob + kSmallObjectSize, small_object2); |
30 uword med_object = free_list->TryAllocate(kMediumObjectSize, false); | 68 uword med_object = Allocate(free_list, kMediumObjectSize, is_protected); |
31 EXPECT_EQ(small_object2 + kSmallObjectSize, med_object); | 69 EXPECT_EQ(small_object2 + kSmallObjectSize, med_object); |
32 // Allocate a large object. | 70 // Allocate a large object. |
33 uword large_object = free_list->TryAllocate(kLargeObjectSize, false); | 71 uword large_object = Allocate(free_list, kLargeObjectSize, is_protected); |
34 EXPECT_EQ(med_object + kMediumObjectSize, large_object); | 72 EXPECT_EQ(med_object + kMediumObjectSize, large_object); |
35 // Make sure that small objects can still split the remainder. | 73 // Make sure that small objects can still split the remainder. |
36 uword small_object3 = free_list->TryAllocate(kSmallObjectSize, false); | 74 uword small_object3 = Allocate(free_list, kSmallObjectSize, is_protected); |
37 EXPECT_EQ(large_object + kLargeObjectSize, small_object3); | 75 EXPECT_EQ(large_object + kLargeObjectSize, small_object3); |
38 // Split the large object. | 76 // Split the large object. |
39 free_list->Free(large_object, kLargeObjectSize); | 77 Free(free_list, large_object, kLargeObjectSize, is_protected); |
40 uword small_object4 = free_list->TryAllocate(kSmallObjectSize, false); | 78 uword small_object4 = Allocate(free_list, kSmallObjectSize, is_protected); |
41 EXPECT_EQ(large_object, small_object4); | 79 EXPECT_EQ(large_object, small_object4); |
42 // Get the full remainder of the large object. | 80 // Get the full remainder of the large object. |
43 large_object = | 81 large_object = |
44 free_list->TryAllocate(kLargeObjectSize - kSmallObjectSize, false); | 82 Allocate(free_list, kLargeObjectSize - kSmallObjectSize, is_protected); |
45 EXPECT_EQ(small_object4 + kSmallObjectSize, large_object); | 83 EXPECT_EQ(small_object4 + kSmallObjectSize, large_object); |
46 // Get another large object from the large unallocated remainder. | 84 // Get another large object from the large unallocated remainder. |
47 uword large_object2 = free_list->TryAllocate(kLargeObjectSize, false); | 85 uword large_object2 = Allocate(free_list, kLargeObjectSize, is_protected); |
48 EXPECT_EQ(small_object3 + kSmallObjectSize, large_object2); | 86 EXPECT_EQ(small_object3 + kSmallObjectSize, large_object2); |
| 87 } |
| 88 |
| 89 TEST_CASE(FreeList) { |
| 90 FreeList* free_list = new FreeList(); |
| 91 const intptr_t kBlobSize = 1 * MB; |
| 92 VirtualMemory* region = VirtualMemory::Reserve(kBlobSize); |
| 93 region->Commit(/* is_executable */ false); |
| 94 |
| 95 TestFreeList(region, free_list, false); |
| 96 |
49 // Delete the memory associated with the test. | 97 // Delete the memory associated with the test. |
50 free(reinterpret_cast<void*>(blob)); | 98 delete region; |
51 delete free_list; | 99 delete free_list; |
52 } | 100 } |
53 | 101 |
| 102 |
| 103 TEST_CASE(FreeListProtected) { |
| 104 FreeList* free_list = new FreeList(); |
| 105 const intptr_t kBlobSize = 1 * MB; |
| 106 VirtualMemory* region = VirtualMemory::Reserve(kBlobSize); |
| 107 region->Commit(/* is_executable */ false); |
| 108 |
| 109 TestFreeList(region, free_list, true); |
| 110 |
| 111 // Delete the memory associated with the test. |
| 112 delete region; |
| 113 delete free_list; |
| 114 } |
| 115 |
| 116 |
| 117 TEST_CASE(FreeListProtectedTinyObjects) { |
| 118 FreeList* free_list = new FreeList(); |
| 119 const intptr_t kBlobSize = 1 * MB; |
| 120 const intptr_t kObjectSize = 2 * kWordSize; |
| 121 uword* objects = new uword[kBlobSize / kObjectSize]; |
| 122 |
| 123 VirtualMemory* blob = VirtualMemory::ReserveAligned(kBlobSize, 4096); |
| 124 blob->Commit(/* is_executable = */ false); |
| 125 blob->Protect(VirtualMemory::kReadWrite); |
| 126 |
| 127 // Enqueue the large blob as one free block. |
| 128 free_list->Free(blob->start(), blob->size()); |
| 129 |
| 130 // Write protect the whole region. |
| 131 blob->Protect(VirtualMemory::kReadExecute); |
| 132 |
| 133 // Allocate small objects. |
| 134 for (intptr_t i = 0; i < blob->size() / kObjectSize; i++) { |
| 135 objects[i] = Allocate(free_list, |
| 136 kObjectSize, |
| 137 true); // is_protected |
| 138 } |
| 139 |
| 140 // All space is occupied. Expect failed allocation. |
| 141 ASSERT(Allocate(free_list, kObjectSize, true) == 0); |
| 142 |
| 143 // Free all objects again. Make the whole region writable for this. |
| 144 blob->Protect(VirtualMemory::kReadWrite); |
| 145 for (intptr_t i = 0; i < blob->size() / kObjectSize; i++) { |
| 146 free_list->Free(objects[i], kObjectSize); |
| 147 } |
| 148 |
| 149 // Delete the memory associated with the test. |
| 150 delete blob; |
| 151 delete free_list; |
| 152 delete[] objects; |
| 153 } |
| 154 |
| 155 |
| 156 TEST_CASE(FreeListProtectedVariableSizeObjects) { |
| 157 FreeList* free_list = new FreeList(); |
| 158 const intptr_t kBlobSize = 8 * KB; |
| 159 const intptr_t kMinSize = 2 * kWordSize; |
| 160 uword* objects = new uword[kBlobSize / kMinSize]; |
| 161 for (intptr_t i = 0; i < kBlobSize / kMinSize; ++i) { |
| 162 objects[i] = NULL; |
| 163 } |
| 164 |
| 165 VirtualMemory* blob = VirtualMemory::ReserveAligned(kBlobSize, 4096); |
| 166 blob->Commit(/* is_executable = */ false); |
| 167 blob->Protect(VirtualMemory::kReadWrite); |
| 168 |
| 169 // Enqueue the large blob as one free block. |
| 170 free_list->Free(blob->start(), blob->size()); |
| 171 |
| 172 // Write protect the whole region. |
| 173 blob->Protect(VirtualMemory::kReadExecute); |
| 174 |
| 175 // Allocate and free objects so that free list has > 1 elements. |
| 176 uword e0 = Allocate(free_list, 1 * KB, true); |
| 177 ASSERT(e0); |
| 178 uword e1 = Allocate(free_list, 3 * KB, true); |
| 179 ASSERT(e1); |
| 180 uword e2 = Allocate(free_list, 2 * KB, true); |
| 181 ASSERT(e2); |
| 182 uword e3 = Allocate(free_list, 2 * KB, true); |
| 183 ASSERT(e3); |
| 184 |
| 185 Free(free_list, e1, 3 * KB, true); |
| 186 Free(free_list, e2, 2 * KB, true); |
| 187 e0 = Allocate(free_list, 3 * KB - 2 * kWordSize, true); |
| 188 ASSERT(e0); |
| 189 |
| 190 // Delete the memory associated with the test. |
| 191 delete blob; |
| 192 delete free_list; |
| 193 delete[] objects; |
| 194 } |
| 195 |
54 } // namespace dart | 196 } // namespace dart |
OLD | NEW |