OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 // TODO(jochen): Remove this after the setting is turned on globally. |
| 6 #define V8_IMMINENT_DEPRECATION_WARNINGS |
| 7 |
| 8 #include <vector> |
| 9 |
| 10 |
| 11 #include "src/globals.h" |
| 12 #include "src/heap/heap.h" |
| 13 #include "src/heap/spaces.h" |
| 14 #include "src/heap/spaces-inl.h" |
| 15 #include "test/cctest/cctest.h" |
| 16 |
| 17 |
| 18 namespace v8 { |
| 19 namespace internal { |
| 20 |
| 21 static Address AllocateLabBackingStore(intptr_t size_in_bytes) { |
| 22 char* base = reinterpret_cast<char*>(malloc(size_in_bytes)); |
| 23 CHECK_EQ(reinterpret_cast<intptr_t>(base) % sizeof(void*), 0); |
| 24 memset(base, 0xAA, size_in_bytes); |
| 25 return reinterpret_cast<Address>(base); |
| 26 } |
| 27 |
| 28 |
| 29 static void FreeLabBackingStore(Address base) { free(base); } |
| 30 |
| 31 |
| 32 static void VerifyIterable(v8::internal::Address base, |
| 33 v8::internal::Address limit, |
| 34 std::vector<intptr_t> expected_size) { |
| 35 CHECK_LE(reinterpret_cast<intptr_t>(base), reinterpret_cast<intptr_t>(limit)); |
| 36 HeapObject* object = nullptr; |
| 37 int counter = 0; |
| 38 while (base < limit) { |
| 39 object = HeapObject::FromAddress(base); |
| 40 CHECK(object->IsFiller()); |
| 41 CHECK_LT(counter, expected_size.size()); |
| 42 CHECK_EQ(expected_size[counter], object->Size()); |
| 43 base += object->Size(); |
| 44 counter++; |
| 45 } |
| 46 } |
| 47 |
| 48 |
| 49 static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab, |
| 50 intptr_t size_in_bytes, |
| 51 AllocationAlignment alignment = kWordAligned) { |
| 52 HeapObject* obj; |
| 53 AllocationResult result = |
| 54 lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment); |
| 55 if (result.To(&obj)) { |
| 56 heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes)); |
| 57 return true; |
| 58 } |
| 59 return false; |
| 60 } |
| 61 |
| 62 |
| 63 TEST(InvalidLab) { |
| 64 LocalAllocationBuffer lab = LocalAllocationBuffer::InvalidBuffer(); |
| 65 CHECK(!lab.IsValid()); |
| 66 } |
| 67 |
| 68 |
| 69 TEST(UnusedLabImplicitClose) { |
| 70 CcTest::InitializeVM(); |
| 71 Heap* heap = CcTest::heap(); |
| 72 heap->root(Heap::kOnePointerFillerMapRootIndex); |
| 73 const int kLabSize = 4 * KB; |
| 74 Address base = AllocateLabBackingStore(kLabSize); |
| 75 Address limit = base + kLabSize; |
| 76 intptr_t expected_sizes_raw[1] = {kLabSize}; |
| 77 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 78 expected_sizes_raw + 1); |
| 79 { |
| 80 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 81 LocalAllocationBuffer lab = |
| 82 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 83 CHECK(lab.IsValid()); |
| 84 } |
| 85 VerifyIterable(base, limit, expected_sizes); |
| 86 FreeLabBackingStore(base); |
| 87 } |
| 88 |
| 89 |
| 90 TEST(SimpleAllocate) { |
| 91 CcTest::InitializeVM(); |
| 92 Heap* heap = CcTest::heap(); |
| 93 const int kLabSize = 4 * KB; |
| 94 Address base = AllocateLabBackingStore(kLabSize); |
| 95 Address limit = base + kLabSize; |
| 96 intptr_t sizes_raw[1] = {128}; |
| 97 intptr_t expected_sizes_raw[2] = {128, kLabSize - 128}; |
| 98 std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 1); |
| 99 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 100 expected_sizes_raw + 2); |
| 101 { |
| 102 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 103 LocalAllocationBuffer lab = |
| 104 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 105 CHECK(lab.IsValid()); |
| 106 intptr_t sum = 0; |
| 107 for (auto size : sizes) { |
| 108 if (AllocateFromLab(heap, &lab, size)) { |
| 109 sum += size; |
| 110 } |
| 111 } |
| 112 } |
| 113 VerifyIterable(base, limit, expected_sizes); |
| 114 FreeLabBackingStore(base); |
| 115 } |
| 116 |
| 117 |
| 118 TEST(AllocateUntilLabOOM) { |
| 119 CcTest::InitializeVM(); |
| 120 Heap* heap = CcTest::heap(); |
| 121 const int kLabSize = 2 * KB; |
| 122 Address base = AllocateLabBackingStore(kLabSize); |
| 123 Address limit = base + kLabSize; |
| 124 // The following objects won't fit in {kLabSize}. |
| 125 intptr_t sizes_raw[5] = {512, 512, 128, 512, 512}; |
| 126 intptr_t expected_sizes_raw[5] = {512, 512, 128, 512, 384 /* left over */}; |
| 127 std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 5); |
| 128 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 129 expected_sizes_raw + 5); |
| 130 intptr_t sum = 0; |
| 131 { |
| 132 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 133 LocalAllocationBuffer lab = |
| 134 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 135 CHECK(lab.IsValid()); |
| 136 for (auto size : sizes) { |
| 137 if (AllocateFromLab(heap, &lab, size)) { |
| 138 sum += size; |
| 139 } |
| 140 } |
| 141 CHECK_EQ(kLabSize - sum, 384); |
| 142 } |
| 143 VerifyIterable(base, limit, expected_sizes); |
| 144 FreeLabBackingStore(base); |
| 145 } |
| 146 |
| 147 |
| 148 TEST(AllocateExactlyUntilLimit) { |
| 149 CcTest::InitializeVM(); |
| 150 Heap* heap = CcTest::heap(); |
| 151 const int kLabSize = 2 * KB; |
| 152 Address base = AllocateLabBackingStore(kLabSize); |
| 153 Address limit = base + kLabSize; |
| 154 intptr_t sizes_raw[4] = {512, 512, 512, 512}; |
| 155 intptr_t expected_sizes_raw[5] = {512, 512, 512, 512, 0}; |
| 156 std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 4); |
| 157 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 158 expected_sizes_raw + 5); |
| 159 { |
| 160 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 161 LocalAllocationBuffer lab = |
| 162 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 163 CHECK(lab.IsValid()); |
| 164 intptr_t sum = 0; |
| 165 for (auto size : sizes) { |
| 166 if (AllocateFromLab(heap, &lab, size)) { |
| 167 sum += size; |
| 168 } else { |
| 169 break; |
| 170 } |
| 171 } |
| 172 CHECK_EQ(kLabSize - sum, 0); |
| 173 } |
| 174 VerifyIterable(base, limit, expected_sizes); |
| 175 FreeLabBackingStore(base); |
| 176 } |
| 177 |
| 178 |
| 179 #ifdef V8_HOST_ARCH_32_BIT |
| 180 TEST(AllocateAligned) { |
| 181 CcTest::InitializeVM(); |
| 182 Heap* heap = CcTest::heap(); |
| 183 const int kLabSize = 2 * KB; |
| 184 Address base = AllocateLabBackingStore(kLabSize); |
| 185 Address limit = base + kLabSize; |
| 186 std::pair<intptr_t, AllocationAlignment> sizes_raw[2] = { |
| 187 std::make_pair(116, kWordAligned), std::make_pair(64, kDoubleAligned)}; |
| 188 std::vector<std::pair<intptr_t, AllocationAlignment>> sizes(sizes_raw, |
| 189 sizes_raw + 2); |
| 190 intptr_t expected_sizes_raw[4] = {116, 4, 64, 1864}; |
| 191 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 192 expected_sizes_raw + 4); |
| 193 |
| 194 { |
| 195 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 196 LocalAllocationBuffer lab = |
| 197 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 198 CHECK(lab.IsValid()); |
| 199 for (auto pair : sizes) { |
| 200 if (!AllocateFromLab(heap, &lab, pair.first, pair.second)) { |
| 201 break; |
| 202 } |
| 203 } |
| 204 } |
| 205 VerifyIterable(base, limit, expected_sizes); |
| 206 FreeLabBackingStore(base); |
| 207 } |
| 208 #endif // V8_HOST_ARCH_32_BIT |
| 209 |
| 210 } // namespace internal |
| 211 } // namespace v8 |
OLD | NEW |