OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 // TODO(jochen): Remove this after the setting is turned on globally. |
| 6 #define V8_IMMINENT_DEPRECATION_WARNINGS |
| 7 |
| 8 #include <vector> |
| 9 |
| 10 #include "src/globals.h" |
| 11 #include "src/heap/heap.h" |
| 12 #include "src/heap/spaces.h" |
| 13 #include "src/heap/spaces-inl.h" |
| 14 #include "test/cctest/cctest.h" |
| 15 |
| 16 namespace v8 { |
| 17 namespace internal { |
| 18 |
| 19 static Address AllocateLabBackingStore(intptr_t size_in_bytes) { |
| 20 char* base = reinterpret_cast<char*>(malloc(size_in_bytes)); |
| 21 CHECK_EQ(reinterpret_cast<intptr_t>(base) % sizeof(void*), 0); |
| 22 memset(base, 0xAA, size_in_bytes); |
| 23 return reinterpret_cast<Address>(base); |
| 24 } |
| 25 |
| 26 |
| 27 static void FreeLabBackingStore(Address base) { free(base); } |
| 28 |
| 29 |
| 30 static void VerifyIterable(v8::internal::Address base, |
| 31 v8::internal::Address limit, |
| 32 std::vector<intptr_t> expected_size) { |
| 33 CHECK_LE(reinterpret_cast<intptr_t>(base), reinterpret_cast<intptr_t>(limit)); |
| 34 HeapObject* object = nullptr; |
| 35 int counter = 0; |
| 36 while (base < limit) { |
| 37 object = HeapObject::FromAddress(base); |
| 38 CHECK(object->IsFiller()); |
| 39 CHECK_LT(counter, expected_size.size()); |
| 40 CHECK_EQ(expected_size[counter], object->Size()); |
| 41 base += object->Size(); |
| 42 counter++; |
| 43 } |
| 44 } |
| 45 |
| 46 |
| 47 static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab, |
| 48 intptr_t size_in_bytes, |
| 49 AllocationAlignment alignment = kWordAligned) { |
| 50 HeapObject* obj; |
| 51 AllocationResult result = |
| 52 lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment); |
| 53 if (result.To(&obj)) { |
| 54 heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes)); |
| 55 return true; |
| 56 } |
| 57 return false; |
| 58 } |
| 59 |
| 60 |
| 61 TEST(InvalidLab) { |
| 62 LocalAllocationBuffer lab = LocalAllocationBuffer::InvalidBuffer(); |
| 63 CHECK(!lab.IsValid()); |
| 64 } |
| 65 |
| 66 |
| 67 TEST(UnusedLabImplicitClose) { |
| 68 CcTest::InitializeVM(); |
| 69 Heap* heap = CcTest::heap(); |
| 70 heap->root(Heap::kOnePointerFillerMapRootIndex); |
| 71 const int kLabSize = 4 * KB; |
| 72 Address base = AllocateLabBackingStore(kLabSize); |
| 73 Address limit = base + kLabSize; |
| 74 intptr_t expected_sizes_raw[1] = {kLabSize}; |
| 75 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 76 expected_sizes_raw + 1); |
| 77 { |
| 78 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 79 LocalAllocationBuffer lab = |
| 80 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 81 CHECK(lab.IsValid()); |
| 82 } |
| 83 VerifyIterable(base, limit, expected_sizes); |
| 84 FreeLabBackingStore(base); |
| 85 } |
| 86 |
| 87 |
| 88 TEST(SimpleAllocate) { |
| 89 CcTest::InitializeVM(); |
| 90 Heap* heap = CcTest::heap(); |
| 91 const int kLabSize = 4 * KB; |
| 92 Address base = AllocateLabBackingStore(kLabSize); |
| 93 Address limit = base + kLabSize; |
| 94 intptr_t sizes_raw[1] = {128}; |
| 95 intptr_t expected_sizes_raw[2] = {128, kLabSize - 128}; |
| 96 std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 1); |
| 97 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 98 expected_sizes_raw + 2); |
| 99 { |
| 100 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 101 LocalAllocationBuffer lab = |
| 102 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 103 CHECK(lab.IsValid()); |
| 104 intptr_t sum = 0; |
| 105 for (auto size : sizes) { |
| 106 if (AllocateFromLab(heap, &lab, size)) { |
| 107 sum += size; |
| 108 } |
| 109 } |
| 110 } |
| 111 VerifyIterable(base, limit, expected_sizes); |
| 112 FreeLabBackingStore(base); |
| 113 } |
| 114 |
| 115 |
| 116 TEST(AllocateUntilLabOOM) { |
| 117 CcTest::InitializeVM(); |
| 118 Heap* heap = CcTest::heap(); |
| 119 const int kLabSize = 2 * KB; |
| 120 Address base = AllocateLabBackingStore(kLabSize); |
| 121 Address limit = base + kLabSize; |
| 122 // The following objects won't fit in {kLabSize}. |
| 123 intptr_t sizes_raw[5] = {512, 512, 128, 512, 512}; |
| 124 intptr_t expected_sizes_raw[5] = {512, 512, 128, 512, 384 /* left over */}; |
| 125 std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 5); |
| 126 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 127 expected_sizes_raw + 5); |
| 128 intptr_t sum = 0; |
| 129 { |
| 130 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 131 LocalAllocationBuffer lab = |
| 132 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 133 CHECK(lab.IsValid()); |
| 134 for (auto size : sizes) { |
| 135 if (AllocateFromLab(heap, &lab, size)) { |
| 136 sum += size; |
| 137 } |
| 138 } |
| 139 CHECK_EQ(kLabSize - sum, 384); |
| 140 } |
| 141 VerifyIterable(base, limit, expected_sizes); |
| 142 FreeLabBackingStore(base); |
| 143 } |
| 144 |
| 145 |
| 146 TEST(AllocateExactlyUntilLimit) { |
| 147 CcTest::InitializeVM(); |
| 148 Heap* heap = CcTest::heap(); |
| 149 const int kLabSize = 2 * KB; |
| 150 Address base = AllocateLabBackingStore(kLabSize); |
| 151 Address limit = base + kLabSize; |
| 152 intptr_t sizes_raw[4] = {512, 512, 512, 512}; |
| 153 intptr_t expected_sizes_raw[5] = {512, 512, 512, 512, 0}; |
| 154 std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 4); |
| 155 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 156 expected_sizes_raw + 5); |
| 157 { |
| 158 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 159 LocalAllocationBuffer lab = |
| 160 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 161 CHECK(lab.IsValid()); |
| 162 intptr_t sum = 0; |
| 163 for (auto size : sizes) { |
| 164 if (AllocateFromLab(heap, &lab, size)) { |
| 165 sum += size; |
| 166 } else { |
| 167 break; |
| 168 } |
| 169 } |
| 170 CHECK_EQ(kLabSize - sum, 0); |
| 171 } |
| 172 VerifyIterable(base, limit, expected_sizes); |
| 173 FreeLabBackingStore(base); |
| 174 } |
| 175 |
| 176 |
| 177 #ifdef V8_HOST_ARCH_32_BIT |
| 178 TEST(AllocateAligned) { |
| 179 CcTest::InitializeVM(); |
| 180 Heap* heap = CcTest::heap(); |
| 181 const int kLabSize = 2 * KB; |
| 182 Address base = AllocateLabBackingStore(kLabSize); |
| 183 Address limit = base + kLabSize; |
| 184 std::pair<intptr_t, AllocationAlignment> sizes_raw[2] = { |
| 185 std::make_pair(116, kWordAligned), std::make_pair(64, kDoubleAligned)}; |
| 186 std::vector<std::pair<intptr_t, AllocationAlignment>> sizes(sizes_raw, |
| 187 sizes_raw + 2); |
| 188 intptr_t expected_sizes_raw[4] = {116, 4, 64, 1864}; |
| 189 std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 190 expected_sizes_raw + 4); |
| 191 |
| 192 { |
| 193 AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 194 LocalAllocationBuffer lab = |
| 195 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 196 CHECK(lab.IsValid()); |
| 197 for (auto pair : sizes) { |
| 198 if (!AllocateFromLab(heap, &lab, pair.first, pair.second)) { |
| 199 break; |
| 200 } |
| 201 } |
| 202 } |
| 203 VerifyIterable(base, limit, expected_sizes); |
| 204 FreeLabBackingStore(base); |
| 205 } |
| 206 #endif // V8_HOST_ARCH_32_BIT |
| 207 |
| 208 } // namespace internal |
| 209 } // namespace v8 |
OLD | NEW |