| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 CHECK(p->OffsetToAddress(Page::kObjectStartOffset) == p->ObjectAreaStart()); | 84 CHECK(p->OffsetToAddress(Page::kObjectStartOffset) == p->ObjectAreaStart()); |
| 85 CHECK(p->OffsetToAddress(Page::kPageSize) == p->ObjectAreaEnd()); | 85 CHECK(p->OffsetToAddress(Page::kPageSize) == p->ObjectAreaEnd()); |
| 86 | 86 |
| 87 // test region marking | 87 // test region marking |
| 88 VerifyRegionMarking(page_start); | 88 VerifyRegionMarking(page_start); |
| 89 | 89 |
| 90 DeleteArray(mem); | 90 DeleteArray(mem); |
| 91 } | 91 } |
| 92 | 92 |
| 93 | 93 |
| 94 namespace v8 { |
| 95 namespace internal { |
| 96 |
| 97 // Temporarily sets a given allocator in an isolate. |
| 98 class TestMemoryAllocatorScope { |
| 99 public: |
| 100 TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator) |
| 101 : isolate_(isolate), |
| 102 old_allocator_(isolate->memory_allocator_) { |
| 103 isolate->memory_allocator_ = allocator; |
| 104 } |
| 105 |
| 106 ~TestMemoryAllocatorScope() { |
| 107 isolate_->memory_allocator_ = old_allocator_; |
| 108 } |
| 109 |
| 110 private: |
| 111 Isolate* isolate_; |
| 112 MemoryAllocator* old_allocator_; |
| 113 |
| 114 DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope); |
| 115 }; |
| 116 |
| 117 } } // namespace v8::internal |
| 118 |
| 119 |
| 94 TEST(MemoryAllocator) { | 120 TEST(MemoryAllocator) { |
| 95 OS::Setup(); | 121 OS::Setup(); |
| 96 Isolate* isolate = Isolate::Current(); | 122 Isolate* isolate = Isolate::Current(); |
| 97 CHECK(HEAP->ConfigureHeapDefault()); | 123 isolate->InitializeLoggingAndCounters(); |
| 98 CHECK(isolate->memory_allocator()->Setup(HEAP->MaxReserved(), | 124 Heap* heap = isolate->heap(); |
| 99 HEAP->MaxExecutableSize())); | 125 CHECK(heap->ConfigureHeapDefault()); |
| 126 MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); |
| 127 CHECK(memory_allocator->Setup(heap->MaxReserved(), |
| 128 heap->MaxExecutableSize())); |
| 129 TestMemoryAllocatorScope test_scope(isolate, memory_allocator); |
| 100 | 130 |
| 101 OldSpace faked_space(HEAP, | 131 OldSpace faked_space(heap, |
| 102 HEAP->MaxReserved(), | 132 heap->MaxReserved(), |
| 103 OLD_POINTER_SPACE, | 133 OLD_POINTER_SPACE, |
| 104 NOT_EXECUTABLE); | 134 NOT_EXECUTABLE); |
| 105 int total_pages = 0; | 135 int total_pages = 0; |
| 106 int requested = MemoryAllocator::kPagesPerChunk; | 136 int requested = MemoryAllocator::kPagesPerChunk; |
| 107 int allocated; | 137 int allocated; |
| 108 // If we request n pages, we should get n or n - 1. | 138 // If we request n pages, we should get n or n - 1. |
| 109 Page* first_page = | 139 Page* first_page = memory_allocator->AllocatePages( |
| 110 isolate->memory_allocator()->AllocatePages( | 140 requested, &allocated, &faked_space); |
| 111 requested, &allocated, &faked_space); | |
| 112 CHECK(first_page->is_valid()); | 141 CHECK(first_page->is_valid()); |
| 113 CHECK(allocated == requested || allocated == requested - 1); | 142 CHECK(allocated == requested || allocated == requested - 1); |
| 114 total_pages += allocated; | 143 total_pages += allocated; |
| 115 | 144 |
| 116 Page* last_page = first_page; | 145 Page* last_page = first_page; |
| 117 for (Page* p = first_page; p->is_valid(); p = p->next_page()) { | 146 for (Page* p = first_page; p->is_valid(); p = p->next_page()) { |
| 118 CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space)); | 147 CHECK(memory_allocator->IsPageInSpace(p, &faked_space)); |
| 119 last_page = p; | 148 last_page = p; |
| 120 } | 149 } |
| 121 | 150 |
| 122 // Again, we should get n or n - 1 pages. | 151 // Again, we should get n or n - 1 pages. |
| 123 Page* others = | 152 Page* others = memory_allocator->AllocatePages( |
| 124 isolate->memory_allocator()->AllocatePages( | 153 requested, &allocated, &faked_space); |
| 125 requested, &allocated, &faked_space); | |
| 126 CHECK(others->is_valid()); | 154 CHECK(others->is_valid()); |
| 127 CHECK(allocated == requested || allocated == requested - 1); | 155 CHECK(allocated == requested || allocated == requested - 1); |
| 128 total_pages += allocated; | 156 total_pages += allocated; |
| 129 | 157 |
| 130 isolate->memory_allocator()->SetNextPage(last_page, others); | 158 memory_allocator->SetNextPage(last_page, others); |
| 131 int page_count = 0; | 159 int page_count = 0; |
| 132 for (Page* p = first_page; p->is_valid(); p = p->next_page()) { | 160 for (Page* p = first_page; p->is_valid(); p = p->next_page()) { |
| 133 CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space)); | 161 CHECK(memory_allocator->IsPageInSpace(p, &faked_space)); |
| 134 page_count++; | 162 page_count++; |
| 135 } | 163 } |
| 136 CHECK(total_pages == page_count); | 164 CHECK(total_pages == page_count); |
| 137 | 165 |
| 138 Page* second_page = first_page->next_page(); | 166 Page* second_page = first_page->next_page(); |
| 139 CHECK(second_page->is_valid()); | 167 CHECK(second_page->is_valid()); |
| 140 | 168 |
| 141 // Freeing pages at the first chunk starting at or after the second page | 169 // Freeing pages at the first chunk starting at or after the second page |
| 142 // should free the entire second chunk. It will return the page it was passed | 170 // should free the entire second chunk. It will return the page it was passed |
| 143 // (since the second page was in the first chunk). | 171 // (since the second page was in the first chunk). |
| 144 Page* free_return = isolate->memory_allocator()->FreePages(second_page); | 172 Page* free_return = memory_allocator->FreePages(second_page); |
| 145 CHECK(free_return == second_page); | 173 CHECK(free_return == second_page); |
| 146 isolate->memory_allocator()->SetNextPage(first_page, free_return); | 174 memory_allocator->SetNextPage(first_page, free_return); |
| 147 | 175 |
| 148 // Freeing pages in the first chunk starting at the first page should free | 176 // Freeing pages in the first chunk starting at the first page should free |
| 149 // the first chunk and return an invalid page. | 177 // the first chunk and return an invalid page. |
| 150 Page* invalid_page = isolate->memory_allocator()->FreePages(first_page); | 178 Page* invalid_page = memory_allocator->FreePages(first_page); |
| 151 CHECK(!invalid_page->is_valid()); | 179 CHECK(!invalid_page->is_valid()); |
| 152 | 180 |
| 153 isolate->memory_allocator()->TearDown(); | 181 memory_allocator->TearDown(); |
| 182 delete memory_allocator; |
| 154 } | 183 } |
| 155 | 184 |
| 156 | 185 |
| 157 TEST(NewSpace) { | 186 TEST(NewSpace) { |
| 158 OS::Setup(); | 187 OS::Setup(); |
| 159 CHECK(HEAP->ConfigureHeapDefault()); | 188 Isolate* isolate = Isolate::Current(); |
| 160 CHECK(Isolate::Current()->memory_allocator()->Setup( | 189 isolate->InitializeLoggingAndCounters(); |
| 161 HEAP->MaxReserved(), HEAP->MaxExecutableSize())); | 190 Heap* heap = isolate->heap(); |
| 191 CHECK(heap->ConfigureHeapDefault()); |
| 192 MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); |
| 193 CHECK(memory_allocator->Setup(heap->MaxReserved(), |
| 194 heap->MaxExecutableSize())); |
| 195 TestMemoryAllocatorScope test_scope(isolate, memory_allocator); |
| 162 | 196 |
| 163 NewSpace new_space(HEAP); | 197 NewSpace new_space(heap); |
| 164 | 198 |
| 165 void* chunk = | 199 void* chunk = |
| 166 Isolate::Current()->memory_allocator()->ReserveInitialChunk( | 200 memory_allocator->ReserveInitialChunk(4 * heap->ReservedSemiSpaceSize()); |
| 167 4 * HEAP->ReservedSemiSpaceSize()); | |
| 168 CHECK(chunk != NULL); | 201 CHECK(chunk != NULL); |
| 169 Address start = RoundUp(static_cast<Address>(chunk), | 202 Address start = RoundUp(static_cast<Address>(chunk), |
| 170 2 * HEAP->ReservedSemiSpaceSize()); | 203 2 * heap->ReservedSemiSpaceSize()); |
| 171 CHECK(new_space.Setup(start, 2 * HEAP->ReservedSemiSpaceSize())); | 204 CHECK(new_space.Setup(start, 2 * heap->ReservedSemiSpaceSize())); |
| 172 CHECK(new_space.HasBeenSetup()); | 205 CHECK(new_space.HasBeenSetup()); |
| 173 | 206 |
| 174 while (new_space.Available() >= Page::kMaxHeapObjectSize) { | 207 while (new_space.Available() >= Page::kMaxHeapObjectSize) { |
| 175 Object* obj = | 208 Object* obj = |
| 176 new_space.AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked(); | 209 new_space.AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked(); |
| 177 CHECK(new_space.Contains(HeapObject::cast(obj))); | 210 CHECK(new_space.Contains(HeapObject::cast(obj))); |
| 178 } | 211 } |
| 179 | 212 |
| 180 new_space.TearDown(); | 213 new_space.TearDown(); |
| 181 Isolate::Current()->memory_allocator()->TearDown(); | 214 memory_allocator->TearDown(); |
| 215 delete memory_allocator; |
| 182 } | 216 } |
| 183 | 217 |
| 184 | 218 |
| 185 TEST(OldSpace) { | 219 TEST(OldSpace) { |
| 186 OS::Setup(); | 220 OS::Setup(); |
| 187 CHECK(HEAP->ConfigureHeapDefault()); | 221 Isolate* isolate = Isolate::Current(); |
| 188 CHECK(Isolate::Current()->memory_allocator()->Setup( | 222 isolate->InitializeLoggingAndCounters(); |
| 189 HEAP->MaxReserved(), HEAP->MaxExecutableSize())); | 223 Heap* heap = isolate->heap(); |
| 224 CHECK(heap->ConfigureHeapDefault()); |
| 225 MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); |
| 226 CHECK(memory_allocator->Setup(heap->MaxReserved(), |
| 227 heap->MaxExecutableSize())); |
| 228 TestMemoryAllocatorScope test_scope(isolate, memory_allocator); |
| 190 | 229 |
| 191 OldSpace* s = new OldSpace(HEAP, | 230 OldSpace* s = new OldSpace(heap, |
| 192 HEAP->MaxOldGenerationSize(), | 231 heap->MaxOldGenerationSize(), |
| 193 OLD_POINTER_SPACE, | 232 OLD_POINTER_SPACE, |
| 194 NOT_EXECUTABLE); | 233 NOT_EXECUTABLE); |
| 195 CHECK(s != NULL); | 234 CHECK(s != NULL); |
| 196 | 235 |
| 197 void* chunk = | 236 void* chunk = memory_allocator->ReserveInitialChunk( |
| 198 Isolate::Current()->memory_allocator()->ReserveInitialChunk( | 237 4 * heap->ReservedSemiSpaceSize()); |
| 199 4 * HEAP->ReservedSemiSpaceSize()); | |
| 200 CHECK(chunk != NULL); | 238 CHECK(chunk != NULL); |
| 201 Address start = static_cast<Address>(chunk); | 239 Address start = static_cast<Address>(chunk); |
| 202 size_t size = RoundUp(start, 2 * HEAP->ReservedSemiSpaceSize()) - start; | 240 size_t size = RoundUp(start, 2 * heap->ReservedSemiSpaceSize()) - start; |
| 203 | 241 |
| 204 CHECK(s->Setup(start, size)); | 242 CHECK(s->Setup(start, size)); |
| 205 | 243 |
| 206 while (s->Available() > 0) { | 244 while (s->Available() > 0) { |
| 207 s->AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked(); | 245 s->AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked(); |
| 208 } | 246 } |
| 209 | 247 |
| 210 s->TearDown(); | 248 s->TearDown(); |
| 211 delete s; | 249 delete s; |
| 212 Isolate::Current()->memory_allocator()->TearDown(); | 250 memory_allocator->TearDown(); |
| 251 delete memory_allocator; |
| 213 } | 252 } |
| 214 | 253 |
| 215 | 254 |
| 216 TEST(LargeObjectSpace) { | 255 TEST(LargeObjectSpace) { |
| 217 OS::Setup(); | 256 v8::V8::Initialize(); |
| 218 CHECK(HEAP->Setup(false)); | |
| 219 | 257 |
| 220 LargeObjectSpace* lo = HEAP->lo_space(); | 258 LargeObjectSpace* lo = HEAP->lo_space(); |
| 221 CHECK(lo != NULL); | 259 CHECK(lo != NULL); |
| 222 | 260 |
| 223 Map* faked_map = reinterpret_cast<Map*>(HeapObject::FromAddress(0)); | 261 Map* faked_map = reinterpret_cast<Map*>(HeapObject::FromAddress(0)); |
| 224 int lo_size = Page::kPageSize; | 262 int lo_size = Page::kPageSize; |
| 225 | 263 |
| 226 Object* obj = lo->AllocateRaw(lo_size)->ToObjectUnchecked(); | 264 Object* obj = lo->AllocateRaw(lo_size)->ToObjectUnchecked(); |
| 227 CHECK(obj->IsHeapObject()); | 265 CHECK(obj->IsHeapObject()); |
| 228 | 266 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 240 { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size); | 278 { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size); |
| 241 if (!maybe_obj->ToObject(&obj)) break; | 279 if (!maybe_obj->ToObject(&obj)) break; |
| 242 } | 280 } |
| 243 HeapObject::cast(obj)->set_map(faked_map); | 281 HeapObject::cast(obj)->set_map(faked_map); |
| 244 CHECK(lo->Available() < available); | 282 CHECK(lo->Available() < available); |
| 245 }; | 283 }; |
| 246 | 284 |
| 247 CHECK(!lo->IsEmpty()); | 285 CHECK(!lo->IsEmpty()); |
| 248 | 286 |
| 249 CHECK(lo->AllocateRaw(lo_size)->IsFailure()); | 287 CHECK(lo->AllocateRaw(lo_size)->IsFailure()); |
| 250 | |
| 251 lo->TearDown(); | |
| 252 delete lo; | |
| 253 | |
| 254 Isolate::Current()->memory_allocator()->TearDown(); | |
| 255 } | 288 } |
| OLD | NEW |