OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
7 | 7 |
8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
250 // -------------------------------------------------------------------------- | 250 // -------------------------------------------------------------------------- |
251 // PagedSpace | 251 // PagedSpace |
252 | 252 |
253 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, | 253 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, |
254 PagedSpace* owner) { | 254 PagedSpace* owner) { |
255 Page* page = reinterpret_cast<Page*>(chunk); | 255 Page* page = reinterpret_cast<Page*>(chunk); |
256 page->mutex_ = new base::Mutex(); | 256 page->mutex_ = new base::Mutex(); |
257 DCHECK(page->area_size() <= kAllocatableMemory); | 257 DCHECK(page->area_size() <= kAllocatableMemory); |
258 DCHECK(chunk->owner() == owner); | 258 DCHECK(chunk->owner() == owner); |
259 owner->IncreaseCapacity(page->area_size()); | 259 owner->IncreaseCapacity(page->area_size()); |
| 260 heap->incremental_marking()->SetOldSpacePageFlags(chunk); |
| 261 |
| 262 // Make sure that categories are initialized before freeing the area. |
| 263 page->InitializeFreeListCategories(); |
260 owner->Free(page->area_start(), page->area_size()); | 264 owner->Free(page->area_start(), page->area_size()); |
261 | 265 |
262 heap->incremental_marking()->SetOldSpacePageFlags(chunk); | 266 return page; |
| 267 } |
263 | 268 |
264 return page; | 269 void Page::InitializeFreeListCategories() { |
| 270 for (int i = kFirstCategory; i < kNumberOfCategories; i++) { |
| 271 categories_[i].Initialize(static_cast<FreeListCategoryType>(i)); |
| 272 } |
265 } | 273 } |
266 | 274 |
267 void MemoryChunk::IncrementLiveBytesFromGC(HeapObject* object, int by) { | 275 void MemoryChunk::IncrementLiveBytesFromGC(HeapObject* object, int by) { |
268 MemoryChunk::FromAddress(object->address())->IncrementLiveBytes(by); | 276 MemoryChunk::FromAddress(object->address())->IncrementLiveBytes(by); |
269 } | 277 } |
270 | 278 |
271 void MemoryChunk::ResetLiveBytes() { | 279 void MemoryChunk::ResetLiveBytes() { |
272 if (FLAG_trace_live_bytes) { | 280 if (FLAG_trace_live_bytes) { |
273 PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", this, | 281 PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", this, |
274 live_byte_count_); | 282 live_byte_count_); |
(...skipping 26 matching lines...) Expand all Loading... |
301 return p->owner() == this; | 309 return p->owner() == this; |
302 } | 310 } |
303 | 311 |
304 bool PagedSpace::Contains(Object* o) { | 312 bool PagedSpace::Contains(Object* o) { |
305 if (!o->IsHeapObject()) return false; | 313 if (!o->IsHeapObject()) return false; |
306 Page* p = Page::FromAddress(HeapObject::cast(o)->address()); | 314 Page* p = Page::FromAddress(HeapObject::cast(o)->address()); |
307 if (!p->is_valid()) return false; | 315 if (!p->is_valid()) return false; |
308 return p->owner() == this; | 316 return p->owner() == this; |
309 } | 317 } |
310 | 318 |
| 319 void PagedSpace::UnlinkFreeListCategories(Page* page) { |
| 320 DCHECK_EQ(this, page->owner()); |
| 321 page->ForAllFreeListCategories([this](FreeListCategory* category) { |
| 322 DCHECK_EQ(free_list(), category->owner()); |
| 323 free_list()->RemoveCategory(category); |
| 324 }); |
| 325 } |
| 326 |
| 327 intptr_t PagedSpace::RelinkFreeListCategories(Page* page) { |
| 328 DCHECK_EQ(this, page->owner()); |
| 329 intptr_t added = 0; |
| 330 page->ForAllFreeListCategories([&added](FreeListCategory* category) { |
| 331 added += category->available(); |
| 332 category->Relink(); |
| 333 }); |
| 334 return added; |
| 335 } |
| 336 |
311 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) { | 337 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) { |
312 MemoryChunk* chunk = MemoryChunk::FromAddress(addr); | 338 MemoryChunk* chunk = MemoryChunk::FromAddress(addr); |
313 uintptr_t offset = addr - chunk->address(); | 339 uintptr_t offset = addr - chunk->address(); |
314 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { | 340 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { |
315 chunk = heap->lo_space()->FindPage(addr); | 341 chunk = heap->lo_space()->FindPage(addr); |
316 } | 342 } |
317 return chunk; | 343 return chunk; |
318 } | 344 } |
319 | 345 |
320 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { | 346 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { |
321 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); | 347 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); |
322 } | 348 } |
323 | 349 |
| 350 void Page::MarkNeverAllocateForTesting() { |
| 351 DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE)); |
| 352 SetFlag(NEVER_ALLOCATE_ON_PAGE); |
| 353 reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this); |
| 354 } |
| 355 |
| 356 void Page::MarkEvacuationCandidate() { |
| 357 DCHECK(!IsFlagSet(NEVER_EVACUATE)); |
| 358 DCHECK_NULL(old_to_old_slots_); |
| 359 DCHECK_NULL(typed_old_to_old_slots_); |
| 360 SetFlag(EVACUATION_CANDIDATE); |
| 361 reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this); |
| 362 } |
| 363 |
| 364 void Page::ClearEvacuationCandidate() { |
| 365 DCHECK_NULL(old_to_old_slots_); |
| 366 DCHECK_NULL(typed_old_to_old_slots_); |
| 367 ClearFlag(EVACUATION_CANDIDATE); |
| 368 InitializeFreeListCategories(); |
| 369 } |
| 370 |
324 MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode) | 371 MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode) |
325 : state_(kOldSpaceState), | 372 : state_(kOldSpaceState), |
326 mode_(mode), | 373 mode_(mode), |
327 old_iterator_(heap->old_space()), | 374 old_iterator_(heap->old_space()), |
328 code_iterator_(heap->code_space()), | 375 code_iterator_(heap->code_space()), |
329 map_iterator_(heap->map_space()), | 376 map_iterator_(heap->map_space()), |
330 lo_iterator_(heap->lo_space()) {} | 377 lo_iterator_(heap->lo_space()) {} |
331 | 378 |
332 MemoryChunk* MemoryChunkIterator::next() { | 379 MemoryChunk* MemoryChunkIterator::next() { |
333 switch (state_) { | 380 switch (state_) { |
(...skipping 28 matching lines...) Expand all Loading... |
362 } | 409 } |
363 case kFinishedState: | 410 case kFinishedState: |
364 return nullptr; | 411 return nullptr; |
365 default: | 412 default: |
366 break; | 413 break; |
367 } | 414 } |
368 UNREACHABLE(); | 415 UNREACHABLE(); |
369 return nullptr; | 416 return nullptr; |
370 } | 417 } |
371 | 418 |
372 | |
373 void Page::set_next_page(Page* page) { | 419 void Page::set_next_page(Page* page) { |
374 DCHECK(page->owner() == owner()); | 420 DCHECK(page->owner() == owner()); |
375 set_next_chunk(page); | 421 set_next_chunk(page); |
376 } | 422 } |
377 | 423 |
378 | |
379 void Page::set_prev_page(Page* page) { | 424 void Page::set_prev_page(Page* page) { |
380 DCHECK(page->owner() == owner()); | 425 DCHECK(page->owner() == owner()); |
381 set_prev_chunk(page); | 426 set_prev_chunk(page); |
382 } | 427 } |
383 | 428 |
| 429 Page* FreeListCategory::page() { |
| 430 return Page::FromAddress(reinterpret_cast<Address>(this)); |
| 431 } |
| 432 |
| 433 FreeList* FreeListCategory::owner() { |
| 434 return reinterpret_cast<PagedSpace*>( |
| 435 Page::FromAddress(reinterpret_cast<Address>(this))->owner()) |
| 436 ->free_list(); |
| 437 } |
| 438 |
| 439 bool FreeListCategory::is_linked() { |
| 440 return prev_ != nullptr || next_ != nullptr || owner()->top(type_) == this; |
| 441 } |
384 | 442 |
385 // Try linear allocation in the page of alloc_info's allocation top. Does | 443 // Try linear allocation in the page of alloc_info's allocation top. Does |
386 // not contain slow case logic (e.g. move to the next page or try free list | 444 // not contain slow case logic (e.g. move to the next page or try free list |
387 // allocation) so it can be used by all the allocation functions and for all | 445 // allocation) so it can be used by all the allocation functions and for all |
388 // the paged spaces. | 446 // the paged spaces. |
389 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { | 447 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { |
390 Address current_top = allocation_info_.top(); | 448 Address current_top = allocation_info_.top(); |
391 Address new_top = current_top + size_in_bytes; | 449 Address new_top = current_top + size_in_bytes; |
392 if (new_top > allocation_info_.limit()) return NULL; | 450 if (new_top > allocation_info_.limit()) return NULL; |
393 | 451 |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
625 other->allocation_info_.Reset(nullptr, nullptr); | 683 other->allocation_info_.Reset(nullptr, nullptr); |
626 return true; | 684 return true; |
627 } | 685 } |
628 return false; | 686 return false; |
629 } | 687 } |
630 | 688 |
631 } // namespace internal | 689 } // namespace internal |
632 } // namespace v8 | 690 } // namespace v8 |
633 | 691 |
634 #endif // V8_HEAP_SPACES_INL_H_ | 692 #endif // V8_HEAP_SPACES_INL_H_ |
OLD | NEW |