OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
7 | 7 |
8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
255 // -------------------------------------------------------------------------- | 255 // -------------------------------------------------------------------------- |
256 // PagedSpace | 256 // PagedSpace |
257 | 257 |
258 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, | 258 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, |
259 PagedSpace* owner) { | 259 PagedSpace* owner) { |
260 Page* page = reinterpret_cast<Page*>(chunk); | 260 Page* page = reinterpret_cast<Page*>(chunk); |
261 page->mutex_ = new base::Mutex(); | 261 page->mutex_ = new base::Mutex(); |
262 DCHECK(page->area_size() <= kAllocatableMemory); | 262 DCHECK(page->area_size() <= kAllocatableMemory); |
263 DCHECK(chunk->owner() == owner); | 263 DCHECK(chunk->owner() == owner); |
264 owner->IncreaseCapacity(page->area_size()); | 264 owner->IncreaseCapacity(page->area_size()); |
| 265 heap->incremental_marking()->SetOldSpacePageFlags(chunk); |
| 266 |
| 267 // Make sure that categories are initialized before freeing the area. |
| 268 page->InitializeFreeListCategories(); |
265 owner->Free(page->area_start(), page->area_size()); | 269 owner->Free(page->area_start(), page->area_size()); |
266 | 270 |
267 heap->incremental_marking()->SetOldSpacePageFlags(chunk); | 271 return page; |
| 272 } |
268 | 273 |
269 return page; | 274 void Page::InitializeFreeListCategories() { |
| 275 for (int i = kFirstCategory; i < kNumberOfCategories; i++) { |
| 276 categories_[i].Initialize(static_cast<FreeListCategoryType>(i)); |
| 277 } |
270 } | 278 } |
271 | 279 |
272 void MemoryChunk::IncrementLiveBytesFromGC(HeapObject* object, int by) { | 280 void MemoryChunk::IncrementLiveBytesFromGC(HeapObject* object, int by) { |
273 MemoryChunk::FromAddress(object->address())->IncrementLiveBytes(by); | 281 MemoryChunk::FromAddress(object->address())->IncrementLiveBytes(by); |
274 } | 282 } |
275 | 283 |
276 void MemoryChunk::ResetLiveBytes() { | 284 void MemoryChunk::ResetLiveBytes() { |
277 if (FLAG_trace_live_bytes) { | 285 if (FLAG_trace_live_bytes) { |
278 PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", this, | 286 PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", this, |
279 live_byte_count_); | 287 live_byte_count_); |
(...skipping 27 matching lines...) Expand all Loading... |
307 return p->owner() == this; | 315 return p->owner() == this; |
308 } | 316 } |
309 | 317 |
310 bool PagedSpace::Contains(Object* o) { | 318 bool PagedSpace::Contains(Object* o) { |
311 if (!o->IsHeapObject()) return false; | 319 if (!o->IsHeapObject()) return false; |
312 Page* p = Page::FromAddress(HeapObject::cast(o)->address()); | 320 Page* p = Page::FromAddress(HeapObject::cast(o)->address()); |
313 if (!p->is_valid()) return false; | 321 if (!p->is_valid()) return false; |
314 return p->owner() == this; | 322 return p->owner() == this; |
315 } | 323 } |
316 | 324 |
| 325 void PagedSpace::UnlinkFreeListCategories(Page* page) { |
| 326 DCHECK_EQ(this, page->owner()); |
| 327 page->ForAllFreeListCategories([this](FreeListCategory* category) { |
| 328 DCHECK_EQ(free_list(), category->owner()); |
| 329 free_list()->RemoveCategory(category); |
| 330 }); |
| 331 } |
| 332 |
| 333 intptr_t PagedSpace::RelinkFreeListCategories(Page* page) { |
| 334 DCHECK_EQ(this, page->owner()); |
| 335 intptr_t added = 0; |
| 336 page->ForAllFreeListCategories([&added](FreeListCategory* category) { |
| 337 added += category->available(); |
| 338 category->Relink(); |
| 339 }); |
| 340 return added; |
| 341 } |
| 342 |
317 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) { | 343 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) { |
318 MemoryChunk* chunk = MemoryChunk::FromAddress(addr); | 344 MemoryChunk* chunk = MemoryChunk::FromAddress(addr); |
319 uintptr_t offset = addr - chunk->address(); | 345 uintptr_t offset = addr - chunk->address(); |
320 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { | 346 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { |
321 chunk = heap->lo_space()->FindPage(addr); | 347 chunk = heap->lo_space()->FindPage(addr); |
322 } | 348 } |
323 return chunk; | 349 return chunk; |
324 } | 350 } |
325 | 351 |
326 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { | 352 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { |
327 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); | 353 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); |
328 } | 354 } |
329 | 355 |
| 356 void Page::MarkNeverAllocateForTesting() { |
| 357 DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE)); |
| 358 SetFlag(NEVER_ALLOCATE_ON_PAGE); |
| 359 reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this); |
| 360 } |
| 361 |
| 362 void Page::MarkEvacuationCandidate() { |
| 363 DCHECK(!IsFlagSet(NEVER_EVACUATE)); |
| 364 DCHECK_NULL(old_to_old_slots_); |
| 365 DCHECK_NULL(typed_old_to_old_slots_); |
| 366 SetFlag(EVACUATION_CANDIDATE); |
| 367 reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this); |
| 368 } |
| 369 |
| 370 void Page::ClearEvacuationCandidate() { |
| 371 DCHECK_NULL(old_to_old_slots_); |
| 372 DCHECK_NULL(typed_old_to_old_slots_); |
| 373 ClearFlag(EVACUATION_CANDIDATE); |
| 374 InitializeFreeListCategories(); |
| 375 } |
| 376 |
330 MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode) | 377 MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode) |
331 : state_(kOldSpaceState), | 378 : state_(kOldSpaceState), |
332 mode_(mode), | 379 mode_(mode), |
333 old_iterator_(heap->old_space()), | 380 old_iterator_(heap->old_space()), |
334 code_iterator_(heap->code_space()), | 381 code_iterator_(heap->code_space()), |
335 map_iterator_(heap->map_space()), | 382 map_iterator_(heap->map_space()), |
336 lo_iterator_(heap->lo_space()) {} | 383 lo_iterator_(heap->lo_space()) {} |
337 | 384 |
338 MemoryChunk* MemoryChunkIterator::next() { | 385 MemoryChunk* MemoryChunkIterator::next() { |
339 switch (state_) { | 386 switch (state_) { |
(...skipping 28 matching lines...) Expand all Loading... |
368 } | 415 } |
369 case kFinishedState: | 416 case kFinishedState: |
370 return nullptr; | 417 return nullptr; |
371 default: | 418 default: |
372 break; | 419 break; |
373 } | 420 } |
374 UNREACHABLE(); | 421 UNREACHABLE(); |
375 return nullptr; | 422 return nullptr; |
376 } | 423 } |
377 | 424 |
378 | |
379 void Page::set_next_page(Page* page) { | 425 void Page::set_next_page(Page* page) { |
380 DCHECK(page->owner() == owner()); | 426 DCHECK(page->owner() == owner()); |
381 set_next_chunk(page); | 427 set_next_chunk(page); |
382 } | 428 } |
383 | 429 |
384 | |
385 void Page::set_prev_page(Page* page) { | 430 void Page::set_prev_page(Page* page) { |
386 DCHECK(page->owner() == owner()); | 431 DCHECK(page->owner() == owner()); |
387 set_prev_chunk(page); | 432 set_prev_chunk(page); |
388 } | 433 } |
389 | 434 |
| 435 Page* FreeListCategory::page() { |
| 436 return Page::FromAddress(reinterpret_cast<Address>(this)); |
| 437 } |
| 438 |
| 439 FreeList* FreeListCategory::owner() { |
| 440 return reinterpret_cast<PagedSpace*>( |
| 441 Page::FromAddress(reinterpret_cast<Address>(this))->owner()) |
| 442 ->free_list(); |
| 443 } |
| 444 |
| 445 bool FreeListCategory::is_linked() { |
| 446 return prev_ != nullptr || next_ != nullptr || owner()->top(type_) == this; |
| 447 } |
390 | 448 |
391 // Try linear allocation in the page of alloc_info's allocation top. Does | 449 // Try linear allocation in the page of alloc_info's allocation top. Does |
392 // not contain slow case logic (e.g. move to the next page or try free list | 450 // not contain slow case logic (e.g. move to the next page or try free list |
393 // allocation) so it can be used by all the allocation functions and for all | 451 // allocation) so it can be used by all the allocation functions and for all |
394 // the paged spaces. | 452 // the paged spaces. |
395 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { | 453 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { |
396 Address current_top = allocation_info_.top(); | 454 Address current_top = allocation_info_.top(); |
397 Address new_top = current_top + size_in_bytes; | 455 Address new_top = current_top + size_in_bytes; |
398 if (new_top > allocation_info_.limit()) return NULL; | 456 if (new_top > allocation_info_.limit()) return NULL; |
399 | 457 |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
631 other->allocation_info_.Reset(nullptr, nullptr); | 689 other->allocation_info_.Reset(nullptr, nullptr); |
632 return true; | 690 return true; |
633 } | 691 } |
634 return false; | 692 return false; |
635 } | 693 } |
636 | 694 |
637 } // namespace internal | 695 } // namespace internal |
638 } // namespace v8 | 696 } // namespace v8 |
639 | 697 |
640 #endif // V8_HEAP_SPACES_INL_H_ | 698 #endif // V8_HEAP_SPACES_INL_H_ |
OLD | NEW |