| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
| 6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
| 9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| 11 #include "src/msan.h" | 11 #include "src/msan.h" |
| 12 #include "src/profiler/heap-profiler.h" | 12 #include "src/profiler/heap-profiler.h" |
| 13 #include "src/v8memory.h" | 13 #include "src/v8memory.h" |
| 14 | 14 |
| 15 namespace v8 { | 15 namespace v8 { |
| 16 namespace internal { | 16 namespace internal { |
| 17 | 17 |
| 18 template <class PAGE_TYPE> |
| 19 PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() { |
| 20 p_ = p_->next_page(); |
| 21 return *this; |
| 22 } |
| 23 |
| 24 template <class PAGE_TYPE> |
| 25 PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) { |
| 26 PageIteratorImpl<PAGE_TYPE> tmp(*this); |
| 27 operator++(); |
| 28 return tmp; |
| 29 } |
| 30 |
| 31 NewSpacePageRange::NewSpacePageRange(Address start, Address limit) |
| 32 : start_(start), limit_(limit) { |
| 33 SemiSpace::AssertValidRange(start, limit); |
| 34 } |
| 18 | 35 |
| 19 // ----------------------------------------------------------------------------- | 36 // ----------------------------------------------------------------------------- |
| 20 // Bitmap | 37 // Bitmap |
| 21 | 38 |
| 22 void Bitmap::Clear(MemoryChunk* chunk) { | 39 void Bitmap::Clear(MemoryChunk* chunk) { |
| 23 Bitmap* bitmap = chunk->markbits(); | 40 Bitmap* bitmap = chunk->markbits(); |
| 24 for (int i = 0; i < bitmap->CellsCount(); i++) bitmap->cells()[i] = 0; | 41 for (int i = 0; i < bitmap->CellsCount(); i++) bitmap->cells()[i] = 0; |
| 25 chunk->ResetLiveBytes(); | 42 chunk->ResetLiveBytes(); |
| 26 } | 43 } |
| 27 | 44 |
| 28 void Bitmap::SetAllBits(MemoryChunk* chunk) { | 45 void Bitmap::SetAllBits(MemoryChunk* chunk) { |
| 29 Bitmap* bitmap = chunk->markbits(); | 46 Bitmap* bitmap = chunk->markbits(); |
| 30 for (int i = 0; i < bitmap->CellsCount(); i++) | 47 for (int i = 0; i < bitmap->CellsCount(); i++) |
| 31 bitmap->cells()[i] = 0xffffffff; | 48 bitmap->cells()[i] = 0xffffffff; |
| 32 } | 49 } |
| 33 | 50 |
| 34 // ----------------------------------------------------------------------------- | |
| 35 // PageIterator | |
| 36 | |
| 37 PageIterator::PageIterator(PagedSpace* space) | |
| 38 : space_(space), | |
| 39 prev_page_(&space->anchor_), | |
| 40 next_page_(prev_page_->next_page()) {} | |
| 41 | |
| 42 | |
| 43 bool PageIterator::has_next() { return next_page_ != &space_->anchor_; } | |
| 44 | |
| 45 | |
| 46 Page* PageIterator::next() { | |
| 47 DCHECK(has_next()); | |
| 48 prev_page_ = next_page_; | |
| 49 next_page_ = next_page_->next_page(); | |
| 50 return prev_page_; | |
| 51 } | |
| 52 | |
| 53 | 51 |
| 54 // ----------------------------------------------------------------------------- | 52 // ----------------------------------------------------------------------------- |
| 55 // SemiSpaceIterator | 53 // SemiSpaceIterator |
| 56 | 54 |
| 57 HeapObject* SemiSpaceIterator::Next() { | 55 HeapObject* SemiSpaceIterator::Next() { |
| 58 while (current_ != limit_) { | 56 while (current_ != limit_) { |
| 59 if (Page::IsAlignedToPageSize(current_)) { | 57 if (Page::IsAlignedToPageSize(current_)) { |
| 60 Page* page = Page::FromAllocationAreaAddress(current_); | 58 Page* page = Page::FromAllocationAreaAddress(current_); |
| 61 page = page->next_page(); | 59 page = page->next_page(); |
| 62 DCHECK(!page->is_anchor()); | 60 DCHECK(!page->is_anchor()); |
| 63 current_ = page->area_start(); | 61 current_ = page->area_start(); |
| 64 if (current_ == limit_) return nullptr; | 62 if (current_ == limit_) return nullptr; |
| 65 } | 63 } |
| 66 HeapObject* object = HeapObject::FromAddress(current_); | 64 HeapObject* object = HeapObject::FromAddress(current_); |
| 67 current_ += object->Size(); | 65 current_ += object->Size(); |
| 68 if (!object->IsFiller()) { | 66 if (!object->IsFiller()) { |
| 69 return object; | 67 return object; |
| 70 } | 68 } |
| 71 } | 69 } |
| 72 return nullptr; | 70 return nullptr; |
| 73 } | 71 } |
| 74 | 72 |
| 75 | 73 |
| 76 HeapObject* SemiSpaceIterator::next_object() { return Next(); } | 74 HeapObject* SemiSpaceIterator::next_object() { return Next(); } |
| 77 | 75 |
| 78 | 76 |
| 79 // ----------------------------------------------------------------------------- | 77 // ----------------------------------------------------------------------------- |
| 80 // NewSpacePageIterator | |
| 81 | |
| 82 NewSpacePageIterator::NewSpacePageIterator(NewSpace* space) | |
| 83 : prev_page_(Page::FromAddress(space->ToSpaceStart())->prev_page()), | |
| 84 next_page_(Page::FromAddress(space->ToSpaceStart())), | |
| 85 last_page_(Page::FromAllocationAreaAddress(space->ToSpaceEnd())) {} | |
| 86 | |
| 87 NewSpacePageIterator::NewSpacePageIterator(SemiSpace* space) | |
| 88 : prev_page_(space->anchor()), | |
| 89 next_page_(prev_page_->next_page()), | |
| 90 last_page_(prev_page_->prev_page()) {} | |
| 91 | |
| 92 NewSpacePageIterator::NewSpacePageIterator(Address start, Address limit) | |
| 93 : prev_page_(Page::FromAddress(start)->prev_page()), | |
| 94 next_page_(Page::FromAddress(start)), | |
| 95 last_page_(Page::FromAllocationAreaAddress(limit)) { | |
| 96 SemiSpace::AssertValidRange(start, limit); | |
| 97 } | |
| 98 | |
| 99 | |
| 100 bool NewSpacePageIterator::has_next() { return prev_page_ != last_page_; } | |
| 101 | |
| 102 Page* NewSpacePageIterator::next() { | |
| 103 DCHECK(has_next()); | |
| 104 prev_page_ = next_page_; | |
| 105 next_page_ = next_page_->next_page(); | |
| 106 return prev_page_; | |
| 107 } | |
| 108 | |
| 109 | |
| 110 // ----------------------------------------------------------------------------- | |
| 111 // HeapObjectIterator | 78 // HeapObjectIterator |
| 112 | 79 |
| 113 HeapObject* HeapObjectIterator::Next() { | 80 HeapObject* HeapObjectIterator::Next() { |
| 114 do { | 81 do { |
| 115 HeapObject* next_obj = FromCurrentPage(); | 82 HeapObject* next_obj = FromCurrentPage(); |
| 116 if (next_obj != NULL) return next_obj; | 83 if (next_obj != NULL) return next_obj; |
| 117 } while (AdvanceToNextPage()); | 84 } while (AdvanceToNextPage()); |
| 118 return NULL; | 85 return NULL; |
| 119 } | 86 } |
| 120 | 87 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 145 } else { | 112 } else { |
| 146 DCHECK_OBJECT_SIZE(obj_size); | 113 DCHECK_OBJECT_SIZE(obj_size); |
| 147 } | 114 } |
| 148 return obj; | 115 return obj; |
| 149 } | 116 } |
| 150 } | 117 } |
| 151 return NULL; | 118 return NULL; |
| 152 } | 119 } |
| 153 | 120 |
| 154 // ----------------------------------------------------------------------------- | 121 // ----------------------------------------------------------------------------- |
| 155 // LargePageIterator | |
| 156 | |
| 157 LargePageIterator::LargePageIterator(LargeObjectSpace* space) | |
| 158 : next_page_(space->first_page()) {} | |
| 159 | |
| 160 LargePage* LargePageIterator::next() { | |
| 161 LargePage* result = next_page_; | |
| 162 if (next_page_ != nullptr) { | |
| 163 next_page_ = next_page_->next_page(); | |
| 164 } | |
| 165 return result; | |
| 166 } | |
| 167 | |
| 168 // ----------------------------------------------------------------------------- | |
| 169 // MemoryAllocator | 122 // MemoryAllocator |
| 170 | 123 |
| 171 #ifdef ENABLE_HEAP_PROTECTION | 124 #ifdef ENABLE_HEAP_PROTECTION |
| 172 | 125 |
| 173 void MemoryAllocator::Protect(Address start, size_t size) { | 126 void MemoryAllocator::Protect(Address start, size_t size) { |
| 174 base::OS::Protect(start, size); | 127 base::OS::Protect(start, size); |
| 175 } | 128 } |
| 176 | 129 |
| 177 | 130 |
| 178 void MemoryAllocator::Unprotect(Address start, size_t size, | 131 void MemoryAllocator::Unprotect(Address start, size_t size, |
| (...skipping 23 matching lines...) Expand all Loading... |
| 202 return id_ == kToSpace | 155 return id_ == kToSpace |
| 203 ? MemoryChunk::FromAddress(o->address())->InToSpace() | 156 ? MemoryChunk::FromAddress(o->address())->InToSpace() |
| 204 : MemoryChunk::FromAddress(o->address())->InFromSpace(); | 157 : MemoryChunk::FromAddress(o->address())->InFromSpace(); |
| 205 } | 158 } |
| 206 | 159 |
| 207 bool SemiSpace::Contains(Object* o) { | 160 bool SemiSpace::Contains(Object* o) { |
| 208 return o->IsHeapObject() && Contains(HeapObject::cast(o)); | 161 return o->IsHeapObject() && Contains(HeapObject::cast(o)); |
| 209 } | 162 } |
| 210 | 163 |
| 211 bool SemiSpace::ContainsSlow(Address a) { | 164 bool SemiSpace::ContainsSlow(Address a) { |
| 212 NewSpacePageIterator it(this); | 165 for (Page* p : *this) { |
| 213 while (it.has_next()) { | 166 if (p == MemoryChunk::FromAddress(a)) return true; |
| 214 if (it.next() == MemoryChunk::FromAddress(a)) return true; | |
| 215 } | 167 } |
| 216 return false; | 168 return false; |
| 217 } | 169 } |
| 218 | 170 |
| 219 // -------------------------------------------------------------------------- | 171 // -------------------------------------------------------------------------- |
| 220 // NewSpace | 172 // NewSpace |
| 221 | 173 |
| 222 bool NewSpace::Contains(HeapObject* o) { | 174 bool NewSpace::Contains(HeapObject* o) { |
| 223 return MemoryChunk::FromAddress(o->address())->InNewSpace(); | 175 return MemoryChunk::FromAddress(o->address())->InNewSpace(); |
| 224 } | 176 } |
| (...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 399 void Page::ClearEvacuationCandidate() { | 351 void Page::ClearEvacuationCandidate() { |
| 400 if (!IsFlagSet(COMPACTION_WAS_ABORTED)) { | 352 if (!IsFlagSet(COMPACTION_WAS_ABORTED)) { |
| 401 DCHECK_NULL(old_to_old_slots_); | 353 DCHECK_NULL(old_to_old_slots_); |
| 402 DCHECK_NULL(typed_old_to_old_slots_); | 354 DCHECK_NULL(typed_old_to_old_slots_); |
| 403 } | 355 } |
| 404 ClearFlag(EVACUATION_CANDIDATE); | 356 ClearFlag(EVACUATION_CANDIDATE); |
| 405 InitializeFreeListCategories(); | 357 InitializeFreeListCategories(); |
| 406 } | 358 } |
| 407 | 359 |
| 408 MemoryChunkIterator::MemoryChunkIterator(Heap* heap) | 360 MemoryChunkIterator::MemoryChunkIterator(Heap* heap) |
| 409 : state_(kOldSpaceState), | 361 : heap_(heap), |
| 410 old_iterator_(heap->old_space()), | 362 state_(kOldSpaceState), |
| 411 code_iterator_(heap->code_space()), | 363 old_iterator_(heap->old_space()->begin()), |
| 412 map_iterator_(heap->map_space()), | 364 code_iterator_(heap->code_space()->begin()), |
| 413 lo_iterator_(heap->lo_space()) {} | 365 map_iterator_(heap->map_space()->begin()), |
| 366 lo_iterator_(heap->lo_space()->begin()) {} |
| 414 | 367 |
| 415 MemoryChunk* MemoryChunkIterator::next() { | 368 MemoryChunk* MemoryChunkIterator::next() { |
| 416 switch (state_) { | 369 switch (state_) { |
| 417 case kOldSpaceState: { | 370 case kOldSpaceState: { |
| 418 if (old_iterator_.has_next()) { | 371 if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++); |
| 419 return old_iterator_.next(); | |
| 420 } | |
| 421 state_ = kMapState; | 372 state_ = kMapState; |
| 422 // Fall through. | 373 // Fall through. |
| 423 } | 374 } |
| 424 case kMapState: { | 375 case kMapState: { |
| 425 if (map_iterator_.has_next()) { | 376 if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++); |
| 426 return map_iterator_.next(); | |
| 427 } | |
| 428 state_ = kCodeState; | 377 state_ = kCodeState; |
| 429 // Fall through. | 378 // Fall through. |
| 430 } | 379 } |
| 431 case kCodeState: { | 380 case kCodeState: { |
| 432 if (code_iterator_.has_next()) { | 381 if (code_iterator_ != heap_->code_space()->end()) |
| 433 return code_iterator_.next(); | 382 return *(code_iterator_++); |
| 434 } | |
| 435 state_ = kLargeObjectState; | 383 state_ = kLargeObjectState; |
| 436 // Fall through. | 384 // Fall through. |
| 437 } | 385 } |
| 438 case kLargeObjectState: { | 386 case kLargeObjectState: { |
| 439 MemoryChunk* answer = lo_iterator_.next(); | 387 if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++); |
| 440 if (answer != nullptr) { | |
| 441 return answer; | |
| 442 } | |
| 443 state_ = kFinishedState; | 388 state_ = kFinishedState; |
| 444 // Fall through; | 389 // Fall through; |
| 445 } | 390 } |
| 446 case kFinishedState: | 391 case kFinishedState: |
| 447 return nullptr; | 392 return nullptr; |
| 448 default: | 393 default: |
| 449 break; | 394 break; |
| 450 } | 395 } |
| 451 UNREACHABLE(); | 396 UNREACHABLE(); |
| 452 return nullptr; | 397 return nullptr; |
| (...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 714 other->allocation_info_.Reset(nullptr, nullptr); | 659 other->allocation_info_.Reset(nullptr, nullptr); |
| 715 return true; | 660 return true; |
| 716 } | 661 } |
| 717 return false; | 662 return false; |
| 718 } | 663 } |
| 719 | 664 |
| 720 } // namespace internal | 665 } // namespace internal |
| 721 } // namespace v8 | 666 } // namespace v8 |
| 722 | 667 |
| 723 #endif // V8_HEAP_SPACES_INL_H_ | 668 #endif // V8_HEAP_SPACES_INL_H_ |
| OLD | NEW |