| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 19 matching lines...) Expand all Loading... |
| 30 | 30 |
| 31 namespace v8 { | 31 namespace v8 { |
| 32 namespace internal { | 32 namespace internal { |
| 33 | 33 |
| 34 | 34 |
| 35 const char* Marking::kWhiteBitPattern = "00"; | 35 const char* Marking::kWhiteBitPattern = "00"; |
| 36 const char* Marking::kBlackBitPattern = "11"; | 36 const char* Marking::kBlackBitPattern = "11"; |
| 37 const char* Marking::kGreyBitPattern = "10"; | 37 const char* Marking::kGreyBitPattern = "10"; |
| 38 const char* Marking::kImpossibleBitPattern = "01"; | 38 const char* Marking::kImpossibleBitPattern = "01"; |
| 39 | 39 |
| 40 | 40 // The following has to hold in order for {ObjectMarking::MarkBitFrom} to not |
| 41 // The following has to hold in order for {Marking::MarkBitFrom} to not produce | 41 // produce invalid {kImpossibleBitPattern} in the marking bitmap by overlapping. |
| 42 // invalid {kImpossibleBitPattern} in the marking bitmap by overlapping. | |
| 43 STATIC_ASSERT(Heap::kMinObjectSizeInWords >= 2); | 42 STATIC_ASSERT(Heap::kMinObjectSizeInWords >= 2); |
| 44 | 43 |
| 45 | 44 |
| 46 // ------------------------------------------------------------------------- | 45 // ------------------------------------------------------------------------- |
| 47 // MarkCompactCollector | 46 // MarkCompactCollector |
| 48 | 47 |
| 49 MarkCompactCollector::MarkCompactCollector(Heap* heap) | 48 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
| 50 : // NOLINT | 49 : // NOLINT |
| 51 heap_(heap), | 50 heap_(heap), |
| 52 page_parallel_job_semaphore_(0), | 51 page_parallel_job_semaphore_(0), |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 102 | 101 |
| 103 | 102 |
| 104 static void VerifyMarking(Heap* heap, Address bottom, Address top) { | 103 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
| 105 VerifyMarkingVisitor visitor(heap); | 104 VerifyMarkingVisitor visitor(heap); |
| 106 HeapObject* object; | 105 HeapObject* object; |
| 107 Address next_object_must_be_here_or_later = bottom; | 106 Address next_object_must_be_here_or_later = bottom; |
| 108 | 107 |
| 109 for (Address current = bottom; current < top; current += kPointerSize) { | 108 for (Address current = bottom; current < top; current += kPointerSize) { |
| 110 object = HeapObject::FromAddress(current); | 109 object = HeapObject::FromAddress(current); |
| 111 if (MarkCompactCollector::IsMarked(object)) { | 110 if (MarkCompactCollector::IsMarked(object)) { |
| 112 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 111 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 113 CHECK(current >= next_object_must_be_here_or_later); | 112 CHECK(current >= next_object_must_be_here_or_later); |
| 114 object->Iterate(&visitor); | 113 object->Iterate(&visitor); |
| 115 next_object_must_be_here_or_later = current + object->Size(); | 114 next_object_must_be_here_or_later = current + object->Size(); |
| 116 // The next word for sure belongs to the current object, jump over it. | 115 // The next word for sure belongs to the current object, jump over it. |
| 117 current += kPointerSize; | 116 current += kPointerSize; |
| 118 } | 117 } |
| 119 } | 118 } |
| 120 } | 119 } |
| 121 | 120 |
| 122 static void VerifyMarkingBlackPage(Heap* heap, Page* page) { | 121 static void VerifyMarkingBlackPage(Heap* heap, Page* page) { |
| 123 CHECK(page->IsFlagSet(Page::BLACK_PAGE)); | 122 CHECK(page->IsFlagSet(Page::BLACK_PAGE)); |
| 124 VerifyMarkingVisitor visitor(heap); | 123 VerifyMarkingVisitor visitor(heap); |
| 125 HeapObjectIterator it(page); | 124 HeapObjectIterator it(page); |
| 126 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 125 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 127 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 126 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 128 object->Iterate(&visitor); | 127 object->Iterate(&visitor); |
| 129 } | 128 } |
| 130 } | 129 } |
| 131 | 130 |
| 132 static void VerifyMarking(NewSpace* space) { | 131 static void VerifyMarking(NewSpace* space) { |
| 133 Address end = space->top(); | 132 Address end = space->top(); |
| 134 // The bottom position is at the start of its page. Allows us to use | 133 // The bottom position is at the start of its page. Allows us to use |
| 135 // page->area_start() as start of range on all pages. | 134 // page->area_start() as start of range on all pages. |
| 136 CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start()); | 135 CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start()); |
| 137 | 136 |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 370 | 369 |
| 371 | 370 |
| 372 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 371 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
| 373 VerifyMarkbitsAreClean(heap_->old_space()); | 372 VerifyMarkbitsAreClean(heap_->old_space()); |
| 374 VerifyMarkbitsAreClean(heap_->code_space()); | 373 VerifyMarkbitsAreClean(heap_->code_space()); |
| 375 VerifyMarkbitsAreClean(heap_->map_space()); | 374 VerifyMarkbitsAreClean(heap_->map_space()); |
| 376 VerifyMarkbitsAreClean(heap_->new_space()); | 375 VerifyMarkbitsAreClean(heap_->new_space()); |
| 377 | 376 |
| 378 LargeObjectIterator it(heap_->lo_space()); | 377 LargeObjectIterator it(heap_->lo_space()); |
| 379 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 378 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 380 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 379 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 381 CHECK(Marking::IsWhite(mark_bit)); | 380 CHECK(Marking::IsWhite(mark_bit)); |
| 382 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 381 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
| 383 } | 382 } |
| 384 } | 383 } |
| 385 | 384 |
| 386 | 385 |
| 387 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 386 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
| 388 HeapObjectIterator code_iterator(heap()->code_space()); | 387 HeapObjectIterator code_iterator(heap()->code_space()); |
| 389 for (HeapObject* obj = code_iterator.Next(); obj != NULL; | 388 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
| 390 obj = code_iterator.Next()) { | 389 obj = code_iterator.Next()) { |
| (...skipping 10 matching lines...) Expand all Loading... |
| 401 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { | 400 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { |
| 402 Map* map = Map::cast(obj); | 401 Map* map = Map::cast(obj); |
| 403 map->VerifyOmittedMapChecks(); | 402 map->VerifyOmittedMapChecks(); |
| 404 } | 403 } |
| 405 } | 404 } |
| 406 #endif // VERIFY_HEAP | 405 #endif // VERIFY_HEAP |
| 407 | 406 |
| 408 | 407 |
| 409 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { | 408 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { |
| 410 for (Page* p : *space) { | 409 for (Page* p : *space) { |
| 411 Bitmap::Clear(p); | 410 p->ClearLiveness(); |
| 412 if (p->IsFlagSet(Page::BLACK_PAGE)) { | 411 if (p->IsFlagSet(Page::BLACK_PAGE)) { |
| 413 p->ClearFlag(Page::BLACK_PAGE); | 412 p->ClearFlag(Page::BLACK_PAGE); |
| 414 } | 413 } |
| 415 } | 414 } |
| 416 } | 415 } |
| 417 | 416 |
| 418 | 417 |
| 419 static void ClearMarkbitsInNewSpace(NewSpace* space) { | 418 static void ClearMarkbitsInNewSpace(NewSpace* space) { |
| 420 for (Page* page : *space) { | 419 for (Page* page : *space) { |
| 421 Bitmap::Clear(page); | 420 page->ClearLiveness(); |
| 422 } | 421 } |
| 423 } | 422 } |
| 424 | 423 |
| 425 | 424 |
| 426 void MarkCompactCollector::ClearMarkbits() { | 425 void MarkCompactCollector::ClearMarkbits() { |
| 427 ClearMarkbitsInPagedSpace(heap_->code_space()); | 426 ClearMarkbitsInPagedSpace(heap_->code_space()); |
| 428 ClearMarkbitsInPagedSpace(heap_->map_space()); | 427 ClearMarkbitsInPagedSpace(heap_->map_space()); |
| 429 ClearMarkbitsInPagedSpace(heap_->old_space()); | 428 ClearMarkbitsInPagedSpace(heap_->old_space()); |
| 430 ClearMarkbitsInNewSpace(heap_->new_space()); | 429 ClearMarkbitsInNewSpace(heap_->new_space()); |
| 431 | 430 |
| 432 LargeObjectIterator it(heap_->lo_space()); | 431 LargeObjectIterator it(heap_->lo_space()); |
| 433 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 432 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 434 Marking::MarkWhite(Marking::MarkBitFrom(obj)); | 433 Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj)); |
| 435 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 434 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 436 chunk->ResetProgressBar(); | 435 chunk->ResetProgressBar(); |
| 437 chunk->ResetLiveBytes(); | 436 chunk->ResetLiveBytes(); |
| 438 if (chunk->IsFlagSet(Page::BLACK_PAGE)) { | 437 if (chunk->IsFlagSet(Page::BLACK_PAGE)) { |
| 439 chunk->ClearFlag(Page::BLACK_PAGE); | 438 chunk->ClearFlag(Page::BLACK_PAGE); |
| 440 } | 439 } |
| 441 } | 440 } |
| 442 } | 441 } |
| 443 | 442 |
| 444 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { | 443 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 577 } | 576 } |
| 578 | 577 |
| 579 bool MarkCompactCollector::Sweeper::IsSweepingCompleted() { | 578 bool MarkCompactCollector::Sweeper::IsSweepingCompleted() { |
| 580 while (pending_sweeper_tasks_semaphore_.WaitFor( | 579 while (pending_sweeper_tasks_semaphore_.WaitFor( |
| 581 base::TimeDelta::FromSeconds(0))) { | 580 base::TimeDelta::FromSeconds(0))) { |
| 582 num_sweeping_tasks_.Increment(-1); | 581 num_sweeping_tasks_.Increment(-1); |
| 583 } | 582 } |
| 584 return num_sweeping_tasks_.Value() == 0; | 583 return num_sweeping_tasks_.Value() == 0; |
| 585 } | 584 } |
| 586 | 585 |
| 587 void Marking::TransferMark(Heap* heap, Address old_start, Address new_start) { | |
| 588 // This is only used when resizing an object. | |
| 589 DCHECK(MemoryChunk::FromAddress(old_start) == | |
| 590 MemoryChunk::FromAddress(new_start)); | |
| 591 | |
| 592 if (!heap->incremental_marking()->IsMarking() || | |
| 593 Page::FromAddress(old_start)->IsFlagSet(Page::BLACK_PAGE)) | |
| 594 return; | |
| 595 | |
| 596 // If the mark doesn't move, we don't check the color of the object. | |
| 597 // It doesn't matter whether the object is black, since it hasn't changed | |
| 598 // size, so the adjustment to the live data count will be zero anyway. | |
| 599 if (old_start == new_start) return; | |
| 600 | |
| 601 MarkBit new_mark_bit = MarkBitFrom(new_start); | |
| 602 MarkBit old_mark_bit = MarkBitFrom(old_start); | |
| 603 | |
| 604 #ifdef DEBUG | |
| 605 ObjectColor old_color = Color(old_mark_bit); | |
| 606 #endif | |
| 607 | |
| 608 if (Marking::IsBlack(old_mark_bit)) { | |
| 609 Marking::BlackToWhite(old_mark_bit); | |
| 610 Marking::MarkBlack(new_mark_bit); | |
| 611 return; | |
| 612 } else if (Marking::IsGrey(old_mark_bit)) { | |
| 613 Marking::GreyToWhite(old_mark_bit); | |
| 614 heap->incremental_marking()->WhiteToGreyAndPush( | |
| 615 HeapObject::FromAddress(new_start), new_mark_bit); | |
| 616 heap->incremental_marking()->RestartIfNotMarking(); | |
| 617 } | |
| 618 | |
| 619 #ifdef DEBUG | |
| 620 ObjectColor new_color = Color(new_mark_bit); | |
| 621 DCHECK(new_color == old_color); | |
| 622 #endif | |
| 623 } | |
| 624 | |
| 625 | |
| 626 const char* AllocationSpaceName(AllocationSpace space) { | 586 const char* AllocationSpaceName(AllocationSpace space) { |
| 627 switch (space) { | 587 switch (space) { |
| 628 case NEW_SPACE: | 588 case NEW_SPACE: |
| 629 return "NEW_SPACE"; | 589 return "NEW_SPACE"; |
| 630 case OLD_SPACE: | 590 case OLD_SPACE: |
| 631 return "OLD_SPACE"; | 591 return "OLD_SPACE"; |
| 632 case CODE_SPACE: | 592 case CODE_SPACE: |
| 633 return "CODE_SPACE"; | 593 return "CODE_SPACE"; |
| 634 case MAP_SPACE: | 594 case MAP_SPACE: |
| 635 return "MAP_SPACE"; | 595 return "MAP_SPACE"; |
| (...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 965 | 925 |
| 966 JSFunction* candidate = jsfunction_candidates_head_; | 926 JSFunction* candidate = jsfunction_candidates_head_; |
| 967 JSFunction* next_candidate; | 927 JSFunction* next_candidate; |
| 968 while (candidate != NULL) { | 928 while (candidate != NULL) { |
| 969 next_candidate = GetNextCandidate(candidate); | 929 next_candidate = GetNextCandidate(candidate); |
| 970 ClearNextCandidate(candidate, undefined); | 930 ClearNextCandidate(candidate, undefined); |
| 971 | 931 |
| 972 SharedFunctionInfo* shared = candidate->shared(); | 932 SharedFunctionInfo* shared = candidate->shared(); |
| 973 | 933 |
| 974 Code* code = shared->code(); | 934 Code* code = shared->code(); |
| 975 MarkBit code_mark = Marking::MarkBitFrom(code); | 935 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
| 976 if (Marking::IsWhite(code_mark)) { | 936 if (Marking::IsWhite(code_mark)) { |
| 977 if (FLAG_trace_code_flushing && shared->is_compiled()) { | 937 if (FLAG_trace_code_flushing && shared->is_compiled()) { |
| 978 PrintF("[code-flushing clears: "); | 938 PrintF("[code-flushing clears: "); |
| 979 shared->ShortPrint(); | 939 shared->ShortPrint(); |
| 980 PrintF(" - age: %d]\n", code->GetAge()); | 940 PrintF(" - age: %d]\n", code->GetAge()); |
| 981 } | 941 } |
| 982 // Always flush the optimized code map if there is one. | 942 // Always flush the optimized code map if there is one. |
| 983 if (!shared->OptimizedCodeMapIsCleared()) { | 943 if (!shared->OptimizedCodeMapIsCleared()) { |
| 984 shared->ClearOptimizedCodeMap(); | 944 shared->ClearOptimizedCodeMap(); |
| 985 } | 945 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1012 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { | 972 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { |
| 1013 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); | 973 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); |
| 1014 | 974 |
| 1015 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 975 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 1016 SharedFunctionInfo* next_candidate; | 976 SharedFunctionInfo* next_candidate; |
| 1017 while (candidate != NULL) { | 977 while (candidate != NULL) { |
| 1018 next_candidate = GetNextCandidate(candidate); | 978 next_candidate = GetNextCandidate(candidate); |
| 1019 ClearNextCandidate(candidate); | 979 ClearNextCandidate(candidate); |
| 1020 | 980 |
| 1021 Code* code = candidate->code(); | 981 Code* code = candidate->code(); |
| 1022 MarkBit code_mark = Marking::MarkBitFrom(code); | 982 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
| 1023 if (Marking::IsWhite(code_mark)) { | 983 if (Marking::IsWhite(code_mark)) { |
| 1024 if (FLAG_trace_code_flushing && candidate->is_compiled()) { | 984 if (FLAG_trace_code_flushing && candidate->is_compiled()) { |
| 1025 PrintF("[code-flushing clears: "); | 985 PrintF("[code-flushing clears: "); |
| 1026 candidate->ShortPrint(); | 986 candidate->ShortPrint(); |
| 1027 PrintF(" - age: %d]\n", code->GetAge()); | 987 PrintF(" - age: %d]\n", code->GetAge()); |
| 1028 } | 988 } |
| 1029 // Always flush the optimized code map if there is one. | 989 // Always flush the optimized code map if there is one. |
| 1030 if (!candidate->OptimizedCodeMapIsCleared()) { | 990 if (!candidate->OptimizedCodeMapIsCleared()) { |
| 1031 candidate->ClearOptimizedCodeMap(); | 991 candidate->ClearOptimizedCodeMap(); |
| 1032 } | 992 } |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1149 // We are close to a stack overflow, so just mark the objects. | 1109 // We are close to a stack overflow, so just mark the objects. |
| 1150 } | 1110 } |
| 1151 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1111 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1152 for (Object** p = start; p < end; p++) { | 1112 for (Object** p = start; p < end; p++) { |
| 1153 MarkObjectByPointer(collector, object, p); | 1113 MarkObjectByPointer(collector, object, p); |
| 1154 } | 1114 } |
| 1155 } | 1115 } |
| 1156 | 1116 |
| 1157 // Marks the object black and pushes it on the marking stack. | 1117 // Marks the object black and pushes it on the marking stack. |
| 1158 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { | 1118 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
| 1159 MarkBit mark = Marking::MarkBitFrom(object); | 1119 MarkBit mark = ObjectMarking::MarkBitFrom(object); |
| 1160 heap->mark_compact_collector()->MarkObject(object, mark); | 1120 heap->mark_compact_collector()->MarkObject(object, mark); |
| 1161 } | 1121 } |
| 1162 | 1122 |
| 1163 // Marks the object black without pushing it on the marking stack. | 1123 // Marks the object black without pushing it on the marking stack. |
| 1164 // Returns true if object needed marking and false otherwise. | 1124 // Returns true if object needed marking and false otherwise. |
| 1165 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { | 1125 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { |
| 1166 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1126 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
| 1167 if (Marking::IsWhite(mark_bit)) { | 1127 if (Marking::IsWhite(mark_bit)) { |
| 1168 heap->mark_compact_collector()->SetMark(object, mark_bit); | 1128 heap->mark_compact_collector()->SetMark(object, mark_bit); |
| 1169 return true; | 1129 return true; |
| 1170 } | 1130 } |
| 1171 return false; | 1131 return false; |
| 1172 } | 1132 } |
| 1173 | 1133 |
| 1174 // Mark object pointed to by p. | 1134 // Mark object pointed to by p. |
| 1175 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 1135 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
| 1176 HeapObject* object, Object** p)) { | 1136 HeapObject* object, Object** p)) { |
| 1177 if (!(*p)->IsHeapObject()) return; | 1137 if (!(*p)->IsHeapObject()) return; |
| 1178 HeapObject* target_object = HeapObject::cast(*p); | 1138 HeapObject* target_object = HeapObject::cast(*p); |
| 1179 collector->RecordSlot(object, p, target_object); | 1139 collector->RecordSlot(object, p, target_object); |
| 1180 MarkBit mark = Marking::MarkBitFrom(target_object); | 1140 MarkBit mark = ObjectMarking::MarkBitFrom(target_object); |
| 1181 collector->MarkObject(target_object, mark); | 1141 collector->MarkObject(target_object, mark); |
| 1182 } | 1142 } |
| 1183 | 1143 |
| 1184 | 1144 |
| 1185 // Visit an unmarked object. | 1145 // Visit an unmarked object. |
| 1186 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1146 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
| 1187 HeapObject* obj)) { | 1147 HeapObject* obj)) { |
| 1188 #ifdef DEBUG | 1148 #ifdef DEBUG |
| 1189 DCHECK(collector->heap()->Contains(obj)); | 1149 DCHECK(collector->heap()->Contains(obj)); |
| 1190 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); | 1150 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); |
| 1191 #endif | 1151 #endif |
| 1192 Map* map = obj->map(); | 1152 Map* map = obj->map(); |
| 1193 Heap* heap = obj->GetHeap(); | 1153 Heap* heap = obj->GetHeap(); |
| 1194 MarkBit mark = Marking::MarkBitFrom(obj); | 1154 MarkBit mark = ObjectMarking::MarkBitFrom(obj); |
| 1195 heap->mark_compact_collector()->SetMark(obj, mark); | 1155 heap->mark_compact_collector()->SetMark(obj, mark); |
| 1196 // Mark the map pointer and the body. | 1156 // Mark the map pointer and the body. |
| 1197 MarkBit map_mark = Marking::MarkBitFrom(map); | 1157 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 1198 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1158 heap->mark_compact_collector()->MarkObject(map, map_mark); |
| 1199 IterateBody(map, obj); | 1159 IterateBody(map, obj); |
| 1200 } | 1160 } |
| 1201 | 1161 |
| 1202 // Visit all unmarked objects pointed to by [start, end). | 1162 // Visit all unmarked objects pointed to by [start, end). |
| 1203 // Returns false if the operation fails (lack of stack space). | 1163 // Returns false if the operation fails (lack of stack space). |
| 1204 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, | 1164 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, |
| 1205 Object** start, Object** end)) { | 1165 Object** start, Object** end)) { |
| 1206 // Return false is we are close to the stack limit. | 1166 // Return false is we are close to the stack limit. |
| 1207 StackLimitCheck check(heap->isolate()); | 1167 StackLimitCheck check(heap->isolate()); |
| 1208 if (check.HasOverflowed()) return false; | 1168 if (check.HasOverflowed()) return false; |
| 1209 | 1169 |
| 1210 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1170 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1211 // Visit the unmarked objects. | 1171 // Visit the unmarked objects. |
| 1212 for (Object** p = start; p < end; p++) { | 1172 for (Object** p = start; p < end; p++) { |
| 1213 Object* o = *p; | 1173 Object* o = *p; |
| 1214 if (!o->IsHeapObject()) continue; | 1174 if (!o->IsHeapObject()) continue; |
| 1215 collector->RecordSlot(object, p, o); | 1175 collector->RecordSlot(object, p, o); |
| 1216 HeapObject* obj = HeapObject::cast(o); | 1176 HeapObject* obj = HeapObject::cast(o); |
| 1217 MarkBit mark = Marking::MarkBitFrom(obj); | 1177 MarkBit mark = ObjectMarking::MarkBitFrom(obj); |
| 1218 if (Marking::IsBlackOrGrey(mark)) continue; | 1178 if (Marking::IsBlackOrGrey(mark)) continue; |
| 1219 VisitUnmarkedObject(collector, obj); | 1179 VisitUnmarkedObject(collector, obj); |
| 1220 } | 1180 } |
| 1221 return true; | 1181 return true; |
| 1222 } | 1182 } |
| 1223 | 1183 |
| 1224 private: | 1184 private: |
| 1225 // Code flushing support. | 1185 // Code flushing support. |
| 1226 | 1186 |
| 1227 static const int kRegExpCodeThreshold = 5; | 1187 static const int kRegExpCodeThreshold = 5; |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1241 if (!code->IsSmi() && | 1201 if (!code->IsSmi() && |
| 1242 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { | 1202 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { |
| 1243 // Save a copy that can be reinstated if we need the code again. | 1203 // Save a copy that can be reinstated if we need the code again. |
| 1244 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); | 1204 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); |
| 1245 | 1205 |
| 1246 // Saving a copy might create a pointer into compaction candidate | 1206 // Saving a copy might create a pointer into compaction candidate |
| 1247 // that was not observed by marker. This might happen if JSRegExp data | 1207 // that was not observed by marker. This might happen if JSRegExp data |
| 1248 // was marked through the compilation cache before marker reached JSRegExp | 1208 // was marked through the compilation cache before marker reached JSRegExp |
| 1249 // object. | 1209 // object. |
| 1250 FixedArray* data = FixedArray::cast(re->data()); | 1210 FixedArray* data = FixedArray::cast(re->data()); |
| 1251 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(data))) { | 1211 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(data))) { |
| 1252 Object** slot = | 1212 Object** slot = |
| 1253 data->data_start() + JSRegExp::saved_code_index(is_one_byte); | 1213 data->data_start() + JSRegExp::saved_code_index(is_one_byte); |
| 1254 heap->mark_compact_collector()->RecordSlot(data, slot, code); | 1214 heap->mark_compact_collector()->RecordSlot(data, slot, code); |
| 1255 } | 1215 } |
| 1256 | 1216 |
| 1257 // Set a number in the 0-255 range to guarantee no smi overflow. | 1217 // Set a number in the 0-255 range to guarantee no smi overflow. |
| 1258 re->SetDataAt(JSRegExp::code_index(is_one_byte), | 1218 re->SetDataAt(JSRegExp::code_index(is_one_byte), |
| 1259 Smi::FromInt(heap->ms_count() & 0xff)); | 1219 Smi::FromInt(heap->ms_count() & 0xff)); |
| 1260 } else if (code->IsSmi()) { | 1220 } else if (code->IsSmi()) { |
| 1261 int value = Smi::cast(code)->value(); | 1221 int value = Smi::cast(code)->value(); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1329 : collector_(collector) {} | 1289 : collector_(collector) {} |
| 1330 | 1290 |
| 1331 void VisitPointers(Object** start, Object** end) override { | 1291 void VisitPointers(Object** start, Object** end) override { |
| 1332 for (Object** p = start; p < end; p++) VisitPointer(p); | 1292 for (Object** p = start; p < end; p++) VisitPointer(p); |
| 1333 } | 1293 } |
| 1334 | 1294 |
| 1335 void VisitPointer(Object** slot) override { | 1295 void VisitPointer(Object** slot) override { |
| 1336 Object* obj = *slot; | 1296 Object* obj = *slot; |
| 1337 if (obj->IsSharedFunctionInfo()) { | 1297 if (obj->IsSharedFunctionInfo()) { |
| 1338 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); | 1298 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); |
| 1339 MarkBit shared_mark = Marking::MarkBitFrom(shared); | 1299 MarkBit shared_mark = ObjectMarking::MarkBitFrom(shared); |
| 1340 MarkBit code_mark = Marking::MarkBitFrom(shared->code()); | 1300 MarkBit code_mark = ObjectMarking::MarkBitFrom(shared->code()); |
| 1341 collector_->MarkObject(shared->code(), code_mark); | 1301 collector_->MarkObject(shared->code(), code_mark); |
| 1342 collector_->MarkObject(shared, shared_mark); | 1302 collector_->MarkObject(shared, shared_mark); |
| 1343 } | 1303 } |
| 1344 } | 1304 } |
| 1345 | 1305 |
| 1346 private: | 1306 private: |
| 1347 MarkCompactCollector* collector_; | 1307 MarkCompactCollector* collector_; |
| 1348 }; | 1308 }; |
| 1349 | 1309 |
| 1350 | 1310 |
| 1351 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, | 1311 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, |
| 1352 ThreadLocalTop* top) { | 1312 ThreadLocalTop* top) { |
| 1353 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { | 1313 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { |
| 1354 // Note: for the frame that has a pending lazy deoptimization | 1314 // Note: for the frame that has a pending lazy deoptimization |
| 1355 // StackFrame::unchecked_code will return a non-optimized code object for | 1315 // StackFrame::unchecked_code will return a non-optimized code object for |
| 1356 // the outermost function and StackFrame::LookupCode will return | 1316 // the outermost function and StackFrame::LookupCode will return |
| 1357 // actual optimized code object. | 1317 // actual optimized code object. |
| 1358 StackFrame* frame = it.frame(); | 1318 StackFrame* frame = it.frame(); |
| 1359 Code* code = frame->unchecked_code(); | 1319 Code* code = frame->unchecked_code(); |
| 1360 MarkBit code_mark = Marking::MarkBitFrom(code); | 1320 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
| 1361 MarkObject(code, code_mark); | 1321 MarkObject(code, code_mark); |
| 1362 if (frame->is_optimized()) { | 1322 if (frame->is_optimized()) { |
| 1363 Code* optimized_code = frame->LookupCode(); | 1323 Code* optimized_code = frame->LookupCode(); |
| 1364 MarkBit optimized_code_mark = Marking::MarkBitFrom(optimized_code); | 1324 MarkBit optimized_code_mark = ObjectMarking::MarkBitFrom(optimized_code); |
| 1365 MarkObject(optimized_code, optimized_code_mark); | 1325 MarkObject(optimized_code, optimized_code_mark); |
| 1366 } | 1326 } |
| 1367 } | 1327 } |
| 1368 } | 1328 } |
| 1369 | 1329 |
| 1370 | 1330 |
| 1371 void MarkCompactCollector::PrepareForCodeFlushing() { | 1331 void MarkCompactCollector::PrepareForCodeFlushing() { |
| 1372 // If code flushing is disabled, there is no need to prepare for it. | 1332 // If code flushing is disabled, there is no need to prepare for it. |
| 1373 if (!is_code_flushing_enabled()) return; | 1333 if (!is_code_flushing_enabled()) return; |
| 1374 | 1334 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1406 // Skip the weak next code link in a code object, which is visited in | 1366 // Skip the weak next code link in a code object, which is visited in |
| 1407 // ProcessTopOptimizedFrame. | 1367 // ProcessTopOptimizedFrame. |
| 1408 void VisitNextCodeLink(Object** p) override {} | 1368 void VisitNextCodeLink(Object** p) override {} |
| 1409 | 1369 |
| 1410 private: | 1370 private: |
| 1411 void MarkObjectByPointer(Object** p) { | 1371 void MarkObjectByPointer(Object** p) { |
| 1412 if (!(*p)->IsHeapObject()) return; | 1372 if (!(*p)->IsHeapObject()) return; |
| 1413 | 1373 |
| 1414 HeapObject* object = HeapObject::cast(*p); | 1374 HeapObject* object = HeapObject::cast(*p); |
| 1415 | 1375 |
| 1416 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1376 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
| 1417 if (Marking::IsBlackOrGrey(mark_bit)) return; | 1377 if (Marking::IsBlackOrGrey(mark_bit)) return; |
| 1418 | 1378 |
| 1419 Map* map = object->map(); | 1379 Map* map = object->map(); |
| 1420 // Mark the object. | 1380 // Mark the object. |
| 1421 collector_->SetMark(object, mark_bit); | 1381 collector_->SetMark(object, mark_bit); |
| 1422 | 1382 |
| 1423 // Mark the map pointer and body, and push them on the marking stack. | 1383 // Mark the map pointer and body, and push them on the marking stack. |
| 1424 MarkBit map_mark = Marking::MarkBitFrom(map); | 1384 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 1425 collector_->MarkObject(map, map_mark); | 1385 collector_->MarkObject(map, map_mark); |
| 1426 MarkCompactMarkingVisitor::IterateBody(map, object); | 1386 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 1427 | 1387 |
| 1428 // Mark all the objects reachable from the map and body. May leave | 1388 // Mark all the objects reachable from the map and body. May leave |
| 1429 // overflowed objects in the heap. | 1389 // overflowed objects in the heap. |
| 1430 collector_->EmptyMarkingDeque(); | 1390 collector_->EmptyMarkingDeque(); |
| 1431 } | 1391 } |
| 1432 | 1392 |
| 1433 MarkCompactCollector* collector_; | 1393 MarkCompactCollector* collector_; |
| 1434 }; | 1394 }; |
| 1435 | 1395 |
| 1436 | 1396 |
| 1437 // Helper class for pruning the string table. | 1397 // Helper class for pruning the string table. |
| 1438 template <bool finalize_external_strings, bool record_slots> | 1398 template <bool finalize_external_strings, bool record_slots> |
| 1439 class StringTableCleaner : public ObjectVisitor { | 1399 class StringTableCleaner : public ObjectVisitor { |
| 1440 public: | 1400 public: |
| 1441 StringTableCleaner(Heap* heap, HeapObject* table) | 1401 StringTableCleaner(Heap* heap, HeapObject* table) |
| 1442 : heap_(heap), pointers_removed_(0), table_(table) { | 1402 : heap_(heap), pointers_removed_(0), table_(table) { |
| 1443 DCHECK(!record_slots || table != nullptr); | 1403 DCHECK(!record_slots || table != nullptr); |
| 1444 } | 1404 } |
| 1445 | 1405 |
| 1446 void VisitPointers(Object** start, Object** end) override { | 1406 void VisitPointers(Object** start, Object** end) override { |
| 1447 // Visit all HeapObject pointers in [start, end). | 1407 // Visit all HeapObject pointers in [start, end). |
| 1448 MarkCompactCollector* collector = heap_->mark_compact_collector(); | 1408 MarkCompactCollector* collector = heap_->mark_compact_collector(); |
| 1449 for (Object** p = start; p < end; p++) { | 1409 for (Object** p = start; p < end; p++) { |
| 1450 Object* o = *p; | 1410 Object* o = *p; |
| 1451 if (o->IsHeapObject()) { | 1411 if (o->IsHeapObject()) { |
| 1452 if (Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(o)))) { | 1412 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(o)))) { |
| 1453 if (finalize_external_strings) { | 1413 if (finalize_external_strings) { |
| 1454 DCHECK(o->IsExternalString()); | 1414 DCHECK(o->IsExternalString()); |
| 1455 heap_->FinalizeExternalString(String::cast(*p)); | 1415 heap_->FinalizeExternalString(String::cast(*p)); |
| 1456 } else { | 1416 } else { |
| 1457 pointers_removed_++; | 1417 pointers_removed_++; |
| 1458 } | 1418 } |
| 1459 // Set the entry to the_hole_value (as deleted). | 1419 // Set the entry to the_hole_value (as deleted). |
| 1460 *p = heap_->the_hole_value(); | 1420 *p = heap_->the_hole_value(); |
| 1461 } else if (record_slots) { | 1421 } else if (record_slots) { |
| 1462 // StringTable contains only old space strings. | 1422 // StringTable contains only old space strings. |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1479 }; | 1439 }; |
| 1480 | 1440 |
| 1481 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; | 1441 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; |
| 1482 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; | 1442 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; |
| 1483 | 1443 |
| 1484 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1444 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
| 1485 // are retained. | 1445 // are retained. |
| 1486 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1446 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
| 1487 public: | 1447 public: |
| 1488 virtual Object* RetainAs(Object* object) { | 1448 virtual Object* RetainAs(Object* object) { |
| 1489 MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(object)); | 1449 MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(object)); |
| 1490 DCHECK(!Marking::IsGrey(mark_bit)); | 1450 DCHECK(!Marking::IsGrey(mark_bit)); |
| 1491 if (Marking::IsBlack(mark_bit)) { | 1451 if (Marking::IsBlack(mark_bit)) { |
| 1492 return object; | 1452 return object; |
| 1493 } else if (object->IsAllocationSite() && | 1453 } else if (object->IsAllocationSite() && |
| 1494 !(AllocationSite::cast(object)->IsZombie())) { | 1454 !(AllocationSite::cast(object)->IsZombie())) { |
| 1495 // "dead" AllocationSites need to live long enough for a traversal of new | 1455 // "dead" AllocationSites need to live long enough for a traversal of new |
| 1496 // space. These sites get a one-time reprieve. | 1456 // space. These sites get a one-time reprieve. |
| 1497 AllocationSite* site = AllocationSite::cast(object); | 1457 AllocationSite* site = AllocationSite::cast(object); |
| 1498 site->MarkZombie(); | 1458 site->MarkZombie(); |
| 1499 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); | 1459 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); |
| 1500 return object; | 1460 return object; |
| 1501 } else { | 1461 } else { |
| 1502 return NULL; | 1462 return NULL; |
| 1503 } | 1463 } |
| 1504 } | 1464 } |
| 1505 }; | 1465 }; |
| 1506 | 1466 |
| 1507 | 1467 |
| 1508 // Fill the marking stack with overflowed objects returned by the given | 1468 // Fill the marking stack with overflowed objects returned by the given |
| 1509 // iterator. Stop when the marking stack is filled or the end of the space | 1469 // iterator. Stop when the marking stack is filled or the end of the space |
| 1510 // is reached, whichever comes first. | 1470 // is reached, whichever comes first. |
| 1511 template <class T> | 1471 template <class T> |
| 1512 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { | 1472 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { |
| 1513 // The caller should ensure that the marking stack is initially not full, | 1473 // The caller should ensure that the marking stack is initially not full, |
| 1514 // so that we don't waste effort pointlessly scanning for objects. | 1474 // so that we don't waste effort pointlessly scanning for objects. |
| 1515 DCHECK(!marking_deque()->IsFull()); | 1475 DCHECK(!marking_deque()->IsFull()); |
| 1516 | 1476 |
| 1517 Map* filler_map = heap()->one_pointer_filler_map(); | 1477 Map* filler_map = heap()->one_pointer_filler_map(); |
| 1518 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { | 1478 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { |
| 1519 MarkBit markbit = Marking::MarkBitFrom(object); | 1479 MarkBit markbit = ObjectMarking::MarkBitFrom(object); |
| 1520 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { | 1480 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { |
| 1521 Marking::GreyToBlack(markbit); | 1481 Marking::GreyToBlack(markbit); |
| 1522 PushBlack(object); | 1482 PushBlack(object); |
| 1523 if (marking_deque()->IsFull()) return; | 1483 if (marking_deque()->IsFull()) return; |
| 1524 } | 1484 } |
| 1525 } | 1485 } |
| 1526 } | 1486 } |
| 1527 | 1487 |
| 1528 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { | 1488 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { |
| 1529 DCHECK(!marking_deque()->IsFull()); | 1489 DCHECK(!marking_deque()->IsFull()); |
| 1530 LiveObjectIterator<kGreyObjects> it(p); | 1490 LiveObjectIterator<kGreyObjects> it(p); |
| 1531 HeapObject* object = NULL; | 1491 HeapObject* object = NULL; |
| 1532 while ((object = it.Next()) != NULL) { | 1492 while ((object = it.Next()) != NULL) { |
| 1533 MarkBit markbit = Marking::MarkBitFrom(object); | 1493 MarkBit markbit = ObjectMarking::MarkBitFrom(object); |
| 1534 DCHECK(Marking::IsGrey(markbit)); | 1494 DCHECK(Marking::IsGrey(markbit)); |
| 1535 Marking::GreyToBlack(markbit); | 1495 Marking::GreyToBlack(markbit); |
| 1536 PushBlack(object); | 1496 PushBlack(object); |
| 1537 if (marking_deque()->IsFull()) return; | 1497 if (marking_deque()->IsFull()) return; |
| 1538 } | 1498 } |
| 1539 } | 1499 } |
| 1540 | 1500 |
| 1541 class RecordMigratedSlotVisitor final : public ObjectVisitor { | 1501 class RecordMigratedSlotVisitor final : public ObjectVisitor { |
| 1542 public: | 1502 public: |
| 1543 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) | 1503 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) |
| (...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1969 DiscoverGreyObjectsOnPage(page); | 1929 DiscoverGreyObjectsOnPage(page); |
| 1970 if (marking_deque()->IsFull()) return; | 1930 if (marking_deque()->IsFull()) return; |
| 1971 } | 1931 } |
| 1972 } | 1932 } |
| 1973 | 1933 |
| 1974 | 1934 |
| 1975 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 1935 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
| 1976 Object* o = *p; | 1936 Object* o = *p; |
| 1977 if (!o->IsHeapObject()) return false; | 1937 if (!o->IsHeapObject()) return false; |
| 1978 HeapObject* heap_object = HeapObject::cast(o); | 1938 HeapObject* heap_object = HeapObject::cast(o); |
| 1979 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1939 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
| 1980 return Marking::IsWhite(mark); | 1940 return Marking::IsWhite(mark); |
| 1981 } | 1941 } |
| 1982 | 1942 |
| 1983 | 1943 |
| 1984 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, | 1944 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, |
| 1985 Object** p) { | 1945 Object** p) { |
| 1986 Object* o = *p; | 1946 Object* o = *p; |
| 1987 DCHECK(o->IsHeapObject()); | 1947 DCHECK(o->IsHeapObject()); |
| 1988 HeapObject* heap_object = HeapObject::cast(o); | 1948 HeapObject* heap_object = HeapObject::cast(o); |
| 1989 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1949 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
| 1990 return Marking::IsWhite(mark); | 1950 return Marking::IsWhite(mark); |
| 1991 } | 1951 } |
| 1992 | 1952 |
| 1993 | 1953 |
| 1994 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { | 1954 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { |
| 1995 StringTable* string_table = heap()->string_table(); | 1955 StringTable* string_table = heap()->string_table(); |
| 1996 // Mark the string table itself. | 1956 // Mark the string table itself. |
| 1997 MarkBit string_table_mark = Marking::MarkBitFrom(string_table); | 1957 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); |
| 1998 if (Marking::IsWhite(string_table_mark)) { | 1958 if (Marking::IsWhite(string_table_mark)) { |
| 1999 // String table could have already been marked by visiting the handles list. | 1959 // String table could have already been marked by visiting the handles list. |
| 2000 SetMark(string_table, string_table_mark); | 1960 SetMark(string_table, string_table_mark); |
| 2001 } | 1961 } |
| 2002 // Explicitly mark the prefix. | 1962 // Explicitly mark the prefix. |
| 2003 string_table->IteratePrefix(visitor); | 1963 string_table->IteratePrefix(visitor); |
| 2004 ProcessMarkingDeque(); | 1964 ProcessMarkingDeque(); |
| 2005 } | 1965 } |
| 2006 | 1966 |
| 2007 | 1967 |
| 2008 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { | 1968 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { |
| 2009 MarkBit mark_bit = Marking::MarkBitFrom(site); | 1969 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); |
| 2010 SetMark(site, mark_bit); | 1970 SetMark(site, mark_bit); |
| 2011 } | 1971 } |
| 2012 | 1972 |
| 2013 | 1973 |
| 2014 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 1974 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
| 2015 // Mark the heap roots including global variables, stack variables, | 1975 // Mark the heap roots including global variables, stack variables, |
| 2016 // etc., and all objects reachable from them. | 1976 // etc., and all objects reachable from them. |
| 2017 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 1977 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 2018 | 1978 |
| 2019 // Handle the string table specially. | 1979 // Handle the string table specially. |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2066 Map* filler_map = heap_->one_pointer_filler_map(); | 2026 Map* filler_map = heap_->one_pointer_filler_map(); |
| 2067 while (!marking_deque_.IsEmpty()) { | 2027 while (!marking_deque_.IsEmpty()) { |
| 2068 HeapObject* object = marking_deque_.Pop(); | 2028 HeapObject* object = marking_deque_.Pop(); |
| 2069 // Explicitly skip one word fillers. Incremental markbit patterns are | 2029 // Explicitly skip one word fillers. Incremental markbit patterns are |
| 2070 // correct only for objects that occupy at least two words. | 2030 // correct only for objects that occupy at least two words. |
| 2071 Map* map = object->map(); | 2031 Map* map = object->map(); |
| 2072 if (map == filler_map) continue; | 2032 if (map == filler_map) continue; |
| 2073 | 2033 |
| 2074 DCHECK(object->IsHeapObject()); | 2034 DCHECK(object->IsHeapObject()); |
| 2075 DCHECK(heap()->Contains(object)); | 2035 DCHECK(heap()->Contains(object)); |
| 2076 DCHECK(!Marking::IsWhite(Marking::MarkBitFrom(object))); | 2036 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); |
| 2077 | 2037 |
| 2078 MarkBit map_mark = Marking::MarkBitFrom(map); | 2038 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 2079 MarkObject(map, map_mark); | 2039 MarkObject(map, map_mark); |
| 2080 | 2040 |
| 2081 MarkCompactMarkingVisitor::IterateBody(map, object); | 2041 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 2082 } | 2042 } |
| 2083 } | 2043 } |
| 2084 | 2044 |
| 2085 | 2045 |
| 2086 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2046 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 2087 // push them on the marking stack. Stop early if the marking stack fills | 2047 // push them on the marking stack. Stop early if the marking stack fills |
| 2088 // before sweeping completes. If sweeping completes, there are no remaining | 2048 // before sweeping completes. If sweeping completes, there are no remaining |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2271 wrappers_to_trace_.push_back(std::pair<void*, void*>( | 2231 wrappers_to_trace_.push_back(std::pair<void*, void*>( |
| 2272 reinterpret_cast<void*>(js_object->GetInternalField(0)), | 2232 reinterpret_cast<void*>(js_object->GetInternalField(0)), |
| 2273 reinterpret_cast<void*>(js_object->GetInternalField(1)))); | 2233 reinterpret_cast<void*>(js_object->GetInternalField(1)))); |
| 2274 } | 2234 } |
| 2275 } | 2235 } |
| 2276 | 2236 |
| 2277 void MarkCompactCollector::RegisterExternallyReferencedObject(Object** object) { | 2237 void MarkCompactCollector::RegisterExternallyReferencedObject(Object** object) { |
| 2278 DCHECK(in_use()); | 2238 DCHECK(in_use()); |
| 2279 HeapObject* heap_object = HeapObject::cast(*object); | 2239 HeapObject* heap_object = HeapObject::cast(*object); |
| 2280 DCHECK(heap_->Contains(heap_object)); | 2240 DCHECK(heap_->Contains(heap_object)); |
| 2281 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); | 2241 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); |
| 2282 MarkObject(heap_object, mark_bit); | 2242 MarkObject(heap_object, mark_bit); |
| 2283 } | 2243 } |
| 2284 | 2244 |
| 2285 void MarkCompactCollector::MarkLiveObjects() { | 2245 void MarkCompactCollector::MarkLiveObjects() { |
| 2286 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); | 2246 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); |
| 2287 double start_time = 0.0; | 2247 double start_time = 0.0; |
| 2288 if (FLAG_print_cumulative_gc_stat) { | 2248 if (FLAG_print_cumulative_gc_stat) { |
| 2289 start_time = heap_->MonotonicallyIncreasingTimeInMs(); | 2249 start_time = heap_->MonotonicallyIncreasingTimeInMs(); |
| 2290 } | 2250 } |
| 2291 // The recursive GC marker detects when it is nearing stack overflow, | 2251 // The recursive GC marker detects when it is nearing stack overflow, |
| (...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2514 } | 2474 } |
| 2515 | 2475 |
| 2516 | 2476 |
| 2517 void MarkCompactCollector::ClearSimpleMapTransitions( | 2477 void MarkCompactCollector::ClearSimpleMapTransitions( |
| 2518 Object* non_live_map_list) { | 2478 Object* non_live_map_list) { |
| 2519 Object* the_hole_value = heap()->the_hole_value(); | 2479 Object* the_hole_value = heap()->the_hole_value(); |
| 2520 Object* weak_cell_obj = non_live_map_list; | 2480 Object* weak_cell_obj = non_live_map_list; |
| 2521 while (weak_cell_obj != Smi::FromInt(0)) { | 2481 while (weak_cell_obj != Smi::FromInt(0)) { |
| 2522 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); | 2482 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); |
| 2523 Map* map = Map::cast(weak_cell->value()); | 2483 Map* map = Map::cast(weak_cell->value()); |
| 2524 DCHECK(Marking::IsWhite(Marking::MarkBitFrom(map))); | 2484 DCHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(map))); |
| 2525 Object* potential_parent = map->constructor_or_backpointer(); | 2485 Object* potential_parent = map->constructor_or_backpointer(); |
| 2526 if (potential_parent->IsMap()) { | 2486 if (potential_parent->IsMap()) { |
| 2527 Map* parent = Map::cast(potential_parent); | 2487 Map* parent = Map::cast(potential_parent); |
| 2528 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)) && | 2488 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)) && |
| 2529 parent->raw_transitions() == weak_cell) { | 2489 parent->raw_transitions() == weak_cell) { |
| 2530 ClearSimpleMapTransition(parent, map); | 2490 ClearSimpleMapTransition(parent, map); |
| 2531 } | 2491 } |
| 2532 } | 2492 } |
| 2533 weak_cell->clear(); | 2493 weak_cell->clear(); |
| 2534 weak_cell_obj = weak_cell->next(); | 2494 weak_cell_obj = weak_cell->next(); |
| 2535 weak_cell->clear_next(the_hole_value); | 2495 weak_cell->clear_next(the_hole_value); |
| 2536 } | 2496 } |
| 2537 } | 2497 } |
| 2538 | 2498 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2558 HeapObject* undefined = heap()->undefined_value(); | 2518 HeapObject* undefined = heap()->undefined_value(); |
| 2559 Object* obj = heap()->encountered_transition_arrays(); | 2519 Object* obj = heap()->encountered_transition_arrays(); |
| 2560 while (obj != Smi::FromInt(0)) { | 2520 while (obj != Smi::FromInt(0)) { |
| 2561 TransitionArray* array = TransitionArray::cast(obj); | 2521 TransitionArray* array = TransitionArray::cast(obj); |
| 2562 int num_transitions = array->number_of_entries(); | 2522 int num_transitions = array->number_of_entries(); |
| 2563 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); | 2523 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); |
| 2564 if (num_transitions > 0) { | 2524 if (num_transitions > 0) { |
| 2565 Map* map = array->GetTarget(0); | 2525 Map* map = array->GetTarget(0); |
| 2566 Map* parent = Map::cast(map->constructor_or_backpointer()); | 2526 Map* parent = Map::cast(map->constructor_or_backpointer()); |
| 2567 bool parent_is_alive = | 2527 bool parent_is_alive = |
| 2568 Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)); | 2528 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)); |
| 2569 DescriptorArray* descriptors = | 2529 DescriptorArray* descriptors = |
| 2570 parent_is_alive ? parent->instance_descriptors() : nullptr; | 2530 parent_is_alive ? parent->instance_descriptors() : nullptr; |
| 2571 bool descriptors_owner_died = | 2531 bool descriptors_owner_died = |
| 2572 CompactTransitionArray(parent, array, descriptors); | 2532 CompactTransitionArray(parent, array, descriptors); |
| 2573 if (descriptors_owner_died) { | 2533 if (descriptors_owner_died) { |
| 2574 TrimDescriptorArray(parent, descriptors); | 2534 TrimDescriptorArray(parent, descriptors); |
| 2575 } | 2535 } |
| 2576 } | 2536 } |
| 2577 obj = array->next_link(); | 2537 obj = array->next_link(); |
| 2578 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2538 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
| 2579 } | 2539 } |
| 2580 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); | 2540 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); |
| 2581 } | 2541 } |
| 2582 | 2542 |
| 2583 | 2543 |
| 2584 bool MarkCompactCollector::CompactTransitionArray( | 2544 bool MarkCompactCollector::CompactTransitionArray( |
| 2585 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { | 2545 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { |
| 2586 int num_transitions = transitions->number_of_entries(); | 2546 int num_transitions = transitions->number_of_entries(); |
| 2587 bool descriptors_owner_died = false; | 2547 bool descriptors_owner_died = false; |
| 2588 int transition_index = 0; | 2548 int transition_index = 0; |
| 2589 // Compact all live transitions to the left. | 2549 // Compact all live transitions to the left. |
| 2590 for (int i = 0; i < num_transitions; ++i) { | 2550 for (int i = 0; i < num_transitions; ++i) { |
| 2591 Map* target = transitions->GetTarget(i); | 2551 Map* target = transitions->GetTarget(i); |
| 2592 DCHECK_EQ(target->constructor_or_backpointer(), map); | 2552 DCHECK_EQ(target->constructor_or_backpointer(), map); |
| 2593 if (Marking::IsWhite(Marking::MarkBitFrom(target))) { | 2553 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(target))) { |
| 2594 if (descriptors != nullptr && | 2554 if (descriptors != nullptr && |
| 2595 target->instance_descriptors() == descriptors) { | 2555 target->instance_descriptors() == descriptors) { |
| 2596 descriptors_owner_died = true; | 2556 descriptors_owner_died = true; |
| 2597 } | 2557 } |
| 2598 } else { | 2558 } else { |
| 2599 if (i != transition_index) { | 2559 if (i != transition_index) { |
| 2600 Name* key = transitions->GetKey(i); | 2560 Name* key = transitions->GetKey(i); |
| 2601 transitions->SetKey(transition_index, key); | 2561 transitions->SetKey(transition_index, key); |
| 2602 Object** key_slot = transitions->GetKeySlot(transition_index); | 2562 Object** key_slot = transitions->GetKeySlot(transition_index); |
| 2603 RecordSlot(transitions, key_slot, key); | 2563 RecordSlot(transitions, key_slot, key); |
| (...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2760 // Cells for new-space objects embedded in optimized code are wrapped in | 2720 // Cells for new-space objects embedded in optimized code are wrapped in |
| 2761 // WeakCell and put into Heap::weak_object_to_code_table. | 2721 // WeakCell and put into Heap::weak_object_to_code_table. |
| 2762 // Such cells do not have any strong references but we want to keep them | 2722 // Such cells do not have any strong references but we want to keep them |
| 2763 // alive as long as the cell value is alive. | 2723 // alive as long as the cell value is alive. |
| 2764 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. | 2724 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. |
| 2765 if (value->IsCell()) { | 2725 if (value->IsCell()) { |
| 2766 Object* cell_value = Cell::cast(value)->value(); | 2726 Object* cell_value = Cell::cast(value)->value(); |
| 2767 if (cell_value->IsHeapObject() && | 2727 if (cell_value->IsHeapObject() && |
| 2768 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { | 2728 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { |
| 2769 // Resurrect the cell. | 2729 // Resurrect the cell. |
| 2770 MarkBit mark = Marking::MarkBitFrom(value); | 2730 MarkBit mark = ObjectMarking::MarkBitFrom(value); |
| 2771 SetMark(value, mark); | 2731 SetMark(value, mark); |
| 2772 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); | 2732 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); |
| 2773 RecordSlot(value, slot, *slot); | 2733 RecordSlot(value, slot, *slot); |
| 2774 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 2734 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
| 2775 RecordSlot(weak_cell, slot, *slot); | 2735 RecordSlot(weak_cell, slot, *slot); |
| 2776 clear_value = false; | 2736 clear_value = false; |
| 2777 } | 2737 } |
| 2778 } | 2738 } |
| 2779 if (value->IsMap()) { | 2739 if (value->IsMap()) { |
| 2780 // The map is non-live. | 2740 // The map is non-live. |
| (...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2995 // Find the last live object in the cell. | 2955 // Find the last live object in the cell. |
| 2996 unsigned int leading_zeros = | 2956 unsigned int leading_zeros = |
| 2997 base::bits::CountLeadingZeros32(current_cell & slot_mask); | 2957 base::bits::CountLeadingZeros32(current_cell & slot_mask); |
| 2998 CHECK(leading_zeros != Bitmap::kBitsPerCell); | 2958 CHECK(leading_zeros != Bitmap::kBitsPerCell); |
| 2999 int offset = static_cast<int>(Bitmap::kBitIndexMask - leading_zeros) - 1; | 2959 int offset = static_cast<int>(Bitmap::kBitIndexMask - leading_zeros) - 1; |
| 3000 | 2960 |
| 3001 base_address += (cell_index - base_address_cell_index) * | 2961 base_address += (cell_index - base_address_cell_index) * |
| 3002 Bitmap::kBitsPerCell * kPointerSize; | 2962 Bitmap::kBitsPerCell * kPointerSize; |
| 3003 Address address = base_address + offset * kPointerSize; | 2963 Address address = base_address + offset * kPointerSize; |
| 3004 HeapObject* object = HeapObject::FromAddress(address); | 2964 HeapObject* object = HeapObject::FromAddress(address); |
| 3005 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 2965 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3006 CHECK(object->address() < reinterpret_cast<Address>(slot)); | 2966 CHECK(object->address() < reinterpret_cast<Address>(slot)); |
| 3007 if ((object->address() + kPointerSize) <= slot && | 2967 if ((object->address() + kPointerSize) <= slot && |
| 3008 (object->address() + object->Size()) > slot) { | 2968 (object->address() + object->Size()) > slot) { |
| 3009 // If the slot is within the last found object in the cell, the slot is | 2969 // If the slot is within the last found object in the cell, the slot is |
| 3010 // in a live object. | 2970 // in a live object. |
| 3011 // Slots pointing to the first word of an object are invalid and removed. | 2971 // Slots pointing to the first word of an object are invalid and removed. |
| 3012 // This can happen when we move the object header while left trimming. | 2972 // This can happen when we move the object header while left trimming. |
| 3013 return true; | 2973 return true; |
| 3014 } | 2974 } |
| 3015 return false; | 2975 return false; |
| (...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3407 skip_list->Clear(); | 3367 skip_list->Clear(); |
| 3408 } | 3368 } |
| 3409 | 3369 |
| 3410 intptr_t freed_bytes = 0; | 3370 intptr_t freed_bytes = 0; |
| 3411 intptr_t max_freed_bytes = 0; | 3371 intptr_t max_freed_bytes = 0; |
| 3412 int curr_region = -1; | 3372 int curr_region = -1; |
| 3413 | 3373 |
| 3414 LiveObjectIterator<kBlackObjects> it(p); | 3374 LiveObjectIterator<kBlackObjects> it(p); |
| 3415 HeapObject* object = NULL; | 3375 HeapObject* object = NULL; |
| 3416 while ((object = it.Next()) != NULL) { | 3376 while ((object = it.Next()) != NULL) { |
| 3417 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3377 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3418 Address free_end = object->address(); | 3378 Address free_end = object->address(); |
| 3419 if (free_end != free_start) { | 3379 if (free_end != free_start) { |
| 3420 int size = static_cast<int>(free_end - free_start); | 3380 int size = static_cast<int>(free_end - free_start); |
| 3421 if (free_space_mode == ZAP_FREE_SPACE) { | 3381 if (free_space_mode == ZAP_FREE_SPACE) { |
| 3422 memset(free_start, 0xcc, size); | 3382 memset(free_start, 0xcc, size); |
| 3423 } | 3383 } |
| 3424 if (free_list_mode == REBUILD_FREE_LIST) { | 3384 if (free_list_mode == REBUILD_FREE_LIST) { |
| 3425 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( | 3385 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( |
| 3426 free_start, size); | 3386 free_start, size); |
| 3427 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3387 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 3438 SkipList::RegionNumber(free_end + size - kPointerSize); | 3398 SkipList::RegionNumber(free_end + size - kPointerSize); |
| 3439 if (new_region_start != curr_region || new_region_end != curr_region) { | 3399 if (new_region_start != curr_region || new_region_end != curr_region) { |
| 3440 skip_list->AddObject(free_end, size); | 3400 skip_list->AddObject(free_end, size); |
| 3441 curr_region = new_region_end; | 3401 curr_region = new_region_end; |
| 3442 } | 3402 } |
| 3443 } | 3403 } |
| 3444 free_start = free_end + size; | 3404 free_start = free_end + size; |
| 3445 } | 3405 } |
| 3446 | 3406 |
| 3447 // Clear the mark bits of that page and reset live bytes count. | 3407 // Clear the mark bits of that page and reset live bytes count. |
| 3448 Bitmap::Clear(p); | 3408 p->ClearLiveness(); |
| 3449 | 3409 |
| 3450 if (free_start != p->area_end()) { | 3410 if (free_start != p->area_end()) { |
| 3451 int size = static_cast<int>(p->area_end() - free_start); | 3411 int size = static_cast<int>(p->area_end() - free_start); |
| 3452 if (free_space_mode == ZAP_FREE_SPACE) { | 3412 if (free_space_mode == ZAP_FREE_SPACE) { |
| 3453 memset(free_start, 0xcc, size); | 3413 memset(free_start, 0xcc, size); |
| 3454 } | 3414 } |
| 3455 if (free_list_mode == REBUILD_FREE_LIST) { | 3415 if (free_list_mode == REBUILD_FREE_LIST) { |
| 3456 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( | 3416 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( |
| 3457 free_start, size); | 3417 free_start, size); |
| 3458 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3418 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3471 Address start = code->instruction_start(); | 3431 Address start = code->instruction_start(); |
| 3472 Address end = code->address() + code->Size(); | 3432 Address end = code->address() + code->Size(); |
| 3473 | 3433 |
| 3474 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); | 3434 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); |
| 3475 | 3435 |
| 3476 if (heap_->incremental_marking()->IsCompacting() && | 3436 if (heap_->incremental_marking()->IsCompacting() && |
| 3477 !ShouldSkipEvacuationSlotRecording(code)) { | 3437 !ShouldSkipEvacuationSlotRecording(code)) { |
| 3478 DCHECK(compacting_); | 3438 DCHECK(compacting_); |
| 3479 | 3439 |
| 3480 // If the object is white than no slots were recorded on it yet. | 3440 // If the object is white than no slots were recorded on it yet. |
| 3481 MarkBit mark_bit = Marking::MarkBitFrom(code); | 3441 MarkBit mark_bit = ObjectMarking::MarkBitFrom(code); |
| 3482 if (Marking::IsWhite(mark_bit)) return; | 3442 if (Marking::IsWhite(mark_bit)) return; |
| 3483 | 3443 |
| 3484 // Ignore all slots that might have been recorded in the body of the | 3444 // Ignore all slots that might have been recorded in the body of the |
| 3485 // deoptimized code object. Assumption: no slots will be recorded for | 3445 // deoptimized code object. Assumption: no slots will be recorded for |
| 3486 // this object after invalidating it. | 3446 // this object after invalidating it. |
| 3487 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); | 3447 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); |
| 3488 } | 3448 } |
| 3489 } | 3449 } |
| 3490 | 3450 |
| 3491 | 3451 |
| 3492 // Return true if the given code is deoptimized or will be deoptimized. | 3452 // Return true if the given code is deoptimized or will be deoptimized. |
| 3493 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3453 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
| 3494 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3454 return code->is_optimized_code() && code->marked_for_deoptimization(); |
| 3495 } | 3455 } |
| 3496 | 3456 |
| 3497 | 3457 |
| 3498 #ifdef VERIFY_HEAP | 3458 #ifdef VERIFY_HEAP |
| 3499 static void VerifyAllBlackObjects(MemoryChunk* page) { | 3459 static void VerifyAllBlackObjects(MemoryChunk* page) { |
| 3500 LiveObjectIterator<kAllLiveObjects> it(page); | 3460 LiveObjectIterator<kAllLiveObjects> it(page); |
| 3501 HeapObject* object = NULL; | 3461 HeapObject* object = NULL; |
| 3502 while ((object = it.Next()) != NULL) { | 3462 while ((object = it.Next()) != NULL) { |
| 3503 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3463 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3504 } | 3464 } |
| 3505 } | 3465 } |
| 3506 #endif // VERIFY_HEAP | 3466 #endif // VERIFY_HEAP |
| 3507 | 3467 |
| 3508 template <class Visitor> | 3468 template <class Visitor> |
| 3509 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, | 3469 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, |
| 3510 IterationMode mode) { | 3470 IterationMode mode) { |
| 3511 #ifdef VERIFY_HEAP | 3471 #ifdef VERIFY_HEAP |
| 3512 VerifyAllBlackObjects(page); | 3472 VerifyAllBlackObjects(page); |
| 3513 #endif // VERIFY_HEAP | 3473 #endif // VERIFY_HEAP |
| 3514 | 3474 |
| 3515 LiveObjectIterator<kBlackObjects> it(page); | 3475 LiveObjectIterator<kBlackObjects> it(page); |
| 3516 HeapObject* object = nullptr; | 3476 HeapObject* object = nullptr; |
| 3517 while ((object = it.Next()) != nullptr) { | 3477 while ((object = it.Next()) != nullptr) { |
| 3518 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3478 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3519 if (!visitor->Visit(object)) { | 3479 if (!visitor->Visit(object)) { |
| 3520 if (mode == kClearMarkbits) { | 3480 if (mode == kClearMarkbits) { |
| 3521 page->markbits()->ClearRange( | 3481 page->markbits()->ClearRange( |
| 3522 page->AddressToMarkbitIndex(page->area_start()), | 3482 page->AddressToMarkbitIndex(page->area_start()), |
| 3523 page->AddressToMarkbitIndex(object->address())); | 3483 page->AddressToMarkbitIndex(object->address())); |
| 3524 if (page->old_to_new_slots() != nullptr) { | 3484 if (page->old_to_new_slots() != nullptr) { |
| 3525 page->old_to_new_slots()->RemoveRange( | 3485 page->old_to_new_slots()->RemoveRange( |
| 3526 0, static_cast<int>(object->address() - page->address())); | 3486 0, static_cast<int>(object->address() - page->address())); |
| 3527 } | 3487 } |
| 3528 if (page->typed_old_to_new_slots() != nullptr) { | 3488 if (page->typed_old_to_new_slots() != nullptr) { |
| 3529 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(), | 3489 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(), |
| 3530 object->address()); | 3490 object->address()); |
| 3531 } | 3491 } |
| 3532 RecomputeLiveBytes(page); | 3492 RecomputeLiveBytes(page); |
| 3533 } | 3493 } |
| 3534 return false; | 3494 return false; |
| 3535 } | 3495 } |
| 3536 } | 3496 } |
| 3537 if (mode == kClearMarkbits) { | 3497 if (mode == kClearMarkbits) { |
| 3538 Bitmap::Clear(page); | 3498 page->ClearLiveness(); |
| 3539 } | 3499 } |
| 3540 return true; | 3500 return true; |
| 3541 } | 3501 } |
| 3542 | 3502 |
| 3543 | 3503 |
| 3544 void MarkCompactCollector::RecomputeLiveBytes(MemoryChunk* page) { | 3504 void MarkCompactCollector::RecomputeLiveBytes(MemoryChunk* page) { |
| 3545 LiveObjectIterator<kBlackObjects> it(page); | 3505 LiveObjectIterator<kBlackObjects> it(page); |
| 3546 int new_live_size = 0; | 3506 int new_live_size = 0; |
| 3547 HeapObject* object = nullptr; | 3507 HeapObject* object = nullptr; |
| 3548 while ((object = it.Next()) != nullptr) { | 3508 while ((object = it.Next()) != nullptr) { |
| 3549 new_live_size += object->Size(); | 3509 new_live_size += object->Size(); |
| 3550 } | 3510 } |
| 3551 page->SetLiveBytes(new_live_size); | 3511 page->SetLiveBytes(new_live_size); |
| 3552 } | 3512 } |
| 3553 | 3513 |
| 3554 | 3514 |
| 3555 void MarkCompactCollector::VisitLiveObjectsBody(Page* page, | 3515 void MarkCompactCollector::VisitLiveObjectsBody(Page* page, |
| 3556 ObjectVisitor* visitor) { | 3516 ObjectVisitor* visitor) { |
| 3557 #ifdef VERIFY_HEAP | 3517 #ifdef VERIFY_HEAP |
| 3558 VerifyAllBlackObjects(page); | 3518 VerifyAllBlackObjects(page); |
| 3559 #endif // VERIFY_HEAP | 3519 #endif // VERIFY_HEAP |
| 3560 | 3520 |
| 3561 LiveObjectIterator<kBlackObjects> it(page); | 3521 LiveObjectIterator<kBlackObjects> it(page); |
| 3562 HeapObject* object = NULL; | 3522 HeapObject* object = NULL; |
| 3563 while ((object = it.Next()) != NULL) { | 3523 while ((object = it.Next()) != NULL) { |
| 3564 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3524 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3565 Map* map = object->synchronized_map(); | 3525 Map* map = object->synchronized_map(); |
| 3566 int size = object->SizeFromMap(map); | 3526 int size = object->SizeFromMap(map); |
| 3567 object->IterateBody(map->instance_type(), size, visitor); | 3527 object->IterateBody(map->instance_type(), size, visitor); |
| 3568 } | 3528 } |
| 3569 } | 3529 } |
| 3570 | 3530 |
| 3571 void MarkCompactCollector::Sweeper::AddSweptPageSafe(PagedSpace* space, | 3531 void MarkCompactCollector::Sweeper::AddSweptPageSafe(PagedSpace* space, |
| 3572 Page* page) { | 3532 Page* page) { |
| 3573 base::LockGuard<base::Mutex> guard(&mutex_); | 3533 base::LockGuard<base::Mutex> guard(&mutex_); |
| 3574 swept_list_[space->identity()].Add(page); | 3534 swept_list_[space->identity()].Add(page); |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3708 // just freed free space object. | 3668 // just freed free space object. |
| 3709 if (heap->InToSpace(*slot)) { | 3669 if (heap->InToSpace(*slot)) { |
| 3710 return KEEP_SLOT; | 3670 return KEEP_SLOT; |
| 3711 } | 3671 } |
| 3712 } else if (heap->InToSpace(*slot)) { | 3672 } else if (heap->InToSpace(*slot)) { |
| 3713 // Slots can point to "to" space if the page has been moved, or if the | 3673 // Slots can point to "to" space if the page has been moved, or if the |
| 3714 // slot has been recorded multiple times in the remembered set. Since | 3674 // slot has been recorded multiple times in the remembered set. Since |
| 3715 // there is no forwarding information present we need to check the | 3675 // there is no forwarding information present we need to check the |
| 3716 // markbits to determine liveness. | 3676 // markbits to determine liveness. |
| 3717 if (Marking::IsBlack( | 3677 if (Marking::IsBlack( |
| 3718 Marking::MarkBitFrom(reinterpret_cast<HeapObject*>(*slot)))) | 3678 ObjectMarking::MarkBitFrom(reinterpret_cast<HeapObject*>(*slot)))) |
| 3719 return KEEP_SLOT; | 3679 return KEEP_SLOT; |
| 3720 } else { | 3680 } else { |
| 3721 DCHECK(!heap->InNewSpace(*slot)); | 3681 DCHECK(!heap->InNewSpace(*slot)); |
| 3722 } | 3682 } |
| 3723 return REMOVE_SLOT; | 3683 return REMOVE_SLOT; |
| 3724 } | 3684 } |
| 3725 }; | 3685 }; |
| 3726 | 3686 |
| 3727 int NumberOfPointerUpdateTasks(int pages) { | 3687 int NumberOfPointerUpdateTasks(int pages) { |
| 3728 if (!FLAG_parallel_pointer_update) return 1; | 3688 if (!FLAG_parallel_pointer_update) return 1; |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3954 | 3914 |
| 3955 if (p->IsEvacuationCandidate()) { | 3915 if (p->IsEvacuationCandidate()) { |
| 3956 // Will be processed in EvacuateNewSpaceAndCandidates. | 3916 // Will be processed in EvacuateNewSpaceAndCandidates. |
| 3957 DCHECK(evacuation_candidates_.length() > 0); | 3917 DCHECK(evacuation_candidates_.length() > 0); |
| 3958 continue; | 3918 continue; |
| 3959 } | 3919 } |
| 3960 | 3920 |
| 3961 // We can not sweep black pages, since all mark bits are set for these | 3921 // We can not sweep black pages, since all mark bits are set for these |
| 3962 // pages. | 3922 // pages. |
| 3963 if (p->IsFlagSet(Page::BLACK_PAGE)) { | 3923 if (p->IsFlagSet(Page::BLACK_PAGE)) { |
| 3964 Bitmap::Clear(p); | 3924 p->ClearLiveness(); |
| 3965 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); | 3925 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); |
| 3966 p->ClearFlag(Page::BLACK_PAGE); | 3926 p->ClearFlag(Page::BLACK_PAGE); |
| 3967 // Area above the high watermark is free. | 3927 // Area above the high watermark is free. |
| 3968 Address free_start = p->HighWaterMark(); | 3928 Address free_start = p->HighWaterMark(); |
| 3969 // Check if the space top was in this page, which means that the | 3929 // Check if the space top was in this page, which means that the |
| 3970 // high watermark is not up-to-date. | 3930 // high watermark is not up-to-date. |
| 3971 if (free_start < space_top && space_top <= p->area_end()) { | 3931 if (free_start < space_top && space_top <= p->area_end()) { |
| 3972 free_start = space_top; | 3932 free_start = space_top; |
| 3973 } | 3933 } |
| 3974 int size = static_cast<int>(p->area_end() - free_start); | 3934 int size = static_cast<int>(p->area_end() - free_start); |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4073 } | 4033 } |
| 4074 } | 4034 } |
| 4075 | 4035 |
| 4076 | 4036 |
| 4077 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { | 4037 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { |
| 4078 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); | 4038 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); |
| 4079 if (is_compacting()) { | 4039 if (is_compacting()) { |
| 4080 Code* host = | 4040 Code* host = |
| 4081 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( | 4041 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( |
| 4082 pc); | 4042 pc); |
| 4083 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4043 MarkBit mark_bit = ObjectMarking::MarkBitFrom(host); |
| 4084 if (Marking::IsBlack(mark_bit)) { | 4044 if (Marking::IsBlack(mark_bit)) { |
| 4085 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4045 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
| 4086 // The target is always in old space, we don't have to record the slot in | 4046 // The target is always in old space, we don't have to record the slot in |
| 4087 // the old-to-new remembered set. | 4047 // the old-to-new remembered set. |
| 4088 DCHECK(!heap()->InNewSpace(target)); | 4048 DCHECK(!heap()->InNewSpace(target)); |
| 4089 RecordRelocSlot(host, &rinfo, target); | 4049 RecordRelocSlot(host, &rinfo, target); |
| 4090 } | 4050 } |
| 4091 } | 4051 } |
| 4092 } | 4052 } |
| 4093 | 4053 |
| 4094 } // namespace internal | 4054 } // namespace internal |
| 4095 } // namespace v8 | 4055 } // namespace v8 |
| OLD | NEW |