OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 19 matching lines...) Expand all Loading... | |
30 | 30 |
31 namespace v8 { | 31 namespace v8 { |
32 namespace internal { | 32 namespace internal { |
33 | 33 |
34 | 34 |
35 const char* Marking::kWhiteBitPattern = "00"; | 35 const char* Marking::kWhiteBitPattern = "00"; |
36 const char* Marking::kBlackBitPattern = "11"; | 36 const char* Marking::kBlackBitPattern = "11"; |
37 const char* Marking::kGreyBitPattern = "10"; | 37 const char* Marking::kGreyBitPattern = "10"; |
38 const char* Marking::kImpossibleBitPattern = "01"; | 38 const char* Marking::kImpossibleBitPattern = "01"; |
39 | 39 |
40 | 40 // The following has to hold in order for {ObjectMarking::MarkBitFrom} to not |
41 // The following has to hold in order for {Marking::MarkBitFrom} to not produce | 41 // produce |
Michael Lippautz
2016/07/12 14:22:31
nit: formatting off
Hannes Payer (out of office)
2016/07/12 14:44:26
Done.
| |
42 // invalid {kImpossibleBitPattern} in the marking bitmap by overlapping. | 42 // invalid {kImpossibleBitPattern} in the marking bitmap by overlapping. |
43 STATIC_ASSERT(Heap::kMinObjectSizeInWords >= 2); | 43 STATIC_ASSERT(Heap::kMinObjectSizeInWords >= 2); |
44 | 44 |
45 | 45 |
46 // ------------------------------------------------------------------------- | 46 // ------------------------------------------------------------------------- |
47 // MarkCompactCollector | 47 // MarkCompactCollector |
48 | 48 |
49 MarkCompactCollector::MarkCompactCollector(Heap* heap) | 49 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
50 : // NOLINT | 50 : // NOLINT |
51 heap_(heap), | 51 heap_(heap), |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
102 | 102 |
103 | 103 |
104 static void VerifyMarking(Heap* heap, Address bottom, Address top) { | 104 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
105 VerifyMarkingVisitor visitor(heap); | 105 VerifyMarkingVisitor visitor(heap); |
106 HeapObject* object; | 106 HeapObject* object; |
107 Address next_object_must_be_here_or_later = bottom; | 107 Address next_object_must_be_here_or_later = bottom; |
108 | 108 |
109 for (Address current = bottom; current < top; current += kPointerSize) { | 109 for (Address current = bottom; current < top; current += kPointerSize) { |
110 object = HeapObject::FromAddress(current); | 110 object = HeapObject::FromAddress(current); |
111 if (MarkCompactCollector::IsMarked(object)) { | 111 if (MarkCompactCollector::IsMarked(object)) { |
112 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 112 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
113 CHECK(current >= next_object_must_be_here_or_later); | 113 CHECK(current >= next_object_must_be_here_or_later); |
114 object->Iterate(&visitor); | 114 object->Iterate(&visitor); |
115 next_object_must_be_here_or_later = current + object->Size(); | 115 next_object_must_be_here_or_later = current + object->Size(); |
116 // The next word for sure belongs to the current object, jump over it. | 116 // The next word for sure belongs to the current object, jump over it. |
117 current += kPointerSize; | 117 current += kPointerSize; |
118 } | 118 } |
119 } | 119 } |
120 } | 120 } |
121 | 121 |
122 static void VerifyMarkingBlackPage(Heap* heap, Page* page) { | 122 static void VerifyMarkingBlackPage(Heap* heap, Page* page) { |
123 CHECK(page->IsFlagSet(Page::BLACK_PAGE)); | 123 CHECK(page->IsFlagSet(Page::BLACK_PAGE)); |
124 VerifyMarkingVisitor visitor(heap); | 124 VerifyMarkingVisitor visitor(heap); |
125 HeapObjectIterator it(page); | 125 HeapObjectIterator it(page); |
126 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 126 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
127 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 127 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
128 object->Iterate(&visitor); | 128 object->Iterate(&visitor); |
129 } | 129 } |
130 } | 130 } |
131 | 131 |
132 static void VerifyMarking(NewSpace* space) { | 132 static void VerifyMarking(NewSpace* space) { |
133 Address end = space->top(); | 133 Address end = space->top(); |
134 // The bottom position is at the start of its page. Allows us to use | 134 // The bottom position is at the start of its page. Allows us to use |
135 // page->area_start() as start of range on all pages. | 135 // page->area_start() as start of range on all pages. |
136 CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start()); | 136 CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start()); |
137 | 137 |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
370 | 370 |
371 | 371 |
372 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 372 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
373 VerifyMarkbitsAreClean(heap_->old_space()); | 373 VerifyMarkbitsAreClean(heap_->old_space()); |
374 VerifyMarkbitsAreClean(heap_->code_space()); | 374 VerifyMarkbitsAreClean(heap_->code_space()); |
375 VerifyMarkbitsAreClean(heap_->map_space()); | 375 VerifyMarkbitsAreClean(heap_->map_space()); |
376 VerifyMarkbitsAreClean(heap_->new_space()); | 376 VerifyMarkbitsAreClean(heap_->new_space()); |
377 | 377 |
378 LargeObjectIterator it(heap_->lo_space()); | 378 LargeObjectIterator it(heap_->lo_space()); |
379 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 379 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
380 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 380 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
381 CHECK(Marking::IsWhite(mark_bit)); | 381 CHECK(Marking::IsWhite(mark_bit)); |
382 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 382 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
383 } | 383 } |
384 } | 384 } |
385 | 385 |
386 | 386 |
387 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 387 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
388 HeapObjectIterator code_iterator(heap()->code_space()); | 388 HeapObjectIterator code_iterator(heap()->code_space()); |
389 for (HeapObject* obj = code_iterator.Next(); obj != NULL; | 389 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
390 obj = code_iterator.Next()) { | 390 obj = code_iterator.Next()) { |
(...skipping 10 matching lines...) Expand all Loading... | |
401 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { | 401 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { |
402 Map* map = Map::cast(obj); | 402 Map* map = Map::cast(obj); |
403 map->VerifyOmittedMapChecks(); | 403 map->VerifyOmittedMapChecks(); |
404 } | 404 } |
405 } | 405 } |
406 #endif // VERIFY_HEAP | 406 #endif // VERIFY_HEAP |
407 | 407 |
408 | 408 |
409 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { | 409 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { |
410 for (Page* p : *space) { | 410 for (Page* p : *space) { |
411 Bitmap::Clear(p); | 411 p->ClearLiveness(); |
412 if (p->IsFlagSet(Page::BLACK_PAGE)) { | 412 if (p->IsFlagSet(Page::BLACK_PAGE)) { |
413 p->ClearFlag(Page::BLACK_PAGE); | 413 p->ClearFlag(Page::BLACK_PAGE); |
414 } | 414 } |
415 } | 415 } |
416 } | 416 } |
417 | 417 |
418 | 418 |
419 static void ClearMarkbitsInNewSpace(NewSpace* space) { | 419 static void ClearMarkbitsInNewSpace(NewSpace* space) { |
420 for (Page* page : *space) { | 420 for (Page* page : *space) { |
421 Bitmap::Clear(page); | 421 page->ClearLiveness(); |
422 } | 422 } |
423 } | 423 } |
424 | 424 |
425 | 425 |
426 void MarkCompactCollector::ClearMarkbits() { | 426 void MarkCompactCollector::ClearMarkbits() { |
427 ClearMarkbitsInPagedSpace(heap_->code_space()); | 427 ClearMarkbitsInPagedSpace(heap_->code_space()); |
428 ClearMarkbitsInPagedSpace(heap_->map_space()); | 428 ClearMarkbitsInPagedSpace(heap_->map_space()); |
429 ClearMarkbitsInPagedSpace(heap_->old_space()); | 429 ClearMarkbitsInPagedSpace(heap_->old_space()); |
430 ClearMarkbitsInNewSpace(heap_->new_space()); | 430 ClearMarkbitsInNewSpace(heap_->new_space()); |
431 | 431 |
432 LargeObjectIterator it(heap_->lo_space()); | 432 LargeObjectIterator it(heap_->lo_space()); |
433 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 433 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
434 Marking::MarkWhite(Marking::MarkBitFrom(obj)); | 434 Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj)); |
435 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 435 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
436 chunk->ResetProgressBar(); | 436 chunk->ResetProgressBar(); |
437 chunk->ResetLiveBytes(); | 437 chunk->ResetLiveBytes(); |
438 if (chunk->IsFlagSet(Page::BLACK_PAGE)) { | 438 if (chunk->IsFlagSet(Page::BLACK_PAGE)) { |
439 chunk->ClearFlag(Page::BLACK_PAGE); | 439 chunk->ClearFlag(Page::BLACK_PAGE); |
440 } | 440 } |
441 } | 441 } |
442 } | 442 } |
443 | 443 |
444 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { | 444 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
577 } | 577 } |
578 | 578 |
579 bool MarkCompactCollector::Sweeper::IsSweepingCompleted() { | 579 bool MarkCompactCollector::Sweeper::IsSweepingCompleted() { |
580 while (pending_sweeper_tasks_semaphore_.WaitFor( | 580 while (pending_sweeper_tasks_semaphore_.WaitFor( |
581 base::TimeDelta::FromSeconds(0))) { | 581 base::TimeDelta::FromSeconds(0))) { |
582 num_sweeping_tasks_.Increment(-1); | 582 num_sweeping_tasks_.Increment(-1); |
583 } | 583 } |
584 return num_sweeping_tasks_.Value() == 0; | 584 return num_sweeping_tasks_.Value() == 0; |
585 } | 585 } |
586 | 586 |
587 void Marking::TransferMark(Heap* heap, Address old_start, Address new_start) { | |
588 // This is only used when resizing an object. | |
589 DCHECK(MemoryChunk::FromAddress(old_start) == | |
590 MemoryChunk::FromAddress(new_start)); | |
591 | |
592 if (!heap->incremental_marking()->IsMarking() || | |
593 Page::FromAddress(old_start)->IsFlagSet(Page::BLACK_PAGE)) | |
594 return; | |
595 | |
596 // If the mark doesn't move, we don't check the color of the object. | |
597 // It doesn't matter whether the object is black, since it hasn't changed | |
598 // size, so the adjustment to the live data count will be zero anyway. | |
599 if (old_start == new_start) return; | |
600 | |
601 MarkBit new_mark_bit = MarkBitFrom(new_start); | |
602 MarkBit old_mark_bit = MarkBitFrom(old_start); | |
603 | |
604 #ifdef DEBUG | |
605 ObjectColor old_color = Color(old_mark_bit); | |
606 #endif | |
607 | |
608 if (Marking::IsBlack(old_mark_bit)) { | |
609 Marking::BlackToWhite(old_mark_bit); | |
610 Marking::MarkBlack(new_mark_bit); | |
611 return; | |
612 } else if (Marking::IsGrey(old_mark_bit)) { | |
613 Marking::GreyToWhite(old_mark_bit); | |
614 heap->incremental_marking()->WhiteToGreyAndPush( | |
615 HeapObject::FromAddress(new_start), new_mark_bit); | |
616 heap->incremental_marking()->RestartIfNotMarking(); | |
617 } | |
618 | |
619 #ifdef DEBUG | |
620 ObjectColor new_color = Color(new_mark_bit); | |
621 DCHECK(new_color == old_color); | |
622 #endif | |
623 } | |
624 | |
625 | |
626 const char* AllocationSpaceName(AllocationSpace space) { | 587 const char* AllocationSpaceName(AllocationSpace space) { |
627 switch (space) { | 588 switch (space) { |
628 case NEW_SPACE: | 589 case NEW_SPACE: |
629 return "NEW_SPACE"; | 590 return "NEW_SPACE"; |
630 case OLD_SPACE: | 591 case OLD_SPACE: |
631 return "OLD_SPACE"; | 592 return "OLD_SPACE"; |
632 case CODE_SPACE: | 593 case CODE_SPACE: |
633 return "CODE_SPACE"; | 594 return "CODE_SPACE"; |
634 case MAP_SPACE: | 595 case MAP_SPACE: |
635 return "MAP_SPACE"; | 596 return "MAP_SPACE"; |
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
965 | 926 |
966 JSFunction* candidate = jsfunction_candidates_head_; | 927 JSFunction* candidate = jsfunction_candidates_head_; |
967 JSFunction* next_candidate; | 928 JSFunction* next_candidate; |
968 while (candidate != NULL) { | 929 while (candidate != NULL) { |
969 next_candidate = GetNextCandidate(candidate); | 930 next_candidate = GetNextCandidate(candidate); |
970 ClearNextCandidate(candidate, undefined); | 931 ClearNextCandidate(candidate, undefined); |
971 | 932 |
972 SharedFunctionInfo* shared = candidate->shared(); | 933 SharedFunctionInfo* shared = candidate->shared(); |
973 | 934 |
974 Code* code = shared->code(); | 935 Code* code = shared->code(); |
975 MarkBit code_mark = Marking::MarkBitFrom(code); | 936 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
976 if (Marking::IsWhite(code_mark)) { | 937 if (Marking::IsWhite(code_mark)) { |
977 if (FLAG_trace_code_flushing && shared->is_compiled()) { | 938 if (FLAG_trace_code_flushing && shared->is_compiled()) { |
978 PrintF("[code-flushing clears: "); | 939 PrintF("[code-flushing clears: "); |
979 shared->ShortPrint(); | 940 shared->ShortPrint(); |
980 PrintF(" - age: %d]\n", code->GetAge()); | 941 PrintF(" - age: %d]\n", code->GetAge()); |
981 } | 942 } |
982 // Always flush the optimized code map if there is one. | 943 // Always flush the optimized code map if there is one. |
983 if (!shared->OptimizedCodeMapIsCleared()) { | 944 if (!shared->OptimizedCodeMapIsCleared()) { |
984 shared->ClearOptimizedCodeMap(); | 945 shared->ClearOptimizedCodeMap(); |
985 } | 946 } |
(...skipping 26 matching lines...) Expand all Loading... | |
1012 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { | 973 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { |
1013 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); | 974 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); |
1014 | 975 |
1015 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 976 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
1016 SharedFunctionInfo* next_candidate; | 977 SharedFunctionInfo* next_candidate; |
1017 while (candidate != NULL) { | 978 while (candidate != NULL) { |
1018 next_candidate = GetNextCandidate(candidate); | 979 next_candidate = GetNextCandidate(candidate); |
1019 ClearNextCandidate(candidate); | 980 ClearNextCandidate(candidate); |
1020 | 981 |
1021 Code* code = candidate->code(); | 982 Code* code = candidate->code(); |
1022 MarkBit code_mark = Marking::MarkBitFrom(code); | 983 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
1023 if (Marking::IsWhite(code_mark)) { | 984 if (Marking::IsWhite(code_mark)) { |
1024 if (FLAG_trace_code_flushing && candidate->is_compiled()) { | 985 if (FLAG_trace_code_flushing && candidate->is_compiled()) { |
1025 PrintF("[code-flushing clears: "); | 986 PrintF("[code-flushing clears: "); |
1026 candidate->ShortPrint(); | 987 candidate->ShortPrint(); |
1027 PrintF(" - age: %d]\n", code->GetAge()); | 988 PrintF(" - age: %d]\n", code->GetAge()); |
1028 } | 989 } |
1029 // Always flush the optimized code map if there is one. | 990 // Always flush the optimized code map if there is one. |
1030 if (!candidate->OptimizedCodeMapIsCleared()) { | 991 if (!candidate->OptimizedCodeMapIsCleared()) { |
1031 candidate->ClearOptimizedCodeMap(); | 992 candidate->ClearOptimizedCodeMap(); |
1032 } | 993 } |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1149 // We are close to a stack overflow, so just mark the objects. | 1110 // We are close to a stack overflow, so just mark the objects. |
1150 } | 1111 } |
1151 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1112 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1152 for (Object** p = start; p < end; p++) { | 1113 for (Object** p = start; p < end; p++) { |
1153 MarkObjectByPointer(collector, object, p); | 1114 MarkObjectByPointer(collector, object, p); |
1154 } | 1115 } |
1155 } | 1116 } |
1156 | 1117 |
1157 // Marks the object black and pushes it on the marking stack. | 1118 // Marks the object black and pushes it on the marking stack. |
1158 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { | 1119 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
1159 MarkBit mark = Marking::MarkBitFrom(object); | 1120 MarkBit mark = ObjectMarking::MarkBitFrom(object); |
1160 heap->mark_compact_collector()->MarkObject(object, mark); | 1121 heap->mark_compact_collector()->MarkObject(object, mark); |
1161 } | 1122 } |
1162 | 1123 |
1163 // Marks the object black without pushing it on the marking stack. | 1124 // Marks the object black without pushing it on the marking stack. |
1164 // Returns true if object needed marking and false otherwise. | 1125 // Returns true if object needed marking and false otherwise. |
1165 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { | 1126 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { |
1166 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1127 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
1167 if (Marking::IsWhite(mark_bit)) { | 1128 if (Marking::IsWhite(mark_bit)) { |
1168 heap->mark_compact_collector()->SetMark(object, mark_bit); | 1129 heap->mark_compact_collector()->SetMark(object, mark_bit); |
1169 return true; | 1130 return true; |
1170 } | 1131 } |
1171 return false; | 1132 return false; |
1172 } | 1133 } |
1173 | 1134 |
1174 // Mark object pointed to by p. | 1135 // Mark object pointed to by p. |
1175 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 1136 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
1176 HeapObject* object, Object** p)) { | 1137 HeapObject* object, Object** p)) { |
1177 if (!(*p)->IsHeapObject()) return; | 1138 if (!(*p)->IsHeapObject()) return; |
1178 HeapObject* target_object = HeapObject::cast(*p); | 1139 HeapObject* target_object = HeapObject::cast(*p); |
1179 collector->RecordSlot(object, p, target_object); | 1140 collector->RecordSlot(object, p, target_object); |
1180 MarkBit mark = Marking::MarkBitFrom(target_object); | 1141 MarkBit mark = ObjectMarking::MarkBitFrom(target_object); |
1181 collector->MarkObject(target_object, mark); | 1142 collector->MarkObject(target_object, mark); |
1182 } | 1143 } |
1183 | 1144 |
1184 | 1145 |
1185 // Visit an unmarked object. | 1146 // Visit an unmarked object. |
1186 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1147 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
1187 HeapObject* obj)) { | 1148 HeapObject* obj)) { |
1188 #ifdef DEBUG | 1149 #ifdef DEBUG |
1189 DCHECK(collector->heap()->Contains(obj)); | 1150 DCHECK(collector->heap()->Contains(obj)); |
1190 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); | 1151 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); |
1191 #endif | 1152 #endif |
1192 Map* map = obj->map(); | 1153 Map* map = obj->map(); |
1193 Heap* heap = obj->GetHeap(); | 1154 Heap* heap = obj->GetHeap(); |
1194 MarkBit mark = Marking::MarkBitFrom(obj); | 1155 MarkBit mark = ObjectMarking::MarkBitFrom(obj); |
1195 heap->mark_compact_collector()->SetMark(obj, mark); | 1156 heap->mark_compact_collector()->SetMark(obj, mark); |
1196 // Mark the map pointer and the body. | 1157 // Mark the map pointer and the body. |
1197 MarkBit map_mark = Marking::MarkBitFrom(map); | 1158 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
1198 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1159 heap->mark_compact_collector()->MarkObject(map, map_mark); |
1199 IterateBody(map, obj); | 1160 IterateBody(map, obj); |
1200 } | 1161 } |
1201 | 1162 |
1202 // Visit all unmarked objects pointed to by [start, end). | 1163 // Visit all unmarked objects pointed to by [start, end). |
1203 // Returns false if the operation fails (lack of stack space). | 1164 // Returns false if the operation fails (lack of stack space). |
1204 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, | 1165 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, |
1205 Object** start, Object** end)) { | 1166 Object** start, Object** end)) { |
1206 // Return false is we are close to the stack limit. | 1167 // Return false is we are close to the stack limit. |
1207 StackLimitCheck check(heap->isolate()); | 1168 StackLimitCheck check(heap->isolate()); |
1208 if (check.HasOverflowed()) return false; | 1169 if (check.HasOverflowed()) return false; |
1209 | 1170 |
1210 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1171 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1211 // Visit the unmarked objects. | 1172 // Visit the unmarked objects. |
1212 for (Object** p = start; p < end; p++) { | 1173 for (Object** p = start; p < end; p++) { |
1213 Object* o = *p; | 1174 Object* o = *p; |
1214 if (!o->IsHeapObject()) continue; | 1175 if (!o->IsHeapObject()) continue; |
1215 collector->RecordSlot(object, p, o); | 1176 collector->RecordSlot(object, p, o); |
1216 HeapObject* obj = HeapObject::cast(o); | 1177 HeapObject* obj = HeapObject::cast(o); |
1217 MarkBit mark = Marking::MarkBitFrom(obj); | 1178 MarkBit mark = ObjectMarking::MarkBitFrom(obj); |
1218 if (Marking::IsBlackOrGrey(mark)) continue; | 1179 if (Marking::IsBlackOrGrey(mark)) continue; |
1219 VisitUnmarkedObject(collector, obj); | 1180 VisitUnmarkedObject(collector, obj); |
1220 } | 1181 } |
1221 return true; | 1182 return true; |
1222 } | 1183 } |
1223 | 1184 |
1224 private: | 1185 private: |
1225 // Code flushing support. | 1186 // Code flushing support. |
1226 | 1187 |
1227 static const int kRegExpCodeThreshold = 5; | 1188 static const int kRegExpCodeThreshold = 5; |
(...skipping 13 matching lines...) Expand all Loading... | |
1241 if (!code->IsSmi() && | 1202 if (!code->IsSmi() && |
1242 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { | 1203 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { |
1243 // Save a copy that can be reinstated if we need the code again. | 1204 // Save a copy that can be reinstated if we need the code again. |
1244 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); | 1205 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); |
1245 | 1206 |
1246 // Saving a copy might create a pointer into compaction candidate | 1207 // Saving a copy might create a pointer into compaction candidate |
1247 // that was not observed by marker. This might happen if JSRegExp data | 1208 // that was not observed by marker. This might happen if JSRegExp data |
1248 // was marked through the compilation cache before marker reached JSRegExp | 1209 // was marked through the compilation cache before marker reached JSRegExp |
1249 // object. | 1210 // object. |
1250 FixedArray* data = FixedArray::cast(re->data()); | 1211 FixedArray* data = FixedArray::cast(re->data()); |
1251 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(data))) { | 1212 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(data))) { |
1252 Object** slot = | 1213 Object** slot = |
1253 data->data_start() + JSRegExp::saved_code_index(is_one_byte); | 1214 data->data_start() + JSRegExp::saved_code_index(is_one_byte); |
1254 heap->mark_compact_collector()->RecordSlot(data, slot, code); | 1215 heap->mark_compact_collector()->RecordSlot(data, slot, code); |
1255 } | 1216 } |
1256 | 1217 |
1257 // Set a number in the 0-255 range to guarantee no smi overflow. | 1218 // Set a number in the 0-255 range to guarantee no smi overflow. |
1258 re->SetDataAt(JSRegExp::code_index(is_one_byte), | 1219 re->SetDataAt(JSRegExp::code_index(is_one_byte), |
1259 Smi::FromInt(heap->ms_count() & 0xff)); | 1220 Smi::FromInt(heap->ms_count() & 0xff)); |
1260 } else if (code->IsSmi()) { | 1221 } else if (code->IsSmi()) { |
1261 int value = Smi::cast(code)->value(); | 1222 int value = Smi::cast(code)->value(); |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1329 : collector_(collector) {} | 1290 : collector_(collector) {} |
1330 | 1291 |
1331 void VisitPointers(Object** start, Object** end) override { | 1292 void VisitPointers(Object** start, Object** end) override { |
1332 for (Object** p = start; p < end; p++) VisitPointer(p); | 1293 for (Object** p = start; p < end; p++) VisitPointer(p); |
1333 } | 1294 } |
1334 | 1295 |
1335 void VisitPointer(Object** slot) override { | 1296 void VisitPointer(Object** slot) override { |
1336 Object* obj = *slot; | 1297 Object* obj = *slot; |
1337 if (obj->IsSharedFunctionInfo()) { | 1298 if (obj->IsSharedFunctionInfo()) { |
1338 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); | 1299 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); |
1339 MarkBit shared_mark = Marking::MarkBitFrom(shared); | 1300 MarkBit shared_mark = ObjectMarking::MarkBitFrom(shared); |
1340 MarkBit code_mark = Marking::MarkBitFrom(shared->code()); | 1301 MarkBit code_mark = ObjectMarking::MarkBitFrom(shared->code()); |
1341 collector_->MarkObject(shared->code(), code_mark); | 1302 collector_->MarkObject(shared->code(), code_mark); |
1342 collector_->MarkObject(shared, shared_mark); | 1303 collector_->MarkObject(shared, shared_mark); |
1343 } | 1304 } |
1344 } | 1305 } |
1345 | 1306 |
1346 private: | 1307 private: |
1347 MarkCompactCollector* collector_; | 1308 MarkCompactCollector* collector_; |
1348 }; | 1309 }; |
1349 | 1310 |
1350 | 1311 |
1351 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, | 1312 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, |
1352 ThreadLocalTop* top) { | 1313 ThreadLocalTop* top) { |
1353 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { | 1314 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { |
1354 // Note: for the frame that has a pending lazy deoptimization | 1315 // Note: for the frame that has a pending lazy deoptimization |
1355 // StackFrame::unchecked_code will return a non-optimized code object for | 1316 // StackFrame::unchecked_code will return a non-optimized code object for |
1356 // the outermost function and StackFrame::LookupCode will return | 1317 // the outermost function and StackFrame::LookupCode will return |
1357 // actual optimized code object. | 1318 // actual optimized code object. |
1358 StackFrame* frame = it.frame(); | 1319 StackFrame* frame = it.frame(); |
1359 Code* code = frame->unchecked_code(); | 1320 Code* code = frame->unchecked_code(); |
1360 MarkBit code_mark = Marking::MarkBitFrom(code); | 1321 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
1361 MarkObject(code, code_mark); | 1322 MarkObject(code, code_mark); |
1362 if (frame->is_optimized()) { | 1323 if (frame->is_optimized()) { |
1363 Code* optimized_code = frame->LookupCode(); | 1324 Code* optimized_code = frame->LookupCode(); |
1364 MarkBit optimized_code_mark = Marking::MarkBitFrom(optimized_code); | 1325 MarkBit optimized_code_mark = ObjectMarking::MarkBitFrom(optimized_code); |
1365 MarkObject(optimized_code, optimized_code_mark); | 1326 MarkObject(optimized_code, optimized_code_mark); |
1366 } | 1327 } |
1367 } | 1328 } |
1368 } | 1329 } |
1369 | 1330 |
1370 | 1331 |
1371 void MarkCompactCollector::PrepareForCodeFlushing() { | 1332 void MarkCompactCollector::PrepareForCodeFlushing() { |
1372 // If code flushing is disabled, there is no need to prepare for it. | 1333 // If code flushing is disabled, there is no need to prepare for it. |
1373 if (!is_code_flushing_enabled()) return; | 1334 if (!is_code_flushing_enabled()) return; |
1374 | 1335 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1406 // Skip the weak next code link in a code object, which is visited in | 1367 // Skip the weak next code link in a code object, which is visited in |
1407 // ProcessTopOptimizedFrame. | 1368 // ProcessTopOptimizedFrame. |
1408 void VisitNextCodeLink(Object** p) override {} | 1369 void VisitNextCodeLink(Object** p) override {} |
1409 | 1370 |
1410 private: | 1371 private: |
1411 void MarkObjectByPointer(Object** p) { | 1372 void MarkObjectByPointer(Object** p) { |
1412 if (!(*p)->IsHeapObject()) return; | 1373 if (!(*p)->IsHeapObject()) return; |
1413 | 1374 |
1414 HeapObject* object = HeapObject::cast(*p); | 1375 HeapObject* object = HeapObject::cast(*p); |
1415 | 1376 |
1416 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1377 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
1417 if (Marking::IsBlackOrGrey(mark_bit)) return; | 1378 if (Marking::IsBlackOrGrey(mark_bit)) return; |
1418 | 1379 |
1419 Map* map = object->map(); | 1380 Map* map = object->map(); |
1420 // Mark the object. | 1381 // Mark the object. |
1421 collector_->SetMark(object, mark_bit); | 1382 collector_->SetMark(object, mark_bit); |
1422 | 1383 |
1423 // Mark the map pointer and body, and push them on the marking stack. | 1384 // Mark the map pointer and body, and push them on the marking stack. |
1424 MarkBit map_mark = Marking::MarkBitFrom(map); | 1385 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
1425 collector_->MarkObject(map, map_mark); | 1386 collector_->MarkObject(map, map_mark); |
1426 MarkCompactMarkingVisitor::IterateBody(map, object); | 1387 MarkCompactMarkingVisitor::IterateBody(map, object); |
1427 | 1388 |
1428 // Mark all the objects reachable from the map and body. May leave | 1389 // Mark all the objects reachable from the map and body. May leave |
1429 // overflowed objects in the heap. | 1390 // overflowed objects in the heap. |
1430 collector_->EmptyMarkingDeque(); | 1391 collector_->EmptyMarkingDeque(); |
1431 } | 1392 } |
1432 | 1393 |
1433 MarkCompactCollector* collector_; | 1394 MarkCompactCollector* collector_; |
1434 }; | 1395 }; |
1435 | 1396 |
1436 | 1397 |
1437 // Helper class for pruning the string table. | 1398 // Helper class for pruning the string table. |
1438 template <bool finalize_external_strings, bool record_slots> | 1399 template <bool finalize_external_strings, bool record_slots> |
1439 class StringTableCleaner : public ObjectVisitor { | 1400 class StringTableCleaner : public ObjectVisitor { |
1440 public: | 1401 public: |
1441 StringTableCleaner(Heap* heap, HeapObject* table) | 1402 StringTableCleaner(Heap* heap, HeapObject* table) |
1442 : heap_(heap), pointers_removed_(0), table_(table) { | 1403 : heap_(heap), pointers_removed_(0), table_(table) { |
1443 DCHECK(!record_slots || table != nullptr); | 1404 DCHECK(!record_slots || table != nullptr); |
1444 } | 1405 } |
1445 | 1406 |
1446 void VisitPointers(Object** start, Object** end) override { | 1407 void VisitPointers(Object** start, Object** end) override { |
1447 // Visit all HeapObject pointers in [start, end). | 1408 // Visit all HeapObject pointers in [start, end). |
1448 MarkCompactCollector* collector = heap_->mark_compact_collector(); | 1409 MarkCompactCollector* collector = heap_->mark_compact_collector(); |
1449 for (Object** p = start; p < end; p++) { | 1410 for (Object** p = start; p < end; p++) { |
1450 Object* o = *p; | 1411 Object* o = *p; |
1451 if (o->IsHeapObject()) { | 1412 if (o->IsHeapObject()) { |
1452 if (Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(o)))) { | 1413 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(o)))) { |
1453 if (finalize_external_strings) { | 1414 if (finalize_external_strings) { |
1454 DCHECK(o->IsExternalString()); | 1415 DCHECK(o->IsExternalString()); |
1455 heap_->FinalizeExternalString(String::cast(*p)); | 1416 heap_->FinalizeExternalString(String::cast(*p)); |
1456 } else { | 1417 } else { |
1457 pointers_removed_++; | 1418 pointers_removed_++; |
1458 } | 1419 } |
1459 // Set the entry to the_hole_value (as deleted). | 1420 // Set the entry to the_hole_value (as deleted). |
1460 *p = heap_->the_hole_value(); | 1421 *p = heap_->the_hole_value(); |
1461 } else if (record_slots) { | 1422 } else if (record_slots) { |
1462 // StringTable contains only old space strings. | 1423 // StringTable contains only old space strings. |
(...skipping 16 matching lines...) Expand all Loading... | |
1479 }; | 1440 }; |
1480 | 1441 |
1481 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; | 1442 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; |
1482 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; | 1443 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; |
1483 | 1444 |
1484 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1445 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
1485 // are retained. | 1446 // are retained. |
1486 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1447 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
1487 public: | 1448 public: |
1488 virtual Object* RetainAs(Object* object) { | 1449 virtual Object* RetainAs(Object* object) { |
1489 MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(object)); | 1450 MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(object)); |
1490 DCHECK(!Marking::IsGrey(mark_bit)); | 1451 DCHECK(!Marking::IsGrey(mark_bit)); |
1491 if (Marking::IsBlack(mark_bit)) { | 1452 if (Marking::IsBlack(mark_bit)) { |
1492 return object; | 1453 return object; |
1493 } else if (object->IsAllocationSite() && | 1454 } else if (object->IsAllocationSite() && |
1494 !(AllocationSite::cast(object)->IsZombie())) { | 1455 !(AllocationSite::cast(object)->IsZombie())) { |
1495 // "dead" AllocationSites need to live long enough for a traversal of new | 1456 // "dead" AllocationSites need to live long enough for a traversal of new |
1496 // space. These sites get a one-time reprieve. | 1457 // space. These sites get a one-time reprieve. |
1497 AllocationSite* site = AllocationSite::cast(object); | 1458 AllocationSite* site = AllocationSite::cast(object); |
1498 site->MarkZombie(); | 1459 site->MarkZombie(); |
1499 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); | 1460 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); |
1500 return object; | 1461 return object; |
1501 } else { | 1462 } else { |
1502 return NULL; | 1463 return NULL; |
1503 } | 1464 } |
1504 } | 1465 } |
1505 }; | 1466 }; |
1506 | 1467 |
1507 | 1468 |
1508 // Fill the marking stack with overflowed objects returned by the given | 1469 // Fill the marking stack with overflowed objects returned by the given |
1509 // iterator. Stop when the marking stack is filled or the end of the space | 1470 // iterator. Stop when the marking stack is filled or the end of the space |
1510 // is reached, whichever comes first. | 1471 // is reached, whichever comes first. |
1511 template <class T> | 1472 template <class T> |
1512 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { | 1473 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { |
1513 // The caller should ensure that the marking stack is initially not full, | 1474 // The caller should ensure that the marking stack is initially not full, |
1514 // so that we don't waste effort pointlessly scanning for objects. | 1475 // so that we don't waste effort pointlessly scanning for objects. |
1515 DCHECK(!marking_deque()->IsFull()); | 1476 DCHECK(!marking_deque()->IsFull()); |
1516 | 1477 |
1517 Map* filler_map = heap()->one_pointer_filler_map(); | 1478 Map* filler_map = heap()->one_pointer_filler_map(); |
1518 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { | 1479 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { |
1519 MarkBit markbit = Marking::MarkBitFrom(object); | 1480 MarkBit markbit = ObjectMarking::MarkBitFrom(object); |
1520 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { | 1481 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { |
1521 Marking::GreyToBlack(markbit); | 1482 Marking::GreyToBlack(markbit); |
1522 PushBlack(object); | 1483 PushBlack(object); |
1523 if (marking_deque()->IsFull()) return; | 1484 if (marking_deque()->IsFull()) return; |
1524 } | 1485 } |
1525 } | 1486 } |
1526 } | 1487 } |
1527 | 1488 |
1528 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { | 1489 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { |
1529 DCHECK(!marking_deque()->IsFull()); | 1490 DCHECK(!marking_deque()->IsFull()); |
1530 LiveObjectIterator<kGreyObjects> it(p); | 1491 LiveObjectIterator<kGreyObjects> it(p); |
1531 HeapObject* object = NULL; | 1492 HeapObject* object = NULL; |
1532 while ((object = it.Next()) != NULL) { | 1493 while ((object = it.Next()) != NULL) { |
1533 MarkBit markbit = Marking::MarkBitFrom(object); | 1494 MarkBit markbit = ObjectMarking::MarkBitFrom(object); |
1534 DCHECK(Marking::IsGrey(markbit)); | 1495 DCHECK(Marking::IsGrey(markbit)); |
1535 Marking::GreyToBlack(markbit); | 1496 Marking::GreyToBlack(markbit); |
1536 PushBlack(object); | 1497 PushBlack(object); |
1537 if (marking_deque()->IsFull()) return; | 1498 if (marking_deque()->IsFull()) return; |
1538 } | 1499 } |
1539 } | 1500 } |
1540 | 1501 |
1541 class RecordMigratedSlotVisitor final : public ObjectVisitor { | 1502 class RecordMigratedSlotVisitor final : public ObjectVisitor { |
1542 public: | 1503 public: |
1543 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) | 1504 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) |
(...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1969 DiscoverGreyObjectsOnPage(page); | 1930 DiscoverGreyObjectsOnPage(page); |
1970 if (marking_deque()->IsFull()) return; | 1931 if (marking_deque()->IsFull()) return; |
1971 } | 1932 } |
1972 } | 1933 } |
1973 | 1934 |
1974 | 1935 |
1975 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 1936 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
1976 Object* o = *p; | 1937 Object* o = *p; |
1977 if (!o->IsHeapObject()) return false; | 1938 if (!o->IsHeapObject()) return false; |
1978 HeapObject* heap_object = HeapObject::cast(o); | 1939 HeapObject* heap_object = HeapObject::cast(o); |
1979 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1940 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
1980 return Marking::IsWhite(mark); | 1941 return Marking::IsWhite(mark); |
1981 } | 1942 } |
1982 | 1943 |
1983 | 1944 |
1984 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, | 1945 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, |
1985 Object** p) { | 1946 Object** p) { |
1986 Object* o = *p; | 1947 Object* o = *p; |
1987 DCHECK(o->IsHeapObject()); | 1948 DCHECK(o->IsHeapObject()); |
1988 HeapObject* heap_object = HeapObject::cast(o); | 1949 HeapObject* heap_object = HeapObject::cast(o); |
1989 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1950 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
1990 return Marking::IsWhite(mark); | 1951 return Marking::IsWhite(mark); |
1991 } | 1952 } |
1992 | 1953 |
1993 | 1954 |
1994 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { | 1955 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { |
1995 StringTable* string_table = heap()->string_table(); | 1956 StringTable* string_table = heap()->string_table(); |
1996 // Mark the string table itself. | 1957 // Mark the string table itself. |
1997 MarkBit string_table_mark = Marking::MarkBitFrom(string_table); | 1958 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); |
1998 if (Marking::IsWhite(string_table_mark)) { | 1959 if (Marking::IsWhite(string_table_mark)) { |
1999 // String table could have already been marked by visiting the handles list. | 1960 // String table could have already been marked by visiting the handles list. |
2000 SetMark(string_table, string_table_mark); | 1961 SetMark(string_table, string_table_mark); |
2001 } | 1962 } |
2002 // Explicitly mark the prefix. | 1963 // Explicitly mark the prefix. |
2003 string_table->IteratePrefix(visitor); | 1964 string_table->IteratePrefix(visitor); |
2004 ProcessMarkingDeque(); | 1965 ProcessMarkingDeque(); |
2005 } | 1966 } |
2006 | 1967 |
2007 | 1968 |
2008 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { | 1969 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { |
2009 MarkBit mark_bit = Marking::MarkBitFrom(site); | 1970 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); |
2010 SetMark(site, mark_bit); | 1971 SetMark(site, mark_bit); |
2011 } | 1972 } |
2012 | 1973 |
2013 | 1974 |
2014 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 1975 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
2015 // Mark the heap roots including global variables, stack variables, | 1976 // Mark the heap roots including global variables, stack variables, |
2016 // etc., and all objects reachable from them. | 1977 // etc., and all objects reachable from them. |
2017 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 1978 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
2018 | 1979 |
2019 // Handle the string table specially. | 1980 // Handle the string table specially. |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2066 Map* filler_map = heap_->one_pointer_filler_map(); | 2027 Map* filler_map = heap_->one_pointer_filler_map(); |
2067 while (!marking_deque_.IsEmpty()) { | 2028 while (!marking_deque_.IsEmpty()) { |
2068 HeapObject* object = marking_deque_.Pop(); | 2029 HeapObject* object = marking_deque_.Pop(); |
2069 // Explicitly skip one word fillers. Incremental markbit patterns are | 2030 // Explicitly skip one word fillers. Incremental markbit patterns are |
2070 // correct only for objects that occupy at least two words. | 2031 // correct only for objects that occupy at least two words. |
2071 Map* map = object->map(); | 2032 Map* map = object->map(); |
2072 if (map == filler_map) continue; | 2033 if (map == filler_map) continue; |
2073 | 2034 |
2074 DCHECK(object->IsHeapObject()); | 2035 DCHECK(object->IsHeapObject()); |
2075 DCHECK(heap()->Contains(object)); | 2036 DCHECK(heap()->Contains(object)); |
2076 DCHECK(!Marking::IsWhite(Marking::MarkBitFrom(object))); | 2037 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); |
2077 | 2038 |
2078 MarkBit map_mark = Marking::MarkBitFrom(map); | 2039 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
2079 MarkObject(map, map_mark); | 2040 MarkObject(map, map_mark); |
2080 | 2041 |
2081 MarkCompactMarkingVisitor::IterateBody(map, object); | 2042 MarkCompactMarkingVisitor::IterateBody(map, object); |
2082 } | 2043 } |
2083 } | 2044 } |
2084 | 2045 |
2085 | 2046 |
2086 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2047 // Sweep the heap for overflowed objects, clear their overflow bits, and |
2087 // push them on the marking stack. Stop early if the marking stack fills | 2048 // push them on the marking stack. Stop early if the marking stack fills |
2088 // before sweeping completes. If sweeping completes, there are no remaining | 2049 // before sweeping completes. If sweeping completes, there are no remaining |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2271 wrappers_to_trace_.push_back(std::pair<void*, void*>( | 2232 wrappers_to_trace_.push_back(std::pair<void*, void*>( |
2272 reinterpret_cast<void*>(js_object->GetInternalField(0)), | 2233 reinterpret_cast<void*>(js_object->GetInternalField(0)), |
2273 reinterpret_cast<void*>(js_object->GetInternalField(1)))); | 2234 reinterpret_cast<void*>(js_object->GetInternalField(1)))); |
2274 } | 2235 } |
2275 } | 2236 } |
2276 | 2237 |
2277 void MarkCompactCollector::RegisterExternallyReferencedObject(Object** object) { | 2238 void MarkCompactCollector::RegisterExternallyReferencedObject(Object** object) { |
2278 DCHECK(in_use()); | 2239 DCHECK(in_use()); |
2279 HeapObject* heap_object = HeapObject::cast(*object); | 2240 HeapObject* heap_object = HeapObject::cast(*object); |
2280 DCHECK(heap_->Contains(heap_object)); | 2241 DCHECK(heap_->Contains(heap_object)); |
2281 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); | 2242 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); |
2282 MarkObject(heap_object, mark_bit); | 2243 MarkObject(heap_object, mark_bit); |
2283 } | 2244 } |
2284 | 2245 |
2285 void MarkCompactCollector::MarkLiveObjects() { | 2246 void MarkCompactCollector::MarkLiveObjects() { |
2286 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); | 2247 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); |
2287 double start_time = 0.0; | 2248 double start_time = 0.0; |
2288 if (FLAG_print_cumulative_gc_stat) { | 2249 if (FLAG_print_cumulative_gc_stat) { |
2289 start_time = heap_->MonotonicallyIncreasingTimeInMs(); | 2250 start_time = heap_->MonotonicallyIncreasingTimeInMs(); |
2290 } | 2251 } |
2291 // The recursive GC marker detects when it is nearing stack overflow, | 2252 // The recursive GC marker detects when it is nearing stack overflow, |
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2514 } | 2475 } |
2515 | 2476 |
2516 | 2477 |
2517 void MarkCompactCollector::ClearSimpleMapTransitions( | 2478 void MarkCompactCollector::ClearSimpleMapTransitions( |
2518 Object* non_live_map_list) { | 2479 Object* non_live_map_list) { |
2519 Object* the_hole_value = heap()->the_hole_value(); | 2480 Object* the_hole_value = heap()->the_hole_value(); |
2520 Object* weak_cell_obj = non_live_map_list; | 2481 Object* weak_cell_obj = non_live_map_list; |
2521 while (weak_cell_obj != Smi::FromInt(0)) { | 2482 while (weak_cell_obj != Smi::FromInt(0)) { |
2522 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); | 2483 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); |
2523 Map* map = Map::cast(weak_cell->value()); | 2484 Map* map = Map::cast(weak_cell->value()); |
2524 DCHECK(Marking::IsWhite(Marking::MarkBitFrom(map))); | 2485 DCHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(map))); |
2525 Object* potential_parent = map->constructor_or_backpointer(); | 2486 Object* potential_parent = map->constructor_or_backpointer(); |
2526 if (potential_parent->IsMap()) { | 2487 if (potential_parent->IsMap()) { |
2527 Map* parent = Map::cast(potential_parent); | 2488 Map* parent = Map::cast(potential_parent); |
2528 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)) && | 2489 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)) && |
2529 parent->raw_transitions() == weak_cell) { | 2490 parent->raw_transitions() == weak_cell) { |
2530 ClearSimpleMapTransition(parent, map); | 2491 ClearSimpleMapTransition(parent, map); |
2531 } | 2492 } |
2532 } | 2493 } |
2533 weak_cell->clear(); | 2494 weak_cell->clear(); |
2534 weak_cell_obj = weak_cell->next(); | 2495 weak_cell_obj = weak_cell->next(); |
2535 weak_cell->clear_next(the_hole_value); | 2496 weak_cell->clear_next(the_hole_value); |
2536 } | 2497 } |
2537 } | 2498 } |
2538 | 2499 |
(...skipping 19 matching lines...) Expand all Loading... | |
2558 HeapObject* undefined = heap()->undefined_value(); | 2519 HeapObject* undefined = heap()->undefined_value(); |
2559 Object* obj = heap()->encountered_transition_arrays(); | 2520 Object* obj = heap()->encountered_transition_arrays(); |
2560 while (obj != Smi::FromInt(0)) { | 2521 while (obj != Smi::FromInt(0)) { |
2561 TransitionArray* array = TransitionArray::cast(obj); | 2522 TransitionArray* array = TransitionArray::cast(obj); |
2562 int num_transitions = array->number_of_entries(); | 2523 int num_transitions = array->number_of_entries(); |
2563 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); | 2524 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); |
2564 if (num_transitions > 0) { | 2525 if (num_transitions > 0) { |
2565 Map* map = array->GetTarget(0); | 2526 Map* map = array->GetTarget(0); |
2566 Map* parent = Map::cast(map->constructor_or_backpointer()); | 2527 Map* parent = Map::cast(map->constructor_or_backpointer()); |
2567 bool parent_is_alive = | 2528 bool parent_is_alive = |
2568 Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)); | 2529 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)); |
2569 DescriptorArray* descriptors = | 2530 DescriptorArray* descriptors = |
2570 parent_is_alive ? parent->instance_descriptors() : nullptr; | 2531 parent_is_alive ? parent->instance_descriptors() : nullptr; |
2571 bool descriptors_owner_died = | 2532 bool descriptors_owner_died = |
2572 CompactTransitionArray(parent, array, descriptors); | 2533 CompactTransitionArray(parent, array, descriptors); |
2573 if (descriptors_owner_died) { | 2534 if (descriptors_owner_died) { |
2574 TrimDescriptorArray(parent, descriptors); | 2535 TrimDescriptorArray(parent, descriptors); |
2575 } | 2536 } |
2576 } | 2537 } |
2577 obj = array->next_link(); | 2538 obj = array->next_link(); |
2578 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2539 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
2579 } | 2540 } |
2580 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); | 2541 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); |
2581 } | 2542 } |
2582 | 2543 |
2583 | 2544 |
2584 bool MarkCompactCollector::CompactTransitionArray( | 2545 bool MarkCompactCollector::CompactTransitionArray( |
2585 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { | 2546 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { |
2586 int num_transitions = transitions->number_of_entries(); | 2547 int num_transitions = transitions->number_of_entries(); |
2587 bool descriptors_owner_died = false; | 2548 bool descriptors_owner_died = false; |
2588 int transition_index = 0; | 2549 int transition_index = 0; |
2589 // Compact all live transitions to the left. | 2550 // Compact all live transitions to the left. |
2590 for (int i = 0; i < num_transitions; ++i) { | 2551 for (int i = 0; i < num_transitions; ++i) { |
2591 Map* target = transitions->GetTarget(i); | 2552 Map* target = transitions->GetTarget(i); |
2592 DCHECK_EQ(target->constructor_or_backpointer(), map); | 2553 DCHECK_EQ(target->constructor_or_backpointer(), map); |
2593 if (Marking::IsWhite(Marking::MarkBitFrom(target))) { | 2554 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(target))) { |
2594 if (descriptors != nullptr && | 2555 if (descriptors != nullptr && |
2595 target->instance_descriptors() == descriptors) { | 2556 target->instance_descriptors() == descriptors) { |
2596 descriptors_owner_died = true; | 2557 descriptors_owner_died = true; |
2597 } | 2558 } |
2598 } else { | 2559 } else { |
2599 if (i != transition_index) { | 2560 if (i != transition_index) { |
2600 Name* key = transitions->GetKey(i); | 2561 Name* key = transitions->GetKey(i); |
2601 transitions->SetKey(transition_index, key); | 2562 transitions->SetKey(transition_index, key); |
2602 Object** key_slot = transitions->GetKeySlot(transition_index); | 2563 Object** key_slot = transitions->GetKeySlot(transition_index); |
2603 RecordSlot(transitions, key_slot, key); | 2564 RecordSlot(transitions, key_slot, key); |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2760 // Cells for new-space objects embedded in optimized code are wrapped in | 2721 // Cells for new-space objects embedded in optimized code are wrapped in |
2761 // WeakCell and put into Heap::weak_object_to_code_table. | 2722 // WeakCell and put into Heap::weak_object_to_code_table. |
2762 // Such cells do not have any strong references but we want to keep them | 2723 // Such cells do not have any strong references but we want to keep them |
2763 // alive as long as the cell value is alive. | 2724 // alive as long as the cell value is alive. |
2764 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. | 2725 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. |
2765 if (value->IsCell()) { | 2726 if (value->IsCell()) { |
2766 Object* cell_value = Cell::cast(value)->value(); | 2727 Object* cell_value = Cell::cast(value)->value(); |
2767 if (cell_value->IsHeapObject() && | 2728 if (cell_value->IsHeapObject() && |
2768 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { | 2729 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { |
2769 // Resurrect the cell. | 2730 // Resurrect the cell. |
2770 MarkBit mark = Marking::MarkBitFrom(value); | 2731 MarkBit mark = ObjectMarking::MarkBitFrom(value); |
2771 SetMark(value, mark); | 2732 SetMark(value, mark); |
2772 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); | 2733 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); |
2773 RecordSlot(value, slot, *slot); | 2734 RecordSlot(value, slot, *slot); |
2774 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 2735 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
2775 RecordSlot(weak_cell, slot, *slot); | 2736 RecordSlot(weak_cell, slot, *slot); |
2776 clear_value = false; | 2737 clear_value = false; |
2777 } | 2738 } |
2778 } | 2739 } |
2779 if (value->IsMap()) { | 2740 if (value->IsMap()) { |
2780 // The map is non-live. | 2741 // The map is non-live. |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2995 // Find the last live object in the cell. | 2956 // Find the last live object in the cell. |
2996 unsigned int leading_zeros = | 2957 unsigned int leading_zeros = |
2997 base::bits::CountLeadingZeros32(current_cell & slot_mask); | 2958 base::bits::CountLeadingZeros32(current_cell & slot_mask); |
2998 CHECK(leading_zeros != Bitmap::kBitsPerCell); | 2959 CHECK(leading_zeros != Bitmap::kBitsPerCell); |
2999 int offset = static_cast<int>(Bitmap::kBitIndexMask - leading_zeros) - 1; | 2960 int offset = static_cast<int>(Bitmap::kBitIndexMask - leading_zeros) - 1; |
3000 | 2961 |
3001 base_address += (cell_index - base_address_cell_index) * | 2962 base_address += (cell_index - base_address_cell_index) * |
3002 Bitmap::kBitsPerCell * kPointerSize; | 2963 Bitmap::kBitsPerCell * kPointerSize; |
3003 Address address = base_address + offset * kPointerSize; | 2964 Address address = base_address + offset * kPointerSize; |
3004 HeapObject* object = HeapObject::FromAddress(address); | 2965 HeapObject* object = HeapObject::FromAddress(address); |
3005 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 2966 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
3006 CHECK(object->address() < reinterpret_cast<Address>(slot)); | 2967 CHECK(object->address() < reinterpret_cast<Address>(slot)); |
3007 if ((object->address() + kPointerSize) <= slot && | 2968 if ((object->address() + kPointerSize) <= slot && |
3008 (object->address() + object->Size()) > slot) { | 2969 (object->address() + object->Size()) > slot) { |
3009 // If the slot is within the last found object in the cell, the slot is | 2970 // If the slot is within the last found object in the cell, the slot is |
3010 // in a live object. | 2971 // in a live object. |
3011 // Slots pointing to the first word of an object are invalid and removed. | 2972 // Slots pointing to the first word of an object are invalid and removed. |
3012 // This can happen when we move the object header while left trimming. | 2973 // This can happen when we move the object header while left trimming. |
3013 return true; | 2974 return true; |
3014 } | 2975 } |
3015 return false; | 2976 return false; |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3407 skip_list->Clear(); | 3368 skip_list->Clear(); |
3408 } | 3369 } |
3409 | 3370 |
3410 intptr_t freed_bytes = 0; | 3371 intptr_t freed_bytes = 0; |
3411 intptr_t max_freed_bytes = 0; | 3372 intptr_t max_freed_bytes = 0; |
3412 int curr_region = -1; | 3373 int curr_region = -1; |
3413 | 3374 |
3414 LiveObjectIterator<kBlackObjects> it(p); | 3375 LiveObjectIterator<kBlackObjects> it(p); |
3415 HeapObject* object = NULL; | 3376 HeapObject* object = NULL; |
3416 while ((object = it.Next()) != NULL) { | 3377 while ((object = it.Next()) != NULL) { |
3417 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3378 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
3418 Address free_end = object->address(); | 3379 Address free_end = object->address(); |
3419 if (free_end != free_start) { | 3380 if (free_end != free_start) { |
3420 int size = static_cast<int>(free_end - free_start); | 3381 int size = static_cast<int>(free_end - free_start); |
3421 if (free_space_mode == ZAP_FREE_SPACE) { | 3382 if (free_space_mode == ZAP_FREE_SPACE) { |
3422 memset(free_start, 0xcc, size); | 3383 memset(free_start, 0xcc, size); |
3423 } | 3384 } |
3424 if (free_list_mode == REBUILD_FREE_LIST) { | 3385 if (free_list_mode == REBUILD_FREE_LIST) { |
3425 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( | 3386 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( |
3426 free_start, size); | 3387 free_start, size); |
3427 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3388 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
(...skipping 10 matching lines...) Expand all Loading... | |
3438 SkipList::RegionNumber(free_end + size - kPointerSize); | 3399 SkipList::RegionNumber(free_end + size - kPointerSize); |
3439 if (new_region_start != curr_region || new_region_end != curr_region) { | 3400 if (new_region_start != curr_region || new_region_end != curr_region) { |
3440 skip_list->AddObject(free_end, size); | 3401 skip_list->AddObject(free_end, size); |
3441 curr_region = new_region_end; | 3402 curr_region = new_region_end; |
3442 } | 3403 } |
3443 } | 3404 } |
3444 free_start = free_end + size; | 3405 free_start = free_end + size; |
3445 } | 3406 } |
3446 | 3407 |
3447 // Clear the mark bits of that page and reset live bytes count. | 3408 // Clear the mark bits of that page and reset live bytes count. |
3448 Bitmap::Clear(p); | 3409 p->ClearLiveness(); |
3449 | 3410 |
3450 if (free_start != p->area_end()) { | 3411 if (free_start != p->area_end()) { |
3451 int size = static_cast<int>(p->area_end() - free_start); | 3412 int size = static_cast<int>(p->area_end() - free_start); |
3452 if (free_space_mode == ZAP_FREE_SPACE) { | 3413 if (free_space_mode == ZAP_FREE_SPACE) { |
3453 memset(free_start, 0xcc, size); | 3414 memset(free_start, 0xcc, size); |
3454 } | 3415 } |
3455 if (free_list_mode == REBUILD_FREE_LIST) { | 3416 if (free_list_mode == REBUILD_FREE_LIST) { |
3456 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( | 3417 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( |
3457 free_start, size); | 3418 free_start, size); |
3458 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3419 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
(...skipping 12 matching lines...) Expand all Loading... | |
3471 Address start = code->instruction_start(); | 3432 Address start = code->instruction_start(); |
3472 Address end = code->address() + code->Size(); | 3433 Address end = code->address() + code->Size(); |
3473 | 3434 |
3474 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); | 3435 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); |
3475 | 3436 |
3476 if (heap_->incremental_marking()->IsCompacting() && | 3437 if (heap_->incremental_marking()->IsCompacting() && |
3477 !ShouldSkipEvacuationSlotRecording(code)) { | 3438 !ShouldSkipEvacuationSlotRecording(code)) { |
3478 DCHECK(compacting_); | 3439 DCHECK(compacting_); |
3479 | 3440 |
3480 // If the object is white than no slots were recorded on it yet. | 3441 // If the object is white than no slots were recorded on it yet. |
3481 MarkBit mark_bit = Marking::MarkBitFrom(code); | 3442 MarkBit mark_bit = ObjectMarking::MarkBitFrom(code); |
3482 if (Marking::IsWhite(mark_bit)) return; | 3443 if (Marking::IsWhite(mark_bit)) return; |
3483 | 3444 |
3484 // Ignore all slots that might have been recorded in the body of the | 3445 // Ignore all slots that might have been recorded in the body of the |
3485 // deoptimized code object. Assumption: no slots will be recorded for | 3446 // deoptimized code object. Assumption: no slots will be recorded for |
3486 // this object after invalidating it. | 3447 // this object after invalidating it. |
3487 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); | 3448 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); |
3488 } | 3449 } |
3489 } | 3450 } |
3490 | 3451 |
3491 | 3452 |
3492 // Return true if the given code is deoptimized or will be deoptimized. | 3453 // Return true if the given code is deoptimized or will be deoptimized. |
3493 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3454 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3494 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3455 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3495 } | 3456 } |
3496 | 3457 |
3497 | 3458 |
3498 #ifdef VERIFY_HEAP | 3459 #ifdef VERIFY_HEAP |
3499 static void VerifyAllBlackObjects(MemoryChunk* page) { | 3460 static void VerifyAllBlackObjects(MemoryChunk* page) { |
3500 LiveObjectIterator<kAllLiveObjects> it(page); | 3461 LiveObjectIterator<kAllLiveObjects> it(page); |
3501 HeapObject* object = NULL; | 3462 HeapObject* object = NULL; |
3502 while ((object = it.Next()) != NULL) { | 3463 while ((object = it.Next()) != NULL) { |
3503 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3464 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
3504 } | 3465 } |
3505 } | 3466 } |
3506 #endif // VERIFY_HEAP | 3467 #endif // VERIFY_HEAP |
3507 | 3468 |
3508 template <class Visitor> | 3469 template <class Visitor> |
3509 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, | 3470 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, |
3510 IterationMode mode) { | 3471 IterationMode mode) { |
3511 #ifdef VERIFY_HEAP | 3472 #ifdef VERIFY_HEAP |
3512 VerifyAllBlackObjects(page); | 3473 VerifyAllBlackObjects(page); |
3513 #endif // VERIFY_HEAP | 3474 #endif // VERIFY_HEAP |
3514 | 3475 |
3515 LiveObjectIterator<kBlackObjects> it(page); | 3476 LiveObjectIterator<kBlackObjects> it(page); |
3516 HeapObject* object = nullptr; | 3477 HeapObject* object = nullptr; |
3517 while ((object = it.Next()) != nullptr) { | 3478 while ((object = it.Next()) != nullptr) { |
3518 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3479 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
3519 if (!visitor->Visit(object)) { | 3480 if (!visitor->Visit(object)) { |
3520 if (mode == kClearMarkbits) { | 3481 if (mode == kClearMarkbits) { |
3521 page->markbits()->ClearRange( | 3482 page->markbits()->ClearRange( |
3522 page->AddressToMarkbitIndex(page->area_start()), | 3483 page->AddressToMarkbitIndex(page->area_start()), |
3523 page->AddressToMarkbitIndex(object->address())); | 3484 page->AddressToMarkbitIndex(object->address())); |
3524 if (page->old_to_new_slots() != nullptr) { | 3485 if (page->old_to_new_slots() != nullptr) { |
3525 page->old_to_new_slots()->RemoveRange( | 3486 page->old_to_new_slots()->RemoveRange( |
3526 0, static_cast<int>(object->address() - page->address())); | 3487 0, static_cast<int>(object->address() - page->address())); |
3527 } | 3488 } |
3528 if (page->typed_old_to_new_slots() != nullptr) { | 3489 if (page->typed_old_to_new_slots() != nullptr) { |
3529 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(), | 3490 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(), |
3530 object->address()); | 3491 object->address()); |
3531 } | 3492 } |
3532 RecomputeLiveBytes(page); | 3493 RecomputeLiveBytes(page); |
3533 } | 3494 } |
3534 return false; | 3495 return false; |
3535 } | 3496 } |
3536 } | 3497 } |
3537 if (mode == kClearMarkbits) { | 3498 if (mode == kClearMarkbits) { |
3538 Bitmap::Clear(page); | 3499 page->ClearLiveness(); |
3539 } | 3500 } |
3540 return true; | 3501 return true; |
3541 } | 3502 } |
3542 | 3503 |
3543 | 3504 |
3544 void MarkCompactCollector::RecomputeLiveBytes(MemoryChunk* page) { | 3505 void MarkCompactCollector::RecomputeLiveBytes(MemoryChunk* page) { |
3545 LiveObjectIterator<kBlackObjects> it(page); | 3506 LiveObjectIterator<kBlackObjects> it(page); |
3546 int new_live_size = 0; | 3507 int new_live_size = 0; |
3547 HeapObject* object = nullptr; | 3508 HeapObject* object = nullptr; |
3548 while ((object = it.Next()) != nullptr) { | 3509 while ((object = it.Next()) != nullptr) { |
3549 new_live_size += object->Size(); | 3510 new_live_size += object->Size(); |
3550 } | 3511 } |
3551 page->SetLiveBytes(new_live_size); | 3512 page->SetLiveBytes(new_live_size); |
3552 } | 3513 } |
3553 | 3514 |
3554 | 3515 |
3555 void MarkCompactCollector::VisitLiveObjectsBody(Page* page, | 3516 void MarkCompactCollector::VisitLiveObjectsBody(Page* page, |
3556 ObjectVisitor* visitor) { | 3517 ObjectVisitor* visitor) { |
3557 #ifdef VERIFY_HEAP | 3518 #ifdef VERIFY_HEAP |
3558 VerifyAllBlackObjects(page); | 3519 VerifyAllBlackObjects(page); |
3559 #endif // VERIFY_HEAP | 3520 #endif // VERIFY_HEAP |
3560 | 3521 |
3561 LiveObjectIterator<kBlackObjects> it(page); | 3522 LiveObjectIterator<kBlackObjects> it(page); |
3562 HeapObject* object = NULL; | 3523 HeapObject* object = NULL; |
3563 while ((object = it.Next()) != NULL) { | 3524 while ((object = it.Next()) != NULL) { |
3564 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3525 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
3565 Map* map = object->synchronized_map(); | 3526 Map* map = object->synchronized_map(); |
3566 int size = object->SizeFromMap(map); | 3527 int size = object->SizeFromMap(map); |
3567 object->IterateBody(map->instance_type(), size, visitor); | 3528 object->IterateBody(map->instance_type(), size, visitor); |
3568 } | 3529 } |
3569 } | 3530 } |
3570 | 3531 |
3571 void MarkCompactCollector::Sweeper::AddSweptPageSafe(PagedSpace* space, | 3532 void MarkCompactCollector::Sweeper::AddSweptPageSafe(PagedSpace* space, |
3572 Page* page) { | 3533 Page* page) { |
3573 base::LockGuard<base::Mutex> guard(&mutex_); | 3534 base::LockGuard<base::Mutex> guard(&mutex_); |
3574 swept_list_[space->identity()].Add(page); | 3535 swept_list_[space->identity()].Add(page); |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3708 // just freed free space object. | 3669 // just freed free space object. |
3709 if (heap->InToSpace(*slot)) { | 3670 if (heap->InToSpace(*slot)) { |
3710 return KEEP_SLOT; | 3671 return KEEP_SLOT; |
3711 } | 3672 } |
3712 } else if (heap->InToSpace(*slot)) { | 3673 } else if (heap->InToSpace(*slot)) { |
3713 // Slots can point to "to" space if the page has been moved, or if the | 3674 // Slots can point to "to" space if the page has been moved, or if the |
3714 // slot has been recorded multiple times in the remembered set. Since | 3675 // slot has been recorded multiple times in the remembered set. Since |
3715 // there is no forwarding information present we need to check the | 3676 // there is no forwarding information present we need to check the |
3716 // markbits to determine liveness. | 3677 // markbits to determine liveness. |
3717 if (Marking::IsBlack( | 3678 if (Marking::IsBlack( |
3718 Marking::MarkBitFrom(reinterpret_cast<HeapObject*>(*slot)))) | 3679 ObjectMarking::MarkBitFrom(reinterpret_cast<HeapObject*>(*slot)))) |
3719 return KEEP_SLOT; | 3680 return KEEP_SLOT; |
3720 } else { | 3681 } else { |
3721 DCHECK(!heap->InNewSpace(*slot)); | 3682 DCHECK(!heap->InNewSpace(*slot)); |
3722 } | 3683 } |
3723 return REMOVE_SLOT; | 3684 return REMOVE_SLOT; |
3724 } | 3685 } |
3725 }; | 3686 }; |
3726 | 3687 |
3727 int NumberOfPointerUpdateTasks(int pages) { | 3688 int NumberOfPointerUpdateTasks(int pages) { |
3728 if (!FLAG_parallel_pointer_update) return 1; | 3689 if (!FLAG_parallel_pointer_update) return 1; |
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3954 | 3915 |
3955 if (p->IsEvacuationCandidate()) { | 3916 if (p->IsEvacuationCandidate()) { |
3956 // Will be processed in EvacuateNewSpaceAndCandidates. | 3917 // Will be processed in EvacuateNewSpaceAndCandidates. |
3957 DCHECK(evacuation_candidates_.length() > 0); | 3918 DCHECK(evacuation_candidates_.length() > 0); |
3958 continue; | 3919 continue; |
3959 } | 3920 } |
3960 | 3921 |
3961 // We can not sweep black pages, since all mark bits are set for these | 3922 // We can not sweep black pages, since all mark bits are set for these |
3962 // pages. | 3923 // pages. |
3963 if (p->IsFlagSet(Page::BLACK_PAGE)) { | 3924 if (p->IsFlagSet(Page::BLACK_PAGE)) { |
3964 Bitmap::Clear(p); | 3925 p->ClearLiveness(); |
3965 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); | 3926 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); |
3966 p->ClearFlag(Page::BLACK_PAGE); | 3927 p->ClearFlag(Page::BLACK_PAGE); |
3967 // Area above the high watermark is free. | 3928 // Area above the high watermark is free. |
3968 Address free_start = p->HighWaterMark(); | 3929 Address free_start = p->HighWaterMark(); |
3969 // Check if the space top was in this page, which means that the | 3930 // Check if the space top was in this page, which means that the |
3970 // high watermark is not up-to-date. | 3931 // high watermark is not up-to-date. |
3971 if (free_start < space_top && space_top <= p->area_end()) { | 3932 if (free_start < space_top && space_top <= p->area_end()) { |
3972 free_start = space_top; | 3933 free_start = space_top; |
3973 } | 3934 } |
3974 int size = static_cast<int>(p->area_end() - free_start); | 3935 int size = static_cast<int>(p->area_end() - free_start); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4073 } | 4034 } |
4074 } | 4035 } |
4075 | 4036 |
4076 | 4037 |
4077 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { | 4038 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { |
4078 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); | 4039 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); |
4079 if (is_compacting()) { | 4040 if (is_compacting()) { |
4080 Code* host = | 4041 Code* host = |
4081 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( | 4042 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( |
4082 pc); | 4043 pc); |
4083 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4044 MarkBit mark_bit = ObjectMarking::MarkBitFrom(host); |
4084 if (Marking::IsBlack(mark_bit)) { | 4045 if (Marking::IsBlack(mark_bit)) { |
4085 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4046 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
4086 // The target is always in old space, we don't have to record the slot in | 4047 // The target is always in old space, we don't have to record the slot in |
4087 // the old-to-new remembered set. | 4048 // the old-to-new remembered set. |
4088 DCHECK(!heap()->InNewSpace(target)); | 4049 DCHECK(!heap()->InNewSpace(target)); |
4089 RecordRelocSlot(host, &rinfo, target); | 4050 RecordRelocSlot(host, &rinfo, target); |
4090 } | 4051 } |
4091 } | 4052 } |
4092 } | 4053 } |
4093 | 4054 |
4094 } // namespace internal | 4055 } // namespace internal |
4095 } // namespace v8 | 4056 } // namespace v8 |
OLD | NEW |