OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
97 // One word fillers at the end of a black area can be grey. | 97 // One word fillers at the end of a black area can be grey. |
98 if (ObjectMarking::IsBlackOrGrey(object, state) && | 98 if (ObjectMarking::IsBlackOrGrey(object, state) && |
99 object->map() != heap_->one_pointer_filler_map()) { | 99 object->map() != heap_->one_pointer_filler_map()) { |
100 CHECK(ObjectMarking::IsBlack(object, state)); | 100 CHECK(ObjectMarking::IsBlack(object, state)); |
101 CHECK(current >= next_object_must_be_here_or_later); | 101 CHECK(current >= next_object_must_be_here_or_later); |
102 object->Iterate(this); | 102 object->Iterate(this); |
103 next_object_must_be_here_or_later = current + object->Size(); | 103 next_object_must_be_here_or_later = current + object->Size(); |
104 // The object is either part of a black area of black allocation or a | 104 // The object is either part of a black area of black allocation or a |
105 // regular black object | 105 // regular black object |
106 CHECK( | 106 CHECK( |
107 state.bitmap->AllBitsSetInRange( | 107 state.bitmap()->AllBitsSetInRange( |
108 page.AddressToMarkbitIndex(current), | 108 page.AddressToMarkbitIndex(current), |
109 page.AddressToMarkbitIndex(next_object_must_be_here_or_later)) || | 109 page.AddressToMarkbitIndex(next_object_must_be_here_or_later)) || |
110 state.bitmap->AllBitsClearInRange( | 110 state.bitmap()->AllBitsClearInRange( |
111 page.AddressToMarkbitIndex(current + kPointerSize * 2), | 111 page.AddressToMarkbitIndex(current + kPointerSize * 2), |
112 page.AddressToMarkbitIndex(next_object_must_be_here_or_later))); | 112 page.AddressToMarkbitIndex(next_object_must_be_here_or_later))); |
113 current = next_object_must_be_here_or_later; | 113 current = next_object_must_be_here_or_later; |
114 } else { | 114 } else { |
115 current += kPointerSize; | 115 current += kPointerSize; |
116 } | 116 } |
117 } | 117 } |
118 } | 118 } |
119 | 119 |
120 void MarkingVerifier::VerifyMarking(NewSpace* space) { | 120 void MarkingVerifier::VerifyMarking(NewSpace* space) { |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
152 LargeObjectIterator it(heap_->lo_space()); | 152 LargeObjectIterator it(heap_->lo_space()); |
153 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 153 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
154 if (ObjectMarking::IsBlackOrGrey(obj, marking_state(obj))) { | 154 if (ObjectMarking::IsBlackOrGrey(obj, marking_state(obj))) { |
155 obj->Iterate(this); | 155 obj->Iterate(this); |
156 } | 156 } |
157 } | 157 } |
158 } | 158 } |
159 | 159 |
160 protected: | 160 protected: |
161 MarkingState marking_state(MemoryChunk* chunk) override { | 161 MarkingState marking_state(MemoryChunk* chunk) override { |
162 return MarkingState::FromPageInternal(chunk); | 162 return MarkingState::Internal(chunk); |
163 } | 163 } |
164 | 164 |
165 MarkingState marking_state(HeapObject* object) { | 165 MarkingState marking_state(HeapObject* object) { |
166 return marking_state(Page::FromAddress(object->address())); | 166 return MarkingState::Internal(object); |
167 } | 167 } |
168 | 168 |
169 void VisitPointers(Object** start, Object** end) override { | 169 void VisitPointers(Object** start, Object** end) override { |
170 for (Object** current = start; current < end; current++) { | 170 for (Object** current = start; current < end; current++) { |
171 if ((*current)->IsHeapObject()) { | 171 if ((*current)->IsHeapObject()) { |
172 HeapObject* object = HeapObject::cast(*current); | 172 HeapObject* object = HeapObject::cast(*current); |
173 CHECK(ObjectMarking::IsBlackOrGrey(object, marking_state(object))); | 173 CHECK(ObjectMarking::IsBlackOrGrey(object, marking_state(object))); |
174 } | 174 } |
175 } | 175 } |
176 } | 176 } |
(...skipping 13 matching lines...) Expand all Loading... |
190 ObjectVisitor::VisitCell(rinfo); | 190 ObjectVisitor::VisitCell(rinfo); |
191 } | 191 } |
192 } | 192 } |
193 }; | 193 }; |
194 | 194 |
195 class YoungGenerationMarkingVerifier : public MarkingVerifier { | 195 class YoungGenerationMarkingVerifier : public MarkingVerifier { |
196 public: | 196 public: |
197 explicit YoungGenerationMarkingVerifier(Heap* heap) : MarkingVerifier(heap) {} | 197 explicit YoungGenerationMarkingVerifier(Heap* heap) : MarkingVerifier(heap) {} |
198 | 198 |
199 MarkingState marking_state(MemoryChunk* chunk) override { | 199 MarkingState marking_state(MemoryChunk* chunk) override { |
200 return MarkingState::FromPageExternal(chunk); | 200 return MarkingState::External(chunk); |
201 } | 201 } |
202 | 202 |
203 MarkingState marking_state(HeapObject* object) { | 203 MarkingState marking_state(HeapObject* object) { |
204 return marking_state(Page::FromAddress(object->address())); | 204 return MarkingState::External(object); |
205 } | 205 } |
206 | 206 |
207 void Run() override { | 207 void Run() override { |
208 VerifyRoots(VISIT_ALL_IN_SCAVENGE); | 208 VerifyRoots(VISIT_ALL_IN_SCAVENGE); |
209 VerifyMarking(heap_->new_space()); | 209 VerifyMarking(heap_->new_space()); |
210 } | 210 } |
211 | 211 |
212 void VisitPointers(Object** start, Object** end) override { | 212 void VisitPointers(Object** start, Object** end) override { |
213 for (Object** current = start; current < end; current++) { | 213 for (Object** current = start; current < end; current++) { |
214 if ((*current)->IsHeapObject()) { | 214 if ((*current)->IsHeapObject()) { |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
372 StartSweepSpaces(); | 372 StartSweepSpaces(); |
373 | 373 |
374 EvacuateNewSpaceAndCandidates(); | 374 EvacuateNewSpaceAndCandidates(); |
375 | 375 |
376 Finish(); | 376 Finish(); |
377 } | 377 } |
378 | 378 |
379 #ifdef VERIFY_HEAP | 379 #ifdef VERIFY_HEAP |
380 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 380 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
381 for (Page* p : *space) { | 381 for (Page* p : *space) { |
382 CHECK(p->markbits()->IsClean()); | 382 const MarkingState state = MarkingState::Internal(p); |
383 CHECK_EQ(0, p->LiveBytes()); | 383 CHECK(state.bitmap()->IsClean()); |
| 384 CHECK_EQ(0, state.live_bytes()); |
384 } | 385 } |
385 } | 386 } |
386 | 387 |
387 | 388 |
388 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { | 389 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { |
389 for (Page* p : PageRange(space->bottom(), space->top())) { | 390 for (Page* p : PageRange(space->bottom(), space->top())) { |
390 CHECK(p->markbits()->IsClean()); | 391 const MarkingState state = MarkingState::Internal(p); |
391 CHECK_EQ(0, p->LiveBytes()); | 392 CHECK(state.bitmap()->IsClean()); |
| 393 CHECK_EQ(0, state.live_bytes()); |
392 } | 394 } |
393 } | 395 } |
394 | 396 |
395 | 397 |
396 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 398 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
397 VerifyMarkbitsAreClean(heap_->old_space()); | 399 VerifyMarkbitsAreClean(heap_->old_space()); |
398 VerifyMarkbitsAreClean(heap_->code_space()); | 400 VerifyMarkbitsAreClean(heap_->code_space()); |
399 VerifyMarkbitsAreClean(heap_->map_space()); | 401 VerifyMarkbitsAreClean(heap_->map_space()); |
400 VerifyMarkbitsAreClean(heap_->new_space()); | 402 VerifyMarkbitsAreClean(heap_->new_space()); |
401 | 403 |
402 LargeObjectIterator it(heap_->lo_space()); | 404 LargeObjectIterator it(heap_->lo_space()); |
403 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 405 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
404 CHECK(ObjectMarking::IsWhite(obj)); | 406 CHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))); |
405 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 407 CHECK_EQ(0, MarkingState::Internal(obj).live_bytes()); |
406 } | 408 } |
407 } | 409 } |
408 | 410 |
409 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 411 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
410 HeapObjectIterator code_iterator(heap()->code_space()); | 412 HeapObjectIterator code_iterator(heap()->code_space()); |
411 for (HeapObject* obj = code_iterator.Next(); obj != NULL; | 413 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
412 obj = code_iterator.Next()) { | 414 obj = code_iterator.Next()) { |
413 Code* code = Code::cast(obj); | 415 Code* code = Code::cast(obj); |
414 if (!code->is_optimized_code()) continue; | 416 if (!code->is_optimized_code()) continue; |
415 if (WillBeDeoptimized(code)) continue; | 417 if (WillBeDeoptimized(code)) continue; |
416 code->VerifyEmbeddedObjectsDependency(); | 418 code->VerifyEmbeddedObjectsDependency(); |
417 } | 419 } |
418 } | 420 } |
419 | 421 |
420 | 422 |
421 void MarkCompactCollector::VerifyOmittedMapChecks() { | 423 void MarkCompactCollector::VerifyOmittedMapChecks() { |
422 HeapObjectIterator iterator(heap()->map_space()); | 424 HeapObjectIterator iterator(heap()->map_space()); |
423 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { | 425 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { |
424 Map* map = Map::cast(obj); | 426 Map* map = Map::cast(obj); |
425 map->VerifyOmittedMapChecks(); | 427 map->VerifyOmittedMapChecks(); |
426 } | 428 } |
427 } | 429 } |
428 #endif // VERIFY_HEAP | 430 #endif // VERIFY_HEAP |
429 | 431 |
430 | 432 |
431 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { | 433 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { |
432 for (Page* p : *space) { | 434 for (Page* p : *space) { |
433 p->ClearLiveness(); | 435 MarkingState::Internal(p).ClearLiveness(); |
434 } | 436 } |
435 } | 437 } |
436 | 438 |
437 | 439 |
438 static void ClearMarkbitsInNewSpace(NewSpace* space) { | 440 static void ClearMarkbitsInNewSpace(NewSpace* space) { |
439 for (Page* page : *space) { | 441 for (Page* p : *space) { |
440 page->ClearLiveness(); | 442 MarkingState::Internal(p).ClearLiveness(); |
441 } | 443 } |
442 } | 444 } |
443 | 445 |
444 | 446 |
445 void MarkCompactCollector::ClearMarkbits() { | 447 void MarkCompactCollector::ClearMarkbits() { |
446 ClearMarkbitsInPagedSpace(heap_->code_space()); | 448 ClearMarkbitsInPagedSpace(heap_->code_space()); |
447 ClearMarkbitsInPagedSpace(heap_->map_space()); | 449 ClearMarkbitsInPagedSpace(heap_->map_space()); |
448 ClearMarkbitsInPagedSpace(heap_->old_space()); | 450 ClearMarkbitsInPagedSpace(heap_->old_space()); |
449 ClearMarkbitsInNewSpace(heap_->new_space()); | 451 ClearMarkbitsInNewSpace(heap_->new_space()); |
450 heap_->lo_space()->ClearMarkingStateOfLiveObjects(); | 452 heap_->lo_space()->ClearMarkingStateOfLiveObjects(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
484 base::AtomicNumber<intptr_t>* num_sweeping_tasks_; | 486 base::AtomicNumber<intptr_t>* num_sweeping_tasks_; |
485 AllocationSpace space_to_start_; | 487 AllocationSpace space_to_start_; |
486 | 488 |
487 DISALLOW_COPY_AND_ASSIGN(SweeperTask); | 489 DISALLOW_COPY_AND_ASSIGN(SweeperTask); |
488 }; | 490 }; |
489 | 491 |
490 void MarkCompactCollector::Sweeper::StartSweeping() { | 492 void MarkCompactCollector::Sweeper::StartSweeping() { |
491 sweeping_in_progress_ = true; | 493 sweeping_in_progress_ = true; |
492 ForAllSweepingSpaces([this](AllocationSpace space) { | 494 ForAllSweepingSpaces([this](AllocationSpace space) { |
493 std::sort(sweeping_list_[space].begin(), sweeping_list_[space].end(), | 495 std::sort(sweeping_list_[space].begin(), sweeping_list_[space].end(), |
494 [](Page* a, Page* b) { return a->LiveBytes() < b->LiveBytes(); }); | 496 [](Page* a, Page* b) { |
| 497 return MarkingState::Internal(a).live_bytes() < |
| 498 MarkingState::Internal(b).live_bytes(); |
| 499 }); |
495 }); | 500 }); |
496 } | 501 } |
497 | 502 |
498 void MarkCompactCollector::Sweeper::StartSweeperTasks() { | 503 void MarkCompactCollector::Sweeper::StartSweeperTasks() { |
499 if (FLAG_concurrent_sweeping && sweeping_in_progress_) { | 504 if (FLAG_concurrent_sweeping && sweeping_in_progress_) { |
500 ForAllSweepingSpaces([this](AllocationSpace space) { | 505 ForAllSweepingSpaces([this](AllocationSpace space) { |
501 if (space == NEW_SPACE) return; | 506 if (space == NEW_SPACE) return; |
502 num_sweeping_tasks_.Increment(1); | 507 num_sweeping_tasks_.Increment(1); |
503 semaphore_counter_++; | 508 semaphore_counter_++; |
504 V8::GetCurrentPlatform()->CallOnBackgroundThread( | 509 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
(...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
938 | 943 |
939 JSFunction* candidate = jsfunction_candidates_head_; | 944 JSFunction* candidate = jsfunction_candidates_head_; |
940 JSFunction* next_candidate; | 945 JSFunction* next_candidate; |
941 while (candidate != NULL) { | 946 while (candidate != NULL) { |
942 next_candidate = GetNextCandidate(candidate); | 947 next_candidate = GetNextCandidate(candidate); |
943 ClearNextCandidate(candidate, undefined); | 948 ClearNextCandidate(candidate, undefined); |
944 | 949 |
945 SharedFunctionInfo* shared = candidate->shared(); | 950 SharedFunctionInfo* shared = candidate->shared(); |
946 | 951 |
947 Code* code = shared->code(); | 952 Code* code = shared->code(); |
948 if (ObjectMarking::IsWhite(code)) { | 953 if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) { |
949 if (FLAG_trace_code_flushing && shared->is_compiled()) { | 954 if (FLAG_trace_code_flushing && shared->is_compiled()) { |
950 PrintF("[code-flushing clears: "); | 955 PrintF("[code-flushing clears: "); |
951 shared->ShortPrint(); | 956 shared->ShortPrint(); |
952 PrintF(" - age: %d]\n", code->GetAge()); | 957 PrintF(" - age: %d]\n", code->GetAge()); |
953 } | 958 } |
954 // Always flush the optimized code map if there is one. | 959 // Always flush the optimized code map if there is one. |
955 if (!shared->OptimizedCodeMapIsCleared()) { | 960 if (!shared->OptimizedCodeMapIsCleared()) { |
956 shared->ClearOptimizedCodeMap(); | 961 shared->ClearOptimizedCodeMap(); |
957 } | 962 } |
958 if (shared->HasBytecodeArray()) { | 963 if (shared->HasBytecodeArray()) { |
959 shared->set_code(interpreter_entry_trampoline); | 964 shared->set_code(interpreter_entry_trampoline); |
960 candidate->set_code(interpreter_entry_trampoline); | 965 candidate->set_code(interpreter_entry_trampoline); |
961 } else { | 966 } else { |
962 shared->set_code(lazy_compile); | 967 shared->set_code(lazy_compile); |
963 candidate->set_code(lazy_compile); | 968 candidate->set_code(lazy_compile); |
964 } | 969 } |
965 } else { | 970 } else { |
966 DCHECK(ObjectMarking::IsBlack(code)); | 971 DCHECK(ObjectMarking::IsBlack(code, MarkingState::Internal(code))); |
967 candidate->set_code(code); | 972 candidate->set_code(code); |
968 } | 973 } |
969 | 974 |
970 // We are in the middle of a GC cycle so the write barrier in the code | 975 // We are in the middle of a GC cycle so the write barrier in the code |
971 // setter did not record the slot update and we have to do that manually. | 976 // setter did not record the slot update and we have to do that manually. |
972 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; | 977 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; |
973 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); | 978 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); |
974 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot( | 979 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot( |
975 candidate, slot, target); | 980 candidate, slot, target); |
976 | 981 |
(...skipping 13 matching lines...) Expand all Loading... |
990 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); | 995 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); |
991 Code* interpreter_entry_trampoline = | 996 Code* interpreter_entry_trampoline = |
992 isolate_->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); | 997 isolate_->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); |
993 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 998 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
994 SharedFunctionInfo* next_candidate; | 999 SharedFunctionInfo* next_candidate; |
995 while (candidate != NULL) { | 1000 while (candidate != NULL) { |
996 next_candidate = GetNextCandidate(candidate); | 1001 next_candidate = GetNextCandidate(candidate); |
997 ClearNextCandidate(candidate); | 1002 ClearNextCandidate(candidate); |
998 | 1003 |
999 Code* code = candidate->code(); | 1004 Code* code = candidate->code(); |
1000 if (ObjectMarking::IsWhite(code)) { | 1005 if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) { |
1001 if (FLAG_trace_code_flushing && candidate->is_compiled()) { | 1006 if (FLAG_trace_code_flushing && candidate->is_compiled()) { |
1002 PrintF("[code-flushing clears: "); | 1007 PrintF("[code-flushing clears: "); |
1003 candidate->ShortPrint(); | 1008 candidate->ShortPrint(); |
1004 PrintF(" - age: %d]\n", code->GetAge()); | 1009 PrintF(" - age: %d]\n", code->GetAge()); |
1005 } | 1010 } |
1006 // Always flush the optimized code map if there is one. | 1011 // Always flush the optimized code map if there is one. |
1007 if (!candidate->OptimizedCodeMapIsCleared()) { | 1012 if (!candidate->OptimizedCodeMapIsCleared()) { |
1008 candidate->ClearOptimizedCodeMap(); | 1013 candidate->ClearOptimizedCodeMap(); |
1009 } | 1014 } |
1010 if (candidate->HasBytecodeArray()) { | 1015 if (candidate->HasBytecodeArray()) { |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1125 if (MarkRecursively(heap, target_object)) return; | 1130 if (MarkRecursively(heap, target_object)) return; |
1126 heap->minor_mark_compact_collector()->MarkObject(target_object); | 1131 heap->minor_mark_compact_collector()->MarkObject(target_object); |
1127 } | 1132 } |
1128 } | 1133 } |
1129 | 1134 |
1130 protected: | 1135 protected: |
1131 inline static bool MarkRecursively(Heap* heap, HeapObject* object) { | 1136 inline static bool MarkRecursively(Heap* heap, HeapObject* object) { |
1132 StackLimitCheck check(heap->isolate()); | 1137 StackLimitCheck check(heap->isolate()); |
1133 if (check.HasOverflowed()) return false; | 1138 if (check.HasOverflowed()) return false; |
1134 | 1139 |
1135 const MarkingState state = | 1140 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>( |
1136 MinorMarkCompactCollector::StateForObject(object); | 1141 object, MarkingState::External(object))) |
1137 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(object, state)) | |
1138 return true; | 1142 return true; |
1139 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object, state); | 1143 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( |
| 1144 object, MarkingState::External(object)); |
1140 IterateBody(object->map(), object); | 1145 IterateBody(object->map(), object); |
1141 return true; | 1146 return true; |
1142 } | 1147 } |
1143 }; | 1148 }; |
1144 | 1149 |
1145 class MarkCompactMarkingVisitor | 1150 class MarkCompactMarkingVisitor |
1146 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { | 1151 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { |
1147 public: | 1152 public: |
1148 static void Initialize(); | 1153 static void Initialize(); |
1149 | 1154 |
(...skipping 16 matching lines...) Expand all Loading... |
1166 } | 1171 } |
1167 | 1172 |
1168 // Marks the object black and pushes it on the marking stack. | 1173 // Marks the object black and pushes it on the marking stack. |
1169 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { | 1174 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
1170 heap->mark_compact_collector()->MarkObject(object); | 1175 heap->mark_compact_collector()->MarkObject(object); |
1171 } | 1176 } |
1172 | 1177 |
1173 // Marks the object black without pushing it on the marking stack. | 1178 // Marks the object black without pushing it on the marking stack. |
1174 // Returns true if object needed marking and false otherwise. | 1179 // Returns true if object needed marking and false otherwise. |
1175 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { | 1180 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { |
1176 if (ObjectMarking::IsWhite(object)) { | 1181 if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) { |
1177 ObjectMarking::WhiteToBlack(object); | 1182 ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object)); |
1178 return true; | 1183 return true; |
1179 } | 1184 } |
1180 return false; | 1185 return false; |
1181 } | 1186 } |
1182 | 1187 |
1183 // Mark object pointed to by p. | 1188 // Mark object pointed to by p. |
1184 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 1189 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
1185 HeapObject* object, Object** p)) { | 1190 HeapObject* object, Object** p)) { |
1186 if (!(*p)->IsHeapObject()) return; | 1191 if (!(*p)->IsHeapObject()) return; |
1187 HeapObject* target_object = HeapObject::cast(*p); | 1192 HeapObject* target_object = HeapObject::cast(*p); |
1188 collector->RecordSlot(object, p, target_object); | 1193 collector->RecordSlot(object, p, target_object); |
1189 collector->MarkObject(target_object); | 1194 collector->MarkObject(target_object); |
1190 } | 1195 } |
1191 | 1196 |
1192 | 1197 |
1193 // Visit an unmarked object. | 1198 // Visit an unmarked object. |
1194 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1199 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
1195 HeapObject* obj)) { | 1200 HeapObject* obj)) { |
1196 #ifdef DEBUG | 1201 #ifdef DEBUG |
1197 DCHECK(collector->heap()->Contains(obj)); | 1202 DCHECK(collector->heap()->Contains(obj)); |
1198 DCHECK(ObjectMarking::IsWhite(obj)); | 1203 DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))); |
1199 #endif | 1204 #endif |
1200 Map* map = obj->map(); | 1205 Map* map = obj->map(); |
1201 Heap* heap = obj->GetHeap(); | 1206 Heap* heap = obj->GetHeap(); |
1202 ObjectMarking::WhiteToBlack(obj); | 1207 ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj)); |
1203 // Mark the map pointer and the body. | 1208 // Mark the map pointer and the body. |
1204 heap->mark_compact_collector()->MarkObject(map); | 1209 heap->mark_compact_collector()->MarkObject(map); |
1205 IterateBody(map, obj); | 1210 IterateBody(map, obj); |
1206 } | 1211 } |
1207 | 1212 |
1208 // Visit all unmarked objects pointed to by [start, end). | 1213 // Visit all unmarked objects pointed to by [start, end). |
1209 // Returns false if the operation fails (lack of stack space). | 1214 // Returns false if the operation fails (lack of stack space). |
1210 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, | 1215 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, |
1211 Object** start, Object** end)) { | 1216 Object** start, Object** end)) { |
1212 // Return false is we are close to the stack limit. | 1217 // Return false is we are close to the stack limit. |
1213 StackLimitCheck check(heap->isolate()); | 1218 StackLimitCheck check(heap->isolate()); |
1214 if (check.HasOverflowed()) return false; | 1219 if (check.HasOverflowed()) return false; |
1215 | 1220 |
1216 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1221 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1217 // Visit the unmarked objects. | 1222 // Visit the unmarked objects. |
1218 for (Object** p = start; p < end; p++) { | 1223 for (Object** p = start; p < end; p++) { |
1219 Object* o = *p; | 1224 Object* o = *p; |
1220 if (!o->IsHeapObject()) continue; | 1225 if (!o->IsHeapObject()) continue; |
1221 collector->RecordSlot(object, p, o); | 1226 collector->RecordSlot(object, p, o); |
1222 HeapObject* obj = HeapObject::cast(o); | 1227 HeapObject* obj = HeapObject::cast(o); |
1223 if (ObjectMarking::IsBlackOrGrey(obj)) continue; | 1228 if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj))) |
| 1229 continue; |
1224 VisitUnmarkedObject(collector, obj); | 1230 VisitUnmarkedObject(collector, obj); |
1225 } | 1231 } |
1226 return true; | 1232 return true; |
1227 } | 1233 } |
1228 | 1234 |
1229 private: | 1235 private: |
1230 // Code flushing support. | 1236 // Code flushing support. |
1231 | 1237 |
1232 static const int kRegExpCodeThreshold = 5; | 1238 static const int kRegExpCodeThreshold = 5; |
1233 | 1239 |
(...skipping 12 matching lines...) Expand all Loading... |
1246 if (!code->IsSmi() && | 1252 if (!code->IsSmi() && |
1247 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { | 1253 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { |
1248 // Save a copy that can be reinstated if we need the code again. | 1254 // Save a copy that can be reinstated if we need the code again. |
1249 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); | 1255 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); |
1250 | 1256 |
1251 // Saving a copy might create a pointer into compaction candidate | 1257 // Saving a copy might create a pointer into compaction candidate |
1252 // that was not observed by marker. This might happen if JSRegExp data | 1258 // that was not observed by marker. This might happen if JSRegExp data |
1253 // was marked through the compilation cache before marker reached JSRegExp | 1259 // was marked through the compilation cache before marker reached JSRegExp |
1254 // object. | 1260 // object. |
1255 FixedArray* data = FixedArray::cast(re->data()); | 1261 FixedArray* data = FixedArray::cast(re->data()); |
1256 if (ObjectMarking::IsBlackOrGrey(data)) { | 1262 if (ObjectMarking::IsBlackOrGrey(data, MarkingState::Internal(data))) { |
1257 Object** slot = | 1263 Object** slot = |
1258 data->data_start() + JSRegExp::saved_code_index(is_one_byte); | 1264 data->data_start() + JSRegExp::saved_code_index(is_one_byte); |
1259 heap->mark_compact_collector()->RecordSlot(data, slot, code); | 1265 heap->mark_compact_collector()->RecordSlot(data, slot, code); |
1260 } | 1266 } |
1261 | 1267 |
1262 // Set a number in the 0-255 range to guarantee no smi overflow. | 1268 // Set a number in the 0-255 range to guarantee no smi overflow. |
1263 re->SetDataAt(JSRegExp::code_index(is_one_byte), | 1269 re->SetDataAt(JSRegExp::code_index(is_one_byte), |
1264 Smi::FromInt(heap->ms_count() & 0xff)); | 1270 Smi::FromInt(heap->ms_count() & 0xff)); |
1265 } else if (code->IsSmi()) { | 1271 } else if (code->IsSmi()) { |
1266 int value = Smi::cast(code)->value(); | 1272 int value = Smi::cast(code)->value(); |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1404 | 1410 |
1405 private: | 1411 private: |
1406 void MarkObjectByPointer(Object** p) { | 1412 void MarkObjectByPointer(Object** p) { |
1407 if (!(*p)->IsHeapObject()) return; | 1413 if (!(*p)->IsHeapObject()) return; |
1408 | 1414 |
1409 HeapObject* object = HeapObject::cast(*p); | 1415 HeapObject* object = HeapObject::cast(*p); |
1410 | 1416 |
1411 if (!collector_->heap()->InNewSpace(object)) return; | 1417 if (!collector_->heap()->InNewSpace(object)) return; |
1412 | 1418 |
1413 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>( | 1419 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>( |
1414 object, StateForObject(object))) | 1420 object, MarkingState::External(object))) |
1415 return; | 1421 return; |
1416 | 1422 |
1417 Map* map = object->map(); | 1423 Map* map = object->map(); |
1418 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object, | 1424 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( |
1419 StateForObject(object)); | 1425 object, MarkingState::External(object)); |
1420 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); | 1426 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); |
1421 | 1427 |
1422 collector_->EmptyMarkingDeque(); | 1428 collector_->EmptyMarkingDeque(); |
1423 } | 1429 } |
1424 | 1430 |
1425 MinorMarkCompactCollector* collector_; | 1431 MinorMarkCompactCollector* collector_; |
1426 }; | 1432 }; |
1427 | 1433 |
1428 // Visitor class for marking heap roots. | 1434 // Visitor class for marking heap roots. |
1429 class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor { | 1435 class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor { |
(...skipping 10 matching lines...) Expand all Loading... |
1440 // Skip the weak next code link in a code object, which is visited in | 1446 // Skip the weak next code link in a code object, which is visited in |
1441 // ProcessTopOptimizedFrame. | 1447 // ProcessTopOptimizedFrame. |
1442 void VisitNextCodeLink(Object** p) override {} | 1448 void VisitNextCodeLink(Object** p) override {} |
1443 | 1449 |
1444 private: | 1450 private: |
1445 void MarkObjectByPointer(Object** p) { | 1451 void MarkObjectByPointer(Object** p) { |
1446 if (!(*p)->IsHeapObject()) return; | 1452 if (!(*p)->IsHeapObject()) return; |
1447 | 1453 |
1448 HeapObject* object = HeapObject::cast(*p); | 1454 HeapObject* object = HeapObject::cast(*p); |
1449 | 1455 |
1450 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(object)) return; | 1456 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>( |
| 1457 object, MarkingState::Internal(object))) |
| 1458 return; |
1451 | 1459 |
1452 Map* map = object->map(); | 1460 Map* map = object->map(); |
1453 // Mark the object. | 1461 // Mark the object. |
1454 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object); | 1462 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( |
| 1463 object, MarkingState::Internal(object)); |
1455 | 1464 |
1456 // Mark the map pointer and body, and push them on the marking stack. | 1465 // Mark the map pointer and body, and push them on the marking stack. |
1457 collector_->MarkObject(map); | 1466 collector_->MarkObject(map); |
1458 MarkCompactMarkingVisitor::IterateBody(map, object); | 1467 MarkCompactMarkingVisitor::IterateBody(map, object); |
1459 | 1468 |
1460 // Mark all the objects reachable from the map and body. May leave | 1469 // Mark all the objects reachable from the map and body. May leave |
1461 // overflowed objects in the heap. | 1470 // overflowed objects in the heap. |
1462 collector_->EmptyMarkingDeque(); | 1471 collector_->EmptyMarkingDeque(); |
1463 } | 1472 } |
1464 | 1473 |
1465 MarkCompactCollector* collector_; | 1474 MarkCompactCollector* collector_; |
1466 }; | 1475 }; |
1467 | 1476 |
1468 | 1477 |
1469 // Helper class for pruning the string table. | 1478 // Helper class for pruning the string table. |
1470 template <bool finalize_external_strings, bool record_slots> | 1479 template <bool finalize_external_strings, bool record_slots> |
1471 class StringTableCleaner : public ObjectVisitor { | 1480 class StringTableCleaner : public ObjectVisitor { |
1472 public: | 1481 public: |
1473 StringTableCleaner(Heap* heap, HeapObject* table) | 1482 StringTableCleaner(Heap* heap, HeapObject* table) |
1474 : heap_(heap), pointers_removed_(0), table_(table) { | 1483 : heap_(heap), pointers_removed_(0), table_(table) { |
1475 DCHECK(!record_slots || table != nullptr); | 1484 DCHECK(!record_slots || table != nullptr); |
1476 } | 1485 } |
1477 | 1486 |
1478 void VisitPointers(Object** start, Object** end) override { | 1487 void VisitPointers(Object** start, Object** end) override { |
1479 // Visit all HeapObject pointers in [start, end). | 1488 // Visit all HeapObject pointers in [start, end). |
1480 MarkCompactCollector* collector = heap_->mark_compact_collector(); | 1489 MarkCompactCollector* collector = heap_->mark_compact_collector(); |
1481 for (Object** p = start; p < end; p++) { | 1490 for (Object** p = start; p < end; p++) { |
1482 Object* o = *p; | 1491 Object* o = *p; |
1483 if (o->IsHeapObject()) { | 1492 if (o->IsHeapObject()) { |
1484 if (ObjectMarking::IsWhite(HeapObject::cast(o))) { | 1493 HeapObject* heap_object = HeapObject::cast(o); |
| 1494 if (ObjectMarking::IsWhite(heap_object, |
| 1495 MarkingState::Internal(heap_object))) { |
1485 if (finalize_external_strings) { | 1496 if (finalize_external_strings) { |
1486 if (o->IsExternalString()) { | 1497 if (o->IsExternalString()) { |
1487 heap_->FinalizeExternalString(String::cast(*p)); | 1498 heap_->FinalizeExternalString(String::cast(*p)); |
1488 } else { | 1499 } else { |
1489 // The original external string may have been internalized. | 1500 // The original external string may have been internalized. |
1490 DCHECK(o->IsThinString()); | 1501 DCHECK(o->IsThinString()); |
1491 } | 1502 } |
1492 } else { | 1503 } else { |
1493 pointers_removed_++; | 1504 pointers_removed_++; |
1494 } | 1505 } |
(...skipping 20 matching lines...) Expand all Loading... |
1515 }; | 1526 }; |
1516 | 1527 |
1517 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; | 1528 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; |
1518 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; | 1529 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; |
1519 | 1530 |
1520 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1531 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
1521 // are retained. | 1532 // are retained. |
1522 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1533 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
1523 public: | 1534 public: |
1524 virtual Object* RetainAs(Object* object) { | 1535 virtual Object* RetainAs(Object* object) { |
1525 DCHECK(!ObjectMarking::IsGrey(HeapObject::cast(object))); | 1536 HeapObject* heap_object = HeapObject::cast(object); |
1526 if (ObjectMarking::IsBlack(HeapObject::cast(object))) { | 1537 DCHECK(!ObjectMarking::IsGrey(heap_object, |
| 1538 MarkingState::Internal(heap_object))); |
| 1539 if (ObjectMarking::IsBlack(heap_object, |
| 1540 MarkingState::Internal(heap_object))) { |
1527 return object; | 1541 return object; |
1528 } else if (object->IsAllocationSite() && | 1542 } else if (object->IsAllocationSite() && |
1529 !(AllocationSite::cast(object)->IsZombie())) { | 1543 !(AllocationSite::cast(object)->IsZombie())) { |
1530 // "dead" AllocationSites need to live long enough for a traversal of new | 1544 // "dead" AllocationSites need to live long enough for a traversal of new |
1531 // space. These sites get a one-time reprieve. | 1545 // space. These sites get a one-time reprieve. |
1532 AllocationSite* site = AllocationSite::cast(object); | 1546 AllocationSite* site = AllocationSite::cast(object); |
1533 site->MarkZombie(); | 1547 site->MarkZombie(); |
1534 ObjectMarking::WhiteToBlack(site); | 1548 ObjectMarking::WhiteToBlack(site, MarkingState::Internal(site)); |
1535 return object; | 1549 return object; |
1536 } else { | 1550 } else { |
1537 return NULL; | 1551 return NULL; |
1538 } | 1552 } |
1539 } | 1553 } |
1540 }; | 1554 }; |
1541 | 1555 |
1542 | 1556 |
1543 // Fill the marking stack with overflowed objects returned by the given | 1557 // Fill the marking stack with overflowed objects returned by the given |
1544 // iterator. Stop when the marking stack is filled or the end of the space | 1558 // iterator. Stop when the marking stack is filled or the end of the space |
1545 // is reached, whichever comes first. | 1559 // is reached, whichever comes first. |
1546 template <class T> | 1560 template <class T> |
1547 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { | 1561 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { |
1548 // The caller should ensure that the marking stack is initially not full, | 1562 // The caller should ensure that the marking stack is initially not full, |
1549 // so that we don't waste effort pointlessly scanning for objects. | 1563 // so that we don't waste effort pointlessly scanning for objects. |
1550 DCHECK(!marking_deque()->IsFull()); | 1564 DCHECK(!marking_deque()->IsFull()); |
1551 | 1565 |
1552 Map* filler_map = heap()->one_pointer_filler_map(); | 1566 Map* filler_map = heap()->one_pointer_filler_map(); |
1553 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { | 1567 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { |
1554 if ((object->map() != filler_map) && ObjectMarking::IsGrey(object)) { | 1568 if ((object->map() != filler_map) && |
1555 ObjectMarking::GreyToBlack(object); | 1569 ObjectMarking::IsGrey(object, MarkingState::Internal(object))) { |
| 1570 ObjectMarking::GreyToBlack(object, MarkingState::Internal(object)); |
1556 PushBlack(object); | 1571 PushBlack(object); |
1557 if (marking_deque()->IsFull()) return; | 1572 if (marking_deque()->IsFull()) return; |
1558 } | 1573 } |
1559 } | 1574 } |
1560 } | 1575 } |
1561 | 1576 |
1562 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { | 1577 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { |
1563 DCHECK(!marking_deque()->IsFull()); | 1578 DCHECK(!marking_deque()->IsFull()); |
1564 LiveObjectIterator<kGreyObjects> it(p, MarkingState::FromPageInternal(p)); | 1579 LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p)); |
1565 HeapObject* object = NULL; | 1580 HeapObject* object = NULL; |
1566 while ((object = it.Next()) != NULL) { | 1581 while ((object = it.Next()) != NULL) { |
1567 DCHECK(ObjectMarking::IsGrey(object)); | 1582 DCHECK(ObjectMarking::IsGrey(object, MarkingState::Internal(object))); |
1568 ObjectMarking::GreyToBlack(object); | 1583 ObjectMarking::GreyToBlack(object, MarkingState::Internal(object)); |
1569 PushBlack(object); | 1584 PushBlack(object); |
1570 if (marking_deque()->IsFull()) return; | 1585 if (marking_deque()->IsFull()) return; |
1571 } | 1586 } |
1572 } | 1587 } |
1573 | 1588 |
1574 class RecordMigratedSlotVisitor final : public ObjectVisitor { | 1589 class RecordMigratedSlotVisitor final : public ObjectVisitor { |
1575 public: | 1590 public: |
1576 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) | 1591 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) |
1577 : collector_(collector) {} | 1592 : collector_(collector) {} |
1578 | 1593 |
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2011 for (Page* page : PageRange(space->bottom(), space->top())) { | 2026 for (Page* page : PageRange(space->bottom(), space->top())) { |
2012 DiscoverGreyObjectsOnPage(page); | 2027 DiscoverGreyObjectsOnPage(page); |
2013 if (marking_deque()->IsFull()) return; | 2028 if (marking_deque()->IsFull()) return; |
2014 } | 2029 } |
2015 } | 2030 } |
2016 | 2031 |
2017 | 2032 |
2018 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 2033 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
2019 Object* o = *p; | 2034 Object* o = *p; |
2020 if (!o->IsHeapObject()) return false; | 2035 if (!o->IsHeapObject()) return false; |
2021 return ObjectMarking::IsWhite(HeapObject::cast(o)); | 2036 return ObjectMarking::IsWhite(HeapObject::cast(o), |
| 2037 MarkingState::Internal(HeapObject::cast(o))); |
2022 } | 2038 } |
2023 | 2039 |
2024 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { | 2040 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { |
2025 StringTable* string_table = heap()->string_table(); | 2041 StringTable* string_table = heap()->string_table(); |
2026 // Mark the string table itself. | 2042 // Mark the string table itself. |
2027 if (ObjectMarking::IsWhite(string_table)) { | 2043 if (ObjectMarking::IsWhite(string_table, |
| 2044 MarkingState::Internal(string_table))) { |
2028 // String table could have already been marked by visiting the handles list. | 2045 // String table could have already been marked by visiting the handles list. |
2029 ObjectMarking::WhiteToBlack(string_table); | 2046 ObjectMarking::WhiteToBlack(string_table, |
| 2047 MarkingState::Internal(string_table)); |
2030 } | 2048 } |
2031 // Explicitly mark the prefix. | 2049 // Explicitly mark the prefix. |
2032 string_table->IteratePrefix(visitor); | 2050 string_table->IteratePrefix(visitor); |
2033 ProcessMarkingDeque(); | 2051 ProcessMarkingDeque(); |
2034 } | 2052 } |
2035 | 2053 |
2036 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 2054 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
2037 // Mark the heap roots including global variables, stack variables, | 2055 // Mark the heap roots including global variables, stack variables, |
2038 // etc., and all objects reachable from them. | 2056 // etc., and all objects reachable from them. |
2039 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 2057 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
(...skipping 12 matching lines...) Expand all Loading... |
2052 // Before: the marking stack contains zero or more heap object pointers. | 2070 // Before: the marking stack contains zero or more heap object pointers. |
2053 // After: the marking stack is empty, and all objects reachable from the | 2071 // After: the marking stack is empty, and all objects reachable from the |
2054 // marking stack have been marked, or are overflowed in the heap. | 2072 // marking stack have been marked, or are overflowed in the heap. |
2055 void MarkCompactCollector::EmptyMarkingDeque() { | 2073 void MarkCompactCollector::EmptyMarkingDeque() { |
2056 while (!marking_deque()->IsEmpty()) { | 2074 while (!marking_deque()->IsEmpty()) { |
2057 HeapObject* object = marking_deque()->Pop(); | 2075 HeapObject* object = marking_deque()->Pop(); |
2058 | 2076 |
2059 DCHECK(!object->IsFiller()); | 2077 DCHECK(!object->IsFiller()); |
2060 DCHECK(object->IsHeapObject()); | 2078 DCHECK(object->IsHeapObject()); |
2061 DCHECK(heap()->Contains(object)); | 2079 DCHECK(heap()->Contains(object)); |
2062 DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(object))); | 2080 DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>( |
| 2081 object, MarkingState::Internal(object)))); |
2063 | 2082 |
2064 Map* map = object->map(); | 2083 Map* map = object->map(); |
2065 MarkObject(map); | 2084 MarkObject(map); |
2066 MarkCompactMarkingVisitor::IterateBody(map, object); | 2085 MarkCompactMarkingVisitor::IterateBody(map, object); |
2067 } | 2086 } |
2068 } | 2087 } |
2069 | 2088 |
2070 | 2089 |
2071 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2090 // Sweep the heap for overflowed objects, clear their overflow bits, and |
2072 // push them on the marking stack. Stop early if the marking stack fills | 2091 // push them on the marking stack. Stop early if the marking stack fills |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2243 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats, | 2262 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats, |
2244 ObjectStats* dead_stats) | 2263 ObjectStats* dead_stats) |
2245 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) { | 2264 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) { |
2246 DCHECK_NOT_NULL(live_stats); | 2265 DCHECK_NOT_NULL(live_stats); |
2247 DCHECK_NOT_NULL(dead_stats); | 2266 DCHECK_NOT_NULL(dead_stats); |
2248 // Global objects are roots and thus recorded as live. | 2267 // Global objects are roots and thus recorded as live. |
2249 live_collector_.CollectGlobalStatistics(); | 2268 live_collector_.CollectGlobalStatistics(); |
2250 } | 2269 } |
2251 | 2270 |
2252 bool Visit(HeapObject* obj) override { | 2271 bool Visit(HeapObject* obj) override { |
2253 if (ObjectMarking::IsBlack(obj)) { | 2272 if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) { |
2254 live_collector_.CollectStatistics(obj); | 2273 live_collector_.CollectStatistics(obj); |
2255 } else { | 2274 } else { |
2256 DCHECK(!ObjectMarking::IsGrey(obj)); | 2275 DCHECK(!ObjectMarking::IsGrey(obj, MarkingState::Internal(obj))); |
2257 dead_collector_.CollectStatistics(obj); | 2276 dead_collector_.CollectStatistics(obj); |
2258 } | 2277 } |
2259 return true; | 2278 return true; |
2260 } | 2279 } |
2261 | 2280 |
2262 private: | 2281 private: |
2263 ObjectStatsCollector live_collector_; | 2282 ObjectStatsCollector live_collector_; |
2264 ObjectStatsCollector dead_collector_; | 2283 ObjectStatsCollector dead_collector_; |
2265 }; | 2284 }; |
2266 | 2285 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2302 } | 2321 } |
2303 | 2322 |
2304 SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject( | 2323 SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject( |
2305 Heap* heap, Address slot_address) { | 2324 Heap* heap, Address slot_address) { |
2306 Object* object = *reinterpret_cast<Object**>(slot_address); | 2325 Object* object = *reinterpret_cast<Object**>(slot_address); |
2307 if (heap->InNewSpace(object)) { | 2326 if (heap->InNewSpace(object)) { |
2308 // Marking happens before flipping the young generation, so the object | 2327 // Marking happens before flipping the young generation, so the object |
2309 // has to be in ToSpace. | 2328 // has to be in ToSpace. |
2310 DCHECK(heap->InToSpace(object)); | 2329 DCHECK(heap->InToSpace(object)); |
2311 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 2330 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
2312 const MarkingState state = | 2331 const MarkingState state = MarkingState::External(heap_object); |
2313 MinorMarkCompactCollector::StateForObject(heap_object); | |
2314 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(heap_object, state)) { | 2332 if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(heap_object, state)) { |
2315 return KEEP_SLOT; | 2333 return KEEP_SLOT; |
2316 } | 2334 } |
2317 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(heap_object, state); | 2335 ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(heap_object, state); |
2318 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(), | 2336 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(), |
2319 heap_object); | 2337 heap_object); |
2320 return KEEP_SLOT; | 2338 return KEEP_SLOT; |
2321 } | 2339 } |
2322 return REMOVE_SLOT; | 2340 return REMOVE_SLOT; |
2323 } | 2341 } |
2324 | 2342 |
2325 static bool IsUnmarkedObject(Heap* heap, Object** p) { | 2343 static bool IsUnmarkedObject(Heap* heap, Object** p) { |
2326 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p)); | 2344 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p)); |
2327 return heap->InNewSpace(*p) && !ObjectMarking::IsBlack(HeapObject::cast(*p)); | 2345 return heap->InNewSpace(*p) && |
| 2346 !ObjectMarking::IsBlack(HeapObject::cast(*p), |
| 2347 MarkingState::Internal(HeapObject::cast(*p))); |
2328 } | 2348 } |
2329 | 2349 |
2330 void MinorMarkCompactCollector::MarkLiveObjects() { | 2350 void MinorMarkCompactCollector::MarkLiveObjects() { |
2331 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); | 2351 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); |
2332 | 2352 |
2333 PostponeInterruptsScope postpone(isolate()); | 2353 PostponeInterruptsScope postpone(isolate()); |
2334 | 2354 |
2335 StaticYoungGenerationMarkingVisitor::Initialize(heap()); | 2355 StaticYoungGenerationMarkingVisitor::Initialize(heap()); |
2336 RootMarkingVisitor root_visitor(this); | 2356 RootMarkingVisitor root_visitor(this); |
2337 | 2357 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2391 | 2411 |
2392 void MinorMarkCompactCollector::EmptyMarkingDeque() { | 2412 void MinorMarkCompactCollector::EmptyMarkingDeque() { |
2393 while (!marking_deque()->IsEmpty()) { | 2413 while (!marking_deque()->IsEmpty()) { |
2394 HeapObject* object = marking_deque()->Pop(); | 2414 HeapObject* object = marking_deque()->Pop(); |
2395 | 2415 |
2396 DCHECK(!object->IsFiller()); | 2416 DCHECK(!object->IsFiller()); |
2397 DCHECK(object->IsHeapObject()); | 2417 DCHECK(object->IsHeapObject()); |
2398 DCHECK(heap()->Contains(object)); | 2418 DCHECK(heap()->Contains(object)); |
2399 | 2419 |
2400 DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>( | 2420 DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>( |
2401 object, StateForObject(object)))); | 2421 object, MarkingState::External(object)))); |
2402 | 2422 |
2403 Map* map = object->map(); | 2423 Map* map = object->map(); |
2404 DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>( | 2424 DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>( |
2405 object, StateForObject(object)))); | 2425 object, MarkingState::External(object)))); |
2406 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); | 2426 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); |
2407 } | 2427 } |
2408 } | 2428 } |
2409 | 2429 |
2410 void MinorMarkCompactCollector::CollectGarbage() { | 2430 void MinorMarkCompactCollector::CollectGarbage() { |
2411 MarkLiveObjects(); | 2431 MarkLiveObjects(); |
2412 | 2432 |
2413 #ifdef VERIFY_HEAP | 2433 #ifdef VERIFY_HEAP |
2414 if (FLAG_verify_heap) { | 2434 if (FLAG_verify_heap) { |
2415 YoungGenerationMarkingVerifier verifier(heap()); | 2435 YoungGenerationMarkingVerifier verifier(heap()); |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2619 } | 2639 } |
2620 | 2640 |
2621 | 2641 |
2622 void MarkCompactCollector::ClearSimpleMapTransitions( | 2642 void MarkCompactCollector::ClearSimpleMapTransitions( |
2623 Object* non_live_map_list) { | 2643 Object* non_live_map_list) { |
2624 Object* the_hole_value = heap()->the_hole_value(); | 2644 Object* the_hole_value = heap()->the_hole_value(); |
2625 Object* weak_cell_obj = non_live_map_list; | 2645 Object* weak_cell_obj = non_live_map_list; |
2626 while (weak_cell_obj != Smi::kZero) { | 2646 while (weak_cell_obj != Smi::kZero) { |
2627 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); | 2647 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); |
2628 Map* map = Map::cast(weak_cell->value()); | 2648 Map* map = Map::cast(weak_cell->value()); |
2629 DCHECK(ObjectMarking::IsWhite(map)); | 2649 DCHECK(ObjectMarking::IsWhite(map, MarkingState::Internal(map))); |
2630 Object* potential_parent = map->constructor_or_backpointer(); | 2650 Object* potential_parent = map->constructor_or_backpointer(); |
2631 if (potential_parent->IsMap()) { | 2651 if (potential_parent->IsMap()) { |
2632 Map* parent = Map::cast(potential_parent); | 2652 Map* parent = Map::cast(potential_parent); |
2633 if (ObjectMarking::IsBlackOrGrey(parent) && | 2653 if (ObjectMarking::IsBlackOrGrey(parent, |
| 2654 MarkingState::Internal(parent)) && |
2634 parent->raw_transitions() == weak_cell) { | 2655 parent->raw_transitions() == weak_cell) { |
2635 ClearSimpleMapTransition(parent, map); | 2656 ClearSimpleMapTransition(parent, map); |
2636 } | 2657 } |
2637 } | 2658 } |
2638 weak_cell->clear(); | 2659 weak_cell->clear(); |
2639 weak_cell_obj = weak_cell->next(); | 2660 weak_cell_obj = weak_cell->next(); |
2640 weak_cell->clear_next(the_hole_value); | 2661 weak_cell->clear_next(the_hole_value); |
2641 } | 2662 } |
2642 } | 2663 } |
2643 | 2664 |
(...skipping 18 matching lines...) Expand all Loading... |
2662 void MarkCompactCollector::ClearFullMapTransitions() { | 2683 void MarkCompactCollector::ClearFullMapTransitions() { |
2663 HeapObject* undefined = heap()->undefined_value(); | 2684 HeapObject* undefined = heap()->undefined_value(); |
2664 Object* obj = heap()->encountered_transition_arrays(); | 2685 Object* obj = heap()->encountered_transition_arrays(); |
2665 while (obj != Smi::kZero) { | 2686 while (obj != Smi::kZero) { |
2666 TransitionArray* array = TransitionArray::cast(obj); | 2687 TransitionArray* array = TransitionArray::cast(obj); |
2667 int num_transitions = array->number_of_entries(); | 2688 int num_transitions = array->number_of_entries(); |
2668 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); | 2689 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); |
2669 if (num_transitions > 0) { | 2690 if (num_transitions > 0) { |
2670 Map* map = array->GetTarget(0); | 2691 Map* map = array->GetTarget(0); |
2671 Map* parent = Map::cast(map->constructor_or_backpointer()); | 2692 Map* parent = Map::cast(map->constructor_or_backpointer()); |
2672 bool parent_is_alive = ObjectMarking::IsBlackOrGrey(parent); | 2693 bool parent_is_alive = |
| 2694 ObjectMarking::IsBlackOrGrey(parent, MarkingState::Internal(parent)); |
2673 DescriptorArray* descriptors = | 2695 DescriptorArray* descriptors = |
2674 parent_is_alive ? parent->instance_descriptors() : nullptr; | 2696 parent_is_alive ? parent->instance_descriptors() : nullptr; |
2675 bool descriptors_owner_died = | 2697 bool descriptors_owner_died = |
2676 CompactTransitionArray(parent, array, descriptors); | 2698 CompactTransitionArray(parent, array, descriptors); |
2677 if (descriptors_owner_died) { | 2699 if (descriptors_owner_died) { |
2678 TrimDescriptorArray(parent, descriptors); | 2700 TrimDescriptorArray(parent, descriptors); |
2679 } | 2701 } |
2680 } | 2702 } |
2681 obj = array->next_link(); | 2703 obj = array->next_link(); |
2682 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2704 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
2683 } | 2705 } |
2684 heap()->set_encountered_transition_arrays(Smi::kZero); | 2706 heap()->set_encountered_transition_arrays(Smi::kZero); |
2685 } | 2707 } |
2686 | 2708 |
2687 | 2709 |
2688 bool MarkCompactCollector::CompactTransitionArray( | 2710 bool MarkCompactCollector::CompactTransitionArray( |
2689 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { | 2711 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { |
2690 int num_transitions = transitions->number_of_entries(); | 2712 int num_transitions = transitions->number_of_entries(); |
2691 bool descriptors_owner_died = false; | 2713 bool descriptors_owner_died = false; |
2692 int transition_index = 0; | 2714 int transition_index = 0; |
2693 // Compact all live transitions to the left. | 2715 // Compact all live transitions to the left. |
2694 for (int i = 0; i < num_transitions; ++i) { | 2716 for (int i = 0; i < num_transitions; ++i) { |
2695 Map* target = transitions->GetTarget(i); | 2717 Map* target = transitions->GetTarget(i); |
2696 DCHECK_EQ(target->constructor_or_backpointer(), map); | 2718 DCHECK_EQ(target->constructor_or_backpointer(), map); |
2697 if (ObjectMarking::IsWhite(target)) { | 2719 if (ObjectMarking::IsWhite(target, MarkingState::Internal(target))) { |
2698 if (descriptors != nullptr && | 2720 if (descriptors != nullptr && |
2699 target->instance_descriptors() == descriptors) { | 2721 target->instance_descriptors() == descriptors) { |
2700 descriptors_owner_died = true; | 2722 descriptors_owner_died = true; |
2701 } | 2723 } |
2702 } else { | 2724 } else { |
2703 if (i != transition_index) { | 2725 if (i != transition_index) { |
2704 Name* key = transitions->GetKey(i); | 2726 Name* key = transitions->GetKey(i); |
2705 transitions->SetKey(transition_index, key); | 2727 transitions->SetKey(transition_index, key); |
2706 Object** key_slot = transitions->GetKeySlot(transition_index); | 2728 Object** key_slot = transitions->GetKeySlot(transition_index); |
2707 RecordSlot(transitions, key_slot, key); | 2729 RecordSlot(transitions, key_slot, key); |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2779 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); | 2801 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); |
2780 heap_->RightTrimFixedArray(enum_indices_cache, to_trim); | 2802 heap_->RightTrimFixedArray(enum_indices_cache, to_trim); |
2781 } | 2803 } |
2782 | 2804 |
2783 | 2805 |
2784 void MarkCompactCollector::ProcessWeakCollections() { | 2806 void MarkCompactCollector::ProcessWeakCollections() { |
2785 Object* weak_collection_obj = heap()->encountered_weak_collections(); | 2807 Object* weak_collection_obj = heap()->encountered_weak_collections(); |
2786 while (weak_collection_obj != Smi::kZero) { | 2808 while (weak_collection_obj != Smi::kZero) { |
2787 JSWeakCollection* weak_collection = | 2809 JSWeakCollection* weak_collection = |
2788 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2810 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
2789 DCHECK(ObjectMarking::IsBlackOrGrey(weak_collection)); | 2811 DCHECK(ObjectMarking::IsBlackOrGrey( |
| 2812 weak_collection, MarkingState::Internal(weak_collection))); |
2790 if (weak_collection->table()->IsHashTable()) { | 2813 if (weak_collection->table()->IsHashTable()) { |
2791 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2814 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
2792 for (int i = 0; i < table->Capacity(); i++) { | 2815 for (int i = 0; i < table->Capacity(); i++) { |
2793 if (ObjectMarking::IsBlackOrGrey(HeapObject::cast(table->KeyAt(i)))) { | 2816 HeapObject* heap_object = HeapObject::cast(table->KeyAt(i)); |
| 2817 if (ObjectMarking::IsBlackOrGrey(heap_object, |
| 2818 MarkingState::Internal(heap_object))) { |
2794 Object** key_slot = | 2819 Object** key_slot = |
2795 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); | 2820 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); |
2796 RecordSlot(table, key_slot, *key_slot); | 2821 RecordSlot(table, key_slot, *key_slot); |
2797 Object** value_slot = | 2822 Object** value_slot = |
2798 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); | 2823 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); |
2799 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table, | 2824 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table, |
2800 value_slot); | 2825 value_slot); |
2801 } | 2826 } |
2802 } | 2827 } |
2803 } | 2828 } |
2804 weak_collection_obj = weak_collection->next(); | 2829 weak_collection_obj = weak_collection->next(); |
2805 } | 2830 } |
2806 } | 2831 } |
2807 | 2832 |
2808 | 2833 |
2809 void MarkCompactCollector::ClearWeakCollections() { | 2834 void MarkCompactCollector::ClearWeakCollections() { |
2810 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS); | 2835 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS); |
2811 Object* weak_collection_obj = heap()->encountered_weak_collections(); | 2836 Object* weak_collection_obj = heap()->encountered_weak_collections(); |
2812 while (weak_collection_obj != Smi::kZero) { | 2837 while (weak_collection_obj != Smi::kZero) { |
2813 JSWeakCollection* weak_collection = | 2838 JSWeakCollection* weak_collection = |
2814 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2839 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
2815 DCHECK(ObjectMarking::IsBlackOrGrey(weak_collection)); | 2840 DCHECK(ObjectMarking::IsBlackOrGrey( |
| 2841 weak_collection, MarkingState::Internal(weak_collection))); |
2816 if (weak_collection->table()->IsHashTable()) { | 2842 if (weak_collection->table()->IsHashTable()) { |
2817 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2843 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
2818 for (int i = 0; i < table->Capacity(); i++) { | 2844 for (int i = 0; i < table->Capacity(); i++) { |
2819 HeapObject* key = HeapObject::cast(table->KeyAt(i)); | 2845 HeapObject* key = HeapObject::cast(table->KeyAt(i)); |
2820 if (!ObjectMarking::IsBlackOrGrey(key)) { | 2846 if (!ObjectMarking::IsBlackOrGrey(key, MarkingState::Internal(key))) { |
2821 table->RemoveEntry(i); | 2847 table->RemoveEntry(i); |
2822 } | 2848 } |
2823 } | 2849 } |
2824 } | 2850 } |
2825 weak_collection_obj = weak_collection->next(); | 2851 weak_collection_obj = weak_collection->next(); |
2826 weak_collection->set_next(heap()->undefined_value()); | 2852 weak_collection->set_next(heap()->undefined_value()); |
2827 } | 2853 } |
2828 heap()->set_encountered_weak_collections(Smi::kZero); | 2854 heap()->set_encountered_weak_collections(Smi::kZero); |
2829 } | 2855 } |
2830 | 2856 |
(...skipping 20 matching lines...) Expand all Loading... |
2851 DependentCode::cast(heap->empty_fixed_array()); | 2877 DependentCode::cast(heap->empty_fixed_array()); |
2852 Object* non_live_map_head = Smi::kZero; | 2878 Object* non_live_map_head = Smi::kZero; |
2853 while (weak_cell_obj != Smi::kZero) { | 2879 while (weak_cell_obj != Smi::kZero) { |
2854 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 2880 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
2855 Object* next_weak_cell = weak_cell->next(); | 2881 Object* next_weak_cell = weak_cell->next(); |
2856 bool clear_value = true; | 2882 bool clear_value = true; |
2857 bool clear_next = true; | 2883 bool clear_next = true; |
2858 // We do not insert cleared weak cells into the list, so the value | 2884 // We do not insert cleared weak cells into the list, so the value |
2859 // cannot be a Smi here. | 2885 // cannot be a Smi here. |
2860 HeapObject* value = HeapObject::cast(weak_cell->value()); | 2886 HeapObject* value = HeapObject::cast(weak_cell->value()); |
2861 if (!ObjectMarking::IsBlackOrGrey(value)) { | 2887 if (!ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) { |
2862 // Cells for new-space objects embedded in optimized code are wrapped in | 2888 // Cells for new-space objects embedded in optimized code are wrapped in |
2863 // WeakCell and put into Heap::weak_object_to_code_table. | 2889 // WeakCell and put into Heap::weak_object_to_code_table. |
2864 // Such cells do not have any strong references but we want to keep them | 2890 // Such cells do not have any strong references but we want to keep them |
2865 // alive as long as the cell value is alive. | 2891 // alive as long as the cell value is alive. |
2866 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. | 2892 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. |
2867 if (value->IsCell()) { | 2893 if (value->IsCell()) { |
2868 Object* cell_value = Cell::cast(value)->value(); | 2894 Object* cell_value = Cell::cast(value)->value(); |
2869 if (cell_value->IsHeapObject() && | 2895 if (cell_value->IsHeapObject() && |
2870 ObjectMarking::IsBlackOrGrey(HeapObject::cast(cell_value))) { | 2896 ObjectMarking::IsBlackOrGrey( |
| 2897 HeapObject::cast(cell_value), |
| 2898 MarkingState::Internal(HeapObject::cast(cell_value)))) { |
2871 // Resurrect the cell. | 2899 // Resurrect the cell. |
2872 ObjectMarking::WhiteToBlack(value); | 2900 ObjectMarking::WhiteToBlack(value, MarkingState::Internal(value)); |
2873 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); | 2901 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); |
2874 RecordSlot(value, slot, *slot); | 2902 RecordSlot(value, slot, *slot); |
2875 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 2903 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
2876 RecordSlot(weak_cell, slot, *slot); | 2904 RecordSlot(weak_cell, slot, *slot); |
2877 clear_value = false; | 2905 clear_value = false; |
2878 } | 2906 } |
2879 } | 2907 } |
2880 if (value->IsMap()) { | 2908 if (value->IsMap()) { |
2881 // The map is non-live. | 2909 // The map is non-live. |
2882 Map* map = Map::cast(value); | 2910 Map* map = Map::cast(value); |
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3156 public: | 3184 public: |
3157 FullEvacuator(Heap* heap, RecordMigratedSlotVisitor* record_visitor) | 3185 FullEvacuator(Heap* heap, RecordMigratedSlotVisitor* record_visitor) |
3158 : Evacuator(heap, record_visitor) {} | 3186 : Evacuator(heap, record_visitor) {} |
3159 | 3187 |
3160 bool EvacuatePage(Page* page, const MarkingState& state) override; | 3188 bool EvacuatePage(Page* page, const MarkingState& state) override; |
3161 }; | 3189 }; |
3162 | 3190 |
3163 bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) { | 3191 bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) { |
3164 bool success = false; | 3192 bool success = false; |
3165 DCHECK(page->SweepingDone()); | 3193 DCHECK(page->SweepingDone()); |
3166 intptr_t saved_live_bytes = *state.live_bytes; | 3194 intptr_t saved_live_bytes = state.live_bytes(); |
3167 double evacuation_time = 0.0; | 3195 double evacuation_time = 0.0; |
3168 { | 3196 { |
3169 AlwaysAllocateScope always_allocate(heap()->isolate()); | 3197 AlwaysAllocateScope always_allocate(heap()->isolate()); |
3170 TimedScope timed_scope(&evacuation_time); | 3198 TimedScope timed_scope(&evacuation_time); |
3171 LiveObjectVisitor object_visitor; | 3199 LiveObjectVisitor object_visitor; |
3172 switch (ComputeEvacuationMode(page)) { | 3200 switch (ComputeEvacuationMode(page)) { |
3173 case kObjectsNewToOld: | 3201 case kObjectsNewToOld: |
3174 success = | 3202 success = |
3175 object_visitor.VisitBlackObjects(page, state, &new_space_visitor_, | 3203 object_visitor.VisitBlackObjects(page, state, &new_space_visitor_, |
3176 LiveObjectVisitor::kClearMarkbits); | 3204 LiveObjectVisitor::kClearMarkbits); |
3177 DCHECK(success); | 3205 DCHECK(success); |
3178 ArrayBufferTracker::ProcessBuffers( | 3206 ArrayBufferTracker::ProcessBuffers( |
3179 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers); | 3207 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers); |
3180 break; | 3208 break; |
3181 case kPageNewToOld: | 3209 case kPageNewToOld: |
3182 success = object_visitor.VisitBlackObjects( | 3210 success = object_visitor.VisitBlackObjects( |
3183 page, state, &new_to_old_page_visitor_, | 3211 page, state, &new_to_old_page_visitor_, |
3184 LiveObjectVisitor::kKeepMarking); | 3212 LiveObjectVisitor::kKeepMarking); |
3185 DCHECK(success); | 3213 DCHECK(success); |
3186 new_to_old_page_visitor_.account_moved_bytes(page->LiveBytes()); | 3214 new_to_old_page_visitor_.account_moved_bytes( |
| 3215 MarkingState::Internal(page).live_bytes()); |
3187 // ArrayBufferTracker will be updated during sweeping. | 3216 // ArrayBufferTracker will be updated during sweeping. |
3188 break; | 3217 break; |
3189 case kPageNewToNew: | 3218 case kPageNewToNew: |
3190 success = object_visitor.VisitBlackObjects( | 3219 success = object_visitor.VisitBlackObjects( |
3191 page, state, &new_to_new_page_visitor_, | 3220 page, state, &new_to_new_page_visitor_, |
3192 LiveObjectVisitor::kKeepMarking); | 3221 LiveObjectVisitor::kKeepMarking); |
3193 DCHECK(success); | 3222 DCHECK(success); |
3194 new_to_new_page_visitor_.account_moved_bytes(page->LiveBytes()); | 3223 new_to_new_page_visitor_.account_moved_bytes( |
| 3224 MarkingState::Internal(page).live_bytes()); |
3195 // ArrayBufferTracker will be updated during sweeping. | 3225 // ArrayBufferTracker will be updated during sweeping. |
3196 break; | 3226 break; |
3197 case kObjectsOldToOld: | 3227 case kObjectsOldToOld: |
3198 success = | 3228 success = |
3199 object_visitor.VisitBlackObjects(page, state, &old_space_visitor_, | 3229 object_visitor.VisitBlackObjects(page, state, &old_space_visitor_, |
3200 LiveObjectVisitor::kClearMarkbits); | 3230 LiveObjectVisitor::kClearMarkbits); |
3201 if (!success) { | 3231 if (!success) { |
3202 // Aborted compaction page. We have to record slots here, since we | 3232 // Aborted compaction page. We have to record slots here, since we |
3203 // might not have recorded them in first place. | 3233 // might not have recorded them in first place. |
3204 // Note: We mark the page as aborted here to be able to record slots | 3234 // Note: We mark the page as aborted here to be able to record slots |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3268 class EvacuationJobTraits { | 3298 class EvacuationJobTraits { |
3269 public: | 3299 public: |
3270 typedef int* PerPageData; // Pointer to number of aborted pages. | 3300 typedef int* PerPageData; // Pointer to number of aborted pages. |
3271 typedef Evacuator* PerTaskData; | 3301 typedef Evacuator* PerTaskData; |
3272 | 3302 |
3273 static const bool NeedSequentialFinalization = true; | 3303 static const bool NeedSequentialFinalization = true; |
3274 | 3304 |
3275 static bool ProcessPageInParallel(Heap* heap, PerTaskData evacuator, | 3305 static bool ProcessPageInParallel(Heap* heap, PerTaskData evacuator, |
3276 MemoryChunk* chunk, PerPageData) { | 3306 MemoryChunk* chunk, PerPageData) { |
3277 return evacuator->EvacuatePage(reinterpret_cast<Page*>(chunk), | 3307 return evacuator->EvacuatePage(reinterpret_cast<Page*>(chunk), |
3278 MarkingState::FromPageInternal(chunk)); | 3308 MarkingState::Internal(chunk)); |
3279 } | 3309 } |
3280 | 3310 |
3281 static void FinalizePageSequentially(Heap* heap, MemoryChunk* chunk, | 3311 static void FinalizePageSequentially(Heap* heap, MemoryChunk* chunk, |
3282 bool success, PerPageData data) { | 3312 bool success, PerPageData data) { |
3283 Page* p = static_cast<Page*>(chunk); | 3313 Page* p = static_cast<Page*>(chunk); |
3284 switch (Evacuator::ComputeEvacuationMode(p)) { | 3314 switch (Evacuator::ComputeEvacuationMode(p)) { |
3285 case Evacuator::kPageNewToOld: | 3315 case Evacuator::kPageNewToOld: |
3286 break; | 3316 break; |
3287 case Evacuator::kPageNewToNew: | 3317 case Evacuator::kPageNewToNew: |
3288 DCHECK(success); | 3318 DCHECK(success); |
(...skipping 22 matching lines...) Expand all Loading... |
3311 }; | 3341 }; |
3312 | 3342 |
3313 void MarkCompactCollector::EvacuatePagesInParallel() { | 3343 void MarkCompactCollector::EvacuatePagesInParallel() { |
3314 PageParallelJob<EvacuationJobTraits> job( | 3344 PageParallelJob<EvacuationJobTraits> job( |
3315 heap_, heap_->isolate()->cancelable_task_manager(), | 3345 heap_, heap_->isolate()->cancelable_task_manager(), |
3316 &page_parallel_job_semaphore_); | 3346 &page_parallel_job_semaphore_); |
3317 | 3347 |
3318 int abandoned_pages = 0; | 3348 int abandoned_pages = 0; |
3319 intptr_t live_bytes = 0; | 3349 intptr_t live_bytes = 0; |
3320 for (Page* page : old_space_evacuation_pages_) { | 3350 for (Page* page : old_space_evacuation_pages_) { |
3321 live_bytes += page->LiveBytes(); | 3351 live_bytes += MarkingState::Internal(page).live_bytes(); |
3322 job.AddPage(page, &abandoned_pages); | 3352 job.AddPage(page, &abandoned_pages); |
3323 } | 3353 } |
3324 | 3354 |
3325 const bool reduce_memory = heap()->ShouldReduceMemory(); | 3355 const bool reduce_memory = heap()->ShouldReduceMemory(); |
3326 const Address age_mark = heap()->new_space()->age_mark(); | 3356 const Address age_mark = heap()->new_space()->age_mark(); |
3327 for (Page* page : new_space_evacuation_pages_) { | 3357 for (Page* page : new_space_evacuation_pages_) { |
3328 live_bytes += page->LiveBytes(); | 3358 intptr_t live_bytes_on_page = MarkingState::Internal(page).live_bytes(); |
| 3359 live_bytes += live_bytes_on_page; |
3329 if (!reduce_memory && !page->NeverEvacuate() && | 3360 if (!reduce_memory && !page->NeverEvacuate() && |
3330 (page->LiveBytes() > Evacuator::PageEvacuationThreshold()) && | 3361 (live_bytes_on_page > Evacuator::PageEvacuationThreshold()) && |
3331 !page->Contains(age_mark) && | 3362 !page->Contains(age_mark) && |
3332 heap()->CanExpandOldGeneration(page->LiveBytes())) { | 3363 heap()->CanExpandOldGeneration(live_bytes_on_page)) { |
3333 if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { | 3364 if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { |
3334 EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); | 3365 EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); |
3335 } else { | 3366 } else { |
3336 EvacuateNewSpacePageVisitor<NEW_TO_NEW>::Move(page); | 3367 EvacuateNewSpacePageVisitor<NEW_TO_NEW>::Move(page); |
3337 } | 3368 } |
3338 } | 3369 } |
3339 | 3370 |
3340 job.AddPage(page, &abandoned_pages); | 3371 job.AddPage(page, &abandoned_pages); |
3341 } | 3372 } |
3342 DCHECK_GE(job.NumberOfPages(), 1); | 3373 DCHECK_GE(job.NumberOfPages(), 1); |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3438 space->identity() == CODE_SPACE && p->skip_list() != nullptr; | 3469 space->identity() == CODE_SPACE && p->skip_list() != nullptr; |
3439 SkipList* skip_list = p->skip_list(); | 3470 SkipList* skip_list = p->skip_list(); |
3440 if (rebuild_skip_list) { | 3471 if (rebuild_skip_list) { |
3441 skip_list->Clear(); | 3472 skip_list->Clear(); |
3442 } | 3473 } |
3443 | 3474 |
3444 intptr_t freed_bytes = 0; | 3475 intptr_t freed_bytes = 0; |
3445 intptr_t max_freed_bytes = 0; | 3476 intptr_t max_freed_bytes = 0; |
3446 int curr_region = -1; | 3477 int curr_region = -1; |
3447 | 3478 |
3448 LiveObjectIterator<kBlackObjects> it(p, MarkingState::FromPageInternal(p)); | 3479 LiveObjectIterator<kBlackObjects> it(p, MarkingState::Internal(p)); |
3449 HeapObject* object = NULL; | 3480 HeapObject* object = NULL; |
3450 | 3481 |
3451 while ((object = it.Next()) != NULL) { | 3482 while ((object = it.Next()) != NULL) { |
3452 DCHECK(ObjectMarking::IsBlack(object)); | 3483 DCHECK(ObjectMarking::IsBlack(object, MarkingState::Internal(object))); |
3453 Address free_end = object->address(); | 3484 Address free_end = object->address(); |
3454 if (free_end != free_start) { | 3485 if (free_end != free_start) { |
3455 CHECK_GT(free_end, free_start); | 3486 CHECK_GT(free_end, free_start); |
3456 size_t size = static_cast<size_t>(free_end - free_start); | 3487 size_t size = static_cast<size_t>(free_end - free_start); |
3457 if (free_space_mode == ZAP_FREE_SPACE) { | 3488 if (free_space_mode == ZAP_FREE_SPACE) { |
3458 memset(free_start, 0xcc, size); | 3489 memset(free_start, 0xcc, size); |
3459 } | 3490 } |
3460 if (free_list_mode == REBUILD_FREE_LIST) { | 3491 if (free_list_mode == REBUILD_FREE_LIST) { |
3461 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( | 3492 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( |
3462 free_start, size); | 3493 free_start, size); |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3513 static_cast<uint32_t>(p->area_end() - p->address()))); | 3544 static_cast<uint32_t>(p->area_end() - p->address()))); |
3514 } | 3545 } |
3515 } | 3546 } |
3516 | 3547 |
3517 // Clear invalid typed slots after collection all free ranges. | 3548 // Clear invalid typed slots after collection all free ranges. |
3518 if (slots_clearing_mode == CLEAR_TYPED_SLOTS) { | 3549 if (slots_clearing_mode == CLEAR_TYPED_SLOTS) { |
3519 p->typed_old_to_new_slots()->RemoveInvaldSlots(free_ranges); | 3550 p->typed_old_to_new_slots()->RemoveInvaldSlots(free_ranges); |
3520 } | 3551 } |
3521 | 3552 |
3522 // Clear the mark bits of that page and reset live bytes count. | 3553 // Clear the mark bits of that page and reset live bytes count. |
3523 p->ClearLiveness(); | 3554 MarkingState::Internal(p).ClearLiveness(); |
3524 | 3555 |
3525 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); | 3556 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); |
3526 if (free_list_mode == IGNORE_FREE_LIST) return 0; | 3557 if (free_list_mode == IGNORE_FREE_LIST) return 0; |
3527 return static_cast<int>(FreeList::GuaranteedAllocatable(max_freed_bytes)); | 3558 return static_cast<int>(FreeList::GuaranteedAllocatable(max_freed_bytes)); |
3528 } | 3559 } |
3529 | 3560 |
3530 void MarkCompactCollector::InvalidateCode(Code* code) { | 3561 void MarkCompactCollector::InvalidateCode(Code* code) { |
3531 Page* page = Page::FromAddress(code->address()); | 3562 Page* page = Page::FromAddress(code->address()); |
3532 Address start = code->instruction_start(); | 3563 Address start = code->instruction_start(); |
3533 Address end = code->address() + code->Size(); | 3564 Address end = code->address() + code->Size(); |
3534 | 3565 |
3535 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); | 3566 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); |
3536 | 3567 |
3537 if (heap_->incremental_marking()->IsCompacting() && | 3568 if (heap_->incremental_marking()->IsCompacting() && |
3538 !ShouldSkipEvacuationSlotRecording(code)) { | 3569 !ShouldSkipEvacuationSlotRecording(code)) { |
3539 DCHECK(compacting_); | 3570 DCHECK(compacting_); |
3540 | 3571 |
3541 // If the object is white than no slots were recorded on it yet. | 3572 // If the object is white than no slots were recorded on it yet. |
3542 if (ObjectMarking::IsWhite(code)) return; | 3573 if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) return; |
3543 | 3574 |
3544 // Ignore all slots that might have been recorded in the body of the | 3575 // Ignore all slots that might have been recorded in the body of the |
3545 // deoptimized code object. Assumption: no slots will be recorded for | 3576 // deoptimized code object. Assumption: no slots will be recorded for |
3546 // this object after invalidating it. | 3577 // this object after invalidating it. |
3547 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); | 3578 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); |
3548 } | 3579 } |
3549 } | 3580 } |
3550 | 3581 |
3551 | 3582 |
3552 // Return true if the given code is deoptimized or will be deoptimized. | 3583 // Return true if the given code is deoptimized or will be deoptimized. |
3553 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3584 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3554 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3585 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3555 } | 3586 } |
3556 | 3587 |
3557 void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) { | 3588 void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) { |
3558 EvacuateRecordOnlyVisitor visitor(heap()); | 3589 EvacuateRecordOnlyVisitor visitor(heap()); |
3559 LiveObjectVisitor object_visitor; | 3590 LiveObjectVisitor object_visitor; |
3560 object_visitor.VisitBlackObjects(page, MarkingState::FromPageInternal(page), | 3591 object_visitor.VisitBlackObjects(page, MarkingState::Internal(page), &visitor, |
3561 &visitor, LiveObjectVisitor::kKeepMarking); | 3592 LiveObjectVisitor::kKeepMarking); |
3562 } | 3593 } |
3563 | 3594 |
3564 template <class Visitor> | 3595 template <class Visitor> |
3565 bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk, | 3596 bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk, |
3566 const MarkingState& state, | 3597 const MarkingState& state, |
3567 Visitor* visitor, | 3598 Visitor* visitor, |
3568 IterationMode iteration_mode) { | 3599 IterationMode iteration_mode) { |
3569 LiveObjectIterator<kBlackObjects> it(chunk, state); | 3600 LiveObjectIterator<kBlackObjects> it(chunk, state); |
3570 HeapObject* object = nullptr; | 3601 HeapObject* object = nullptr; |
3571 while ((object = it.Next()) != nullptr) { | 3602 while ((object = it.Next()) != nullptr) { |
3572 DCHECK(ObjectMarking::IsBlack(object, state)); | 3603 DCHECK(ObjectMarking::IsBlack(object, state)); |
3573 if (!visitor->Visit(object)) { | 3604 if (!visitor->Visit(object)) { |
3574 if (iteration_mode == kClearMarkbits) { | 3605 if (iteration_mode == kClearMarkbits) { |
3575 state.bitmap->ClearRange( | 3606 state.bitmap()->ClearRange( |
3576 chunk->AddressToMarkbitIndex(chunk->area_start()), | 3607 chunk->AddressToMarkbitIndex(chunk->area_start()), |
3577 chunk->AddressToMarkbitIndex(object->address())); | 3608 chunk->AddressToMarkbitIndex(object->address())); |
3578 if (chunk->old_to_new_slots() != nullptr) { | 3609 if (chunk->old_to_new_slots() != nullptr) { |
3579 chunk->old_to_new_slots()->RemoveRange( | 3610 chunk->old_to_new_slots()->RemoveRange( |
3580 0, static_cast<int>(object->address() - chunk->address()), | 3611 0, static_cast<int>(object->address() - chunk->address()), |
3581 SlotSet::PREFREE_EMPTY_BUCKETS); | 3612 SlotSet::PREFREE_EMPTY_BUCKETS); |
3582 } | 3613 } |
3583 if (chunk->typed_old_to_new_slots() != nullptr) { | 3614 if (chunk->typed_old_to_new_slots() != nullptr) { |
3584 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(chunk, chunk->address(), | 3615 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(chunk, chunk->address(), |
3585 object->address()); | 3616 object->address()); |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3766 // Unfortunately, we do not know about the slot. It could be in a | 3797 // Unfortunately, we do not know about the slot. It could be in a |
3767 // just freed free space object. | 3798 // just freed free space object. |
3768 if (heap->InToSpace(slot->Value())) { | 3799 if (heap->InToSpace(slot->Value())) { |
3769 return KEEP_SLOT; | 3800 return KEEP_SLOT; |
3770 } | 3801 } |
3771 } else if (heap->InToSpace(slot_reference)) { | 3802 } else if (heap->InToSpace(slot_reference)) { |
3772 // Slots can point to "to" space if the page has been moved, or if the | 3803 // Slots can point to "to" space if the page has been moved, or if the |
3773 // slot has been recorded multiple times in the remembered set. Since | 3804 // slot has been recorded multiple times in the remembered set. Since |
3774 // there is no forwarding information present we need to check the | 3805 // there is no forwarding information present we need to check the |
3775 // markbits to determine liveness. | 3806 // markbits to determine liveness. |
3776 if (ObjectMarking::IsBlack(reinterpret_cast<HeapObject*>(slot_reference))) | 3807 HeapObject* heap_object = reinterpret_cast<HeapObject*>(slot_reference); |
| 3808 if (ObjectMarking::IsBlack(heap_object, |
| 3809 MarkingState::Internal(heap_object))) |
3777 return KEEP_SLOT; | 3810 return KEEP_SLOT; |
3778 } else { | 3811 } else { |
3779 DCHECK(!heap->InNewSpace(slot_reference)); | 3812 DCHECK(!heap->InNewSpace(slot_reference)); |
3780 } | 3813 } |
3781 return REMOVE_SLOT; | 3814 return REMOVE_SLOT; |
3782 } | 3815 } |
3783 }; | 3816 }; |
3784 | 3817 |
3785 int NumberOfPointerUpdateTasks(int pages) { | 3818 int NumberOfPointerUpdateTasks(int pages) { |
3786 if (!FLAG_parallel_pointer_update) return 1; | 3819 if (!FLAG_parallel_pointer_update) return 1; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3832 Map* map = object->map(); | 3865 Map* map = object->map(); |
3833 int size = object->SizeFromMap(map); | 3866 int size = object->SizeFromMap(map); |
3834 object->IterateBody(map->instance_type(), size, visitor); | 3867 object->IterateBody(map->instance_type(), size, visitor); |
3835 cur += size; | 3868 cur += size; |
3836 } | 3869 } |
3837 } | 3870 } |
3838 | 3871 |
3839 static void ProcessPageInParallelVisitLive(Heap* heap, PerTaskData visitor, | 3872 static void ProcessPageInParallelVisitLive(Heap* heap, PerTaskData visitor, |
3840 MemoryChunk* chunk, | 3873 MemoryChunk* chunk, |
3841 PerPageData limits) { | 3874 PerPageData limits) { |
3842 LiveObjectIterator<kBlackObjects> it(chunk, | 3875 LiveObjectIterator<kBlackObjects> it(chunk, MarkingState::Internal(chunk)); |
3843 MarkingState::FromPageInternal(chunk)); | |
3844 HeapObject* object = NULL; | 3876 HeapObject* object = NULL; |
3845 while ((object = it.Next()) != NULL) { | 3877 while ((object = it.Next()) != NULL) { |
3846 Map* map = object->map(); | 3878 Map* map = object->map(); |
3847 int size = object->SizeFromMap(map); | 3879 int size = object->SizeFromMap(map); |
3848 object->IterateBody(map->instance_type(), size, visitor); | 3880 object->IterateBody(map->instance_type(), size, visitor); |
3849 } | 3881 } |
3850 } | 3882 } |
3851 }; | 3883 }; |
3852 | 3884 |
3853 void UpdateToSpacePointersInParallel(Heap* heap, base::Semaphore* semaphore) { | 3885 void UpdateToSpacePointersInParallel(Heap* heap, base::Semaphore* semaphore) { |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3897 EvacuationWeakObjectRetainer evacuation_object_retainer; | 3929 EvacuationWeakObjectRetainer evacuation_object_retainer; |
3898 heap()->ProcessWeakListRoots(&evacuation_object_retainer); | 3930 heap()->ProcessWeakListRoots(&evacuation_object_retainer); |
3899 } | 3931 } |
3900 } | 3932 } |
3901 | 3933 |
3902 | 3934 |
3903 void MarkCompactCollector::ReleaseEvacuationCandidates() { | 3935 void MarkCompactCollector::ReleaseEvacuationCandidates() { |
3904 for (Page* p : old_space_evacuation_pages_) { | 3936 for (Page* p : old_space_evacuation_pages_) { |
3905 if (!p->IsEvacuationCandidate()) continue; | 3937 if (!p->IsEvacuationCandidate()) continue; |
3906 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3938 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
3907 p->ResetLiveBytes(); | 3939 MarkingState::Internal(p).SetLiveBytes(0); |
3908 CHECK(p->SweepingDone()); | 3940 CHECK(p->SweepingDone()); |
3909 space->ReleasePage(p); | 3941 space->ReleasePage(p); |
3910 } | 3942 } |
3911 old_space_evacuation_pages_.Rewind(0); | 3943 old_space_evacuation_pages_.Rewind(0); |
3912 compacting_ = false; | 3944 compacting_ = false; |
3913 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); | 3945 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); |
3914 } | 3946 } |
3915 | 3947 |
3916 int MarkCompactCollector::Sweeper::ParallelSweepSpace(AllocationSpace identity, | 3948 int MarkCompactCollector::Sweeper::ParallelSweepSpace(AllocationSpace identity, |
3917 int required_freed_bytes, | 3949 int required_freed_bytes, |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3968 | 4000 |
3969 void MarkCompactCollector::Sweeper::AddPage(AllocationSpace space, Page* page) { | 4001 void MarkCompactCollector::Sweeper::AddPage(AllocationSpace space, Page* page) { |
3970 DCHECK(!FLAG_concurrent_sweeping || !AreSweeperTasksRunning()); | 4002 DCHECK(!FLAG_concurrent_sweeping || !AreSweeperTasksRunning()); |
3971 PrepareToBeSweptPage(space, page); | 4003 PrepareToBeSweptPage(space, page); |
3972 sweeping_list_[space].push_back(page); | 4004 sweeping_list_[space].push_back(page); |
3973 } | 4005 } |
3974 | 4006 |
3975 void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space, | 4007 void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space, |
3976 Page* page) { | 4008 Page* page) { |
3977 page->concurrent_sweeping_state().SetValue(Page::kSweepingPending); | 4009 page->concurrent_sweeping_state().SetValue(Page::kSweepingPending); |
3978 DCHECK_GE(page->area_size(), static_cast<size_t>(page->LiveBytes())); | 4010 DCHECK_GE(page->area_size(), |
3979 size_t to_sweep = page->area_size() - page->LiveBytes(); | 4011 static_cast<size_t>(MarkingState::Internal(page).live_bytes())); |
| 4012 size_t to_sweep = |
| 4013 page->area_size() - MarkingState::Internal(page).live_bytes(); |
3980 if (space != NEW_SPACE) | 4014 if (space != NEW_SPACE) |
3981 heap_->paged_space(space)->accounting_stats_.ShrinkSpace(to_sweep); | 4015 heap_->paged_space(space)->accounting_stats_.ShrinkSpace(to_sweep); |
3982 } | 4016 } |
3983 | 4017 |
3984 Page* MarkCompactCollector::Sweeper::GetSweepingPageSafe( | 4018 Page* MarkCompactCollector::Sweeper::GetSweepingPageSafe( |
3985 AllocationSpace space) { | 4019 AllocationSpace space) { |
3986 base::LockGuard<base::Mutex> guard(&mutex_); | 4020 base::LockGuard<base::Mutex> guard(&mutex_); |
3987 Page* page = nullptr; | 4021 Page* page = nullptr; |
3988 if (!sweeping_list_[space].empty()) { | 4022 if (!sweeping_list_[space].empty()) { |
3989 page = sweeping_list_[space].front(); | 4023 page = sweeping_list_[space].front(); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4021 // (in the free list) dropped again. Since we only use the flag for | 4055 // (in the free list) dropped again. Since we only use the flag for |
4022 // testing this is fine. | 4056 // testing this is fine. |
4023 p->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); | 4057 p->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); |
4024 Sweeper::RawSweep(p, Sweeper::IGNORE_FREE_LIST, | 4058 Sweeper::RawSweep(p, Sweeper::IGNORE_FREE_LIST, |
4025 Heap::ShouldZapGarbage() ? Sweeper::ZAP_FREE_SPACE | 4059 Heap::ShouldZapGarbage() ? Sweeper::ZAP_FREE_SPACE |
4026 : Sweeper::IGNORE_FREE_SPACE); | 4060 : Sweeper::IGNORE_FREE_SPACE); |
4027 continue; | 4061 continue; |
4028 } | 4062 } |
4029 | 4063 |
4030 // One unused page is kept, all further are released before sweeping them. | 4064 // One unused page is kept, all further are released before sweeping them. |
4031 if (p->LiveBytes() == 0) { | 4065 if (MarkingState::Internal(p).live_bytes() == 0) { |
4032 if (unused_page_present) { | 4066 if (unused_page_present) { |
4033 if (FLAG_gc_verbose) { | 4067 if (FLAG_gc_verbose) { |
4034 PrintIsolate(isolate(), "sweeping: released page: %p", | 4068 PrintIsolate(isolate(), "sweeping: released page: %p", |
4035 static_cast<void*>(p)); | 4069 static_cast<void*>(p)); |
4036 } | 4070 } |
4037 ArrayBufferTracker::FreeAll(p); | 4071 ArrayBufferTracker::FreeAll(p); |
4038 space->ReleasePage(p); | 4072 space->ReleasePage(p); |
4039 continue; | 4073 continue; |
4040 } | 4074 } |
4041 unused_page_present = true; | 4075 unused_page_present = true; |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4101 } | 4135 } |
4102 } | 4136 } |
4103 | 4137 |
4104 | 4138 |
4105 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { | 4139 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { |
4106 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); | 4140 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); |
4107 if (is_compacting()) { | 4141 if (is_compacting()) { |
4108 Code* host = | 4142 Code* host = |
4109 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( | 4143 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( |
4110 pc); | 4144 pc); |
4111 if (ObjectMarking::IsBlack(host)) { | 4145 if (ObjectMarking::IsBlack(host, MarkingState::Internal(host))) { |
4112 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); | 4146 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); |
4113 // The target is always in old space, we don't have to record the slot in | 4147 // The target is always in old space, we don't have to record the slot in |
4114 // the old-to-new remembered set. | 4148 // the old-to-new remembered set. |
4115 DCHECK(!heap()->InNewSpace(target)); | 4149 DCHECK(!heap()->InNewSpace(target)); |
4116 RecordRelocSlot(host, &rinfo, target); | 4150 RecordRelocSlot(host, &rinfo, target); |
4117 } | 4151 } |
4118 } | 4152 } |
4119 } | 4153 } |
4120 | 4154 |
4121 } // namespace internal | 4155 } // namespace internal |
4122 } // namespace v8 | 4156 } // namespace v8 |
OLD | NEW |