| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 100 | 100 |
| 101 static void VerifyMarking(Heap* heap, Address bottom, Address top) { | 101 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
| 102 VerifyMarkingVisitor visitor(heap); | 102 VerifyMarkingVisitor visitor(heap); |
| 103 HeapObject* object; | 103 HeapObject* object; |
| 104 Address next_object_must_be_here_or_later = bottom; | 104 Address next_object_must_be_here_or_later = bottom; |
| 105 for (Address current = bottom; current < top;) { | 105 for (Address current = bottom; current < top;) { |
| 106 object = HeapObject::FromAddress(current); | 106 object = HeapObject::FromAddress(current); |
| 107 // One word fillers at the end of a black area can be grey. | 107 // One word fillers at the end of a black area can be grey. |
| 108 if (MarkCompactCollector::IsMarked(object) && | 108 if (MarkCompactCollector::IsMarked(object) && |
| 109 object->map() != heap->one_pointer_filler_map()) { | 109 object->map() != heap->one_pointer_filler_map()) { |
| 110 CHECK(ObjectMarking::IsBlack(object)); | 110 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 111 CHECK(current >= next_object_must_be_here_or_later); | 111 CHECK(current >= next_object_must_be_here_or_later); |
| 112 object->Iterate(&visitor); | 112 object->Iterate(&visitor); |
| 113 next_object_must_be_here_or_later = current + object->Size(); | 113 next_object_must_be_here_or_later = current + object->Size(); |
| 114 // The object is either part of a black area of black allocation or a | 114 // The object is either part of a black area of black allocation or a |
| 115 // regular black object | 115 // regular black object |
| 116 Page* page = Page::FromAddress(current); | 116 Page* page = Page::FromAddress(current); |
| 117 CHECK( | 117 CHECK( |
| 118 page->markbits()->AllBitsSetInRange( | 118 page->markbits()->AllBitsSetInRange( |
| 119 page->AddressToMarkbitIndex(current), | 119 page->AddressToMarkbitIndex(current), |
| 120 page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) || | 120 page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) || |
| (...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 341 | 341 |
| 342 | 342 |
| 343 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 343 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
| 344 VerifyMarkbitsAreClean(heap_->old_space()); | 344 VerifyMarkbitsAreClean(heap_->old_space()); |
| 345 VerifyMarkbitsAreClean(heap_->code_space()); | 345 VerifyMarkbitsAreClean(heap_->code_space()); |
| 346 VerifyMarkbitsAreClean(heap_->map_space()); | 346 VerifyMarkbitsAreClean(heap_->map_space()); |
| 347 VerifyMarkbitsAreClean(heap_->new_space()); | 347 VerifyMarkbitsAreClean(heap_->new_space()); |
| 348 | 348 |
| 349 LargeObjectIterator it(heap_->lo_space()); | 349 LargeObjectIterator it(heap_->lo_space()); |
| 350 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 350 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 351 CHECK(ObjectMarking::IsWhite(obj)); | 351 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 352 CHECK(Marking::IsWhite(mark_bit)); |
| 352 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 353 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
| 353 } | 354 } |
| 354 } | 355 } |
| 355 | 356 |
| 356 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 357 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
| 357 HeapObjectIterator code_iterator(heap()->code_space()); | 358 HeapObjectIterator code_iterator(heap()->code_space()); |
| 358 for (HeapObject* obj = code_iterator.Next(); obj != NULL; | 359 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
| 359 obj = code_iterator.Next()) { | 360 obj = code_iterator.Next()) { |
| 360 Code* code = Code::cast(obj); | 361 Code* code = Code::cast(obj); |
| 361 if (!code->is_optimized_code()) continue; | 362 if (!code->is_optimized_code()) continue; |
| (...skipping 28 matching lines...) Expand all Loading... |
| 390 | 391 |
| 391 | 392 |
| 392 void MarkCompactCollector::ClearMarkbits() { | 393 void MarkCompactCollector::ClearMarkbits() { |
| 393 ClearMarkbitsInPagedSpace(heap_->code_space()); | 394 ClearMarkbitsInPagedSpace(heap_->code_space()); |
| 394 ClearMarkbitsInPagedSpace(heap_->map_space()); | 395 ClearMarkbitsInPagedSpace(heap_->map_space()); |
| 395 ClearMarkbitsInPagedSpace(heap_->old_space()); | 396 ClearMarkbitsInPagedSpace(heap_->old_space()); |
| 396 ClearMarkbitsInNewSpace(heap_->new_space()); | 397 ClearMarkbitsInNewSpace(heap_->new_space()); |
| 397 | 398 |
| 398 LargeObjectIterator it(heap_->lo_space()); | 399 LargeObjectIterator it(heap_->lo_space()); |
| 399 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 400 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 400 ObjectMarking::ClearMarkBit(obj); | 401 Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj)); |
| 401 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 402 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 402 chunk->ResetProgressBar(); | 403 chunk->ResetProgressBar(); |
| 403 chunk->ResetLiveBytes(); | 404 chunk->ResetLiveBytes(); |
| 404 } | 405 } |
| 405 } | 406 } |
| 406 | 407 |
| 407 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { | 408 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { |
| 408 public: | 409 public: |
| 409 SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks, | 410 SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks, |
| 410 AllocationSpace space_to_start) | 411 AllocationSpace space_to_start) |
| (...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 901 | 902 |
| 902 JSFunction* candidate = jsfunction_candidates_head_; | 903 JSFunction* candidate = jsfunction_candidates_head_; |
| 903 JSFunction* next_candidate; | 904 JSFunction* next_candidate; |
| 904 while (candidate != NULL) { | 905 while (candidate != NULL) { |
| 905 next_candidate = GetNextCandidate(candidate); | 906 next_candidate = GetNextCandidate(candidate); |
| 906 ClearNextCandidate(candidate, undefined); | 907 ClearNextCandidate(candidate, undefined); |
| 907 | 908 |
| 908 SharedFunctionInfo* shared = candidate->shared(); | 909 SharedFunctionInfo* shared = candidate->shared(); |
| 909 | 910 |
| 910 Code* code = shared->code(); | 911 Code* code = shared->code(); |
| 911 if (ObjectMarking::IsWhite(code)) { | 912 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
| 913 if (Marking::IsWhite(code_mark)) { |
| 912 if (FLAG_trace_code_flushing && shared->is_compiled()) { | 914 if (FLAG_trace_code_flushing && shared->is_compiled()) { |
| 913 PrintF("[code-flushing clears: "); | 915 PrintF("[code-flushing clears: "); |
| 914 shared->ShortPrint(); | 916 shared->ShortPrint(); |
| 915 PrintF(" - age: %d]\n", code->GetAge()); | 917 PrintF(" - age: %d]\n", code->GetAge()); |
| 916 } | 918 } |
| 917 // Always flush the optimized code map if there is one. | 919 // Always flush the optimized code map if there is one. |
| 918 if (!shared->OptimizedCodeMapIsCleared()) { | 920 if (!shared->OptimizedCodeMapIsCleared()) { |
| 919 shared->ClearOptimizedCodeMap(); | 921 shared->ClearOptimizedCodeMap(); |
| 920 } | 922 } |
| 921 if (shared->HasBytecodeArray()) { | 923 if (shared->HasBytecodeArray()) { |
| 922 shared->set_code(interpreter_entry_trampoline); | 924 shared->set_code(interpreter_entry_trampoline); |
| 923 candidate->set_code(interpreter_entry_trampoline); | 925 candidate->set_code(interpreter_entry_trampoline); |
| 924 } else { | 926 } else { |
| 925 shared->set_code(lazy_compile); | 927 shared->set_code(lazy_compile); |
| 926 candidate->set_code(lazy_compile); | 928 candidate->set_code(lazy_compile); |
| 927 } | 929 } |
| 928 } else { | 930 } else { |
| 929 DCHECK(ObjectMarking::IsBlack(code)); | 931 DCHECK(Marking::IsBlack(code_mark)); |
| 930 candidate->set_code(code); | 932 candidate->set_code(code); |
| 931 } | 933 } |
| 932 | 934 |
| 933 // We are in the middle of a GC cycle so the write barrier in the code | 935 // We are in the middle of a GC cycle so the write barrier in the code |
| 934 // setter did not record the slot update and we have to do that manually. | 936 // setter did not record the slot update and we have to do that manually. |
| 935 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; | 937 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; |
| 936 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); | 938 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); |
| 937 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot( | 939 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot( |
| 938 candidate, slot, target); | 940 candidate, slot, target); |
| 939 | 941 |
| (...skipping 13 matching lines...) Expand all Loading... |
| 953 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); | 955 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); |
| 954 Code* interpreter_entry_trampoline = | 956 Code* interpreter_entry_trampoline = |
| 955 isolate_->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); | 957 isolate_->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); |
| 956 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 958 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 957 SharedFunctionInfo* next_candidate; | 959 SharedFunctionInfo* next_candidate; |
| 958 while (candidate != NULL) { | 960 while (candidate != NULL) { |
| 959 next_candidate = GetNextCandidate(candidate); | 961 next_candidate = GetNextCandidate(candidate); |
| 960 ClearNextCandidate(candidate); | 962 ClearNextCandidate(candidate); |
| 961 | 963 |
| 962 Code* code = candidate->code(); | 964 Code* code = candidate->code(); |
| 963 if (ObjectMarking::IsWhite(code)) { | 965 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); |
| 966 if (Marking::IsWhite(code_mark)) { |
| 964 if (FLAG_trace_code_flushing && candidate->is_compiled()) { | 967 if (FLAG_trace_code_flushing && candidate->is_compiled()) { |
| 965 PrintF("[code-flushing clears: "); | 968 PrintF("[code-flushing clears: "); |
| 966 candidate->ShortPrint(); | 969 candidate->ShortPrint(); |
| 967 PrintF(" - age: %d]\n", code->GetAge()); | 970 PrintF(" - age: %d]\n", code->GetAge()); |
| 968 } | 971 } |
| 969 // Always flush the optimized code map if there is one. | 972 // Always flush the optimized code map if there is one. |
| 970 if (!candidate->OptimizedCodeMapIsCleared()) { | 973 if (!candidate->OptimizedCodeMapIsCleared()) { |
| 971 candidate->ClearOptimizedCodeMap(); | 974 candidate->ClearOptimizedCodeMap(); |
| 972 } | 975 } |
| 973 if (candidate->HasBytecodeArray()) { | 976 if (candidate->HasBytecodeArray()) { |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1093 inline static void PushOnMarkingDeque(Heap* heap, Object* obj) { | 1096 inline static void PushOnMarkingDeque(Heap* heap, Object* obj) { |
| 1094 HeapObject* object = HeapObject::cast(obj); | 1097 HeapObject* object = HeapObject::cast(obj); |
| 1095 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); | 1098 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
| 1096 heap->mark_compact_collector()->MarkObject(object, mark_bit); | 1099 heap->mark_compact_collector()->MarkObject(object, mark_bit); |
| 1097 } | 1100 } |
| 1098 | 1101 |
| 1099 inline static bool MarkRecursively(Heap* heap, HeapObject* object) { | 1102 inline static bool MarkRecursively(Heap* heap, HeapObject* object) { |
| 1100 StackLimitCheck check(heap->isolate()); | 1103 StackLimitCheck check(heap->isolate()); |
| 1101 if (check.HasOverflowed()) return false; | 1104 if (check.HasOverflowed()) return false; |
| 1102 | 1105 |
| 1103 if (ObjectMarking::IsBlackOrGrey(object)) return true; | 1106 MarkBit mark = ObjectMarking::MarkBitFrom(object); |
| 1104 heap->mark_compact_collector()->SetMark(object); | 1107 if (Marking::IsBlackOrGrey(mark)) return true; |
| 1108 heap->mark_compact_collector()->SetMark(object, mark); |
| 1105 IterateBody(object->map(), object); | 1109 IterateBody(object->map(), object); |
| 1106 return true; | 1110 return true; |
| 1107 } | 1111 } |
| 1108 }; | 1112 }; |
| 1109 | 1113 |
| 1110 class MarkCompactMarkingVisitor | 1114 class MarkCompactMarkingVisitor |
| 1111 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { | 1115 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { |
| 1112 public: | 1116 public: |
| 1113 static void Initialize(); | 1117 static void Initialize(); |
| 1114 | 1118 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1132 | 1136 |
| 1133 // Marks the object black and pushes it on the marking stack. | 1137 // Marks the object black and pushes it on the marking stack. |
| 1134 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { | 1138 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
| 1135 MarkBit mark = ObjectMarking::MarkBitFrom(object); | 1139 MarkBit mark = ObjectMarking::MarkBitFrom(object); |
| 1136 heap->mark_compact_collector()->MarkObject(object, mark); | 1140 heap->mark_compact_collector()->MarkObject(object, mark); |
| 1137 } | 1141 } |
| 1138 | 1142 |
| 1139 // Marks the object black without pushing it on the marking stack. | 1143 // Marks the object black without pushing it on the marking stack. |
| 1140 // Returns true if object needed marking and false otherwise. | 1144 // Returns true if object needed marking and false otherwise. |
| 1141 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { | 1145 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { |
| 1142 if (ObjectMarking::IsWhite(object)) { | 1146 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
| 1143 heap->mark_compact_collector()->SetMark(object); | 1147 if (Marking::IsWhite(mark_bit)) { |
| 1148 heap->mark_compact_collector()->SetMark(object, mark_bit); |
| 1144 return true; | 1149 return true; |
| 1145 } | 1150 } |
| 1146 return false; | 1151 return false; |
| 1147 } | 1152 } |
| 1148 | 1153 |
| 1149 // Mark object pointed to by p. | 1154 // Mark object pointed to by p. |
| 1150 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 1155 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
| 1151 HeapObject* object, Object** p)) { | 1156 HeapObject* object, Object** p)) { |
| 1152 if (!(*p)->IsHeapObject()) return; | 1157 if (!(*p)->IsHeapObject()) return; |
| 1153 HeapObject* target_object = HeapObject::cast(*p); | 1158 HeapObject* target_object = HeapObject::cast(*p); |
| 1154 collector->RecordSlot(object, p, target_object); | 1159 collector->RecordSlot(object, p, target_object); |
| 1155 MarkBit mark = ObjectMarking::MarkBitFrom(target_object); | 1160 MarkBit mark = ObjectMarking::MarkBitFrom(target_object); |
| 1156 collector->MarkObject(target_object, mark); | 1161 collector->MarkObject(target_object, mark); |
| 1157 } | 1162 } |
| 1158 | 1163 |
| 1159 | 1164 |
| 1160 // Visit an unmarked object. | 1165 // Visit an unmarked object. |
| 1161 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1166 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
| 1162 HeapObject* obj)) { | 1167 HeapObject* obj)) { |
| 1163 #ifdef DEBUG | 1168 #ifdef DEBUG |
| 1164 DCHECK(collector->heap()->Contains(obj)); | 1169 DCHECK(collector->heap()->Contains(obj)); |
| 1165 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); | 1170 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); |
| 1166 #endif | 1171 #endif |
| 1167 Map* map = obj->map(); | 1172 Map* map = obj->map(); |
| 1168 Heap* heap = obj->GetHeap(); | 1173 Heap* heap = obj->GetHeap(); |
| 1169 heap->mark_compact_collector()->SetMark(obj); | 1174 MarkBit mark = ObjectMarking::MarkBitFrom(obj); |
| 1175 heap->mark_compact_collector()->SetMark(obj, mark); |
| 1170 // Mark the map pointer and the body. | 1176 // Mark the map pointer and the body. |
| 1171 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | 1177 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 1172 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1178 heap->mark_compact_collector()->MarkObject(map, map_mark); |
| 1173 IterateBody(map, obj); | 1179 IterateBody(map, obj); |
| 1174 } | 1180 } |
| 1175 | 1181 |
| 1176 // Visit all unmarked objects pointed to by [start, end). | 1182 // Visit all unmarked objects pointed to by [start, end). |
| 1177 // Returns false if the operation fails (lack of stack space). | 1183 // Returns false if the operation fails (lack of stack space). |
| 1178 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, | 1184 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, |
| 1179 Object** start, Object** end)) { | 1185 Object** start, Object** end)) { |
| 1180 // Return false is we are close to the stack limit. | 1186 // Return false is we are close to the stack limit. |
| 1181 StackLimitCheck check(heap->isolate()); | 1187 StackLimitCheck check(heap->isolate()); |
| 1182 if (check.HasOverflowed()) return false; | 1188 if (check.HasOverflowed()) return false; |
| 1183 | 1189 |
| 1184 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1190 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1185 // Visit the unmarked objects. | 1191 // Visit the unmarked objects. |
| 1186 for (Object** p = start; p < end; p++) { | 1192 for (Object** p = start; p < end; p++) { |
| 1187 Object* o = *p; | 1193 Object* o = *p; |
| 1188 if (!o->IsHeapObject()) continue; | 1194 if (!o->IsHeapObject()) continue; |
| 1189 collector->RecordSlot(object, p, o); | 1195 collector->RecordSlot(object, p, o); |
| 1190 HeapObject* obj = HeapObject::cast(o); | 1196 HeapObject* obj = HeapObject::cast(o); |
| 1191 if (ObjectMarking::IsBlackOrGrey(obj)) continue; | 1197 MarkBit mark = ObjectMarking::MarkBitFrom(obj); |
| 1198 if (Marking::IsBlackOrGrey(mark)) continue; |
| 1192 VisitUnmarkedObject(collector, obj); | 1199 VisitUnmarkedObject(collector, obj); |
| 1193 } | 1200 } |
| 1194 return true; | 1201 return true; |
| 1195 } | 1202 } |
| 1196 | 1203 |
| 1197 private: | 1204 private: |
| 1198 // Code flushing support. | 1205 // Code flushing support. |
| 1199 | 1206 |
| 1200 static const int kRegExpCodeThreshold = 5; | 1207 static const int kRegExpCodeThreshold = 5; |
| 1201 | 1208 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1214 if (!code->IsSmi() && | 1221 if (!code->IsSmi() && |
| 1215 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { | 1222 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { |
| 1216 // Save a copy that can be reinstated if we need the code again. | 1223 // Save a copy that can be reinstated if we need the code again. |
| 1217 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); | 1224 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); |
| 1218 | 1225 |
| 1219 // Saving a copy might create a pointer into compaction candidate | 1226 // Saving a copy might create a pointer into compaction candidate |
| 1220 // that was not observed by marker. This might happen if JSRegExp data | 1227 // that was not observed by marker. This might happen if JSRegExp data |
| 1221 // was marked through the compilation cache before marker reached JSRegExp | 1228 // was marked through the compilation cache before marker reached JSRegExp |
| 1222 // object. | 1229 // object. |
| 1223 FixedArray* data = FixedArray::cast(re->data()); | 1230 FixedArray* data = FixedArray::cast(re->data()); |
| 1224 if (ObjectMarking::IsBlackOrGrey(data)) { | 1231 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(data))) { |
| 1225 Object** slot = | 1232 Object** slot = |
| 1226 data->data_start() + JSRegExp::saved_code_index(is_one_byte); | 1233 data->data_start() + JSRegExp::saved_code_index(is_one_byte); |
| 1227 heap->mark_compact_collector()->RecordSlot(data, slot, code); | 1234 heap->mark_compact_collector()->RecordSlot(data, slot, code); |
| 1228 } | 1235 } |
| 1229 | 1236 |
| 1230 // Set a number in the 0-255 range to guarantee no smi overflow. | 1237 // Set a number in the 0-255 range to guarantee no smi overflow. |
| 1231 re->SetDataAt(JSRegExp::code_index(is_one_byte), | 1238 re->SetDataAt(JSRegExp::code_index(is_one_byte), |
| 1232 Smi::FromInt(heap->ms_count() & 0xff)); | 1239 Smi::FromInt(heap->ms_count() & 0xff)); |
| 1233 } else if (code->IsSmi()) { | 1240 } else if (code->IsSmi()) { |
| 1234 int value = Smi::cast(code)->value(); | 1241 int value = Smi::cast(code)->value(); |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1380 private: | 1387 private: |
| 1381 void MarkObjectByPointer(Object** p) { | 1388 void MarkObjectByPointer(Object** p) { |
| 1382 if (!(*p)->IsHeapObject()) return; | 1389 if (!(*p)->IsHeapObject()) return; |
| 1383 | 1390 |
| 1384 HeapObject* object = HeapObject::cast(*p); | 1391 HeapObject* object = HeapObject::cast(*p); |
| 1385 | 1392 |
| 1386 if (mode == MarkCompactMode::YOUNG_GENERATION && | 1393 if (mode == MarkCompactMode::YOUNG_GENERATION && |
| 1387 !collector_->heap()->InNewSpace(object)) | 1394 !collector_->heap()->InNewSpace(object)) |
| 1388 return; | 1395 return; |
| 1389 | 1396 |
| 1390 if (ObjectMarking::IsBlackOrGrey(object)) return; | 1397 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
| 1398 if (Marking::IsBlackOrGrey(mark_bit)) return; |
| 1391 | 1399 |
| 1392 Map* map = object->map(); | 1400 Map* map = object->map(); |
| 1393 // Mark the object. | 1401 // Mark the object. |
| 1394 collector_->SetMark(object); | 1402 collector_->SetMark(object, mark_bit); |
| 1395 | 1403 |
| 1396 switch (mode) { | 1404 switch (mode) { |
| 1397 case MarkCompactMode::FULL: { | 1405 case MarkCompactMode::FULL: { |
| 1398 // Mark the map pointer and body, and push them on the marking stack. | 1406 // Mark the map pointer and body, and push them on the marking stack. |
| 1399 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | 1407 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 1400 collector_->MarkObject(map, map_mark); | 1408 collector_->MarkObject(map, map_mark); |
| 1401 MarkCompactMarkingVisitor::IterateBody(map, object); | 1409 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 1402 } break; | 1410 } break; |
| 1403 case MarkCompactMode::YOUNG_GENERATION: | 1411 case MarkCompactMode::YOUNG_GENERATION: |
| 1404 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); | 1412 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1422 : heap_(heap), pointers_removed_(0), table_(table) { | 1430 : heap_(heap), pointers_removed_(0), table_(table) { |
| 1423 DCHECK(!record_slots || table != nullptr); | 1431 DCHECK(!record_slots || table != nullptr); |
| 1424 } | 1432 } |
| 1425 | 1433 |
| 1426 void VisitPointers(Object** start, Object** end) override { | 1434 void VisitPointers(Object** start, Object** end) override { |
| 1427 // Visit all HeapObject pointers in [start, end). | 1435 // Visit all HeapObject pointers in [start, end). |
| 1428 MarkCompactCollector* collector = heap_->mark_compact_collector(); | 1436 MarkCompactCollector* collector = heap_->mark_compact_collector(); |
| 1429 for (Object** p = start; p < end; p++) { | 1437 for (Object** p = start; p < end; p++) { |
| 1430 Object* o = *p; | 1438 Object* o = *p; |
| 1431 if (o->IsHeapObject()) { | 1439 if (o->IsHeapObject()) { |
| 1432 if (ObjectMarking::IsWhite(HeapObject::cast(o))) { | 1440 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(o)))) { |
| 1433 if (finalize_external_strings) { | 1441 if (finalize_external_strings) { |
| 1434 if (o->IsExternalString()) { | 1442 if (o->IsExternalString()) { |
| 1435 heap_->FinalizeExternalString(String::cast(*p)); | 1443 heap_->FinalizeExternalString(String::cast(*p)); |
| 1436 } else { | 1444 } else { |
| 1437 // The original external string may have been internalized. | 1445 // The original external string may have been internalized. |
| 1438 DCHECK(o->IsThinString()); | 1446 DCHECK(o->IsThinString()); |
| 1439 } | 1447 } |
| 1440 } else { | 1448 } else { |
| 1441 pointers_removed_++; | 1449 pointers_removed_++; |
| 1442 } | 1450 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1463 }; | 1471 }; |
| 1464 | 1472 |
| 1465 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; | 1473 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; |
| 1466 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; | 1474 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; |
| 1467 | 1475 |
| 1468 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1476 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
| 1469 // are retained. | 1477 // are retained. |
| 1470 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1478 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
| 1471 public: | 1479 public: |
| 1472 virtual Object* RetainAs(Object* object) { | 1480 virtual Object* RetainAs(Object* object) { |
| 1473 DCHECK(!ObjectMarking::IsGrey(HeapObject::cast(object))); | 1481 MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(object)); |
| 1474 if (ObjectMarking::IsBlack(HeapObject::cast(object))) { | 1482 DCHECK(!Marking::IsGrey(mark_bit)); |
| 1483 if (Marking::IsBlack(mark_bit)) { |
| 1475 return object; | 1484 return object; |
| 1476 } else if (object->IsAllocationSite() && | 1485 } else if (object->IsAllocationSite() && |
| 1477 !(AllocationSite::cast(object)->IsZombie())) { | 1486 !(AllocationSite::cast(object)->IsZombie())) { |
| 1478 // "dead" AllocationSites need to live long enough for a traversal of new | 1487 // "dead" AllocationSites need to live long enough for a traversal of new |
| 1479 // space. These sites get a one-time reprieve. | 1488 // space. These sites get a one-time reprieve. |
| 1480 AllocationSite* site = AllocationSite::cast(object); | 1489 AllocationSite* site = AllocationSite::cast(object); |
| 1481 site->MarkZombie(); | 1490 site->MarkZombie(); |
| 1482 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); | 1491 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); |
| 1483 return object; | 1492 return object; |
| 1484 } else { | 1493 } else { |
| 1485 return NULL; | 1494 return NULL; |
| 1486 } | 1495 } |
| 1487 } | 1496 } |
| 1488 }; | 1497 }; |
| 1489 | 1498 |
| 1490 | 1499 |
| 1491 // Fill the marking stack with overflowed objects returned by the given | 1500 // Fill the marking stack with overflowed objects returned by the given |
| 1492 // iterator. Stop when the marking stack is filled or the end of the space | 1501 // iterator. Stop when the marking stack is filled or the end of the space |
| 1493 // is reached, whichever comes first. | 1502 // is reached, whichever comes first. |
| 1494 template <class T> | 1503 template <class T> |
| 1495 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { | 1504 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { |
| 1496 // The caller should ensure that the marking stack is initially not full, | 1505 // The caller should ensure that the marking stack is initially not full, |
| 1497 // so that we don't waste effort pointlessly scanning for objects. | 1506 // so that we don't waste effort pointlessly scanning for objects. |
| 1498 DCHECK(!marking_deque()->IsFull()); | 1507 DCHECK(!marking_deque()->IsFull()); |
| 1499 | 1508 |
| 1500 Map* filler_map = heap()->one_pointer_filler_map(); | 1509 Map* filler_map = heap()->one_pointer_filler_map(); |
| 1501 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { | 1510 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { |
| 1502 if ((object->map() != filler_map) && ObjectMarking::IsGrey(object)) { | 1511 MarkBit markbit = ObjectMarking::MarkBitFrom(object); |
| 1503 ObjectMarking::GreyToBlack(object); | 1512 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { |
| 1513 Marking::GreyToBlack(markbit); |
| 1504 PushBlack(object); | 1514 PushBlack(object); |
| 1505 if (marking_deque()->IsFull()) return; | 1515 if (marking_deque()->IsFull()) return; |
| 1506 } | 1516 } |
| 1507 } | 1517 } |
| 1508 } | 1518 } |
| 1509 | 1519 |
| 1510 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { | 1520 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { |
| 1511 DCHECK(!marking_deque()->IsFull()); | 1521 DCHECK(!marking_deque()->IsFull()); |
| 1512 LiveObjectIterator<kGreyObjects> it(p); | 1522 LiveObjectIterator<kGreyObjects> it(p); |
| 1513 HeapObject* object = NULL; | 1523 HeapObject* object = NULL; |
| 1514 while ((object = it.Next()) != NULL) { | 1524 while ((object = it.Next()) != NULL) { |
| 1515 DCHECK(ObjectMarking::IsGrey(object)); | 1525 MarkBit markbit = ObjectMarking::MarkBitFrom(object); |
| 1516 ObjectMarking::GreyToBlack(object); | 1526 DCHECK(Marking::IsGrey(markbit)); |
| 1527 Marking::GreyToBlack(markbit); |
| 1517 PushBlack(object); | 1528 PushBlack(object); |
| 1518 if (marking_deque()->IsFull()) return; | 1529 if (marking_deque()->IsFull()) return; |
| 1519 } | 1530 } |
| 1520 } | 1531 } |
| 1521 | 1532 |
| 1522 class RecordMigratedSlotVisitor final : public ObjectVisitor { | 1533 class RecordMigratedSlotVisitor final : public ObjectVisitor { |
| 1523 public: | 1534 public: |
| 1524 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) | 1535 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) |
| 1525 : collector_(collector) {} | 1536 : collector_(collector) {} |
| 1526 | 1537 |
| (...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1956 for (Page* page : PageRange(space->bottom(), space->top())) { | 1967 for (Page* page : PageRange(space->bottom(), space->top())) { |
| 1957 DiscoverGreyObjectsOnPage(page); | 1968 DiscoverGreyObjectsOnPage(page); |
| 1958 if (marking_deque()->IsFull()) return; | 1969 if (marking_deque()->IsFull()) return; |
| 1959 } | 1970 } |
| 1960 } | 1971 } |
| 1961 | 1972 |
| 1962 | 1973 |
| 1963 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 1974 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
| 1964 Object* o = *p; | 1975 Object* o = *p; |
| 1965 if (!o->IsHeapObject()) return false; | 1976 if (!o->IsHeapObject()) return false; |
| 1966 return ObjectMarking::IsWhite(HeapObject::cast(o)); | 1977 HeapObject* heap_object = HeapObject::cast(o); |
| 1978 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
| 1979 return Marking::IsWhite(mark); |
| 1967 } | 1980 } |
| 1968 | 1981 |
| 1969 | 1982 |
| 1970 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, | 1983 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, |
| 1971 Object** p) { | 1984 Object** p) { |
| 1972 Object* o = *p; | 1985 Object* o = *p; |
| 1973 DCHECK(o->IsHeapObject()); | 1986 DCHECK(o->IsHeapObject()); |
| 1974 return ObjectMarking::IsWhite(HeapObject::cast(o)); | 1987 HeapObject* heap_object = HeapObject::cast(o); |
| 1988 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
| 1989 return Marking::IsWhite(mark); |
| 1975 } | 1990 } |
| 1976 | 1991 |
| 1977 void MarkCompactCollector::MarkStringTable( | 1992 void MarkCompactCollector::MarkStringTable( |
| 1978 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { | 1993 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { |
| 1979 StringTable* string_table = heap()->string_table(); | 1994 StringTable* string_table = heap()->string_table(); |
| 1980 // Mark the string table itself. | 1995 // Mark the string table itself. |
| 1981 if (ObjectMarking::IsWhite(string_table)) { | 1996 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); |
| 1997 if (Marking::IsWhite(string_table_mark)) { |
| 1982 // String table could have already been marked by visiting the handles list. | 1998 // String table could have already been marked by visiting the handles list. |
| 1983 SetMark(string_table); | 1999 SetMark(string_table, string_table_mark); |
| 1984 } | 2000 } |
| 1985 // Explicitly mark the prefix. | 2001 // Explicitly mark the prefix. |
| 1986 string_table->IteratePrefix(visitor); | 2002 string_table->IteratePrefix(visitor); |
| 1987 ProcessMarkingDeque<MarkCompactMode::FULL>(); | 2003 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
| 1988 } | 2004 } |
| 1989 | 2005 |
| 1990 | 2006 |
| 1991 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { | 2007 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { |
| 1992 SetMark(site); | 2008 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); |
| 2009 SetMark(site, mark_bit); |
| 1993 } | 2010 } |
| 1994 | 2011 |
| 1995 void MarkCompactCollector::MarkRoots( | 2012 void MarkCompactCollector::MarkRoots( |
| 1996 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { | 2013 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { |
| 1997 // Mark the heap roots including global variables, stack variables, | 2014 // Mark the heap roots including global variables, stack variables, |
| 1998 // etc., and all objects reachable from them. | 2015 // etc., and all objects reachable from them. |
| 1999 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 2016 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 2000 | 2017 |
| 2001 // Handle the string table specially. | 2018 // Handle the string table specially. |
| 2002 MarkStringTable(visitor); | 2019 MarkStringTable(visitor); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2045 // After: the marking stack is empty, and all objects reachable from the | 2062 // After: the marking stack is empty, and all objects reachable from the |
| 2046 // marking stack have been marked, or are overflowed in the heap. | 2063 // marking stack have been marked, or are overflowed in the heap. |
| 2047 template <MarkCompactMode mode> | 2064 template <MarkCompactMode mode> |
| 2048 void MarkCompactCollector::EmptyMarkingDeque() { | 2065 void MarkCompactCollector::EmptyMarkingDeque() { |
| 2049 while (!marking_deque()->IsEmpty()) { | 2066 while (!marking_deque()->IsEmpty()) { |
| 2050 HeapObject* object = marking_deque()->Pop(); | 2067 HeapObject* object = marking_deque()->Pop(); |
| 2051 | 2068 |
| 2052 DCHECK(!object->IsFiller()); | 2069 DCHECK(!object->IsFiller()); |
| 2053 DCHECK(object->IsHeapObject()); | 2070 DCHECK(object->IsHeapObject()); |
| 2054 DCHECK(heap()->Contains(object)); | 2071 DCHECK(heap()->Contains(object)); |
| 2055 DCHECK(!ObjectMarking::IsWhite(object)); | 2072 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); |
| 2056 | 2073 |
| 2057 Map* map = object->map(); | 2074 Map* map = object->map(); |
| 2058 switch (mode) { | 2075 switch (mode) { |
| 2059 case MarkCompactMode::FULL: { | 2076 case MarkCompactMode::FULL: { |
| 2060 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | 2077 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 2061 MarkObject(map, map_mark); | 2078 MarkObject(map, map_mark); |
| 2062 MarkCompactMarkingVisitor::IterateBody(map, object); | 2079 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 2063 } break; | 2080 } break; |
| 2064 case MarkCompactMode::YOUNG_GENERATION: { | 2081 case MarkCompactMode::YOUNG_GENERATION: { |
| 2065 DCHECK(ObjectMarking::IsBlack(object)); | 2082 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 2066 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); | 2083 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); |
| 2067 } break; | 2084 } break; |
| 2068 } | 2085 } |
| 2069 } | 2086 } |
| 2070 } | 2087 } |
| 2071 | 2088 |
| 2072 | 2089 |
| 2073 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2090 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 2074 // push them on the marking stack. Stop early if the marking stack fills | 2091 // push them on the marking stack. Stop early if the marking stack fills |
| 2075 // before sweeping completes. If sweeping completes, there are no remaining | 2092 // before sweeping completes. If sweeping completes, there are no remaining |
| (...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2256 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats, | 2273 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats, |
| 2257 ObjectStats* dead_stats) | 2274 ObjectStats* dead_stats) |
| 2258 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) { | 2275 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) { |
| 2259 DCHECK_NOT_NULL(live_stats); | 2276 DCHECK_NOT_NULL(live_stats); |
| 2260 DCHECK_NOT_NULL(dead_stats); | 2277 DCHECK_NOT_NULL(dead_stats); |
| 2261 // Global objects are roots and thus recorded as live. | 2278 // Global objects are roots and thus recorded as live. |
| 2262 live_collector_.CollectGlobalStatistics(); | 2279 live_collector_.CollectGlobalStatistics(); |
| 2263 } | 2280 } |
| 2264 | 2281 |
| 2265 bool Visit(HeapObject* obj) override { | 2282 bool Visit(HeapObject* obj) override { |
| 2266 if (ObjectMarking::IsBlack(obj)) { | 2283 if (Marking::IsBlack(ObjectMarking::MarkBitFrom(obj))) { |
| 2267 live_collector_.CollectStatistics(obj); | 2284 live_collector_.CollectStatistics(obj); |
| 2268 } else { | 2285 } else { |
| 2269 DCHECK(!ObjectMarking::IsGrey(obj)); | 2286 DCHECK(!Marking::IsGrey(ObjectMarking::MarkBitFrom(obj))); |
| 2270 dead_collector_.CollectStatistics(obj); | 2287 dead_collector_.CollectStatistics(obj); |
| 2271 } | 2288 } |
| 2272 return true; | 2289 return true; |
| 2273 } | 2290 } |
| 2274 | 2291 |
| 2275 private: | 2292 private: |
| 2276 ObjectStatsCollector live_collector_; | 2293 ObjectStatsCollector live_collector_; |
| 2277 ObjectStatsCollector dead_collector_; | 2294 ObjectStatsCollector dead_collector_; |
| 2278 }; | 2295 }; |
| 2279 | 2296 |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2315 } | 2332 } |
| 2316 | 2333 |
| 2317 SlotCallbackResult MarkCompactCollector::CheckAndMarkObject( | 2334 SlotCallbackResult MarkCompactCollector::CheckAndMarkObject( |
| 2318 Heap* heap, Address slot_address) { | 2335 Heap* heap, Address slot_address) { |
| 2319 Object* object = *reinterpret_cast<Object**>(slot_address); | 2336 Object* object = *reinterpret_cast<Object**>(slot_address); |
| 2320 if (heap->InNewSpace(object)) { | 2337 if (heap->InNewSpace(object)) { |
| 2321 // Marking happens before flipping the young generation, so the object | 2338 // Marking happens before flipping the young generation, so the object |
| 2322 // has to be in ToSpace. | 2339 // has to be in ToSpace. |
| 2323 DCHECK(heap->InToSpace(object)); | 2340 DCHECK(heap->InToSpace(object)); |
| 2324 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 2341 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 2325 if (ObjectMarking::IsBlackOrGrey(heap_object)) { | 2342 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); |
| 2343 if (Marking::IsBlackOrGrey(mark_bit)) { |
| 2326 return KEEP_SLOT; | 2344 return KEEP_SLOT; |
| 2327 } | 2345 } |
| 2328 heap->mark_compact_collector()->SetMark(heap_object); | 2346 heap->mark_compact_collector()->SetMark(heap_object, mark_bit); |
| 2329 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(), | 2347 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(), |
| 2330 heap_object); | 2348 heap_object); |
| 2331 return KEEP_SLOT; | 2349 return KEEP_SLOT; |
| 2332 } | 2350 } |
| 2333 return REMOVE_SLOT; | 2351 return REMOVE_SLOT; |
| 2334 } | 2352 } |
| 2335 | 2353 |
| 2336 static bool IsUnmarkedObject(Heap* heap, Object** p) { | 2354 static bool IsUnmarkedObject(Heap* heap, Object** p) { |
| 2337 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p)); | 2355 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p)); |
| 2338 return heap->InNewSpace(*p) && !ObjectMarking::IsBlack(HeapObject::cast(*p)); | 2356 return heap->InNewSpace(*p) && |
| 2357 !Marking::IsBlack(ObjectMarking::MarkBitFrom(HeapObject::cast(*p))); |
| 2339 } | 2358 } |
| 2340 | 2359 |
| 2341 void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() { | 2360 void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() { |
| 2342 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); | 2361 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); |
| 2343 | 2362 |
| 2344 PostponeInterruptsScope postpone(isolate()); | 2363 PostponeInterruptsScope postpone(isolate()); |
| 2345 | 2364 |
| 2346 StaticYoungGenerationMarkingVisitor::Initialize(heap()); | 2365 StaticYoungGenerationMarkingVisitor::Initialize(heap()); |
| 2347 RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap()); | 2366 RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap()); |
| 2348 | 2367 |
| (...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2610 } | 2629 } |
| 2611 | 2630 |
| 2612 | 2631 |
| 2613 void MarkCompactCollector::ClearSimpleMapTransitions( | 2632 void MarkCompactCollector::ClearSimpleMapTransitions( |
| 2614 Object* non_live_map_list) { | 2633 Object* non_live_map_list) { |
| 2615 Object* the_hole_value = heap()->the_hole_value(); | 2634 Object* the_hole_value = heap()->the_hole_value(); |
| 2616 Object* weak_cell_obj = non_live_map_list; | 2635 Object* weak_cell_obj = non_live_map_list; |
| 2617 while (weak_cell_obj != Smi::kZero) { | 2636 while (weak_cell_obj != Smi::kZero) { |
| 2618 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); | 2637 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); |
| 2619 Map* map = Map::cast(weak_cell->value()); | 2638 Map* map = Map::cast(weak_cell->value()); |
| 2620 DCHECK(ObjectMarking::IsWhite(map)); | 2639 DCHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(map))); |
| 2621 Object* potential_parent = map->constructor_or_backpointer(); | 2640 Object* potential_parent = map->constructor_or_backpointer(); |
| 2622 if (potential_parent->IsMap()) { | 2641 if (potential_parent->IsMap()) { |
| 2623 Map* parent = Map::cast(potential_parent); | 2642 Map* parent = Map::cast(potential_parent); |
| 2624 if (ObjectMarking::IsBlackOrGrey(parent) && | 2643 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)) && |
| 2625 parent->raw_transitions() == weak_cell) { | 2644 parent->raw_transitions() == weak_cell) { |
| 2626 ClearSimpleMapTransition(parent, map); | 2645 ClearSimpleMapTransition(parent, map); |
| 2627 } | 2646 } |
| 2628 } | 2647 } |
| 2629 weak_cell->clear(); | 2648 weak_cell->clear(); |
| 2630 weak_cell_obj = weak_cell->next(); | 2649 weak_cell_obj = weak_cell->next(); |
| 2631 weak_cell->clear_next(the_hole_value); | 2650 weak_cell->clear_next(the_hole_value); |
| 2632 } | 2651 } |
| 2633 } | 2652 } |
| 2634 | 2653 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2653 void MarkCompactCollector::ClearFullMapTransitions() { | 2672 void MarkCompactCollector::ClearFullMapTransitions() { |
| 2654 HeapObject* undefined = heap()->undefined_value(); | 2673 HeapObject* undefined = heap()->undefined_value(); |
| 2655 Object* obj = heap()->encountered_transition_arrays(); | 2674 Object* obj = heap()->encountered_transition_arrays(); |
| 2656 while (obj != Smi::kZero) { | 2675 while (obj != Smi::kZero) { |
| 2657 TransitionArray* array = TransitionArray::cast(obj); | 2676 TransitionArray* array = TransitionArray::cast(obj); |
| 2658 int num_transitions = array->number_of_entries(); | 2677 int num_transitions = array->number_of_entries(); |
| 2659 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); | 2678 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); |
| 2660 if (num_transitions > 0) { | 2679 if (num_transitions > 0) { |
| 2661 Map* map = array->GetTarget(0); | 2680 Map* map = array->GetTarget(0); |
| 2662 Map* parent = Map::cast(map->constructor_or_backpointer()); | 2681 Map* parent = Map::cast(map->constructor_or_backpointer()); |
| 2663 bool parent_is_alive = ObjectMarking::IsBlackOrGrey(parent); | 2682 bool parent_is_alive = |
| 2683 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)); |
| 2664 DescriptorArray* descriptors = | 2684 DescriptorArray* descriptors = |
| 2665 parent_is_alive ? parent->instance_descriptors() : nullptr; | 2685 parent_is_alive ? parent->instance_descriptors() : nullptr; |
| 2666 bool descriptors_owner_died = | 2686 bool descriptors_owner_died = |
| 2667 CompactTransitionArray(parent, array, descriptors); | 2687 CompactTransitionArray(parent, array, descriptors); |
| 2668 if (descriptors_owner_died) { | 2688 if (descriptors_owner_died) { |
| 2669 TrimDescriptorArray(parent, descriptors); | 2689 TrimDescriptorArray(parent, descriptors); |
| 2670 } | 2690 } |
| 2671 } | 2691 } |
| 2672 obj = array->next_link(); | 2692 obj = array->next_link(); |
| 2673 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2693 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
| 2674 } | 2694 } |
| 2675 heap()->set_encountered_transition_arrays(Smi::kZero); | 2695 heap()->set_encountered_transition_arrays(Smi::kZero); |
| 2676 } | 2696 } |
| 2677 | 2697 |
| 2678 | 2698 |
| 2679 bool MarkCompactCollector::CompactTransitionArray( | 2699 bool MarkCompactCollector::CompactTransitionArray( |
| 2680 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { | 2700 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { |
| 2681 int num_transitions = transitions->number_of_entries(); | 2701 int num_transitions = transitions->number_of_entries(); |
| 2682 bool descriptors_owner_died = false; | 2702 bool descriptors_owner_died = false; |
| 2683 int transition_index = 0; | 2703 int transition_index = 0; |
| 2684 // Compact all live transitions to the left. | 2704 // Compact all live transitions to the left. |
| 2685 for (int i = 0; i < num_transitions; ++i) { | 2705 for (int i = 0; i < num_transitions; ++i) { |
| 2686 Map* target = transitions->GetTarget(i); | 2706 Map* target = transitions->GetTarget(i); |
| 2687 DCHECK_EQ(target->constructor_or_backpointer(), map); | 2707 DCHECK_EQ(target->constructor_or_backpointer(), map); |
| 2688 if (ObjectMarking::IsWhite(target)) { | 2708 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(target))) { |
| 2689 if (descriptors != nullptr && | 2709 if (descriptors != nullptr && |
| 2690 target->instance_descriptors() == descriptors) { | 2710 target->instance_descriptors() == descriptors) { |
| 2691 descriptors_owner_died = true; | 2711 descriptors_owner_died = true; |
| 2692 } | 2712 } |
| 2693 } else { | 2713 } else { |
| 2694 if (i != transition_index) { | 2714 if (i != transition_index) { |
| 2695 Name* key = transitions->GetKey(i); | 2715 Name* key = transitions->GetKey(i); |
| 2696 transitions->SetKey(transition_index, key); | 2716 transitions->SetKey(transition_index, key); |
| 2697 Object** key_slot = transitions->GetKeySlot(transition_index); | 2717 Object** key_slot = transitions->GetKeySlot(transition_index); |
| 2698 RecordSlot(transitions, key_slot, key); | 2718 RecordSlot(transitions, key_slot, key); |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2853 // Cells for new-space objects embedded in optimized code are wrapped in | 2873 // Cells for new-space objects embedded in optimized code are wrapped in |
| 2854 // WeakCell and put into Heap::weak_object_to_code_table. | 2874 // WeakCell and put into Heap::weak_object_to_code_table. |
| 2855 // Such cells do not have any strong references but we want to keep them | 2875 // Such cells do not have any strong references but we want to keep them |
| 2856 // alive as long as the cell value is alive. | 2876 // alive as long as the cell value is alive. |
| 2857 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. | 2877 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. |
| 2858 if (value->IsCell()) { | 2878 if (value->IsCell()) { |
| 2859 Object* cell_value = Cell::cast(value)->value(); | 2879 Object* cell_value = Cell::cast(value)->value(); |
| 2860 if (cell_value->IsHeapObject() && | 2880 if (cell_value->IsHeapObject() && |
| 2861 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { | 2881 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { |
| 2862 // Resurrect the cell. | 2882 // Resurrect the cell. |
| 2863 SetMark(value); | 2883 MarkBit mark = ObjectMarking::MarkBitFrom(value); |
| 2884 SetMark(value, mark); |
| 2864 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); | 2885 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); |
| 2865 RecordSlot(value, slot, *slot); | 2886 RecordSlot(value, slot, *slot); |
| 2866 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 2887 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
| 2867 RecordSlot(weak_cell, slot, *slot); | 2888 RecordSlot(weak_cell, slot, *slot); |
| 2868 clear_value = false; | 2889 clear_value = false; |
| 2869 } | 2890 } |
| 2870 } | 2891 } |
| 2871 if (value->IsMap()) { | 2892 if (value->IsMap()) { |
| 2872 // The map is non-live. | 2893 // The map is non-live. |
| 2873 Map* map = Map::cast(value); | 2894 Map* map = Map::cast(value); |
| (...skipping 521 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3395 } | 3416 } |
| 3396 | 3417 |
| 3397 intptr_t freed_bytes = 0; | 3418 intptr_t freed_bytes = 0; |
| 3398 intptr_t max_freed_bytes = 0; | 3419 intptr_t max_freed_bytes = 0; |
| 3399 int curr_region = -1; | 3420 int curr_region = -1; |
| 3400 | 3421 |
| 3401 LiveObjectIterator<kBlackObjects> it(p); | 3422 LiveObjectIterator<kBlackObjects> it(p); |
| 3402 HeapObject* object = NULL; | 3423 HeapObject* object = NULL; |
| 3403 | 3424 |
| 3404 while ((object = it.Next()) != NULL) { | 3425 while ((object = it.Next()) != NULL) { |
| 3405 DCHECK(ObjectMarking::IsBlack(object)); | 3426 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3406 Address free_end = object->address(); | 3427 Address free_end = object->address(); |
| 3407 if (free_end != free_start) { | 3428 if (free_end != free_start) { |
| 3408 CHECK_GT(free_end, free_start); | 3429 CHECK_GT(free_end, free_start); |
| 3409 size_t size = static_cast<size_t>(free_end - free_start); | 3430 size_t size = static_cast<size_t>(free_end - free_start); |
| 3410 if (free_space_mode == ZAP_FREE_SPACE) { | 3431 if (free_space_mode == ZAP_FREE_SPACE) { |
| 3411 memset(free_start, 0xcc, size); | 3432 memset(free_start, 0xcc, size); |
| 3412 } | 3433 } |
| 3413 if (free_list_mode == REBUILD_FREE_LIST) { | 3434 if (free_list_mode == REBUILD_FREE_LIST) { |
| 3414 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( | 3435 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( |
| 3415 free_start, size); | 3436 free_start, size); |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3485 Address start = code->instruction_start(); | 3506 Address start = code->instruction_start(); |
| 3486 Address end = code->address() + code->Size(); | 3507 Address end = code->address() + code->Size(); |
| 3487 | 3508 |
| 3488 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); | 3509 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); |
| 3489 | 3510 |
| 3490 if (heap_->incremental_marking()->IsCompacting() && | 3511 if (heap_->incremental_marking()->IsCompacting() && |
| 3491 !ShouldSkipEvacuationSlotRecording(code)) { | 3512 !ShouldSkipEvacuationSlotRecording(code)) { |
| 3492 DCHECK(compacting_); | 3513 DCHECK(compacting_); |
| 3493 | 3514 |
| 3494 // If the object is white than no slots were recorded on it yet. | 3515 // If the object is white than no slots were recorded on it yet. |
| 3495 if (ObjectMarking::IsWhite(code)) return; | 3516 MarkBit mark_bit = ObjectMarking::MarkBitFrom(code); |
| 3517 if (Marking::IsWhite(mark_bit)) return; |
| 3496 | 3518 |
| 3497 // Ignore all slots that might have been recorded in the body of the | 3519 // Ignore all slots that might have been recorded in the body of the |
| 3498 // deoptimized code object. Assumption: no slots will be recorded for | 3520 // deoptimized code object. Assumption: no slots will be recorded for |
| 3499 // this object after invalidating it. | 3521 // this object after invalidating it. |
| 3500 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); | 3522 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); |
| 3501 } | 3523 } |
| 3502 } | 3524 } |
| 3503 | 3525 |
| 3504 | 3526 |
| 3505 // Return true if the given code is deoptimized or will be deoptimized. | 3527 // Return true if the given code is deoptimized or will be deoptimized. |
| 3506 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3528 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
| 3507 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3529 return code->is_optimized_code() && code->marked_for_deoptimization(); |
| 3508 } | 3530 } |
| 3509 | 3531 |
| 3510 | 3532 |
| 3511 #ifdef VERIFY_HEAP | 3533 #ifdef VERIFY_HEAP |
| 3512 static void VerifyAllBlackObjects(MemoryChunk* page) { | 3534 static void VerifyAllBlackObjects(MemoryChunk* page) { |
| 3513 LiveObjectIterator<kAllLiveObjects> it(page); | 3535 LiveObjectIterator<kAllLiveObjects> it(page); |
| 3514 HeapObject* object = NULL; | 3536 HeapObject* object = NULL; |
| 3515 while ((object = it.Next()) != NULL) { | 3537 while ((object = it.Next()) != NULL) { |
| 3516 CHECK(ObjectMarking::IsBlack(object)); | 3538 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3517 } | 3539 } |
| 3518 } | 3540 } |
| 3519 #endif // VERIFY_HEAP | 3541 #endif // VERIFY_HEAP |
| 3520 | 3542 |
| 3521 template <class Visitor> | 3543 template <class Visitor> |
| 3522 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, | 3544 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, |
| 3523 IterationMode mode) { | 3545 IterationMode mode) { |
| 3524 #ifdef VERIFY_HEAP | 3546 #ifdef VERIFY_HEAP |
| 3525 VerifyAllBlackObjects(page); | 3547 VerifyAllBlackObjects(page); |
| 3526 #endif // VERIFY_HEAP | 3548 #endif // VERIFY_HEAP |
| 3527 | 3549 |
| 3528 LiveObjectIterator<kBlackObjects> it(page); | 3550 LiveObjectIterator<kBlackObjects> it(page); |
| 3529 HeapObject* object = nullptr; | 3551 HeapObject* object = nullptr; |
| 3530 while ((object = it.Next()) != nullptr) { | 3552 while ((object = it.Next()) != nullptr) { |
| 3531 DCHECK(ObjectMarking::IsBlack(object)); | 3553 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 3532 if (!visitor->Visit(object)) { | 3554 if (!visitor->Visit(object)) { |
| 3533 if (mode == kClearMarkbits) { | 3555 if (mode == kClearMarkbits) { |
| 3534 page->markbits()->ClearRange( | 3556 page->markbits()->ClearRange( |
| 3535 page->AddressToMarkbitIndex(page->area_start()), | 3557 page->AddressToMarkbitIndex(page->area_start()), |
| 3536 page->AddressToMarkbitIndex(object->address())); | 3558 page->AddressToMarkbitIndex(object->address())); |
| 3537 if (page->old_to_new_slots() != nullptr) { | 3559 if (page->old_to_new_slots() != nullptr) { |
| 3538 page->old_to_new_slots()->RemoveRange( | 3560 page->old_to_new_slots()->RemoveRange( |
| 3539 0, static_cast<int>(object->address() - page->address()), | 3561 0, static_cast<int>(object->address() - page->address()), |
| 3540 SlotSet::PREFREE_EMPTY_BUCKETS); | 3562 SlotSet::PREFREE_EMPTY_BUCKETS); |
| 3541 } | 3563 } |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3718 // Unfortunately, we do not know about the slot. It could be in a | 3740 // Unfortunately, we do not know about the slot. It could be in a |
| 3719 // just freed free space object. | 3741 // just freed free space object. |
| 3720 if (heap->InToSpace(slot->Value())) { | 3742 if (heap->InToSpace(slot->Value())) { |
| 3721 return KEEP_SLOT; | 3743 return KEEP_SLOT; |
| 3722 } | 3744 } |
| 3723 } else if (heap->InToSpace(slot_reference)) { | 3745 } else if (heap->InToSpace(slot_reference)) { |
| 3724 // Slots can point to "to" space if the page has been moved, or if the | 3746 // Slots can point to "to" space if the page has been moved, or if the |
| 3725 // slot has been recorded multiple times in the remembered set. Since | 3747 // slot has been recorded multiple times in the remembered set. Since |
| 3726 // there is no forwarding information present we need to check the | 3748 // there is no forwarding information present we need to check the |
| 3727 // markbits to determine liveness. | 3749 // markbits to determine liveness. |
| 3728 if (ObjectMarking::IsBlack(reinterpret_cast<HeapObject*>(slot_reference))) | 3750 if (Marking::IsBlack(ObjectMarking::MarkBitFrom( |
| 3751 reinterpret_cast<HeapObject*>(slot_reference)))) |
| 3729 return KEEP_SLOT; | 3752 return KEEP_SLOT; |
| 3730 } else { | 3753 } else { |
| 3731 DCHECK(!heap->InNewSpace(slot_reference)); | 3754 DCHECK(!heap->InNewSpace(slot_reference)); |
| 3732 } | 3755 } |
| 3733 return REMOVE_SLOT; | 3756 return REMOVE_SLOT; |
| 3734 } | 3757 } |
| 3735 }; | 3758 }; |
| 3736 | 3759 |
| 3737 int NumberOfPointerUpdateTasks(int pages) { | 3760 int NumberOfPointerUpdateTasks(int pages) { |
| 3738 if (!FLAG_parallel_pointer_update) return 1; | 3761 if (!FLAG_parallel_pointer_update) return 1; |
| (...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4052 } | 4075 } |
| 4053 } | 4076 } |
| 4054 | 4077 |
| 4055 | 4078 |
| 4056 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { | 4079 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { |
| 4057 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); | 4080 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); |
| 4058 if (is_compacting()) { | 4081 if (is_compacting()) { |
| 4059 Code* host = | 4082 Code* host = |
| 4060 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( | 4083 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( |
| 4061 pc); | 4084 pc); |
| 4062 if (ObjectMarking::IsBlack(host)) { | 4085 MarkBit mark_bit = ObjectMarking::MarkBitFrom(host); |
| 4086 if (Marking::IsBlack(mark_bit)) { |
| 4063 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4087 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
| 4064 // The target is always in old space, we don't have to record the slot in | 4088 // The target is always in old space, we don't have to record the slot in |
| 4065 // the old-to-new remembered set. | 4089 // the old-to-new remembered set. |
| 4066 DCHECK(!heap()->InNewSpace(target)); | 4090 DCHECK(!heap()->InNewSpace(target)); |
| 4067 RecordRelocSlot(host, &rinfo, target); | 4091 RecordRelocSlot(host, &rinfo, target); |
| 4068 } | 4092 } |
| 4069 } | 4093 } |
| 4070 } | 4094 } |
| 4071 | 4095 |
| 4072 } // namespace internal | 4096 } // namespace internal |
| 4073 } // namespace v8 | 4097 } // namespace v8 |
| OLD | NEW |