Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(290)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2644523002: [heap] Provide ObjectMarking with marking transitions (Closed)
Patch Set: Fix markbit clearing for LO Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
100 100
101 static void VerifyMarking(Heap* heap, Address bottom, Address top) { 101 static void VerifyMarking(Heap* heap, Address bottom, Address top) {
102 VerifyMarkingVisitor visitor(heap); 102 VerifyMarkingVisitor visitor(heap);
103 HeapObject* object; 103 HeapObject* object;
104 Address next_object_must_be_here_or_later = bottom; 104 Address next_object_must_be_here_or_later = bottom;
105 for (Address current = bottom; current < top;) { 105 for (Address current = bottom; current < top;) {
106 object = HeapObject::FromAddress(current); 106 object = HeapObject::FromAddress(current);
107 // One word fillers at the end of a black area can be grey. 107 // One word fillers at the end of a black area can be grey.
108 if (MarkCompactCollector::IsMarked(object) && 108 if (MarkCompactCollector::IsMarked(object) &&
109 object->map() != heap->one_pointer_filler_map()) { 109 object->map() != heap->one_pointer_filler_map()) {
110 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 110 CHECK(ObjectMarking::IsBlack(object));
111 CHECK(current >= next_object_must_be_here_or_later); 111 CHECK(current >= next_object_must_be_here_or_later);
112 object->Iterate(&visitor); 112 object->Iterate(&visitor);
113 next_object_must_be_here_or_later = current + object->Size(); 113 next_object_must_be_here_or_later = current + object->Size();
114 // The object is either part of a black area of black allocation or a 114 // The object is either part of a black area of black allocation or a
115 // regular black object 115 // regular black object
116 Page* page = Page::FromAddress(current); 116 Page* page = Page::FromAddress(current);
117 CHECK( 117 CHECK(
118 page->markbits()->AllBitsSetInRange( 118 page->markbits()->AllBitsSetInRange(
119 page->AddressToMarkbitIndex(current), 119 page->AddressToMarkbitIndex(current),
120 page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) || 120 page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
341 341
342 342
343 void MarkCompactCollector::VerifyMarkbitsAreClean() { 343 void MarkCompactCollector::VerifyMarkbitsAreClean() {
344 VerifyMarkbitsAreClean(heap_->old_space()); 344 VerifyMarkbitsAreClean(heap_->old_space());
345 VerifyMarkbitsAreClean(heap_->code_space()); 345 VerifyMarkbitsAreClean(heap_->code_space());
346 VerifyMarkbitsAreClean(heap_->map_space()); 346 VerifyMarkbitsAreClean(heap_->map_space());
347 VerifyMarkbitsAreClean(heap_->new_space()); 347 VerifyMarkbitsAreClean(heap_->new_space());
348 348
349 LargeObjectIterator it(heap_->lo_space()); 349 LargeObjectIterator it(heap_->lo_space());
350 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 350 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
351 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); 351 CHECK(ObjectMarking::IsWhite(obj));
352 CHECK(Marking::IsWhite(mark_bit));
353 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); 352 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
354 } 353 }
355 } 354 }
356 355
357 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { 356 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
358 HeapObjectIterator code_iterator(heap()->code_space()); 357 HeapObjectIterator code_iterator(heap()->code_space());
359 for (HeapObject* obj = code_iterator.Next(); obj != NULL; 358 for (HeapObject* obj = code_iterator.Next(); obj != NULL;
360 obj = code_iterator.Next()) { 359 obj = code_iterator.Next()) {
361 Code* code = Code::cast(obj); 360 Code* code = Code::cast(obj);
362 if (!code->is_optimized_code()) continue; 361 if (!code->is_optimized_code()) continue;
(...skipping 28 matching lines...) Expand all
391 390
392 391
393 void MarkCompactCollector::ClearMarkbits() { 392 void MarkCompactCollector::ClearMarkbits() {
394 ClearMarkbitsInPagedSpace(heap_->code_space()); 393 ClearMarkbitsInPagedSpace(heap_->code_space());
395 ClearMarkbitsInPagedSpace(heap_->map_space()); 394 ClearMarkbitsInPagedSpace(heap_->map_space());
396 ClearMarkbitsInPagedSpace(heap_->old_space()); 395 ClearMarkbitsInPagedSpace(heap_->old_space());
397 ClearMarkbitsInNewSpace(heap_->new_space()); 396 ClearMarkbitsInNewSpace(heap_->new_space());
398 397
399 LargeObjectIterator it(heap_->lo_space()); 398 LargeObjectIterator it(heap_->lo_space());
400 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 399 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
401 Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj)); 400 ObjectMarking::ClearMarkBit(obj);
402 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 401 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
403 chunk->ResetProgressBar(); 402 chunk->ResetProgressBar();
404 chunk->ResetLiveBytes(); 403 chunk->ResetLiveBytes();
405 } 404 }
406 } 405 }
407 406
408 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { 407 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task {
409 public: 408 public:
410 SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks, 409 SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks,
411 AllocationSpace space_to_start) 410 AllocationSpace space_to_start)
(...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after
902 901
903 JSFunction* candidate = jsfunction_candidates_head_; 902 JSFunction* candidate = jsfunction_candidates_head_;
904 JSFunction* next_candidate; 903 JSFunction* next_candidate;
905 while (candidate != NULL) { 904 while (candidate != NULL) {
906 next_candidate = GetNextCandidate(candidate); 905 next_candidate = GetNextCandidate(candidate);
907 ClearNextCandidate(candidate, undefined); 906 ClearNextCandidate(candidate, undefined);
908 907
909 SharedFunctionInfo* shared = candidate->shared(); 908 SharedFunctionInfo* shared = candidate->shared();
910 909
911 Code* code = shared->code(); 910 Code* code = shared->code();
912 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); 911 if (ObjectMarking::IsWhite(code)) {
913 if (Marking::IsWhite(code_mark)) {
914 if (FLAG_trace_code_flushing && shared->is_compiled()) { 912 if (FLAG_trace_code_flushing && shared->is_compiled()) {
915 PrintF("[code-flushing clears: "); 913 PrintF("[code-flushing clears: ");
916 shared->ShortPrint(); 914 shared->ShortPrint();
917 PrintF(" - age: %d]\n", code->GetAge()); 915 PrintF(" - age: %d]\n", code->GetAge());
918 } 916 }
919 // Always flush the optimized code map if there is one. 917 // Always flush the optimized code map if there is one.
920 if (!shared->OptimizedCodeMapIsCleared()) { 918 if (!shared->OptimizedCodeMapIsCleared()) {
921 shared->ClearOptimizedCodeMap(); 919 shared->ClearOptimizedCodeMap();
922 } 920 }
923 if (shared->HasBytecodeArray()) { 921 if (shared->HasBytecodeArray()) {
924 shared->set_code(interpreter_entry_trampoline); 922 shared->set_code(interpreter_entry_trampoline);
925 candidate->set_code(interpreter_entry_trampoline); 923 candidate->set_code(interpreter_entry_trampoline);
926 } else { 924 } else {
927 shared->set_code(lazy_compile); 925 shared->set_code(lazy_compile);
928 candidate->set_code(lazy_compile); 926 candidate->set_code(lazy_compile);
929 } 927 }
930 } else { 928 } else {
931 DCHECK(Marking::IsBlack(code_mark)); 929 DCHECK(ObjectMarking::IsBlack(code));
932 candidate->set_code(code); 930 candidate->set_code(code);
933 } 931 }
934 932
935 // We are in the middle of a GC cycle so the write barrier in the code 933 // We are in the middle of a GC cycle so the write barrier in the code
936 // setter did not record the slot update and we have to do that manually. 934 // setter did not record the slot update and we have to do that manually.
937 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; 935 Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
938 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); 936 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
939 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot( 937 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(
940 candidate, slot, target); 938 candidate, slot, target);
941 939
(...skipping 13 matching lines...) Expand all
955 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); 953 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy);
956 Code* interpreter_entry_trampoline = 954 Code* interpreter_entry_trampoline =
957 isolate_->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); 955 isolate_->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
958 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; 956 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
959 SharedFunctionInfo* next_candidate; 957 SharedFunctionInfo* next_candidate;
960 while (candidate != NULL) { 958 while (candidate != NULL) {
961 next_candidate = GetNextCandidate(candidate); 959 next_candidate = GetNextCandidate(candidate);
962 ClearNextCandidate(candidate); 960 ClearNextCandidate(candidate);
963 961
964 Code* code = candidate->code(); 962 Code* code = candidate->code();
965 MarkBit code_mark = ObjectMarking::MarkBitFrom(code); 963 if (ObjectMarking::IsWhite(code)) {
966 if (Marking::IsWhite(code_mark)) {
967 if (FLAG_trace_code_flushing && candidate->is_compiled()) { 964 if (FLAG_trace_code_flushing && candidate->is_compiled()) {
968 PrintF("[code-flushing clears: "); 965 PrintF("[code-flushing clears: ");
969 candidate->ShortPrint(); 966 candidate->ShortPrint();
970 PrintF(" - age: %d]\n", code->GetAge()); 967 PrintF(" - age: %d]\n", code->GetAge());
971 } 968 }
972 // Always flush the optimized code map if there is one. 969 // Always flush the optimized code map if there is one.
973 if (!candidate->OptimizedCodeMapIsCleared()) { 970 if (!candidate->OptimizedCodeMapIsCleared()) {
974 candidate->ClearOptimizedCodeMap(); 971 candidate->ClearOptimizedCodeMap();
975 } 972 }
976 if (candidate->HasBytecodeArray()) { 973 if (candidate->HasBytecodeArray()) {
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
1096 inline static void PushOnMarkingDeque(Heap* heap, Object* obj) { 1093 inline static void PushOnMarkingDeque(Heap* heap, Object* obj) {
1097 HeapObject* object = HeapObject::cast(obj); 1094 HeapObject* object = HeapObject::cast(obj);
1098 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); 1095 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
1099 heap->mark_compact_collector()->MarkObject(object, mark_bit); 1096 heap->mark_compact_collector()->MarkObject(object, mark_bit);
1100 } 1097 }
1101 1098
1102 inline static bool MarkRecursively(Heap* heap, HeapObject* object) { 1099 inline static bool MarkRecursively(Heap* heap, HeapObject* object) {
1103 StackLimitCheck check(heap->isolate()); 1100 StackLimitCheck check(heap->isolate());
1104 if (check.HasOverflowed()) return false; 1101 if (check.HasOverflowed()) return false;
1105 1102
1106 MarkBit mark = ObjectMarking::MarkBitFrom(object); 1103 if (ObjectMarking::IsBlackOrGrey(object)) return true;
1107 if (Marking::IsBlackOrGrey(mark)) return true; 1104 heap->mark_compact_collector()->SetMark(object);
1108 heap->mark_compact_collector()->SetMark(object, mark);
1109 IterateBody(object->map(), object); 1105 IterateBody(object->map(), object);
1110 return true; 1106 return true;
1111 } 1107 }
1112 }; 1108 };
1113 1109
1114 class MarkCompactMarkingVisitor 1110 class MarkCompactMarkingVisitor
1115 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { 1111 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
1116 public: 1112 public:
1117 static void Initialize(); 1113 static void Initialize();
1118 1114
(...skipping 17 matching lines...) Expand all
1136 1132
1137 // Marks the object black and pushes it on the marking stack. 1133 // Marks the object black and pushes it on the marking stack.
1138 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { 1134 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
1139 MarkBit mark = ObjectMarking::MarkBitFrom(object); 1135 MarkBit mark = ObjectMarking::MarkBitFrom(object);
1140 heap->mark_compact_collector()->MarkObject(object, mark); 1136 heap->mark_compact_collector()->MarkObject(object, mark);
1141 } 1137 }
1142 1138
1143 // Marks the object black without pushing it on the marking stack. 1139 // Marks the object black without pushing it on the marking stack.
1144 // Returns true if object needed marking and false otherwise. 1140 // Returns true if object needed marking and false otherwise.
1145 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { 1141 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
1146 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); 1142 if (ObjectMarking::IsWhite(object)) {
1147 if (Marking::IsWhite(mark_bit)) { 1143 heap->mark_compact_collector()->SetMark(object);
1148 heap->mark_compact_collector()->SetMark(object, mark_bit);
1149 return true; 1144 return true;
1150 } 1145 }
1151 return false; 1146 return false;
1152 } 1147 }
1153 1148
1154 // Mark object pointed to by p. 1149 // Mark object pointed to by p.
1155 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, 1150 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
1156 HeapObject* object, Object** p)) { 1151 HeapObject* object, Object** p)) {
1157 if (!(*p)->IsHeapObject()) return; 1152 if (!(*p)->IsHeapObject()) return;
1158 HeapObject* target_object = HeapObject::cast(*p); 1153 HeapObject* target_object = HeapObject::cast(*p);
1159 collector->RecordSlot(object, p, target_object); 1154 collector->RecordSlot(object, p, target_object);
1160 MarkBit mark = ObjectMarking::MarkBitFrom(target_object); 1155 MarkBit mark = ObjectMarking::MarkBitFrom(target_object);
1161 collector->MarkObject(target_object, mark); 1156 collector->MarkObject(target_object, mark);
1162 } 1157 }
1163 1158
1164 1159
1165 // Visit an unmarked object. 1160 // Visit an unmarked object.
1166 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, 1161 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
1167 HeapObject* obj)) { 1162 HeapObject* obj)) {
1168 #ifdef DEBUG 1163 #ifdef DEBUG
1169 DCHECK(collector->heap()->Contains(obj)); 1164 DCHECK(collector->heap()->Contains(obj));
1170 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); 1165 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj));
1171 #endif 1166 #endif
1172 Map* map = obj->map(); 1167 Map* map = obj->map();
1173 Heap* heap = obj->GetHeap(); 1168 Heap* heap = obj->GetHeap();
1174 MarkBit mark = ObjectMarking::MarkBitFrom(obj); 1169 heap->mark_compact_collector()->SetMark(obj);
1175 heap->mark_compact_collector()->SetMark(obj, mark);
1176 // Mark the map pointer and the body. 1170 // Mark the map pointer and the body.
1177 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); 1171 MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
1178 heap->mark_compact_collector()->MarkObject(map, map_mark); 1172 heap->mark_compact_collector()->MarkObject(map, map_mark);
1179 IterateBody(map, obj); 1173 IterateBody(map, obj);
1180 } 1174 }
1181 1175
1182 // Visit all unmarked objects pointed to by [start, end). 1176 // Visit all unmarked objects pointed to by [start, end).
1183 // Returns false if the operation fails (lack of stack space). 1177 // Returns false if the operation fails (lack of stack space).
1184 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object, 1178 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object,
1185 Object** start, Object** end)) { 1179 Object** start, Object** end)) {
1186 // Return false is we are close to the stack limit. 1180 // Return false is we are close to the stack limit.
1187 StackLimitCheck check(heap->isolate()); 1181 StackLimitCheck check(heap->isolate());
1188 if (check.HasOverflowed()) return false; 1182 if (check.HasOverflowed()) return false;
1189 1183
1190 MarkCompactCollector* collector = heap->mark_compact_collector(); 1184 MarkCompactCollector* collector = heap->mark_compact_collector();
1191 // Visit the unmarked objects. 1185 // Visit the unmarked objects.
1192 for (Object** p = start; p < end; p++) { 1186 for (Object** p = start; p < end; p++) {
1193 Object* o = *p; 1187 Object* o = *p;
1194 if (!o->IsHeapObject()) continue; 1188 if (!o->IsHeapObject()) continue;
1195 collector->RecordSlot(object, p, o); 1189 collector->RecordSlot(object, p, o);
1196 HeapObject* obj = HeapObject::cast(o); 1190 HeapObject* obj = HeapObject::cast(o);
1197 MarkBit mark = ObjectMarking::MarkBitFrom(obj); 1191 if (ObjectMarking::IsBlackOrGrey(obj)) continue;
1198 if (Marking::IsBlackOrGrey(mark)) continue;
1199 VisitUnmarkedObject(collector, obj); 1192 VisitUnmarkedObject(collector, obj);
1200 } 1193 }
1201 return true; 1194 return true;
1202 } 1195 }
1203 1196
1204 private: 1197 private:
1205 // Code flushing support. 1198 // Code flushing support.
1206 1199
1207 static const int kRegExpCodeThreshold = 5; 1200 static const int kRegExpCodeThreshold = 5;
1208 1201
(...skipping 12 matching lines...) Expand all
1221 if (!code->IsSmi() && 1214 if (!code->IsSmi() &&
1222 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { 1215 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) {
1223 // Save a copy that can be reinstated if we need the code again. 1216 // Save a copy that can be reinstated if we need the code again.
1224 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); 1217 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code);
1225 1218
1226 // Saving a copy might create a pointer into compaction candidate 1219 // Saving a copy might create a pointer into compaction candidate
1227 // that was not observed by marker. This might happen if JSRegExp data 1220 // that was not observed by marker. This might happen if JSRegExp data
1228 // was marked through the compilation cache before marker reached JSRegExp 1221 // was marked through the compilation cache before marker reached JSRegExp
1229 // object. 1222 // object.
1230 FixedArray* data = FixedArray::cast(re->data()); 1223 FixedArray* data = FixedArray::cast(re->data());
1231 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(data))) { 1224 if (ObjectMarking::IsBlackOrGrey(data)) {
1232 Object** slot = 1225 Object** slot =
1233 data->data_start() + JSRegExp::saved_code_index(is_one_byte); 1226 data->data_start() + JSRegExp::saved_code_index(is_one_byte);
1234 heap->mark_compact_collector()->RecordSlot(data, slot, code); 1227 heap->mark_compact_collector()->RecordSlot(data, slot, code);
1235 } 1228 }
1236 1229
1237 // Set a number in the 0-255 range to guarantee no smi overflow. 1230 // Set a number in the 0-255 range to guarantee no smi overflow.
1238 re->SetDataAt(JSRegExp::code_index(is_one_byte), 1231 re->SetDataAt(JSRegExp::code_index(is_one_byte),
1239 Smi::FromInt(heap->ms_count() & 0xff)); 1232 Smi::FromInt(heap->ms_count() & 0xff));
1240 } else if (code->IsSmi()) { 1233 } else if (code->IsSmi()) {
1241 int value = Smi::cast(code)->value(); 1234 int value = Smi::cast(code)->value();
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
1387 private: 1380 private:
1388 void MarkObjectByPointer(Object** p) { 1381 void MarkObjectByPointer(Object** p) {
1389 if (!(*p)->IsHeapObject()) return; 1382 if (!(*p)->IsHeapObject()) return;
1390 1383
1391 HeapObject* object = HeapObject::cast(*p); 1384 HeapObject* object = HeapObject::cast(*p);
1392 1385
1393 if (mode == MarkCompactMode::YOUNG_GENERATION && 1386 if (mode == MarkCompactMode::YOUNG_GENERATION &&
1394 !collector_->heap()->InNewSpace(object)) 1387 !collector_->heap()->InNewSpace(object))
1395 return; 1388 return;
1396 1389
1397 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); 1390 if (ObjectMarking::IsBlackOrGrey(object)) return;
1398 if (Marking::IsBlackOrGrey(mark_bit)) return;
1399 1391
1400 Map* map = object->map(); 1392 Map* map = object->map();
1401 // Mark the object. 1393 // Mark the object.
1402 collector_->SetMark(object, mark_bit); 1394 collector_->SetMark(object);
1403 1395
1404 switch (mode) { 1396 switch (mode) {
1405 case MarkCompactMode::FULL: { 1397 case MarkCompactMode::FULL: {
1406 // Mark the map pointer and body, and push them on the marking stack. 1398 // Mark the map pointer and body, and push them on the marking stack.
1407 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); 1399 MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
1408 collector_->MarkObject(map, map_mark); 1400 collector_->MarkObject(map, map_mark);
1409 MarkCompactMarkingVisitor::IterateBody(map, object); 1401 MarkCompactMarkingVisitor::IterateBody(map, object);
1410 } break; 1402 } break;
1411 case MarkCompactMode::YOUNG_GENERATION: 1403 case MarkCompactMode::YOUNG_GENERATION:
1412 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); 1404 StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
(...skipping 17 matching lines...) Expand all
1430 : heap_(heap), pointers_removed_(0), table_(table) { 1422 : heap_(heap), pointers_removed_(0), table_(table) {
1431 DCHECK(!record_slots || table != nullptr); 1423 DCHECK(!record_slots || table != nullptr);
1432 } 1424 }
1433 1425
1434 void VisitPointers(Object** start, Object** end) override { 1426 void VisitPointers(Object** start, Object** end) override {
1435 // Visit all HeapObject pointers in [start, end). 1427 // Visit all HeapObject pointers in [start, end).
1436 MarkCompactCollector* collector = heap_->mark_compact_collector(); 1428 MarkCompactCollector* collector = heap_->mark_compact_collector();
1437 for (Object** p = start; p < end; p++) { 1429 for (Object** p = start; p < end; p++) {
1438 Object* o = *p; 1430 Object* o = *p;
1439 if (o->IsHeapObject()) { 1431 if (o->IsHeapObject()) {
1440 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(o)))) { 1432 if (ObjectMarking::IsWhite(HeapObject::cast(o))) {
1441 if (finalize_external_strings) { 1433 if (finalize_external_strings) {
1442 DCHECK(o->IsExternalString()); 1434 DCHECK(o->IsExternalString());
1443 heap_->FinalizeExternalString(String::cast(*p)); 1435 heap_->FinalizeExternalString(String::cast(*p));
1444 } else { 1436 } else {
1445 pointers_removed_++; 1437 pointers_removed_++;
1446 } 1438 }
1447 // Set the entry to the_hole_value (as deleted). 1439 // Set the entry to the_hole_value (as deleted).
1448 *p = heap_->the_hole_value(); 1440 *p = heap_->the_hole_value();
1449 } else if (record_slots) { 1441 } else if (record_slots) {
1450 // StringTable contains only old space strings. 1442 // StringTable contains only old space strings.
(...skipping 16 matching lines...) Expand all
1467 }; 1459 };
1468 1460
1469 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner; 1461 typedef StringTableCleaner<false, true> InternalizedStringTableCleaner;
1470 typedef StringTableCleaner<true, false> ExternalStringTableCleaner; 1462 typedef StringTableCleaner<true, false> ExternalStringTableCleaner;
1471 1463
1472 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects 1464 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
1473 // are retained. 1465 // are retained.
1474 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { 1466 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
1475 public: 1467 public:
1476 virtual Object* RetainAs(Object* object) { 1468 virtual Object* RetainAs(Object* object) {
1477 MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(object)); 1469 DCHECK(!ObjectMarking::IsGrey(HeapObject::cast(object)));
1478 DCHECK(!Marking::IsGrey(mark_bit)); 1470 if (ObjectMarking::IsBlack(HeapObject::cast(object))) {
1479 if (Marking::IsBlack(mark_bit)) {
1480 return object; 1471 return object;
1481 } else if (object->IsAllocationSite() && 1472 } else if (object->IsAllocationSite() &&
1482 !(AllocationSite::cast(object)->IsZombie())) { 1473 !(AllocationSite::cast(object)->IsZombie())) {
1483 // "dead" AllocationSites need to live long enough for a traversal of new 1474 // "dead" AllocationSites need to live long enough for a traversal of new
1484 // space. These sites get a one-time reprieve. 1475 // space. These sites get a one-time reprieve.
1485 AllocationSite* site = AllocationSite::cast(object); 1476 AllocationSite* site = AllocationSite::cast(object);
1486 site->MarkZombie(); 1477 site->MarkZombie();
1487 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); 1478 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site);
1488 return object; 1479 return object;
1489 } else { 1480 } else {
1490 return NULL; 1481 return NULL;
1491 } 1482 }
1492 } 1483 }
1493 }; 1484 };
1494 1485
1495 1486
1496 // Fill the marking stack with overflowed objects returned by the given 1487 // Fill the marking stack with overflowed objects returned by the given
1497 // iterator. Stop when the marking stack is filled or the end of the space 1488 // iterator. Stop when the marking stack is filled or the end of the space
1498 // is reached, whichever comes first. 1489 // is reached, whichever comes first.
1499 template <class T> 1490 template <class T>
1500 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { 1491 void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
1501 // The caller should ensure that the marking stack is initially not full, 1492 // The caller should ensure that the marking stack is initially not full,
1502 // so that we don't waste effort pointlessly scanning for objects. 1493 // so that we don't waste effort pointlessly scanning for objects.
1503 DCHECK(!marking_deque()->IsFull()); 1494 DCHECK(!marking_deque()->IsFull());
1504 1495
1505 Map* filler_map = heap()->one_pointer_filler_map(); 1496 Map* filler_map = heap()->one_pointer_filler_map();
1506 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { 1497 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
1507 MarkBit markbit = ObjectMarking::MarkBitFrom(object); 1498 if ((object->map() != filler_map) && ObjectMarking::IsGrey(object)) {
1508 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { 1499 ObjectMarking::GreyToBlack(object);
1509 Marking::GreyToBlack(markbit);
1510 PushBlack(object); 1500 PushBlack(object);
1511 if (marking_deque()->IsFull()) return; 1501 if (marking_deque()->IsFull()) return;
1512 } 1502 }
1513 } 1503 }
1514 } 1504 }
1515 1505
1516 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { 1506 void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
1517 DCHECK(!marking_deque()->IsFull()); 1507 DCHECK(!marking_deque()->IsFull());
1518 LiveObjectIterator<kGreyObjects> it(p); 1508 LiveObjectIterator<kGreyObjects> it(p);
1519 HeapObject* object = NULL; 1509 HeapObject* object = NULL;
1520 while ((object = it.Next()) != NULL) { 1510 while ((object = it.Next()) != NULL) {
1521 MarkBit markbit = ObjectMarking::MarkBitFrom(object); 1511 DCHECK(ObjectMarking::IsGrey(object));
1522 DCHECK(Marking::IsGrey(markbit)); 1512 ObjectMarking::GreyToBlack(object);
1523 Marking::GreyToBlack(markbit);
1524 PushBlack(object); 1513 PushBlack(object);
1525 if (marking_deque()->IsFull()) return; 1514 if (marking_deque()->IsFull()) return;
1526 } 1515 }
1527 } 1516 }
1528 1517
1529 class RecordMigratedSlotVisitor final : public ObjectVisitor { 1518 class RecordMigratedSlotVisitor final : public ObjectVisitor {
1530 public: 1519 public:
1531 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) 1520 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector)
1532 : collector_(collector) {} 1521 : collector_(collector) {}
1533 1522
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after
1963 for (Page* page : PageRange(space->bottom(), space->top())) { 1952 for (Page* page : PageRange(space->bottom(), space->top())) {
1964 DiscoverGreyObjectsOnPage(page); 1953 DiscoverGreyObjectsOnPage(page);
1965 if (marking_deque()->IsFull()) return; 1954 if (marking_deque()->IsFull()) return;
1966 } 1955 }
1967 } 1956 }
1968 1957
1969 1958
1970 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { 1959 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
1971 Object* o = *p; 1960 Object* o = *p;
1972 if (!o->IsHeapObject()) return false; 1961 if (!o->IsHeapObject()) return false;
1973 HeapObject* heap_object = HeapObject::cast(o); 1962 return ObjectMarking::IsWhite(HeapObject::cast(o));
1974 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object);
1975 return Marking::IsWhite(mark);
1976 } 1963 }
1977 1964
1978 1965
1979 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, 1966 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
1980 Object** p) { 1967 Object** p) {
1981 Object* o = *p; 1968 Object* o = *p;
1982 DCHECK(o->IsHeapObject()); 1969 DCHECK(o->IsHeapObject());
1983 HeapObject* heap_object = HeapObject::cast(o); 1970 return ObjectMarking::IsWhite(HeapObject::cast(o));
1984 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object);
1985 return Marking::IsWhite(mark);
1986 } 1971 }
1987 1972
1988 void MarkCompactCollector::MarkStringTable( 1973 void MarkCompactCollector::MarkStringTable(
1989 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { 1974 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
1990 StringTable* string_table = heap()->string_table(); 1975 StringTable* string_table = heap()->string_table();
1991 // Mark the string table itself. 1976 // Mark the string table itself.
1992 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); 1977 if (ObjectMarking::IsWhite(string_table)) {
1993 if (Marking::IsWhite(string_table_mark)) {
1994 // String table could have already been marked by visiting the handles list. 1978 // String table could have already been marked by visiting the handles list.
1995 SetMark(string_table, string_table_mark); 1979 SetMark(string_table);
1996 } 1980 }
1997 // Explicitly mark the prefix. 1981 // Explicitly mark the prefix.
1998 string_table->IteratePrefix(visitor); 1982 string_table->IteratePrefix(visitor);
1999 ProcessMarkingDeque<MarkCompactMode::FULL>(); 1983 ProcessMarkingDeque<MarkCompactMode::FULL>();
2000 } 1984 }
2001 1985
2002 1986
2003 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { 1987 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) {
2004 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); 1988 SetMark(site);
Hannes Payer (out of office) 2017/01/19 14:41:51 We may want to unify the marking functions a bit.
Michael Lippautz 2017/01/19 18:50:12 Acknowledged.
2005 SetMark(site, mark_bit);
2006 } 1989 }
2007 1990
2008 void MarkCompactCollector::MarkRoots( 1991 void MarkCompactCollector::MarkRoots(
2009 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { 1992 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
2010 // Mark the heap roots including global variables, stack variables, 1993 // Mark the heap roots including global variables, stack variables,
2011 // etc., and all objects reachable from them. 1994 // etc., and all objects reachable from them.
2012 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); 1995 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
2013 1996
2014 // Handle the string table specially. 1997 // Handle the string table specially.
2015 MarkStringTable(visitor); 1998 MarkStringTable(visitor);
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
2058 // After: the marking stack is empty, and all objects reachable from the 2041 // After: the marking stack is empty, and all objects reachable from the
2059 // marking stack have been marked, or are overflowed in the heap. 2042 // marking stack have been marked, or are overflowed in the heap.
2060 template <MarkCompactMode mode> 2043 template <MarkCompactMode mode>
2061 void MarkCompactCollector::EmptyMarkingDeque() { 2044 void MarkCompactCollector::EmptyMarkingDeque() {
2062 while (!marking_deque()->IsEmpty()) { 2045 while (!marking_deque()->IsEmpty()) {
2063 HeapObject* object = marking_deque()->Pop(); 2046 HeapObject* object = marking_deque()->Pop();
2064 2047
2065 DCHECK(!object->IsFiller()); 2048 DCHECK(!object->IsFiller());
2066 DCHECK(object->IsHeapObject()); 2049 DCHECK(object->IsHeapObject());
2067 DCHECK(heap()->Contains(object)); 2050 DCHECK(heap()->Contains(object));
2068 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); 2051 DCHECK(!ObjectMarking::IsWhite(object));
2069 2052
2070 Map* map = object->map(); 2053 Map* map = object->map();
2071 switch (mode) { 2054 switch (mode) {
2072 case MarkCompactMode::FULL: { 2055 case MarkCompactMode::FULL: {
2073 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); 2056 MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
2074 MarkObject(map, map_mark); 2057 MarkObject(map, map_mark);
2075 MarkCompactMarkingVisitor::IterateBody(map, object); 2058 MarkCompactMarkingVisitor::IterateBody(map, object);
2076 } break; 2059 } break;
2077 case MarkCompactMode::YOUNG_GENERATION: { 2060 case MarkCompactMode::YOUNG_GENERATION: {
2078 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 2061 DCHECK(ObjectMarking::IsBlack(object));
2079 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); 2062 StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
2080 } break; 2063 } break;
2081 } 2064 }
2082 } 2065 }
2083 } 2066 }
2084 2067
2085 2068
2086 // Sweep the heap for overflowed objects, clear their overflow bits, and 2069 // Sweep the heap for overflowed objects, clear their overflow bits, and
2087 // push them on the marking stack. Stop early if the marking stack fills 2070 // push them on the marking stack. Stop early if the marking stack fills
2088 // before sweeping completes. If sweeping completes, there are no remaining 2071 // before sweeping completes. If sweeping completes, there are no remaining
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after
2269 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats, 2252 ObjectStatsVisitor(Heap* heap, ObjectStats* live_stats,
2270 ObjectStats* dead_stats) 2253 ObjectStats* dead_stats)
2271 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) { 2254 : live_collector_(heap, live_stats), dead_collector_(heap, dead_stats) {
2272 DCHECK_NOT_NULL(live_stats); 2255 DCHECK_NOT_NULL(live_stats);
2273 DCHECK_NOT_NULL(dead_stats); 2256 DCHECK_NOT_NULL(dead_stats);
2274 // Global objects are roots and thus recorded as live. 2257 // Global objects are roots and thus recorded as live.
2275 live_collector_.CollectGlobalStatistics(); 2258 live_collector_.CollectGlobalStatistics();
2276 } 2259 }
2277 2260
2278 bool Visit(HeapObject* obj) override { 2261 bool Visit(HeapObject* obj) override {
2279 if (Marking::IsBlack(ObjectMarking::MarkBitFrom(obj))) { 2262 if (ObjectMarking::IsBlack(obj)) {
2280 live_collector_.CollectStatistics(obj); 2263 live_collector_.CollectStatistics(obj);
2281 } else { 2264 } else {
2282 DCHECK(!Marking::IsGrey(ObjectMarking::MarkBitFrom(obj))); 2265 DCHECK(!ObjectMarking::IsGrey(obj));
2283 dead_collector_.CollectStatistics(obj); 2266 dead_collector_.CollectStatistics(obj);
2284 } 2267 }
2285 return true; 2268 return true;
2286 } 2269 }
2287 2270
2288 private: 2271 private:
2289 ObjectStatsCollector live_collector_; 2272 ObjectStatsCollector live_collector_;
2290 ObjectStatsCollector dead_collector_; 2273 ObjectStatsCollector dead_collector_;
2291 }; 2274 };
2292 2275
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
2328 } 2311 }
2329 2312
2330 SlotCallbackResult MarkCompactCollector::CheckAndMarkObject( 2313 SlotCallbackResult MarkCompactCollector::CheckAndMarkObject(
2331 Heap* heap, Address slot_address) { 2314 Heap* heap, Address slot_address) {
2332 Object* object = *reinterpret_cast<Object**>(slot_address); 2315 Object* object = *reinterpret_cast<Object**>(slot_address);
2333 if (heap->InNewSpace(object)) { 2316 if (heap->InNewSpace(object)) {
2334 // Marking happens before flipping the young generation, so the object 2317 // Marking happens before flipping the young generation, so the object
2335 // has to be in ToSpace. 2318 // has to be in ToSpace.
2336 DCHECK(heap->InToSpace(object)); 2319 DCHECK(heap->InToSpace(object));
2337 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); 2320 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
2338 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); 2321 if (ObjectMarking::IsBlackOrGrey(heap_object)) {
2339 if (Marking::IsBlackOrGrey(mark_bit)) {
2340 return KEEP_SLOT; 2322 return KEEP_SLOT;
2341 } 2323 }
2342 heap->mark_compact_collector()->SetMark(heap_object, mark_bit); 2324 heap->mark_compact_collector()->SetMark(heap_object);
2343 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(), 2325 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(),
2344 heap_object); 2326 heap_object);
2345 return KEEP_SLOT; 2327 return KEEP_SLOT;
2346 } 2328 }
2347 return REMOVE_SLOT; 2329 return REMOVE_SLOT;
2348 } 2330 }
2349 2331
2350 static bool IsUnmarkedObject(Heap* heap, Object** p) { 2332 static bool IsUnmarkedObject(Heap* heap, Object** p) {
2351 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p)); 2333 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p));
2352 return heap->InNewSpace(*p) && 2334 return heap->InNewSpace(*p) && !ObjectMarking::IsBlack(HeapObject::cast(*p));
2353 !Marking::IsBlack(ObjectMarking::MarkBitFrom(HeapObject::cast(*p)));
2354 } 2335 }
2355 2336
2356 void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() { 2337 void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() {
2357 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); 2338 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
2358 2339
2359 PostponeInterruptsScope postpone(isolate()); 2340 PostponeInterruptsScope postpone(isolate());
2360 2341
2361 StaticYoungGenerationMarkingVisitor::Initialize(heap()); 2342 StaticYoungGenerationMarkingVisitor::Initialize(heap());
2362 RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap()); 2343 RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap());
2363 2344
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after
2625 } 2606 }
2626 2607
2627 2608
2628 void MarkCompactCollector::ClearSimpleMapTransitions( 2609 void MarkCompactCollector::ClearSimpleMapTransitions(
2629 Object* non_live_map_list) { 2610 Object* non_live_map_list) {
2630 Object* the_hole_value = heap()->the_hole_value(); 2611 Object* the_hole_value = heap()->the_hole_value();
2631 Object* weak_cell_obj = non_live_map_list; 2612 Object* weak_cell_obj = non_live_map_list;
2632 while (weak_cell_obj != Smi::kZero) { 2613 while (weak_cell_obj != Smi::kZero) {
2633 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); 2614 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
2634 Map* map = Map::cast(weak_cell->value()); 2615 Map* map = Map::cast(weak_cell->value());
2635 DCHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(map))); 2616 DCHECK(ObjectMarking::IsWhite(map));
2636 Object* potential_parent = map->constructor_or_backpointer(); 2617 Object* potential_parent = map->constructor_or_backpointer();
2637 if (potential_parent->IsMap()) { 2618 if (potential_parent->IsMap()) {
2638 Map* parent = Map::cast(potential_parent); 2619 Map* parent = Map::cast(potential_parent);
2639 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)) && 2620 if (ObjectMarking::IsBlackOrGrey(parent) &&
2640 parent->raw_transitions() == weak_cell) { 2621 parent->raw_transitions() == weak_cell) {
2641 ClearSimpleMapTransition(parent, map); 2622 ClearSimpleMapTransition(parent, map);
2642 } 2623 }
2643 } 2624 }
2644 weak_cell->clear(); 2625 weak_cell->clear();
2645 weak_cell_obj = weak_cell->next(); 2626 weak_cell_obj = weak_cell->next();
2646 weak_cell->clear_next(the_hole_value); 2627 weak_cell->clear_next(the_hole_value);
2647 } 2628 }
2648 } 2629 }
2649 2630
(...skipping 18 matching lines...) Expand all
2668 void MarkCompactCollector::ClearFullMapTransitions() { 2649 void MarkCompactCollector::ClearFullMapTransitions() {
2669 HeapObject* undefined = heap()->undefined_value(); 2650 HeapObject* undefined = heap()->undefined_value();
2670 Object* obj = heap()->encountered_transition_arrays(); 2651 Object* obj = heap()->encountered_transition_arrays();
2671 while (obj != Smi::kZero) { 2652 while (obj != Smi::kZero) {
2672 TransitionArray* array = TransitionArray::cast(obj); 2653 TransitionArray* array = TransitionArray::cast(obj);
2673 int num_transitions = array->number_of_entries(); 2654 int num_transitions = array->number_of_entries();
2674 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); 2655 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions);
2675 if (num_transitions > 0) { 2656 if (num_transitions > 0) {
2676 Map* map = array->GetTarget(0); 2657 Map* map = array->GetTarget(0);
2677 Map* parent = Map::cast(map->constructor_or_backpointer()); 2658 Map* parent = Map::cast(map->constructor_or_backpointer());
2678 bool parent_is_alive = 2659 bool parent_is_alive = ObjectMarking::IsBlackOrGrey(parent);
2679 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent));
2680 DescriptorArray* descriptors = 2660 DescriptorArray* descriptors =
2681 parent_is_alive ? parent->instance_descriptors() : nullptr; 2661 parent_is_alive ? parent->instance_descriptors() : nullptr;
2682 bool descriptors_owner_died = 2662 bool descriptors_owner_died =
2683 CompactTransitionArray(parent, array, descriptors); 2663 CompactTransitionArray(parent, array, descriptors);
2684 if (descriptors_owner_died) { 2664 if (descriptors_owner_died) {
2685 TrimDescriptorArray(parent, descriptors); 2665 TrimDescriptorArray(parent, descriptors);
2686 } 2666 }
2687 } 2667 }
2688 obj = array->next_link(); 2668 obj = array->next_link();
2689 array->set_next_link(undefined, SKIP_WRITE_BARRIER); 2669 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2690 } 2670 }
2691 heap()->set_encountered_transition_arrays(Smi::kZero); 2671 heap()->set_encountered_transition_arrays(Smi::kZero);
2692 } 2672 }
2693 2673
2694 2674
2695 bool MarkCompactCollector::CompactTransitionArray( 2675 bool MarkCompactCollector::CompactTransitionArray(
2696 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { 2676 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) {
2697 int num_transitions = transitions->number_of_entries(); 2677 int num_transitions = transitions->number_of_entries();
2698 bool descriptors_owner_died = false; 2678 bool descriptors_owner_died = false;
2699 int transition_index = 0; 2679 int transition_index = 0;
2700 // Compact all live transitions to the left. 2680 // Compact all live transitions to the left.
2701 for (int i = 0; i < num_transitions; ++i) { 2681 for (int i = 0; i < num_transitions; ++i) {
2702 Map* target = transitions->GetTarget(i); 2682 Map* target = transitions->GetTarget(i);
2703 DCHECK_EQ(target->constructor_or_backpointer(), map); 2683 DCHECK_EQ(target->constructor_or_backpointer(), map);
2704 if (Marking::IsWhite(ObjectMarking::MarkBitFrom(target))) { 2684 if (ObjectMarking::IsWhite(target)) {
2705 if (descriptors != nullptr && 2685 if (descriptors != nullptr &&
2706 target->instance_descriptors() == descriptors) { 2686 target->instance_descriptors() == descriptors) {
2707 descriptors_owner_died = true; 2687 descriptors_owner_died = true;
2708 } 2688 }
2709 } else { 2689 } else {
2710 if (i != transition_index) { 2690 if (i != transition_index) {
2711 Name* key = transitions->GetKey(i); 2691 Name* key = transitions->GetKey(i);
2712 transitions->SetKey(transition_index, key); 2692 transitions->SetKey(transition_index, key);
2713 Object** key_slot = transitions->GetKeySlot(transition_index); 2693 Object** key_slot = transitions->GetKeySlot(transition_index);
2714 RecordSlot(transitions, key_slot, key); 2694 RecordSlot(transitions, key_slot, key);
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
2869 // Cells for new-space objects embedded in optimized code are wrapped in 2849 // Cells for new-space objects embedded in optimized code are wrapped in
2870 // WeakCell and put into Heap::weak_object_to_code_table. 2850 // WeakCell and put into Heap::weak_object_to_code_table.
2871 // Such cells do not have any strong references but we want to keep them 2851 // Such cells do not have any strong references but we want to keep them
2872 // alive as long as the cell value is alive. 2852 // alive as long as the cell value is alive.
2873 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. 2853 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table.
2874 if (value->IsCell()) { 2854 if (value->IsCell()) {
2875 Object* cell_value = Cell::cast(value)->value(); 2855 Object* cell_value = Cell::cast(value)->value();
2876 if (cell_value->IsHeapObject() && 2856 if (cell_value->IsHeapObject() &&
2877 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { 2857 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) {
2878 // Resurrect the cell. 2858 // Resurrect the cell.
2879 MarkBit mark = ObjectMarking::MarkBitFrom(value); 2859 SetMark(value);
2880 SetMark(value, mark);
2881 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); 2860 Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
2882 RecordSlot(value, slot, *slot); 2861 RecordSlot(value, slot, *slot);
2883 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 2862 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2884 RecordSlot(weak_cell, slot, *slot); 2863 RecordSlot(weak_cell, slot, *slot);
2885 clear_value = false; 2864 clear_value = false;
2886 } 2865 }
2887 } 2866 }
2888 if (value->IsMap()) { 2867 if (value->IsMap()) {
2889 // The map is non-live. 2868 // The map is non-live.
2890 Map* map = Map::cast(value); 2869 Map* map = Map::cast(value);
(...skipping 521 matching lines...) Expand 10 before | Expand all | Expand 10 after
3412 } 3391 }
3413 3392
3414 intptr_t freed_bytes = 0; 3393 intptr_t freed_bytes = 0;
3415 intptr_t max_freed_bytes = 0; 3394 intptr_t max_freed_bytes = 0;
3416 int curr_region = -1; 3395 int curr_region = -1;
3417 3396
3418 LiveObjectIterator<kBlackObjects> it(p); 3397 LiveObjectIterator<kBlackObjects> it(p);
3419 HeapObject* object = NULL; 3398 HeapObject* object = NULL;
3420 3399
3421 while ((object = it.Next()) != NULL) { 3400 while ((object = it.Next()) != NULL) {
3422 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 3401 DCHECK(ObjectMarking::IsBlack(object));
3423 Address free_end = object->address(); 3402 Address free_end = object->address();
3424 if (free_end != free_start) { 3403 if (free_end != free_start) {
3425 CHECK_GT(free_end, free_start); 3404 CHECK_GT(free_end, free_start);
3426 size_t size = static_cast<size_t>(free_end - free_start); 3405 size_t size = static_cast<size_t>(free_end - free_start);
3427 if (free_space_mode == ZAP_FREE_SPACE) { 3406 if (free_space_mode == ZAP_FREE_SPACE) {
3428 memset(free_start, 0xcc, size); 3407 memset(free_start, 0xcc, size);
3429 } 3408 }
3430 if (free_list_mode == REBUILD_FREE_LIST) { 3409 if (free_list_mode == REBUILD_FREE_LIST) {
3431 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( 3410 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree(
3432 free_start, size); 3411 free_start, size);
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
3502 Address start = code->instruction_start(); 3481 Address start = code->instruction_start();
3503 Address end = code->address() + code->Size(); 3482 Address end = code->address() + code->Size();
3504 3483
3505 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); 3484 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end);
3506 3485
3507 if (heap_->incremental_marking()->IsCompacting() && 3486 if (heap_->incremental_marking()->IsCompacting() &&
3508 !ShouldSkipEvacuationSlotRecording(code)) { 3487 !ShouldSkipEvacuationSlotRecording(code)) {
3509 DCHECK(compacting_); 3488 DCHECK(compacting_);
3510 3489
3511 // If the object is white than no slots were recorded on it yet. 3490 // If the object is white than no slots were recorded on it yet.
3512 MarkBit mark_bit = ObjectMarking::MarkBitFrom(code); 3491 if (ObjectMarking::IsWhite(code)) return;
3513 if (Marking::IsWhite(mark_bit)) return;
3514 3492
3515 // Ignore all slots that might have been recorded in the body of the 3493 // Ignore all slots that might have been recorded in the body of the
3516 // deoptimized code object. Assumption: no slots will be recorded for 3494 // deoptimized code object. Assumption: no slots will be recorded for
3517 // this object after invalidating it. 3495 // this object after invalidating it.
3518 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); 3496 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
3519 } 3497 }
3520 } 3498 }
3521 3499
3522 3500
3523 // Return true if the given code is deoptimized or will be deoptimized. 3501 // Return true if the given code is deoptimized or will be deoptimized.
3524 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { 3502 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3525 return code->is_optimized_code() && code->marked_for_deoptimization(); 3503 return code->is_optimized_code() && code->marked_for_deoptimization();
3526 } 3504 }
3527 3505
3528 3506
3529 #ifdef VERIFY_HEAP 3507 #ifdef VERIFY_HEAP
3530 static void VerifyAllBlackObjects(MemoryChunk* page) { 3508 static void VerifyAllBlackObjects(MemoryChunk* page) {
3531 LiveObjectIterator<kAllLiveObjects> it(page); 3509 LiveObjectIterator<kAllLiveObjects> it(page);
3532 HeapObject* object = NULL; 3510 HeapObject* object = NULL;
3533 while ((object = it.Next()) != NULL) { 3511 while ((object = it.Next()) != NULL) {
3534 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 3512 CHECK(ObjectMarking::IsBlack(object));
3535 } 3513 }
3536 } 3514 }
3537 #endif // VERIFY_HEAP 3515 #endif // VERIFY_HEAP
3538 3516
3539 template <class Visitor> 3517 template <class Visitor>
3540 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor, 3518 bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor,
3541 IterationMode mode) { 3519 IterationMode mode) {
3542 #ifdef VERIFY_HEAP 3520 #ifdef VERIFY_HEAP
3543 VerifyAllBlackObjects(page); 3521 VerifyAllBlackObjects(page);
3544 #endif // VERIFY_HEAP 3522 #endif // VERIFY_HEAP
3545 3523
3546 LiveObjectIterator<kBlackObjects> it(page); 3524 LiveObjectIterator<kBlackObjects> it(page);
3547 HeapObject* object = nullptr; 3525 HeapObject* object = nullptr;
3548 while ((object = it.Next()) != nullptr) { 3526 while ((object = it.Next()) != nullptr) {
3549 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 3527 DCHECK(ObjectMarking::IsBlack(object));
3550 if (!visitor->Visit(object)) { 3528 if (!visitor->Visit(object)) {
3551 if (mode == kClearMarkbits) { 3529 if (mode == kClearMarkbits) {
3552 page->markbits()->ClearRange( 3530 page->markbits()->ClearRange(
3553 page->AddressToMarkbitIndex(page->area_start()), 3531 page->AddressToMarkbitIndex(page->area_start()),
3554 page->AddressToMarkbitIndex(object->address())); 3532 page->AddressToMarkbitIndex(object->address()));
3555 if (page->old_to_new_slots() != nullptr) { 3533 if (page->old_to_new_slots() != nullptr) {
3556 page->old_to_new_slots()->RemoveRange( 3534 page->old_to_new_slots()->RemoveRange(
3557 0, static_cast<int>(object->address() - page->address()), 3535 0, static_cast<int>(object->address() - page->address()),
3558 SlotSet::PREFREE_EMPTY_BUCKETS); 3536 SlotSet::PREFREE_EMPTY_BUCKETS);
3559 } 3537 }
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
3736 // Unfortunately, we do not know about the slot. It could be in a 3714 // Unfortunately, we do not know about the slot. It could be in a
3737 // just freed free space object. 3715 // just freed free space object.
3738 if (heap->InToSpace(slot->Value())) { 3716 if (heap->InToSpace(slot->Value())) {
3739 return KEEP_SLOT; 3717 return KEEP_SLOT;
3740 } 3718 }
3741 } else if (heap->InToSpace(slot_reference)) { 3719 } else if (heap->InToSpace(slot_reference)) {
3742 // Slots can point to "to" space if the page has been moved, or if the 3720 // Slots can point to "to" space if the page has been moved, or if the
3743 // slot has been recorded multiple times in the remembered set. Since 3721 // slot has been recorded multiple times in the remembered set. Since
3744 // there is no forwarding information present we need to check the 3722 // there is no forwarding information present we need to check the
3745 // markbits to determine liveness. 3723 // markbits to determine liveness.
3746 if (Marking::IsBlack(ObjectMarking::MarkBitFrom( 3724 if (ObjectMarking::IsBlack(reinterpret_cast<HeapObject*>(slot_reference)))
3747 reinterpret_cast<HeapObject*>(slot_reference))))
3748 return KEEP_SLOT; 3725 return KEEP_SLOT;
3749 } else { 3726 } else {
3750 DCHECK(!heap->InNewSpace(slot_reference)); 3727 DCHECK(!heap->InNewSpace(slot_reference));
3751 } 3728 }
3752 return REMOVE_SLOT; 3729 return REMOVE_SLOT;
3753 } 3730 }
3754 }; 3731 };
3755 3732
3756 int NumberOfPointerUpdateTasks(int pages) { 3733 int NumberOfPointerUpdateTasks(int pages) {
3757 if (!FLAG_parallel_pointer_update) return 1; 3734 if (!FLAG_parallel_pointer_update) return 1;
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after
4071 } 4048 }
4072 } 4049 }
4073 4050
4074 4051
4075 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { 4052 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
4076 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); 4053 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
4077 if (is_compacting()) { 4054 if (is_compacting()) {
4078 Code* host = 4055 Code* host =
4079 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( 4056 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
4080 pc); 4057 pc);
4081 MarkBit mark_bit = ObjectMarking::MarkBitFrom(host); 4058 if (ObjectMarking::IsBlack(host)) {
4082 if (Marking::IsBlack(mark_bit)) {
4083 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 4059 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4084 // The target is always in old space, we don't have to record the slot in 4060 // The target is always in old space, we don't have to record the slot in
4085 // the old-to-new remembered set. 4061 // the old-to-new remembered set.
4086 DCHECK(!heap()->InNewSpace(target)); 4062 DCHECK(!heap()->InNewSpace(target));
4087 RecordRelocSlot(host, &rinfo, target); 4063 RecordRelocSlot(host, &rinfo, target);
4088 } 4064 }
4089 } 4065 }
4090 } 4066 }
4091 4067
4092 } // namespace internal 4068 } // namespace internal
4093 } // namespace v8 4069 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698