OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
315 } | 315 } |
316 #endif | 316 #endif |
317 | 317 |
318 StartSweepSpaces(); | 318 StartSweepSpaces(); |
319 | 319 |
320 EvacuateNewSpaceAndCandidates(); | 320 EvacuateNewSpaceAndCandidates(); |
321 | 321 |
322 Finish(); | 322 Finish(); |
323 } | 323 } |
324 | 324 |
325 | |
326 #ifdef VERIFY_HEAP | 325 #ifdef VERIFY_HEAP |
327 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 326 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
328 for (Page* p : *space) { | 327 for (Page* p : *space) { |
329 CHECK(p->markbits()->IsClean()); | 328 CHECK(p->markbits()->IsClean()); |
330 CHECK_EQ(0, p->LiveBytes()); | 329 CHECK_EQ(0, p->LiveBytes()); |
331 } | 330 } |
332 } | 331 } |
333 | 332 |
334 | 333 |
335 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { | 334 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { |
(...skipping 11 matching lines...) Expand all Loading... | |
347 VerifyMarkbitsAreClean(heap_->new_space()); | 346 VerifyMarkbitsAreClean(heap_->new_space()); |
348 | 347 |
349 LargeObjectIterator it(heap_->lo_space()); | 348 LargeObjectIterator it(heap_->lo_space()); |
350 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 349 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
351 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 350 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
352 CHECK(Marking::IsWhite(mark_bit)); | 351 CHECK(Marking::IsWhite(mark_bit)); |
353 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 352 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
354 } | 353 } |
355 } | 354 } |
356 | 355 |
356 void MarkCompactCollector::MarkYoungGenerationForVerification() { | |
357 MarkLiveObjectsInYoungGeneration(); | |
358 } | |
359 | |
360 std::vector<HeapObject*> MarkCompactCollector::GetObjectsInToSpace() { | |
361 std::vector<HeapObject*> objects; | |
362 const Address top = heap()->new_space()->top(); | |
363 const Address space_start = heap()->new_space()->bottom(); | |
364 const Address space_end = heap()->new_space()->top(); | |
365 for (Page* page : NewSpacePageRange(space_start, space_end)) { | |
366 HeapObject* object = HeapObject::FromAddress(page->area_start()); | |
367 while (object->address() < page->area_end() && | |
368 (!page->ContainsLimit(top) || (object->address() < top))) { | |
369 if (!object->IsFiller()) { | |
370 objects.push_back(object); | |
371 } | |
372 object = HeapObject::FromAddress(object->address() + object->Size()); | |
373 } | |
374 } | |
375 return objects; | |
376 } | |
377 | |
378 void MarkCompactCollector::VerifyYoungGenerationMarkbitsUsingForwardingPointers( | |
379 const std::vector<HeapObject*>& objects) { | |
380 for (HeapObject* object : objects) { | |
381 const MapWord map_word = object->map_word(); | |
382 const bool is_black = Marking::IsBlack(ObjectMarking::MarkBitFrom(object)); | |
383 if (map_word.IsForwardingAddress()) { | |
384 CHECK(is_black); | |
385 } else { | |
386 CHECK(!is_black); | |
387 } | |
388 } | |
389 } | |
357 | 390 |
358 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 391 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
359 HeapObjectIterator code_iterator(heap()->code_space()); | 392 HeapObjectIterator code_iterator(heap()->code_space()); |
360 for (HeapObject* obj = code_iterator.Next(); obj != NULL; | 393 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
361 obj = code_iterator.Next()) { | 394 obj = code_iterator.Next()) { |
362 Code* code = Code::cast(obj); | 395 Code* code = Code::cast(obj); |
363 if (!code->is_optimized_code()) continue; | 396 if (!code->is_optimized_code()) continue; |
364 if (WillBeDeoptimized(code)) continue; | 397 if (WillBeDeoptimized(code)) continue; |
365 code->VerifyEmbeddedObjectsDependency(); | 398 code->VerifyEmbeddedObjectsDependency(); |
366 } | 399 } |
(...skipping 709 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1076 JSFunction* candidate = jsfunction_candidates_head_; | 1109 JSFunction* candidate = jsfunction_candidates_head_; |
1077 while (candidate != NULL) { | 1110 while (candidate != NULL) { |
1078 if (heap->InFromSpace(candidate)) { | 1111 if (heap->InFromSpace(candidate)) { |
1079 v->VisitPointer(reinterpret_cast<Object**>(slot)); | 1112 v->VisitPointer(reinterpret_cast<Object**>(slot)); |
1080 } | 1113 } |
1081 candidate = GetNextCandidate(*slot); | 1114 candidate = GetNextCandidate(*slot); |
1082 slot = GetNextCandidateSlot(*slot); | 1115 slot = GetNextCandidateSlot(*slot); |
1083 } | 1116 } |
1084 } | 1117 } |
1085 | 1118 |
1119 class StaticYoungGenerationMarkingVisitor | |
1120 : public StaticNewSpaceVisitor<StaticYoungGenerationMarkingVisitor> { | |
1121 public: | |
1122 static void Initialize(Heap* heap) { | |
1123 StaticNewSpaceVisitor<StaticYoungGenerationMarkingVisitor>::Initialize(); | |
1124 } | |
1125 | |
1126 inline static void VisitPointer(Heap* heap, HeapObject* object, Object** p) { | |
1127 Object* target = *p; | |
1128 if (heap->InNewSpace(target)) { | |
1129 if (MarkRecursively(heap, HeapObject::cast(target))) return; | |
1130 PushOnMarkingDeque(heap, target); | |
1131 } | |
1132 } | |
1133 | |
1134 protected: | |
1135 inline static void PushOnMarkingDeque(Heap* heap, Object* obj) { | |
1136 HeapObject* object = HeapObject::cast(obj); | |
1137 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); | |
1138 heap->mark_compact_collector()->MarkObject(object, mark_bit); | |
1139 } | |
1140 | |
1141 inline static bool MarkRecursively(Heap* heap, HeapObject* object) { | |
1142 StackLimitCheck check(heap->isolate()); | |
1143 if (check.HasOverflowed()) return false; | |
1144 | |
1145 MarkBit mark = ObjectMarking::MarkBitFrom(object); | |
1146 if (Marking::IsBlackOrGrey(mark)) return true; | |
1147 heap->mark_compact_collector()->SetMark(object, mark); | |
1148 IterateBody(object->map(), object); | |
1149 return true; | |
1150 } | |
1151 }; | |
1086 | 1152 |
1087 class MarkCompactMarkingVisitor | 1153 class MarkCompactMarkingVisitor |
1088 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { | 1154 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { |
1089 public: | 1155 public: |
1090 static void Initialize(); | 1156 static void Initialize(); |
1091 | 1157 |
1092 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 1158 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
1093 MarkObjectByPointer(heap->mark_compact_collector(), object, p); | 1159 MarkObjectByPointer(heap->mark_compact_collector(), object, p); |
1094 } | 1160 } |
1095 | 1161 |
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1329 // Iterate the archived stacks in all threads to check if | 1395 // Iterate the archived stacks in all threads to check if |
1330 // the code is referenced. | 1396 // the code is referenced. |
1331 CodeMarkingVisitor code_marking_visitor(this); | 1397 CodeMarkingVisitor code_marking_visitor(this); |
1332 heap()->isolate()->thread_manager()->IterateArchivedThreads( | 1398 heap()->isolate()->thread_manager()->IterateArchivedThreads( |
1333 &code_marking_visitor); | 1399 &code_marking_visitor); |
1334 | 1400 |
1335 SharedFunctionInfoMarkingVisitor visitor(this); | 1401 SharedFunctionInfoMarkingVisitor visitor(this); |
1336 heap()->isolate()->compilation_cache()->IterateFunctions(&visitor); | 1402 heap()->isolate()->compilation_cache()->IterateFunctions(&visitor); |
1337 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor); | 1403 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor); |
1338 | 1404 |
1339 ProcessMarkingDeque(); | 1405 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
1340 } | 1406 } |
1341 | 1407 |
1342 | 1408 |
1343 // Visitor class for marking heap roots. | 1409 // Visitor class for marking heap roots. |
1410 template <MarkCompactMode mode> | |
1344 class RootMarkingVisitor : public ObjectVisitor { | 1411 class RootMarkingVisitor : public ObjectVisitor { |
1345 public: | 1412 public: |
1346 explicit RootMarkingVisitor(Heap* heap) | 1413 explicit RootMarkingVisitor(Heap* heap) |
1347 : collector_(heap->mark_compact_collector()) {} | 1414 : collector_(heap->mark_compact_collector()) {} |
1348 | 1415 |
1349 void VisitPointer(Object** p) override { MarkObjectByPointer(p); } | 1416 void VisitPointer(Object** p) override { MarkObjectByPointer(p); } |
1350 | 1417 |
1351 void VisitPointers(Object** start, Object** end) override { | 1418 void VisitPointers(Object** start, Object** end) override { |
1352 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 1419 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
1353 } | 1420 } |
1354 | 1421 |
1355 // Skip the weak next code link in a code object, which is visited in | 1422 // Skip the weak next code link in a code object, which is visited in |
1356 // ProcessTopOptimizedFrame. | 1423 // ProcessTopOptimizedFrame. |
1357 void VisitNextCodeLink(Object** p) override {} | 1424 void VisitNextCodeLink(Object** p) override {} |
1358 | 1425 |
1359 private: | 1426 private: |
1360 void MarkObjectByPointer(Object** p) { | 1427 void MarkObjectByPointer(Object** p) { |
1361 if (!(*p)->IsHeapObject()) return; | 1428 if (!(*p)->IsHeapObject()) return; |
1362 | 1429 |
1363 HeapObject* object = HeapObject::cast(*p); | 1430 HeapObject* object = HeapObject::cast(*p); |
1364 | 1431 |
1432 if (mode == MarkCompactMode::YOUNG_GENERATION && | |
1433 !collector_->heap()->InNewSpace(object)) | |
1434 return; | |
1435 | |
1365 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); | 1436 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
1366 if (Marking::IsBlackOrGrey(mark_bit)) return; | 1437 if (Marking::IsBlackOrGrey(mark_bit)) return; |
1367 | 1438 |
1368 Map* map = object->map(); | 1439 Map* map = object->map(); |
1369 // Mark the object. | 1440 // Mark the object. |
1370 collector_->SetMark(object, mark_bit); | 1441 collector_->SetMark(object, mark_bit); |
1371 | 1442 |
1372 // Mark the map pointer and body, and push them on the marking stack. | 1443 switch (mode) { |
1373 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | 1444 case MarkCompactMode::FULL: { |
1374 collector_->MarkObject(map, map_mark); | 1445 // Mark the map pointer and body, and push them on the marking stack. |
1375 MarkCompactMarkingVisitor::IterateBody(map, object); | 1446 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
1447 collector_->MarkObject(map, map_mark); | |
1448 MarkCompactMarkingVisitor::IterateBody(map, object); | |
1449 } break; | |
1450 case MarkCompactMode::YOUNG_GENERATION: | |
1451 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); | |
1452 break; | |
1453 } | |
1376 | 1454 |
1377 // Mark all the objects reachable from the map and body. May leave | 1455 // Mark all the objects reachable from the map and body. May leave |
1378 // overflowed objects in the heap. | 1456 // overflowed objects in the heap. |
1379 collector_->EmptyMarkingDeque(); | 1457 collector_->EmptyMarkingDeque<mode>(); |
1380 } | 1458 } |
1381 | 1459 |
1382 MarkCompactCollector* collector_; | 1460 MarkCompactCollector* collector_; |
1383 }; | 1461 }; |
1384 | 1462 |
1385 | 1463 |
1386 // Helper class for pruning the string table. | 1464 // Helper class for pruning the string table. |
1387 template <bool finalize_external_strings, bool record_slots> | 1465 template <bool finalize_external_strings, bool record_slots> |
1388 class StringTableCleaner : public ObjectVisitor { | 1466 class StringTableCleaner : public ObjectVisitor { |
1389 public: | 1467 public: |
(...skipping 549 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1939 | 2017 |
1940 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, | 2018 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, |
1941 Object** p) { | 2019 Object** p) { |
1942 Object* o = *p; | 2020 Object* o = *p; |
1943 DCHECK(o->IsHeapObject()); | 2021 DCHECK(o->IsHeapObject()); |
1944 HeapObject* heap_object = HeapObject::cast(o); | 2022 HeapObject* heap_object = HeapObject::cast(o); |
1945 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); | 2023 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); |
1946 return Marking::IsWhite(mark); | 2024 return Marking::IsWhite(mark); |
1947 } | 2025 } |
1948 | 2026 |
1949 | 2027 void MarkCompactCollector::MarkStringTable( |
1950 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { | 2028 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { |
1951 StringTable* string_table = heap()->string_table(); | 2029 StringTable* string_table = heap()->string_table(); |
1952 // Mark the string table itself. | 2030 // Mark the string table itself. |
1953 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); | 2031 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); |
1954 if (Marking::IsWhite(string_table_mark)) { | 2032 if (Marking::IsWhite(string_table_mark)) { |
1955 // String table could have already been marked by visiting the handles list. | 2033 // String table could have already been marked by visiting the handles list. |
1956 SetMark(string_table, string_table_mark); | 2034 SetMark(string_table, string_table_mark); |
1957 } | 2035 } |
1958 // Explicitly mark the prefix. | 2036 // Explicitly mark the prefix. |
1959 string_table->IteratePrefix(visitor); | 2037 string_table->IteratePrefix(visitor); |
1960 ProcessMarkingDeque(); | 2038 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
1961 } | 2039 } |
1962 | 2040 |
1963 | 2041 |
1964 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { | 2042 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { |
1965 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); | 2043 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); |
1966 SetMark(site, mark_bit); | 2044 SetMark(site, mark_bit); |
1967 } | 2045 } |
1968 | 2046 |
1969 | 2047 void MarkCompactCollector::MarkRoots( |
1970 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 2048 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) { |
1971 // Mark the heap roots including global variables, stack variables, | 2049 // Mark the heap roots including global variables, stack variables, |
1972 // etc., and all objects reachable from them. | 2050 // etc., and all objects reachable from them. |
1973 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 2051 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
1974 | 2052 |
1975 // Handle the string table specially. | 2053 // Handle the string table specially. |
1976 MarkStringTable(visitor); | 2054 MarkStringTable(visitor); |
1977 | 2055 |
1978 // There may be overflowed objects in the heap. Visit them now. | 2056 // There may be overflowed objects in the heap. Visit them now. |
1979 while (marking_deque()->overflowed()) { | 2057 while (marking_deque()->overflowed()) { |
1980 RefillMarkingDeque(); | 2058 RefillMarkingDeque<MarkCompactMode::FULL>(); |
1981 EmptyMarkingDeque(); | 2059 EmptyMarkingDeque<MarkCompactMode::FULL>(); |
1982 } | 2060 } |
1983 } | 2061 } |
1984 | 2062 |
1985 | 2063 |
1986 void MarkCompactCollector::MarkImplicitRefGroups( | 2064 void MarkCompactCollector::MarkImplicitRefGroups( |
1987 MarkObjectFunction mark_object) { | 2065 MarkObjectFunction mark_object) { |
1988 List<ImplicitRefGroup*>* ref_groups = | 2066 List<ImplicitRefGroup*>* ref_groups = |
1989 isolate()->global_handles()->implicit_ref_groups(); | 2067 isolate()->global_handles()->implicit_ref_groups(); |
1990 | 2068 |
1991 int last = 0; | 2069 int last = 0; |
(...skipping 19 matching lines...) Expand all Loading... | |
2011 delete entry; | 2089 delete entry; |
2012 } | 2090 } |
2013 ref_groups->Rewind(last); | 2091 ref_groups->Rewind(last); |
2014 } | 2092 } |
2015 | 2093 |
2016 | 2094 |
2017 // Mark all objects reachable from the objects on the marking stack. | 2095 // Mark all objects reachable from the objects on the marking stack. |
2018 // Before: the marking stack contains zero or more heap object pointers. | 2096 // Before: the marking stack contains zero or more heap object pointers. |
2019 // After: the marking stack is empty, and all objects reachable from the | 2097 // After: the marking stack is empty, and all objects reachable from the |
2020 // marking stack have been marked, or are overflowed in the heap. | 2098 // marking stack have been marked, or are overflowed in the heap. |
2099 template <MarkCompactMode mode> | |
2021 void MarkCompactCollector::EmptyMarkingDeque() { | 2100 void MarkCompactCollector::EmptyMarkingDeque() { |
2022 while (!marking_deque()->IsEmpty()) { | 2101 while (!marking_deque()->IsEmpty()) { |
2023 HeapObject* object = marking_deque()->Pop(); | 2102 HeapObject* object = marking_deque()->Pop(); |
2024 | 2103 |
2025 DCHECK(!object->IsFiller()); | 2104 DCHECK(!object->IsFiller()); |
2026 DCHECK(object->IsHeapObject()); | 2105 DCHECK(object->IsHeapObject()); |
2027 DCHECK(heap()->Contains(object)); | 2106 DCHECK(heap()->Contains(object)); |
2028 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); | 2107 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); |
2029 | 2108 |
2030 Map* map = object->map(); | 2109 Map* map = object->map(); |
2031 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | 2110 switch (mode) { |
2032 MarkObject(map, map_mark); | 2111 case MarkCompactMode::FULL: { |
2033 | 2112 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
2034 MarkCompactMarkingVisitor::IterateBody(map, object); | 2113 MarkObject(map, map_mark); |
2114 MarkCompactMarkingVisitor::IterateBody(map, object); | |
2115 } break; | |
2116 case MarkCompactMode::YOUNG_GENERATION: { | |
2117 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); | |
2118 StaticYoungGenerationMarkingVisitor::IterateBody(map, object); | |
2119 } break; | |
2120 } | |
2035 } | 2121 } |
2036 } | 2122 } |
2037 | 2123 |
2038 | 2124 |
2039 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2125 // Sweep the heap for overflowed objects, clear their overflow bits, and |
2040 // push them on the marking stack. Stop early if the marking stack fills | 2126 // push them on the marking stack. Stop early if the marking stack fills |
2041 // before sweeping completes. If sweeping completes, there are no remaining | 2127 // before sweeping completes. If sweeping completes, there are no remaining |
2042 // overflowed objects in the heap so the overflow flag on the markings stack | 2128 // overflowed objects in the heap so the overflow flag on the markings stack |
2043 // is cleared. | 2129 // is cleared. |
2130 template <MarkCompactMode mode> | |
2044 void MarkCompactCollector::RefillMarkingDeque() { | 2131 void MarkCompactCollector::RefillMarkingDeque() { |
2045 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); | 2132 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); |
2046 DCHECK(marking_deque()->overflowed()); | 2133 DCHECK(marking_deque()->overflowed()); |
2047 | 2134 |
2048 DiscoverGreyObjectsInNewSpace(); | 2135 DiscoverGreyObjectsInNewSpace(); |
2049 if (marking_deque()->IsFull()) return; | 2136 if (marking_deque()->IsFull()) return; |
2050 | 2137 |
2051 DiscoverGreyObjectsInSpace(heap()->old_space()); | 2138 if (mode == MarkCompactMode::FULL) { |
2052 if (marking_deque()->IsFull()) return; | 2139 DiscoverGreyObjectsInSpace(heap()->old_space()); |
2053 | 2140 if (marking_deque()->IsFull()) return; |
2054 DiscoverGreyObjectsInSpace(heap()->code_space()); | 2141 DiscoverGreyObjectsInSpace(heap()->code_space()); |
2055 if (marking_deque()->IsFull()) return; | 2142 if (marking_deque()->IsFull()) return; |
2056 | 2143 DiscoverGreyObjectsInSpace(heap()->map_space()); |
2057 DiscoverGreyObjectsInSpace(heap()->map_space()); | 2144 if (marking_deque()->IsFull()) return; |
2058 if (marking_deque()->IsFull()) return; | 2145 LargeObjectIterator lo_it(heap()->lo_space()); |
2059 | 2146 DiscoverGreyObjectsWithIterator(&lo_it); |
2060 LargeObjectIterator lo_it(heap()->lo_space()); | 2147 if (marking_deque()->IsFull()) return; |
2061 DiscoverGreyObjectsWithIterator(&lo_it); | 2148 } |
2062 if (marking_deque()->IsFull()) return; | |
2063 | 2149 |
2064 marking_deque()->ClearOverflowed(); | 2150 marking_deque()->ClearOverflowed(); |
2065 } | 2151 } |
2066 | 2152 |
2067 | 2153 |
2068 // Mark all objects reachable (transitively) from objects on the marking | 2154 // Mark all objects reachable (transitively) from objects on the marking |
2069 // stack. Before: the marking stack contains zero or more heap object | 2155 // stack. Before: the marking stack contains zero or more heap object |
2070 // pointers. After: the marking stack is empty and there are no overflowed | 2156 // pointers. After: the marking stack is empty and there are no overflowed |
2071 // objects in the heap. | 2157 // objects in the heap. |
2158 template <MarkCompactMode mode> | |
2072 void MarkCompactCollector::ProcessMarkingDeque() { | 2159 void MarkCompactCollector::ProcessMarkingDeque() { |
2073 EmptyMarkingDeque(); | 2160 EmptyMarkingDeque<mode>(); |
2074 while (marking_deque()->overflowed()) { | 2161 while (marking_deque()->overflowed()) { |
2075 RefillMarkingDeque(); | 2162 RefillMarkingDeque<mode>(); |
2076 EmptyMarkingDeque(); | 2163 EmptyMarkingDeque<mode>(); |
2077 } | 2164 } |
2165 DCHECK(marking_deque()->IsEmpty()); | |
2078 } | 2166 } |
2079 | 2167 |
2080 // Mark all objects reachable (transitively) from objects on the marking | 2168 // Mark all objects reachable (transitively) from objects on the marking |
2081 // stack including references only considered in the atomic marking pause. | 2169 // stack including references only considered in the atomic marking pause. |
2082 void MarkCompactCollector::ProcessEphemeralMarking( | 2170 void MarkCompactCollector::ProcessEphemeralMarking( |
2083 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) { | 2171 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) { |
2084 DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed()); | 2172 DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed()); |
2085 bool work_to_do = true; | 2173 bool work_to_do = true; |
2086 while (work_to_do) { | 2174 while (work_to_do) { |
2087 if (heap_->UsingEmbedderHeapTracer()) { | 2175 if (heap_->UsingEmbedderHeapTracer()) { |
2088 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING); | 2176 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING); |
2089 heap_->RegisterWrappersWithEmbedderHeapTracer(); | 2177 heap_->RegisterWrappersWithEmbedderHeapTracer(); |
2090 heap_->embedder_heap_tracer()->AdvanceTracing( | 2178 heap_->embedder_heap_tracer()->AdvanceTracing( |
2091 0, EmbedderHeapTracer::AdvanceTracingActions( | 2179 0, EmbedderHeapTracer::AdvanceTracingActions( |
2092 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)); | 2180 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)); |
2093 } | 2181 } |
2094 if (!only_process_harmony_weak_collections) { | 2182 if (!only_process_harmony_weak_collections) { |
2095 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING); | 2183 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING); |
2096 isolate()->global_handles()->IterateObjectGroups( | 2184 isolate()->global_handles()->IterateObjectGroups( |
2097 visitor, &IsUnmarkedHeapObjectWithHeap); | 2185 visitor, &IsUnmarkedHeapObjectWithHeap); |
2098 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject); | 2186 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject); |
2099 } | 2187 } |
2100 ProcessWeakCollections(); | 2188 ProcessWeakCollections(); |
2101 work_to_do = !marking_deque()->IsEmpty(); | 2189 work_to_do = !marking_deque()->IsEmpty(); |
2102 ProcessMarkingDeque(); | 2190 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
2103 } | 2191 } |
2104 } | 2192 } |
2105 | 2193 |
2106 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { | 2194 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { |
2107 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); | 2195 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); |
2108 !it.done(); it.Advance()) { | 2196 !it.done(); it.Advance()) { |
2109 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { | 2197 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { |
2110 return; | 2198 return; |
2111 } | 2199 } |
2112 if (it.frame()->type() == StackFrame::OPTIMIZED) { | 2200 if (it.frame()->type() == StackFrame::OPTIMIZED) { |
2113 Code* code = it.frame()->LookupCode(); | 2201 Code* code = it.frame()->LookupCode(); |
2114 if (!code->CanDeoptAt(it.frame()->pc())) { | 2202 if (!code->CanDeoptAt(it.frame()->pc())) { |
2115 Code::BodyDescriptor::IterateBody(code, visitor); | 2203 Code::BodyDescriptor::IterateBody(code, visitor); |
2116 } | 2204 } |
2117 ProcessMarkingDeque(); | 2205 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
2118 return; | 2206 return; |
2119 } | 2207 } |
2120 } | 2208 } |
2121 } | 2209 } |
2122 | 2210 |
2123 void MarkingDeque::SetUp() { | 2211 void MarkingDeque::SetUp() { |
2124 backing_store_ = new base::VirtualMemory(kMaxSize); | 2212 backing_store_ = new base::VirtualMemory(kMaxSize); |
2125 backing_store_committed_size_ = 0; | 2213 backing_store_committed_size_ = 0; |
2126 if (backing_store_ == nullptr) { | 2214 if (backing_store_ == nullptr) { |
2127 V8::FatalProcessOutOfMemory("MarkingDeque::SetUp"); | 2215 V8::FatalProcessOutOfMemory("MarkingDeque::SetUp"); |
(...skipping 17 matching lines...) Expand all Loading... | |
2145 size_t size = FLAG_force_marking_deque_overflows | 2233 size_t size = FLAG_force_marking_deque_overflows |
2146 ? 64 * kPointerSize | 2234 ? 64 * kPointerSize |
2147 : backing_store_committed_size_; | 2235 : backing_store_committed_size_; |
2148 DCHECK( | 2236 DCHECK( |
2149 base::bits::IsPowerOfTwo32(static_cast<uint32_t>(size / kPointerSize))); | 2237 base::bits::IsPowerOfTwo32(static_cast<uint32_t>(size / kPointerSize))); |
2150 mask_ = static_cast<int>((size / kPointerSize) - 1); | 2238 mask_ = static_cast<int>((size / kPointerSize) - 1); |
2151 top_ = bottom_ = 0; | 2239 top_ = bottom_ = 0; |
2152 overflowed_ = false; | 2240 overflowed_ = false; |
2153 } | 2241 } |
2154 | 2242 |
2155 void MarkingDeque::StopUsing() { | 2243 void MarkingDeque::StopUsing(bool free_immediately) { |
2156 base::LockGuard<base::Mutex> guard(&mutex_); | 2244 base::LockGuard<base::Mutex> guard(&mutex_); |
2245 if (!in_use_) return; | |
2246 | |
2157 DCHECK(IsEmpty()); | 2247 DCHECK(IsEmpty()); |
2158 DCHECK(!overflowed_); | 2248 DCHECK(!overflowed_); |
2159 top_ = bottom_ = mask_ = 0; | 2249 top_ = bottom_ = mask_ = 0; |
2160 in_use_ = false; | 2250 in_use_ = false; |
2161 if (FLAG_concurrent_sweeping) { | 2251 if (FLAG_concurrent_sweeping && !free_immediately) { |
2162 StartUncommitTask(); | 2252 StartUncommitTask(); |
2163 } else { | 2253 } else { |
2164 Uncommit(); | 2254 Uncommit(); |
2165 } | 2255 } |
2166 } | 2256 } |
2167 | 2257 |
2168 void MarkingDeque::Clear() { | 2258 void MarkingDeque::Clear() { |
2169 DCHECK(in_use_); | 2259 DCHECK(in_use_); |
2170 top_ = bottom_ = 0; | 2260 top_ = bottom_ = 0; |
2171 overflowed_ = false; | 2261 overflowed_ = false; |
2172 } | 2262 } |
2173 | 2263 |
2174 void MarkingDeque::Uncommit() { | 2264 void MarkingDeque::Uncommit() { |
2175 DCHECK(!in_use_); | 2265 DCHECK(!in_use_); |
2266 if (backing_store_committed_size_ == 0) return; | |
ulan
2016/11/15 19:57:21
How can this happen?
Michael Lippautz
2016/11/16 08:59:37
I added a comment: Basically, two calls for StopUs
| |
2176 bool success = backing_store_->Uncommit(backing_store_->address(), | 2267 bool success = backing_store_->Uncommit(backing_store_->address(), |
2177 backing_store_committed_size_); | 2268 backing_store_committed_size_); |
2178 backing_store_committed_size_ = 0; | 2269 backing_store_committed_size_ = 0; |
2179 CHECK(success); | 2270 CHECK(success); |
2180 } | 2271 } |
2181 | 2272 |
2182 void MarkingDeque::EnsureCommitted() { | 2273 void MarkingDeque::EnsureCommitted() { |
2183 DCHECK(in_use_); | 2274 DCHECK(in_use_); |
2184 if (backing_store_committed_size_ > 0) return; | 2275 if (backing_store_committed_size_ > 0) return; |
2185 | 2276 |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2260 } | 2351 } |
2261 if (FLAG_trace_gc_object_stats) { | 2352 if (FLAG_trace_gc_object_stats) { |
2262 heap()->live_object_stats_->PrintJSON("live"); | 2353 heap()->live_object_stats_->PrintJSON("live"); |
2263 heap()->dead_object_stats_->PrintJSON("dead"); | 2354 heap()->dead_object_stats_->PrintJSON("dead"); |
2264 } | 2355 } |
2265 heap()->live_object_stats_->CheckpointObjectStats(); | 2356 heap()->live_object_stats_->CheckpointObjectStats(); |
2266 heap()->dead_object_stats_->ClearObjectStats(); | 2357 heap()->dead_object_stats_->ClearObjectStats(); |
2267 } | 2358 } |
2268 } | 2359 } |
2269 | 2360 |
2361 SlotCallbackResult MarkCompactCollector::CheckAndMarkObject( | |
2362 Heap* heap, Address slot_address) { | |
2363 Object* object = *reinterpret_cast<Object**>(slot_address); | |
2364 // Marking happens before flipping the young generation, hence check for | |
2365 // ToSpace. | |
ulan
2016/11/15 19:57:21
DCHECK(object_in_new_space implies object_in_to_sp
Michael Lippautz
2016/11/16 08:59:37
Used InNewSpace with a DCHECK for ToSpace.
| |
2366 if (heap->InToSpace(object)) { | |
2367 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | |
2368 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); | |
2369 if (Marking::IsBlackOrGrey(mark_bit)) { | |
2370 return KEEP_SLOT; | |
2371 } | |
2372 heap->mark_compact_collector()->SetMark(heap_object, mark_bit); | |
2373 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(), | |
2374 heap_object); | |
2375 return KEEP_SLOT; | |
2376 } | |
2377 return REMOVE_SLOT; | |
2378 } | |
2379 | |
2380 static bool IsUnmarkedObject(Heap* heap, Object** p) { | |
2381 return heap->InNewSpace(*p) && | |
ulan
2016/11/15 19:57:21
For consistency let's use either InNewSpace or InT
Michael Lippautz
2016/11/16 08:59:37
Done (added dcheck).
| |
2382 !Marking::IsBlack(ObjectMarking::MarkBitFrom(HeapObject::cast(*p))); | |
2383 } | |
2384 | |
2385 void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() { | |
2386 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); | |
2387 | |
2388 PostponeInterruptsScope postpone(isolate()); | |
2389 | |
2390 StaticYoungGenerationMarkingVisitor::Initialize(heap()); | |
2391 RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap()); | |
2392 | |
2393 marking_deque()->StartUsing(); | |
2394 | |
2395 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | |
2396 &Heap::IsUnmodifiedHeapObject); | |
2397 | |
2398 { | |
2399 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS); | |
2400 heap()->IterateRoots(&root_visitor, VISIT_ALL_IN_SCAVENGE); | |
2401 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>(); | |
2402 } | |
2403 | |
2404 { | |
2405 TRACE_GC(heap()->tracer(), | |
2406 GCTracer::Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS); | |
2407 RememberedSet<OLD_TO_NEW>::Iterate(heap(), [this](Address addr) { | |
2408 return CheckAndMarkObject(heap(), addr); | |
2409 }); | |
2410 RememberedSet<OLD_TO_NEW>::IterateTyped( | |
2411 heap(), [this](SlotType type, Address host_addr, Address addr) { | |
2412 return UpdateTypedSlotHelper::UpdateTypedSlot( | |
2413 isolate(), type, addr, [this](Object** addr) { | |
2414 return CheckAndMarkObject(heap(), | |
2415 reinterpret_cast<Address>(addr)); | |
2416 }); | |
2417 }); | |
2418 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>(); | |
2419 } | |
2420 | |
2421 { | |
2422 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK); | |
2423 heap()->VisitEncounteredWeakCollections(&root_visitor); | |
2424 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>(); | |
2425 } | |
2426 | |
2427 if (is_code_flushing_enabled()) { | |
2428 TRACE_GC(heap()->tracer(), | |
2429 GCTracer::Scope::MINOR_MC_MARK_CODE_FLUSH_CANDIDATES); | |
2430 code_flusher()->IteratePointersToFromSpace(&root_visitor); | |
2431 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>(); | |
2432 } | |
2433 | |
2434 { | |
2435 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES); | |
2436 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | |
2437 &IsUnmarkedObject); | |
2438 isolate() | |
2439 ->global_handles() | |
2440 ->IterateNewSpaceWeakUnmodifiedRoots< | |
2441 GlobalHandles::DONT_HANDLE_PHANTOM_NODES>(&root_visitor); | |
2442 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>(); | |
2443 } | |
2444 | |
2445 marking_deque()->StopUsing(true); | |
2446 } | |
2447 | |
2270 void MarkCompactCollector::MarkLiveObjects() { | 2448 void MarkCompactCollector::MarkLiveObjects() { |
2271 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); | 2449 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); |
2272 // The recursive GC marker detects when it is nearing stack overflow, | 2450 // The recursive GC marker detects when it is nearing stack overflow, |
2273 // and switches to a different marking system. JS interrupts interfere | 2451 // and switches to a different marking system. JS interrupts interfere |
2274 // with the C stack limit check. | 2452 // with the C stack limit check. |
2275 PostponeInterruptsScope postpone(isolate()); | 2453 PostponeInterruptsScope postpone(isolate()); |
2276 | 2454 |
2277 { | 2455 { |
2278 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL); | 2456 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL); |
2279 IncrementalMarking* incremental_marking = heap_->incremental_marking(); | 2457 IncrementalMarking* incremental_marking = heap_->incremental_marking(); |
2280 if (was_marked_incrementally_) { | 2458 if (was_marked_incrementally_) { |
2281 incremental_marking->Finalize(); | 2459 incremental_marking->Finalize(); |
2282 } else { | 2460 } else { |
2283 CHECK(incremental_marking->IsStopped()); | 2461 CHECK(incremental_marking->IsStopped()); |
2284 } | 2462 } |
2285 } | 2463 } |
2286 | 2464 |
2287 #ifdef DEBUG | 2465 #ifdef DEBUG |
2288 DCHECK(state_ == PREPARE_GC); | 2466 DCHECK(state_ == PREPARE_GC); |
2289 state_ = MARK_LIVE_OBJECTS; | 2467 state_ = MARK_LIVE_OBJECTS; |
2290 #endif | 2468 #endif |
2291 | 2469 |
2292 marking_deque()->StartUsing(); | 2470 marking_deque()->StartUsing(); |
2293 | 2471 |
2294 { | 2472 { |
2295 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH); | 2473 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH); |
2296 PrepareForCodeFlushing(); | 2474 PrepareForCodeFlushing(); |
2297 } | 2475 } |
2298 | 2476 |
2299 RootMarkingVisitor root_visitor(heap()); | 2477 RootMarkingVisitor<MarkCompactMode::FULL> root_visitor(heap()); |
2300 | 2478 |
2301 { | 2479 { |
2302 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS); | 2480 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS); |
2303 MarkRoots(&root_visitor); | 2481 MarkRoots(&root_visitor); |
2304 ProcessTopOptimizedFrame(&root_visitor); | 2482 ProcessTopOptimizedFrame(&root_visitor); |
2305 } | 2483 } |
2306 | 2484 |
2307 { | 2485 { |
2308 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE); | 2486 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE); |
2309 | 2487 |
(...skipping 11 matching lines...) Expand all Loading... | |
2321 // immediately reclaimed. Instead, we have to mark them as pending and mark | 2499 // immediately reclaimed. Instead, we have to mark them as pending and mark |
2322 // objects reachable from them. | 2500 // objects reachable from them. |
2323 // | 2501 // |
2324 // First we identify nonlive weak handles and mark them as pending | 2502 // First we identify nonlive weak handles and mark them as pending |
2325 // destruction. | 2503 // destruction. |
2326 { | 2504 { |
2327 TRACE_GC(heap()->tracer(), | 2505 TRACE_GC(heap()->tracer(), |
2328 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES); | 2506 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES); |
2329 heap()->isolate()->global_handles()->IdentifyWeakHandles( | 2507 heap()->isolate()->global_handles()->IdentifyWeakHandles( |
2330 &IsUnmarkedHeapObject); | 2508 &IsUnmarkedHeapObject); |
2331 ProcessMarkingDeque(); | 2509 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
2332 } | 2510 } |
2333 // Then we mark the objects. | 2511 // Then we mark the objects. |
2334 | 2512 |
2335 { | 2513 { |
2336 TRACE_GC(heap()->tracer(), | 2514 TRACE_GC(heap()->tracer(), |
2337 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS); | 2515 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS); |
2338 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor); | 2516 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor); |
2339 ProcessMarkingDeque(); | 2517 ProcessMarkingDeque<MarkCompactMode::FULL>(); |
2340 } | 2518 } |
2341 | 2519 |
2342 // Repeat Harmony weak maps marking to mark unmarked objects reachable from | 2520 // Repeat Harmony weak maps marking to mark unmarked objects reachable from |
2343 // the weak roots we just marked as pending destruction. | 2521 // the weak roots we just marked as pending destruction. |
2344 // | 2522 // |
2345 // We only process harmony collections, as all object groups have been fully | 2523 // We only process harmony collections, as all object groups have been fully |
2346 // processed and no weakly reachable node can discover new objects groups. | 2524 // processed and no weakly reachable node can discover new objects groups. |
2347 { | 2525 { |
2348 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); | 2526 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); |
2349 ProcessEphemeralMarking(&root_visitor, true); | 2527 ProcessEphemeralMarking(&root_visitor, true); |
(...skipping 1585 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3935 // The target is always in old space, we don't have to record the slot in | 4113 // The target is always in old space, we don't have to record the slot in |
3936 // the old-to-new remembered set. | 4114 // the old-to-new remembered set. |
3937 DCHECK(!heap()->InNewSpace(target)); | 4115 DCHECK(!heap()->InNewSpace(target)); |
3938 RecordRelocSlot(host, &rinfo, target); | 4116 RecordRelocSlot(host, &rinfo, target); |
3939 } | 4117 } |
3940 } | 4118 } |
3941 } | 4119 } |
3942 | 4120 |
3943 } // namespace internal | 4121 } // namespace internal |
3944 } // namespace v8 | 4122 } // namespace v8 |
OLD | NEW |