Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2498583002: [heap] Minor MC: Add marking (Closed)
Patch Set: Fix Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/scavenger.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after
315 } 315 }
316 #endif 316 #endif
317 317
318 StartSweepSpaces(); 318 StartSweepSpaces();
319 319
320 EvacuateNewSpaceAndCandidates(); 320 EvacuateNewSpaceAndCandidates();
321 321
322 Finish(); 322 Finish();
323 } 323 }
324 324
325
326 #ifdef VERIFY_HEAP 325 #ifdef VERIFY_HEAP
327 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { 326 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
328 for (Page* p : *space) { 327 for (Page* p : *space) {
329 CHECK(p->markbits()->IsClean()); 328 CHECK(p->markbits()->IsClean());
330 CHECK_EQ(0, p->LiveBytes()); 329 CHECK_EQ(0, p->LiveBytes());
331 } 330 }
332 } 331 }
333 332
334 333
335 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { 334 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
(...skipping 11 matching lines...) Expand all
347 VerifyMarkbitsAreClean(heap_->new_space()); 346 VerifyMarkbitsAreClean(heap_->new_space());
348 347
349 LargeObjectIterator it(heap_->lo_space()); 348 LargeObjectIterator it(heap_->lo_space());
350 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 349 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
351 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); 350 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
352 CHECK(Marking::IsWhite(mark_bit)); 351 CHECK(Marking::IsWhite(mark_bit));
353 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); 352 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
354 } 353 }
355 } 354 }
356 355
356 MarkCompactCollector::YounGenerationMarkingVerificationState
357 MarkCompactCollector::MarkYoungGenerationForVerification() {
358 YounGenerationMarkingVerificationState state;
359 state = GetObjectsInToSpace();
360 MarkLiveObjectsInYoungGeneration();
361 GlobalHandles::NewSpaceNodeStates new_space_node_states =
362 isolate()->global_handles()->GetNewSpaceNodeStates();
363 MarkLiveObjectsInYoungGeneration();
364 isolate()->global_handles()->RestoreNewSpaceNodeStates(new_space_node_states);
365 return state;
366 }
367
368 std::vector<HeapObject*> MarkCompactCollector::GetObjectsInToSpace() {
369 std::vector<HeapObject*> objects;
370 const Address top = heap()->new_space()->top();
371 const Address space_start = heap()->new_space()->bottom();
372 const Address space_end = heap()->new_space()->top();
373 for (Page* page : NewSpacePageRange(space_start, space_end)) {
374 HeapObject* object = HeapObject::FromAddress(page->area_start());
375 while (object->address() < page->area_end() &&
376 (!page->ContainsLimit(top) || (object->address() < top))) {
377 if (!object->IsFiller()) {
378 objects.push_back(object);
379 }
380 object = HeapObject::FromAddress(object->address() + object->Size());
381 }
382 }
383 return objects;
384 }
385
386 void MarkCompactCollector::VerifyYoungGenerationMarkbitsUsingForwardingPointers(
387 const YounGenerationMarkingVerificationState& state) {
388 for (HeapObject* object : state) {
389 const MapWord map_word = object->map_word();
390 const bool is_black = Marking::IsBlack(ObjectMarking::MarkBitFrom(object));
391 if (map_word.IsForwardingAddress()) {
392 CHECK(is_black);
393 } else {
394 CHECK(!is_black);
395 }
396 }
397
398 for (Page* p : NewSpacePageRange(heap()->new_space()->FromSpaceStart(),
399 heap()->new_space()->FromSpaceEnd())) {
400 p->ClearLiveness();
401 }
402 }
357 403
358 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { 404 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
359 HeapObjectIterator code_iterator(heap()->code_space()); 405 HeapObjectIterator code_iterator(heap()->code_space());
360 for (HeapObject* obj = code_iterator.Next(); obj != NULL; 406 for (HeapObject* obj = code_iterator.Next(); obj != NULL;
361 obj = code_iterator.Next()) { 407 obj = code_iterator.Next()) {
362 Code* code = Code::cast(obj); 408 Code* code = Code::cast(obj);
363 if (!code->is_optimized_code()) continue; 409 if (!code->is_optimized_code()) continue;
364 if (WillBeDeoptimized(code)) continue; 410 if (WillBeDeoptimized(code)) continue;
365 code->VerifyEmbeddedObjectsDependency(); 411 code->VerifyEmbeddedObjectsDependency();
366 } 412 }
(...skipping 709 matching lines...) Expand 10 before | Expand all | Expand 10 after
1076 JSFunction* candidate = jsfunction_candidates_head_; 1122 JSFunction* candidate = jsfunction_candidates_head_;
1077 while (candidate != NULL) { 1123 while (candidate != NULL) {
1078 if (heap->InFromSpace(candidate)) { 1124 if (heap->InFromSpace(candidate)) {
1079 v->VisitPointer(reinterpret_cast<Object**>(slot)); 1125 v->VisitPointer(reinterpret_cast<Object**>(slot));
1080 } 1126 }
1081 candidate = GetNextCandidate(*slot); 1127 candidate = GetNextCandidate(*slot);
1082 slot = GetNextCandidateSlot(*slot); 1128 slot = GetNextCandidateSlot(*slot);
1083 } 1129 }
1084 } 1130 }
1085 1131
1132 class StaticYoungGenerationMarkingVisitor
1133 : public StaticNewSpaceVisitor<StaticYoungGenerationMarkingVisitor> {
1134 public:
1135 static void Initialize(Heap* heap) {
1136 StaticNewSpaceVisitor<StaticYoungGenerationMarkingVisitor>::Initialize();
1137 }
1138
1139 inline static void VisitPointer(Heap* heap, HeapObject* object, Object** p) {
1140 Object* target = *p;
1141 if (heap->InNewSpace(target)) {
1142 if (MarkRecursively(heap, HeapObject::cast(target))) return;
1143 PushOnMarkingDeque(heap, target);
1144 }
1145 }
1146
1147 protected:
1148 inline static void PushOnMarkingDeque(Heap* heap, Object* obj) {
1149 HeapObject* object = HeapObject::cast(obj);
1150 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
1151 heap->mark_compact_collector()->MarkObject(object, mark_bit);
1152 }
1153
1154 inline static bool MarkRecursively(Heap* heap, HeapObject* object) {
1155 StackLimitCheck check(heap->isolate());
1156 if (check.HasOverflowed()) return false;
1157
1158 MarkBit mark = ObjectMarking::MarkBitFrom(object);
1159 if (Marking::IsBlackOrGrey(mark)) return true;
1160 heap->mark_compact_collector()->SetMark(object, mark);
1161 IterateBody(object->map(), object);
1162 return true;
1163 }
1164 };
1086 1165
1087 class MarkCompactMarkingVisitor 1166 class MarkCompactMarkingVisitor
1088 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { 1167 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
1089 public: 1168 public:
1090 static void Initialize(); 1169 static void Initialize();
1091 1170
1092 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { 1171 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
1093 MarkObjectByPointer(heap->mark_compact_collector(), object, p); 1172 MarkObjectByPointer(heap->mark_compact_collector(), object, p);
1094 } 1173 }
1095 1174
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after
1329 // Iterate the archived stacks in all threads to check if 1408 // Iterate the archived stacks in all threads to check if
1330 // the code is referenced. 1409 // the code is referenced.
1331 CodeMarkingVisitor code_marking_visitor(this); 1410 CodeMarkingVisitor code_marking_visitor(this);
1332 heap()->isolate()->thread_manager()->IterateArchivedThreads( 1411 heap()->isolate()->thread_manager()->IterateArchivedThreads(
1333 &code_marking_visitor); 1412 &code_marking_visitor);
1334 1413
1335 SharedFunctionInfoMarkingVisitor visitor(this); 1414 SharedFunctionInfoMarkingVisitor visitor(this);
1336 heap()->isolate()->compilation_cache()->IterateFunctions(&visitor); 1415 heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
1337 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor); 1416 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
1338 1417
1339 ProcessMarkingDeque(); 1418 ProcessMarkingDeque<MarkCompactMode::FULL>();
1340 } 1419 }
1341 1420
1342 1421
1343 // Visitor class for marking heap roots. 1422 // Visitor class for marking heap roots.
1423 template <MarkCompactMode mode>
1344 class RootMarkingVisitor : public ObjectVisitor { 1424 class RootMarkingVisitor : public ObjectVisitor {
1345 public: 1425 public:
1346 explicit RootMarkingVisitor(Heap* heap) 1426 explicit RootMarkingVisitor(Heap* heap)
1347 : collector_(heap->mark_compact_collector()) {} 1427 : collector_(heap->mark_compact_collector()) {}
1348 1428
1349 void VisitPointer(Object** p) override { MarkObjectByPointer(p); } 1429 void VisitPointer(Object** p) override { MarkObjectByPointer(p); }
1350 1430
1351 void VisitPointers(Object** start, Object** end) override { 1431 void VisitPointers(Object** start, Object** end) override {
1352 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 1432 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
1353 } 1433 }
1354 1434
1355 // Skip the weak next code link in a code object, which is visited in 1435 // Skip the weak next code link in a code object, which is visited in
1356 // ProcessTopOptimizedFrame. 1436 // ProcessTopOptimizedFrame.
1357 void VisitNextCodeLink(Object** p) override {} 1437 void VisitNextCodeLink(Object** p) override {}
1358 1438
1359 private: 1439 private:
1360 void MarkObjectByPointer(Object** p) { 1440 void MarkObjectByPointer(Object** p) {
1361 if (!(*p)->IsHeapObject()) return; 1441 if (!(*p)->IsHeapObject()) return;
1362 1442
1363 HeapObject* object = HeapObject::cast(*p); 1443 HeapObject* object = HeapObject::cast(*p);
1364 1444
1445 if (mode == MarkCompactMode::YOUNG_GENERATION &&
1446 !collector_->heap()->InNewSpace(object))
1447 return;
1448
1365 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); 1449 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
1366 if (Marking::IsBlackOrGrey(mark_bit)) return; 1450 if (Marking::IsBlackOrGrey(mark_bit)) return;
1367 1451
1368 Map* map = object->map(); 1452 Map* map = object->map();
1369 // Mark the object. 1453 // Mark the object.
1370 collector_->SetMark(object, mark_bit); 1454 collector_->SetMark(object, mark_bit);
1371 1455
1372 // Mark the map pointer and body, and push them on the marking stack. 1456 switch (mode) {
1373 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); 1457 case MarkCompactMode::FULL: {
1374 collector_->MarkObject(map, map_mark); 1458 // Mark the map pointer and body, and push them on the marking stack.
1375 MarkCompactMarkingVisitor::IterateBody(map, object); 1459 MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
1460 collector_->MarkObject(map, map_mark);
1461 MarkCompactMarkingVisitor::IterateBody(map, object);
1462 } break;
1463 case MarkCompactMode::YOUNG_GENERATION:
1464 StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
1465 break;
1466 }
1376 1467
1377 // Mark all the objects reachable from the map and body. May leave 1468 // Mark all the objects reachable from the map and body. May leave
1378 // overflowed objects in the heap. 1469 // overflowed objects in the heap.
1379 collector_->EmptyMarkingDeque(); 1470 collector_->EmptyMarkingDeque<mode>();
1380 } 1471 }
1381 1472
1382 MarkCompactCollector* collector_; 1473 MarkCompactCollector* collector_;
1383 }; 1474 };
1384 1475
1385 1476
1386 // Helper class for pruning the string table. 1477 // Helper class for pruning the string table.
1387 template <bool finalize_external_strings, bool record_slots> 1478 template <bool finalize_external_strings, bool record_slots>
1388 class StringTableCleaner : public ObjectVisitor { 1479 class StringTableCleaner : public ObjectVisitor {
1389 public: 1480 public:
(...skipping 549 matching lines...) Expand 10 before | Expand all | Expand 10 after
1939 2030
1940 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap, 2031 bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
1941 Object** p) { 2032 Object** p) {
1942 Object* o = *p; 2033 Object* o = *p;
1943 DCHECK(o->IsHeapObject()); 2034 DCHECK(o->IsHeapObject());
1944 HeapObject* heap_object = HeapObject::cast(o); 2035 HeapObject* heap_object = HeapObject::cast(o);
1945 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object); 2036 MarkBit mark = ObjectMarking::MarkBitFrom(heap_object);
1946 return Marking::IsWhite(mark); 2037 return Marking::IsWhite(mark);
1947 } 2038 }
1948 2039
1949 2040 void MarkCompactCollector::MarkStringTable(
1950 void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { 2041 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
1951 StringTable* string_table = heap()->string_table(); 2042 StringTable* string_table = heap()->string_table();
1952 // Mark the string table itself. 2043 // Mark the string table itself.
1953 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table); 2044 MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table);
1954 if (Marking::IsWhite(string_table_mark)) { 2045 if (Marking::IsWhite(string_table_mark)) {
1955 // String table could have already been marked by visiting the handles list. 2046 // String table could have already been marked by visiting the handles list.
1956 SetMark(string_table, string_table_mark); 2047 SetMark(string_table, string_table_mark);
1957 } 2048 }
1958 // Explicitly mark the prefix. 2049 // Explicitly mark the prefix.
1959 string_table->IteratePrefix(visitor); 2050 string_table->IteratePrefix(visitor);
1960 ProcessMarkingDeque(); 2051 ProcessMarkingDeque<MarkCompactMode::FULL>();
1961 } 2052 }
1962 2053
1963 2054
1964 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { 2055 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) {
1965 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site); 2056 MarkBit mark_bit = ObjectMarking::MarkBitFrom(site);
1966 SetMark(site, mark_bit); 2057 SetMark(site, mark_bit);
1967 } 2058 }
1968 2059
1969 2060 void MarkCompactCollector::MarkRoots(
1970 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { 2061 RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
1971 // Mark the heap roots including global variables, stack variables, 2062 // Mark the heap roots including global variables, stack variables,
1972 // etc., and all objects reachable from them. 2063 // etc., and all objects reachable from them.
1973 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); 2064 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1974 2065
1975 // Handle the string table specially. 2066 // Handle the string table specially.
1976 MarkStringTable(visitor); 2067 MarkStringTable(visitor);
1977 2068
1978 // There may be overflowed objects in the heap. Visit them now. 2069 // There may be overflowed objects in the heap. Visit them now.
1979 while (marking_deque()->overflowed()) { 2070 while (marking_deque()->overflowed()) {
1980 RefillMarkingDeque(); 2071 RefillMarkingDeque<MarkCompactMode::FULL>();
1981 EmptyMarkingDeque(); 2072 EmptyMarkingDeque<MarkCompactMode::FULL>();
1982 } 2073 }
1983 } 2074 }
1984 2075
1985 2076
1986 void MarkCompactCollector::MarkImplicitRefGroups( 2077 void MarkCompactCollector::MarkImplicitRefGroups(
1987 MarkObjectFunction mark_object) { 2078 MarkObjectFunction mark_object) {
1988 List<ImplicitRefGroup*>* ref_groups = 2079 List<ImplicitRefGroup*>* ref_groups =
1989 isolate()->global_handles()->implicit_ref_groups(); 2080 isolate()->global_handles()->implicit_ref_groups();
1990 2081
1991 int last = 0; 2082 int last = 0;
(...skipping 19 matching lines...) Expand all
2011 delete entry; 2102 delete entry;
2012 } 2103 }
2013 ref_groups->Rewind(last); 2104 ref_groups->Rewind(last);
2014 } 2105 }
2015 2106
2016 2107
2017 // Mark all objects reachable from the objects on the marking stack. 2108 // Mark all objects reachable from the objects on the marking stack.
2018 // Before: the marking stack contains zero or more heap object pointers. 2109 // Before: the marking stack contains zero or more heap object pointers.
2019 // After: the marking stack is empty, and all objects reachable from the 2110 // After: the marking stack is empty, and all objects reachable from the
2020 // marking stack have been marked, or are overflowed in the heap. 2111 // marking stack have been marked, or are overflowed in the heap.
2112 template <MarkCompactMode mode>
2021 void MarkCompactCollector::EmptyMarkingDeque() { 2113 void MarkCompactCollector::EmptyMarkingDeque() {
2022 while (!marking_deque()->IsEmpty()) { 2114 while (!marking_deque()->IsEmpty()) {
2023 HeapObject* object = marking_deque()->Pop(); 2115 HeapObject* object = marking_deque()->Pop();
2024 2116
2025 DCHECK(!object->IsFiller()); 2117 DCHECK(!object->IsFiller());
2026 DCHECK(object->IsHeapObject()); 2118 DCHECK(object->IsHeapObject());
2027 DCHECK(heap()->Contains(object)); 2119 DCHECK(heap()->Contains(object));
2028 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object))); 2120 DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object)));
2029 2121
2030 Map* map = object->map(); 2122 Map* map = object->map();
2031 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); 2123 switch (mode) {
2032 MarkObject(map, map_mark); 2124 case MarkCompactMode::FULL: {
2033 2125 MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
2034 MarkCompactMarkingVisitor::IterateBody(map, object); 2126 MarkObject(map, map_mark);
2127 MarkCompactMarkingVisitor::IterateBody(map, object);
2128 } break;
2129 case MarkCompactMode::YOUNG_GENERATION: {
2130 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
2131 StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
2132 } break;
2133 }
2035 } 2134 }
2036 } 2135 }
2037 2136
2038 2137
2039 // Sweep the heap for overflowed objects, clear their overflow bits, and 2138 // Sweep the heap for overflowed objects, clear their overflow bits, and
2040 // push them on the marking stack. Stop early if the marking stack fills 2139 // push them on the marking stack. Stop early if the marking stack fills
2041 // before sweeping completes. If sweeping completes, there are no remaining 2140 // before sweeping completes. If sweeping completes, there are no remaining
2042 // overflowed objects in the heap so the overflow flag on the markings stack 2141 // overflowed objects in the heap so the overflow flag on the markings stack
2043 // is cleared. 2142 // is cleared.
2143 template <MarkCompactMode mode>
2044 void MarkCompactCollector::RefillMarkingDeque() { 2144 void MarkCompactCollector::RefillMarkingDeque() {
2045 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); 2145 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow);
2046 DCHECK(marking_deque()->overflowed()); 2146 DCHECK(marking_deque()->overflowed());
2047 2147
2048 DiscoverGreyObjectsInNewSpace(); 2148 DiscoverGreyObjectsInNewSpace();
2049 if (marking_deque()->IsFull()) return; 2149 if (marking_deque()->IsFull()) return;
2050 2150
2051 DiscoverGreyObjectsInSpace(heap()->old_space()); 2151 if (mode == MarkCompactMode::FULL) {
2052 if (marking_deque()->IsFull()) return; 2152 DiscoverGreyObjectsInSpace(heap()->old_space());
2053 2153 if (marking_deque()->IsFull()) return;
2054 DiscoverGreyObjectsInSpace(heap()->code_space()); 2154 DiscoverGreyObjectsInSpace(heap()->code_space());
2055 if (marking_deque()->IsFull()) return; 2155 if (marking_deque()->IsFull()) return;
2056 2156 DiscoverGreyObjectsInSpace(heap()->map_space());
2057 DiscoverGreyObjectsInSpace(heap()->map_space()); 2157 if (marking_deque()->IsFull()) return;
2058 if (marking_deque()->IsFull()) return; 2158 LargeObjectIterator lo_it(heap()->lo_space());
2059 2159 DiscoverGreyObjectsWithIterator(&lo_it);
2060 LargeObjectIterator lo_it(heap()->lo_space()); 2160 if (marking_deque()->IsFull()) return;
2061 DiscoverGreyObjectsWithIterator(&lo_it); 2161 }
2062 if (marking_deque()->IsFull()) return;
2063 2162
2064 marking_deque()->ClearOverflowed(); 2163 marking_deque()->ClearOverflowed();
2065 } 2164 }
2066 2165
2067 2166
2068 // Mark all objects reachable (transitively) from objects on the marking 2167 // Mark all objects reachable (transitively) from objects on the marking
2069 // stack. Before: the marking stack contains zero or more heap object 2168 // stack. Before: the marking stack contains zero or more heap object
2070 // pointers. After: the marking stack is empty and there are no overflowed 2169 // pointers. After: the marking stack is empty and there are no overflowed
2071 // objects in the heap. 2170 // objects in the heap.
2171 template <MarkCompactMode mode>
2072 void MarkCompactCollector::ProcessMarkingDeque() { 2172 void MarkCompactCollector::ProcessMarkingDeque() {
2073 EmptyMarkingDeque(); 2173 EmptyMarkingDeque<mode>();
2074 while (marking_deque()->overflowed()) { 2174 while (marking_deque()->overflowed()) {
2075 RefillMarkingDeque(); 2175 RefillMarkingDeque<mode>();
2076 EmptyMarkingDeque(); 2176 EmptyMarkingDeque<mode>();
2077 } 2177 }
2178 DCHECK(marking_deque()->IsEmpty());
2078 } 2179 }
2079 2180
2080 // Mark all objects reachable (transitively) from objects on the marking 2181 // Mark all objects reachable (transitively) from objects on the marking
2081 // stack including references only considered in the atomic marking pause. 2182 // stack including references only considered in the atomic marking pause.
2082 void MarkCompactCollector::ProcessEphemeralMarking( 2183 void MarkCompactCollector::ProcessEphemeralMarking(
2083 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) { 2184 ObjectVisitor* visitor, bool only_process_harmony_weak_collections) {
2084 DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed()); 2185 DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed());
2085 bool work_to_do = true; 2186 bool work_to_do = true;
2086 while (work_to_do) { 2187 while (work_to_do) {
2087 if (heap_->UsingEmbedderHeapTracer()) { 2188 if (heap_->UsingEmbedderHeapTracer()) {
2088 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING); 2189 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING);
2089 heap_->RegisterWrappersWithEmbedderHeapTracer(); 2190 heap_->RegisterWrappersWithEmbedderHeapTracer();
2090 heap_->embedder_heap_tracer()->AdvanceTracing( 2191 heap_->embedder_heap_tracer()->AdvanceTracing(
2091 0, EmbedderHeapTracer::AdvanceTracingActions( 2192 0, EmbedderHeapTracer::AdvanceTracingActions(
2092 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)); 2193 EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
2093 } 2194 }
2094 if (!only_process_harmony_weak_collections) { 2195 if (!only_process_harmony_weak_collections) {
2095 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING); 2196 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING);
2096 isolate()->global_handles()->IterateObjectGroups( 2197 isolate()->global_handles()->IterateObjectGroups(
2097 visitor, &IsUnmarkedHeapObjectWithHeap); 2198 visitor, &IsUnmarkedHeapObjectWithHeap);
2098 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject); 2199 MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject);
2099 } 2200 }
2100 ProcessWeakCollections(); 2201 ProcessWeakCollections();
2101 work_to_do = !marking_deque()->IsEmpty(); 2202 work_to_do = !marking_deque()->IsEmpty();
2102 ProcessMarkingDeque(); 2203 ProcessMarkingDeque<MarkCompactMode::FULL>();
2103 } 2204 }
2104 } 2205 }
2105 2206
2106 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { 2207 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
2107 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); 2208 for (StackFrameIterator it(isolate(), isolate()->thread_local_top());
2108 !it.done(); it.Advance()) { 2209 !it.done(); it.Advance()) {
2109 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { 2210 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) {
2110 return; 2211 return;
2111 } 2212 }
2112 if (it.frame()->type() == StackFrame::OPTIMIZED) { 2213 if (it.frame()->type() == StackFrame::OPTIMIZED) {
2113 Code* code = it.frame()->LookupCode(); 2214 Code* code = it.frame()->LookupCode();
2114 if (!code->CanDeoptAt(it.frame()->pc())) { 2215 if (!code->CanDeoptAt(it.frame()->pc())) {
2115 Code::BodyDescriptor::IterateBody(code, visitor); 2216 Code::BodyDescriptor::IterateBody(code, visitor);
2116 } 2217 }
2117 ProcessMarkingDeque(); 2218 ProcessMarkingDeque<MarkCompactMode::FULL>();
2118 return; 2219 return;
2119 } 2220 }
2120 } 2221 }
2121 } 2222 }
2122 2223
2123 void MarkingDeque::SetUp() { 2224 void MarkingDeque::SetUp() {
2124 backing_store_ = new base::VirtualMemory(kMaxSize); 2225 backing_store_ = new base::VirtualMemory(kMaxSize);
2125 backing_store_committed_size_ = 0; 2226 backing_store_committed_size_ = 0;
2126 if (backing_store_ == nullptr) { 2227 if (backing_store_ == nullptr) {
2127 V8::FatalProcessOutOfMemory("MarkingDeque::SetUp"); 2228 V8::FatalProcessOutOfMemory("MarkingDeque::SetUp");
(...skipping 17 matching lines...) Expand all
2145 size_t size = FLAG_force_marking_deque_overflows 2246 size_t size = FLAG_force_marking_deque_overflows
2146 ? 64 * kPointerSize 2247 ? 64 * kPointerSize
2147 : backing_store_committed_size_; 2248 : backing_store_committed_size_;
2148 DCHECK( 2249 DCHECK(
2149 base::bits::IsPowerOfTwo32(static_cast<uint32_t>(size / kPointerSize))); 2250 base::bits::IsPowerOfTwo32(static_cast<uint32_t>(size / kPointerSize)));
2150 mask_ = static_cast<int>((size / kPointerSize) - 1); 2251 mask_ = static_cast<int>((size / kPointerSize) - 1);
2151 top_ = bottom_ = 0; 2252 top_ = bottom_ = 0;
2152 overflowed_ = false; 2253 overflowed_ = false;
2153 } 2254 }
2154 2255
2155 void MarkingDeque::StopUsing() { 2256 void MarkingDeque::StopUsing(UncommitMode mode) {
Hannes Payer (out of office) 2016/11/18 07:33:02 Based on the recent latency numbers of uncommit, w
Michael Lippautz 2016/11/18 09:46:19 Yes, obviously. Let's leave this for later though
Hannes Payer (out of office) 2016/11/18 10:07:11 As discussed offline. I would trust the concurrent
Michael Lippautz 2016/11/18 11:33:32 Done.
2156 base::LockGuard<base::Mutex> guard(&mutex_); 2257 base::LockGuard<base::Mutex> guard(&mutex_);
2258 if (!in_use_) return;
2157 DCHECK(IsEmpty()); 2259 DCHECK(IsEmpty());
2158 DCHECK(!overflowed_); 2260 DCHECK(!overflowed_);
2159 top_ = bottom_ = mask_ = 0; 2261 top_ = bottom_ = mask_ = 0;
2160 in_use_ = false; 2262 in_use_ = false;
2161 if (FLAG_concurrent_sweeping) { 2263 if (FLAG_concurrent_sweeping && (mode == CONCURRENT)) {
2162 StartUncommitTask(); 2264 StartUncommitTask();
2163 } else { 2265 } else {
2164 Uncommit(); 2266 Uncommit();
2165 } 2267 }
2166 } 2268 }
2167 2269
2168 void MarkingDeque::Clear() { 2270 void MarkingDeque::Clear() {
2169 DCHECK(in_use_); 2271 DCHECK(in_use_);
2170 top_ = bottom_ = 0; 2272 top_ = bottom_ = 0;
2171 overflowed_ = false; 2273 overflowed_ = false;
2172 } 2274 }
2173 2275
2174 void MarkingDeque::Uncommit() { 2276 void MarkingDeque::Uncommit() {
2175 DCHECK(!in_use_); 2277 DCHECK(!in_use_);
2278 // A call forcing immediate uncommitting could've already freed the backing
2279 // store, indicated its size being zero.
2280 if (backing_store_committed_size_ == 0) return;
2176 bool success = backing_store_->Uncommit(backing_store_->address(), 2281 bool success = backing_store_->Uncommit(backing_store_->address(),
2177 backing_store_committed_size_); 2282 backing_store_committed_size_);
2178 backing_store_committed_size_ = 0; 2283 backing_store_committed_size_ = 0;
2179 CHECK(success); 2284 CHECK(success);
2180 } 2285 }
2181 2286
2182 void MarkingDeque::EnsureCommitted() { 2287 void MarkingDeque::EnsureCommitted() {
2183 DCHECK(in_use_); 2288 DCHECK(in_use_);
2184 if (backing_store_committed_size_ > 0) return; 2289 if (backing_store_committed_size_ > 0) return;
2185 2290
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
2260 } 2365 }
2261 if (FLAG_trace_gc_object_stats) { 2366 if (FLAG_trace_gc_object_stats) {
2262 heap()->live_object_stats_->PrintJSON("live"); 2367 heap()->live_object_stats_->PrintJSON("live");
2263 heap()->dead_object_stats_->PrintJSON("dead"); 2368 heap()->dead_object_stats_->PrintJSON("dead");
2264 } 2369 }
2265 heap()->live_object_stats_->CheckpointObjectStats(); 2370 heap()->live_object_stats_->CheckpointObjectStats();
2266 heap()->dead_object_stats_->ClearObjectStats(); 2371 heap()->dead_object_stats_->ClearObjectStats();
2267 } 2372 }
2268 } 2373 }
2269 2374
2375 SlotCallbackResult MarkCompactCollector::CheckAndMarkObject(
2376 Heap* heap, Address slot_address) {
2377 Object* object = *reinterpret_cast<Object**>(slot_address);
2378 if (heap->InNewSpace(object)) {
2379 // Marking happens before flipping the young generation, so the object
2380 // has to be in ToSpace.
2381 DCHECK(heap->InToSpace(object));
2382 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
2383 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object);
2384 if (Marking::IsBlackOrGrey(mark_bit)) {
2385 return KEEP_SLOT;
2386 }
2387 heap->mark_compact_collector()->SetMark(heap_object, mark_bit);
2388 StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(),
2389 heap_object);
2390 return KEEP_SLOT;
2391 }
2392 return REMOVE_SLOT;
2393 }
2394
2395 static bool IsUnmarkedObject(Heap* heap, Object** p) {
2396 DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p));
2397 return heap->InNewSpace(*p) &&
2398 !Marking::IsBlack(ObjectMarking::MarkBitFrom(HeapObject::cast(*p)));
2399 }
2400
2401 void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() {
2402 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
2403
2404 PostponeInterruptsScope postpone(isolate());
2405
2406 StaticYoungGenerationMarkingVisitor::Initialize(heap());
2407 RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap());
2408
2409 marking_deque()->StartUsing();
2410
2411 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
2412 &Heap::IsUnmodifiedHeapObject);
2413
2414 {
2415 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
2416 heap()->IterateRoots(&root_visitor, VISIT_ALL_IN_SCAVENGE);
2417 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
2418 }
2419
2420 {
2421 TRACE_GC(heap()->tracer(),
2422 GCTracer::Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS);
2423 RememberedSet<OLD_TO_NEW>::Iterate(heap(), [this](Address addr) {
2424 return CheckAndMarkObject(heap(), addr);
2425 });
2426 RememberedSet<OLD_TO_NEW>::IterateTyped(
2427 heap(), [this](SlotType type, Address host_addr, Address addr) {
2428 return UpdateTypedSlotHelper::UpdateTypedSlot(
2429 isolate(), type, addr, [this](Object** addr) {
2430 return CheckAndMarkObject(heap(),
2431 reinterpret_cast<Address>(addr));
2432 });
2433 });
2434 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
2435 }
2436
2437 {
2438 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
2439 heap()->VisitEncounteredWeakCollections(&root_visitor);
2440 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
2441 }
2442
2443 if (is_code_flushing_enabled()) {
2444 TRACE_GC(heap()->tracer(),
2445 GCTracer::Scope::MINOR_MC_MARK_CODE_FLUSH_CANDIDATES);
2446 code_flusher()->IteratePointersToFromSpace(&root_visitor);
2447 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
2448 }
2449
2450 {
2451 TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES);
2452 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
2453 &IsUnmarkedObject);
2454 isolate()
2455 ->global_handles()
2456 ->IterateNewSpaceWeakUnmodifiedRoots<
2457 GlobalHandles::DONT_HANDLE_PHANTOM_NODES>(&root_visitor);
2458 ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
2459 }
2460
2461 marking_deque()->StopUsing(MarkingDeque::SEQUENTIAL);
2462 }
2463
2270 void MarkCompactCollector::MarkLiveObjects() { 2464 void MarkCompactCollector::MarkLiveObjects() {
2271 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK); 2465 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
2272 // The recursive GC marker detects when it is nearing stack overflow, 2466 // The recursive GC marker detects when it is nearing stack overflow,
2273 // and switches to a different marking system. JS interrupts interfere 2467 // and switches to a different marking system. JS interrupts interfere
2274 // with the C stack limit check. 2468 // with the C stack limit check.
2275 PostponeInterruptsScope postpone(isolate()); 2469 PostponeInterruptsScope postpone(isolate());
2276 2470
2277 { 2471 {
2278 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL); 2472 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL);
2279 IncrementalMarking* incremental_marking = heap_->incremental_marking(); 2473 IncrementalMarking* incremental_marking = heap_->incremental_marking();
2280 if (was_marked_incrementally_) { 2474 if (was_marked_incrementally_) {
2281 incremental_marking->Finalize(); 2475 incremental_marking->Finalize();
2282 } else { 2476 } else {
2283 CHECK(incremental_marking->IsStopped()); 2477 CHECK(incremental_marking->IsStopped());
2284 } 2478 }
2285 } 2479 }
2286 2480
2287 #ifdef DEBUG 2481 #ifdef DEBUG
2288 DCHECK(state_ == PREPARE_GC); 2482 DCHECK(state_ == PREPARE_GC);
2289 state_ = MARK_LIVE_OBJECTS; 2483 state_ = MARK_LIVE_OBJECTS;
2290 #endif 2484 #endif
2291 2485
2292 marking_deque()->StartUsing(); 2486 marking_deque()->StartUsing();
2293 2487
2294 { 2488 {
2295 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH); 2489 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH);
2296 PrepareForCodeFlushing(); 2490 PrepareForCodeFlushing();
2297 } 2491 }
2298 2492
2299 RootMarkingVisitor root_visitor(heap()); 2493 RootMarkingVisitor<MarkCompactMode::FULL> root_visitor(heap());
2300 2494
2301 { 2495 {
2302 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS); 2496 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
2303 MarkRoots(&root_visitor); 2497 MarkRoots(&root_visitor);
2304 ProcessTopOptimizedFrame(&root_visitor); 2498 ProcessTopOptimizedFrame(&root_visitor);
2305 } 2499 }
2306 2500
2307 { 2501 {
2308 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE); 2502 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
2309 2503
(...skipping 11 matching lines...) Expand all
2321 // immediately reclaimed. Instead, we have to mark them as pending and mark 2515 // immediately reclaimed. Instead, we have to mark them as pending and mark
2322 // objects reachable from them. 2516 // objects reachable from them.
2323 // 2517 //
2324 // First we identify nonlive weak handles and mark them as pending 2518 // First we identify nonlive weak handles and mark them as pending
2325 // destruction. 2519 // destruction.
2326 { 2520 {
2327 TRACE_GC(heap()->tracer(), 2521 TRACE_GC(heap()->tracer(),
2328 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES); 2522 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
2329 heap()->isolate()->global_handles()->IdentifyWeakHandles( 2523 heap()->isolate()->global_handles()->IdentifyWeakHandles(
2330 &IsUnmarkedHeapObject); 2524 &IsUnmarkedHeapObject);
2331 ProcessMarkingDeque(); 2525 ProcessMarkingDeque<MarkCompactMode::FULL>();
2332 } 2526 }
2333 // Then we mark the objects. 2527 // Then we mark the objects.
2334 2528
2335 { 2529 {
2336 TRACE_GC(heap()->tracer(), 2530 TRACE_GC(heap()->tracer(),
2337 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS); 2531 GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
2338 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor); 2532 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
2339 ProcessMarkingDeque(); 2533 ProcessMarkingDeque<MarkCompactMode::FULL>();
2340 } 2534 }
2341 2535
2342 // Repeat Harmony weak maps marking to mark unmarked objects reachable from 2536 // Repeat Harmony weak maps marking to mark unmarked objects reachable from
2343 // the weak roots we just marked as pending destruction. 2537 // the weak roots we just marked as pending destruction.
2344 // 2538 //
2345 // We only process harmony collections, as all object groups have been fully 2539 // We only process harmony collections, as all object groups have been fully
2346 // processed and no weakly reachable node can discover new objects groups. 2540 // processed and no weakly reachable node can discover new objects groups.
2347 { 2541 {
2348 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); 2542 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY);
2349 ProcessEphemeralMarking(&root_visitor, true); 2543 ProcessEphemeralMarking(&root_visitor, true);
(...skipping 1585 matching lines...) Expand 10 before | Expand all | Expand 10 after
3935 // The target is always in old space, we don't have to record the slot in 4129 // The target is always in old space, we don't have to record the slot in
3936 // the old-to-new remembered set. 4130 // the old-to-new remembered set.
3937 DCHECK(!heap()->InNewSpace(target)); 4131 DCHECK(!heap()->InNewSpace(target));
3938 RecordRelocSlot(host, &rinfo, target); 4132 RecordRelocSlot(host, &rinfo, target);
3939 } 4133 }
3940 } 4134 }
3941 } 4135 }
3942 4136
3943 } // namespace internal 4137 } // namespace internal
3944 } // namespace v8 4138 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/scavenger.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698