OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
64 live_bytes_(0), | 64 live_bytes_(0), |
65 #endif | 65 #endif |
66 heap_(NULL), | 66 heap_(NULL), |
67 code_flusher_(NULL) { } | 67 code_flusher_(NULL) { } |
68 | 68 |
69 | 69 |
70 void MarkCompactCollector::CollectGarbage() { | 70 void MarkCompactCollector::CollectGarbage() { |
71 // Make sure that Prepare() has been called. The individual steps below will | 71 // Make sure that Prepare() has been called. The individual steps below will |
72 // update the state as they proceed. | 72 // update the state as they proceed. |
73 ASSERT(state_ == PREPARE_GC); | 73 ASSERT(state_ == PREPARE_GC); |
74 ASSERT(heap()->encountered_weak_maps() == Smi::FromInt(0)); | |
74 | 75 |
75 // Prepare has selected whether to compact the old generation or not. | 76 // Prepare has selected whether to compact the old generation or not. |
76 // Tell the tracer. | 77 // Tell the tracer. |
77 if (IsCompacting()) tracer_->set_is_compacting(); | 78 if (IsCompacting()) tracer_->set_is_compacting(); |
78 | 79 |
79 MarkLiveObjects(); | 80 MarkLiveObjects(); |
80 | 81 |
81 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 82 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
82 | 83 |
84 ClearWeakMaps(); | |
85 | |
83 SweepLargeObjectSpace(); | 86 SweepLargeObjectSpace(); |
84 | 87 |
85 if (IsCompacting()) { | 88 if (IsCompacting()) { |
86 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); | 89 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); |
87 EncodeForwardingAddresses(); | 90 EncodeForwardingAddresses(); |
88 | 91 |
89 heap()->MarkMapPointersAsEncoded(true); | 92 heap()->MarkMapPointersAsEncoded(true); |
90 UpdatePointers(); | 93 UpdatePointers(); |
91 heap()->MarkMapPointersAsEncoded(false); | 94 heap()->MarkMapPointersAsEncoded(false); |
92 heap()->isolate()->pc_to_code_cache()->Flush(); | 95 heap()->isolate()->pc_to_code_cache()->Flush(); |
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
400 | 403 |
401 table_.Register(kVisitGlobalContext, | 404 table_.Register(kVisitGlobalContext, |
402 &FixedBodyVisitor<StaticMarkingVisitor, | 405 &FixedBodyVisitor<StaticMarkingVisitor, |
403 Context::MarkCompactBodyDescriptor, | 406 Context::MarkCompactBodyDescriptor, |
404 void>::Visit); | 407 void>::Visit); |
405 | 408 |
406 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | 409 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
407 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit); | 410 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit); |
408 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); | 411 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); |
409 | 412 |
413 table_.Register(kVisitJSWeakMap, &VisitJSWeakMap); | |
414 | |
410 table_.Register(kVisitOddball, | 415 table_.Register(kVisitOddball, |
411 &FixedBodyVisitor<StaticMarkingVisitor, | 416 &FixedBodyVisitor<StaticMarkingVisitor, |
412 Oddball::BodyDescriptor, | 417 Oddball::BodyDescriptor, |
413 void>::Visit); | 418 void>::Visit); |
414 table_.Register(kVisitMap, | 419 table_.Register(kVisitMap, |
415 &FixedBodyVisitor<StaticMarkingVisitor, | 420 &FixedBodyVisitor<StaticMarkingVisitor, |
416 Map::BodyDescriptor, | 421 Map::BodyDescriptor, |
417 void>::Visit); | 422 void>::Visit); |
418 | 423 |
419 table_.Register(kVisitCode, &VisitCode); | 424 table_.Register(kVisitCode, &VisitCode); |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
549 }; | 554 }; |
550 | 555 |
551 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 556 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
552 JSObject::BodyDescriptor, | 557 JSObject::BodyDescriptor, |
553 void> JSObjectVisitor; | 558 void> JSObjectVisitor; |
554 | 559 |
555 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 560 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
556 StructBodyDescriptor, | 561 StructBodyDescriptor, |
557 void> StructObjectVisitor; | 562 void> StructObjectVisitor; |
558 | 563 |
564 static void VisitJSWeakMap(Map* map, HeapObject* object) { | |
565 Heap* heap = map->heap(); | |
566 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); | |
567 | |
568 // Enqueue weak map in linked list of encountered weak maps. | |
569 ASSERT(weak_map->next() == Smi::FromInt(0)); | |
570 weak_map->set_next(heap->encountered_weak_maps()); | |
571 heap->set_encountered_weak_maps(weak_map); | |
572 | |
573 // Skip visiting the backing hash table containing the mappings. | |
574 int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); | |
575 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers( | |
576 map->heap(), | |
577 object, | |
578 JSWeakMap::BodyDescriptor::kStartOffset, | |
579 JSWeakMap::kTableOffset); | |
580 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers( | |
581 map->heap(), | |
582 object, | |
583 JSWeakMap::kTableOffset + kPointerSize, | |
584 object_size); | |
585 | |
586 // Mark the backing hash table without pushing it on the marking stack. | |
587 ASSERT(!weak_map->unchecked_table()->IsMarked()); | |
588 heap->mark_compact_collector()->SetMark(weak_map->unchecked_table()); | |
Vyacheslav Egorov (Chromium)
2011/08/02 16:32:00
What about marking unchecked_table()->map() or at
Michael Starzinger
2011/08/03 08:47:05
Done.
| |
589 } | |
590 | |
559 static void VisitCode(Map* map, HeapObject* object) { | 591 static void VisitCode(Map* map, HeapObject* object) { |
560 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( | 592 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( |
561 map->heap()); | 593 map->heap()); |
562 } | 594 } |
563 | 595 |
564 // Code flushing support. | 596 // Code flushing support. |
565 | 597 |
566 // How many collections newly compiled code object will survive before being | 598 // How many collections newly compiled code object will survive before being |
567 // flushed. | 599 // flushed. |
568 static const int kCodeAgeThreshold = 5; | 600 static const int kCodeAgeThreshold = 5; |
(...skipping 793 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1362 ref_groups->Rewind(last); | 1394 ref_groups->Rewind(last); |
1363 } | 1395 } |
1364 | 1396 |
1365 | 1397 |
1366 // Mark all objects reachable from the objects on the marking stack. | 1398 // Mark all objects reachable from the objects on the marking stack. |
1367 // Before: the marking stack contains zero or more heap object pointers. | 1399 // Before: the marking stack contains zero or more heap object pointers. |
1368 // After: the marking stack is empty, and all objects reachable from the | 1400 // After: the marking stack is empty, and all objects reachable from the |
1369 // marking stack have been marked, or are overflowed in the heap. | 1401 // marking stack have been marked, or are overflowed in the heap. |
1370 void MarkCompactCollector::EmptyMarkingStack() { | 1402 void MarkCompactCollector::EmptyMarkingStack() { |
1371 while (!marking_stack_.is_empty()) { | 1403 while (!marking_stack_.is_empty()) { |
1372 HeapObject* object = marking_stack_.Pop(); | 1404 while (!marking_stack_.is_empty()) { |
1373 ASSERT(object->IsHeapObject()); | 1405 HeapObject* object = marking_stack_.Pop(); |
1374 ASSERT(heap()->Contains(object)); | 1406 ASSERT(object->IsHeapObject()); |
1375 ASSERT(object->IsMarked()); | 1407 ASSERT(heap()->Contains(object)); |
1376 ASSERT(!object->IsOverflowed()); | 1408 ASSERT(object->IsMarked()); |
1409 ASSERT(!object->IsOverflowed()); | |
1377 | 1410 |
1378 // Because the object is marked, we have to recover the original map | 1411 // Because the object is marked, we have to recover the original map |
1379 // pointer and use it to mark the object's body. | 1412 // pointer and use it to mark the object's body. |
1380 MapWord map_word = object->map_word(); | 1413 MapWord map_word = object->map_word(); |
1381 map_word.ClearMark(); | 1414 map_word.ClearMark(); |
1382 Map* map = map_word.ToMap(); | 1415 Map* map = map_word.ToMap(); |
1383 MarkObject(map); | 1416 MarkObject(map); |
1384 | 1417 |
1385 StaticMarkingVisitor::IterateBody(map, object); | 1418 StaticMarkingVisitor::IterateBody(map, object); |
1419 } | |
1420 | |
1421 // Process encountered weak maps, mark objects only reachable by those | |
1422 // weak maps and repeat until fix-point is reached. | |
1423 ProcessWeakMaps(); | |
1386 } | 1424 } |
1387 } | 1425 } |
1388 | 1426 |
1389 | 1427 |
1390 // Sweep the heap for overflowed objects, clear their overflow bits, and | 1428 // Sweep the heap for overflowed objects, clear their overflow bits, and |
1391 // push them on the marking stack. Stop early if the marking stack fills | 1429 // push them on the marking stack. Stop early if the marking stack fills |
1392 // before sweeping completes. If sweeping completes, there are no remaining | 1430 // before sweeping completes. If sweeping completes, there are no remaining |
1393 // overflowed objects in the heap so the overflow flag on the markings stack | 1431 // overflowed objects in the heap so the overflow flag on the markings stack |
1394 // is cleared. | 1432 // is cleared. |
1395 void MarkCompactCollector::RefillMarkingStack() { | 1433 void MarkCompactCollector::RefillMarkingStack() { |
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1728 on_dead_path = false; | 1766 on_dead_path = false; |
1729 current->ClearNonLiveTransitions(heap(), real_prototype); | 1767 current->ClearNonLiveTransitions(heap(), real_prototype); |
1730 } | 1768 } |
1731 *HeapObject::RawField(current, Map::kPrototypeOffset) = | 1769 *HeapObject::RawField(current, Map::kPrototypeOffset) = |
1732 real_prototype; | 1770 real_prototype; |
1733 current = reinterpret_cast<Map*>(next); | 1771 current = reinterpret_cast<Map*>(next); |
1734 } | 1772 } |
1735 } | 1773 } |
1736 } | 1774 } |
1737 | 1775 |
1776 | |
1777 void MarkCompactCollector::ProcessWeakMaps() { | |
1778 Object* weak_map_obj = heap()->encountered_weak_maps(); | |
1779 while (weak_map_obj != Smi::FromInt(0)) { | |
1780 ASSERT(HeapObject::cast(weak_map_obj)->IsMarked()); | |
1781 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | |
1782 ObjectHashTable* table = weak_map->unchecked_table(); | |
1783 for (int i = 0; i < table->Capacity(); i++) { | |
1784 if (HeapObject::cast(table->KeyAt(i))->IsMarked()) { | |
1785 Object* value = table->ValueAt(i); | |
1786 StaticMarkingVisitor::MarkObjectByPointer(heap(), &value); | |
Vyacheslav Egorov (Chromium)
2011/08/02 16:32:00
MarkObjectByPointer might modify the value (short
Michael Starzinger
2011/08/03 08:47:05
Done.
| |
1787 } | |
1788 } | |
1789 weak_map_obj = weak_map->next(); | |
1790 } | |
1791 } | |
1792 | |
1793 | |
1794 void MarkCompactCollector::ClearWeakMaps() { | |
1795 Object* weak_map_obj = heap()->encountered_weak_maps(); | |
1796 while (weak_map_obj != Smi::FromInt(0)) { | |
1797 ASSERT(HeapObject::cast(weak_map_obj)->IsMarked()); | |
1798 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | |
1799 ObjectHashTable* table = weak_map->unchecked_table(); | |
1800 for (int i = 0; i < table->Capacity(); i++) { | |
1801 if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) { | |
1802 table->RemoveEntry(i, heap()); | |
1803 } | |
1804 } | |
1805 weak_map_obj = weak_map->next(); | |
1806 weak_map->set_next(Smi::FromInt(0)); | |
1807 } | |
1808 heap()->set_encountered_weak_maps(Smi::FromInt(0)); | |
1809 } | |
1810 | |
1738 // ------------------------------------------------------------------------- | 1811 // ------------------------------------------------------------------------- |
1739 // Phase 2: Encode forwarding addresses. | 1812 // Phase 2: Encode forwarding addresses. |
1740 // When compacting, forwarding addresses for objects in old space and map | 1813 // When compacting, forwarding addresses for objects in old space and map |
1741 // space are encoded in their map pointer word (along with an encoding of | 1814 // space are encoded in their map pointer word (along with an encoding of |
1742 // their map pointers). | 1815 // their map pointers). |
1743 // | 1816 // |
1744 // The excact encoding is described in the comments for class MapWord in | 1817 // The excact encoding is described in the comments for class MapWord in |
1745 // objects.h. | 1818 // objects.h. |
1746 // | 1819 // |
1747 // An address range [start, end) can have both live and non-live objects. | 1820 // An address range [start, end) can have both live and non-live objects. |
(...skipping 1523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3271 } | 3344 } |
3272 | 3345 |
3273 | 3346 |
3274 void MarkCompactCollector::Initialize() { | 3347 void MarkCompactCollector::Initialize() { |
3275 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3348 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
3276 StaticMarkingVisitor::Initialize(); | 3349 StaticMarkingVisitor::Initialize(); |
3277 } | 3350 } |
3278 | 3351 |
3279 | 3352 |
3280 } } // namespace v8::internal | 3353 } } // namespace v8::internal |
OLD | NEW |