OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
57 live_young_objects_size_(0), | 57 live_young_objects_size_(0), |
58 live_old_pointer_objects_size_(0), | 58 live_old_pointer_objects_size_(0), |
59 live_old_data_objects_size_(0), | 59 live_old_data_objects_size_(0), |
60 live_code_objects_size_(0), | 60 live_code_objects_size_(0), |
61 live_map_objects_size_(0), | 61 live_map_objects_size_(0), |
62 live_cell_objects_size_(0), | 62 live_cell_objects_size_(0), |
63 live_lo_objects_size_(0), | 63 live_lo_objects_size_(0), |
64 live_bytes_(0), | 64 live_bytes_(0), |
65 #endif | 65 #endif |
66 heap_(NULL), | 66 heap_(NULL), |
67 code_flusher_(NULL) { } | 67 code_flusher_(NULL), |
| 68 encountered_weak_maps_(NULL) { } |
68 | 69 |
69 | 70 |
70 void MarkCompactCollector::CollectGarbage() { | 71 void MarkCompactCollector::CollectGarbage() { |
71 // Make sure that Prepare() has been called. The individual steps below will | 72 // Make sure that Prepare() has been called. The individual steps below will |
72 // update the state as they proceed. | 73 // update the state as they proceed. |
73 ASSERT(state_ == PREPARE_GC); | 74 ASSERT(state_ == PREPARE_GC); |
| 75 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); |
74 | 76 |
75 // Prepare has selected whether to compact the old generation or not. | 77 // Prepare has selected whether to compact the old generation or not. |
76 // Tell the tracer. | 78 // Tell the tracer. |
77 if (IsCompacting()) tracer_->set_is_compacting(); | 79 if (IsCompacting()) tracer_->set_is_compacting(); |
78 | 80 |
79 MarkLiveObjects(); | 81 MarkLiveObjects(); |
80 | 82 |
81 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 83 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
82 | 84 |
| 85 ClearWeakMaps(); |
| 86 |
83 SweepLargeObjectSpace(); | 87 SweepLargeObjectSpace(); |
84 | 88 |
85 if (IsCompacting()) { | 89 if (IsCompacting()) { |
86 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); | 90 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); |
87 EncodeForwardingAddresses(); | 91 EncodeForwardingAddresses(); |
88 | 92 |
89 heap()->MarkMapPointersAsEncoded(true); | 93 heap()->MarkMapPointersAsEncoded(true); |
90 UpdatePointers(); | 94 UpdatePointers(); |
91 heap()->MarkMapPointersAsEncoded(false); | 95 heap()->MarkMapPointersAsEncoded(false); |
92 heap()->isolate()->pc_to_code_cache()->Flush(); | 96 heap()->isolate()->pc_to_code_cache()->Flush(); |
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
400 | 404 |
401 table_.Register(kVisitGlobalContext, | 405 table_.Register(kVisitGlobalContext, |
402 &FixedBodyVisitor<StaticMarkingVisitor, | 406 &FixedBodyVisitor<StaticMarkingVisitor, |
403 Context::MarkCompactBodyDescriptor, | 407 Context::MarkCompactBodyDescriptor, |
404 void>::Visit); | 408 void>::Visit); |
405 | 409 |
406 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | 410 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
407 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit); | 411 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit); |
408 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); | 412 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); |
409 | 413 |
| 414 table_.Register(kVisitJSWeakMap, &VisitJSWeakMap); |
| 415 |
410 table_.Register(kVisitOddball, | 416 table_.Register(kVisitOddball, |
411 &FixedBodyVisitor<StaticMarkingVisitor, | 417 &FixedBodyVisitor<StaticMarkingVisitor, |
412 Oddball::BodyDescriptor, | 418 Oddball::BodyDescriptor, |
413 void>::Visit); | 419 void>::Visit); |
414 table_.Register(kVisitMap, | 420 table_.Register(kVisitMap, |
415 &FixedBodyVisitor<StaticMarkingVisitor, | 421 &FixedBodyVisitor<StaticMarkingVisitor, |
416 Map::BodyDescriptor, | 422 Map::BodyDescriptor, |
417 void>::Visit); | 423 void>::Visit); |
418 | 424 |
419 table_.Register(kVisitCode, &VisitCode); | 425 table_.Register(kVisitCode, &VisitCode); |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
549 }; | 555 }; |
550 | 556 |
551 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 557 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
552 JSObject::BodyDescriptor, | 558 JSObject::BodyDescriptor, |
553 void> JSObjectVisitor; | 559 void> JSObjectVisitor; |
554 | 560 |
555 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 561 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
556 StructBodyDescriptor, | 562 StructBodyDescriptor, |
557 void> StructObjectVisitor; | 563 void> StructObjectVisitor; |
558 | 564 |
| 565 static void VisitJSWeakMap(Map* map, HeapObject* object) { |
| 566 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); |
| 567 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); |
| 568 |
| 569 // Enqueue weak map in linked list of encountered weak maps. |
| 570 ASSERT(weak_map->next() == Smi::FromInt(0)); |
| 571 weak_map->set_next(collector->encountered_weak_maps()); |
| 572 collector->set_encountered_weak_maps(weak_map); |
| 573 |
| 574 // Skip visiting the backing hash table containing the mappings. |
| 575 int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); |
| 576 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers( |
| 577 map->heap(), |
| 578 object, |
| 579 JSWeakMap::BodyDescriptor::kStartOffset, |
| 580 JSWeakMap::kTableOffset); |
| 581 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers( |
| 582 map->heap(), |
| 583 object, |
| 584 JSWeakMap::kTableOffset + kPointerSize, |
| 585 object_size); |
| 586 |
| 587 // Mark the backing hash table without pushing it on the marking stack. |
| 588 ASSERT(!weak_map->unchecked_table()->IsMarked()); |
| 589 ASSERT(weak_map->unchecked_table()->map()->IsMarked()); |
| 590 collector->SetMark(weak_map->unchecked_table()); |
| 591 } |
| 592 |
559 static void VisitCode(Map* map, HeapObject* object) { | 593 static void VisitCode(Map* map, HeapObject* object) { |
560 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( | 594 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( |
561 map->heap()); | 595 map->heap()); |
562 } | 596 } |
563 | 597 |
564 // Code flushing support. | 598 // Code flushing support. |
565 | 599 |
566 // How many collections newly compiled code object will survive before being | 600 // How many collections newly compiled code object will survive before being |
567 // flushed. | 601 // flushed. |
568 static const int kCodeAgeThreshold = 5; | 602 static const int kCodeAgeThreshold = 5; |
(...skipping 793 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1362 ref_groups->Rewind(last); | 1396 ref_groups->Rewind(last); |
1363 } | 1397 } |
1364 | 1398 |
1365 | 1399 |
1366 // Mark all objects reachable from the objects on the marking stack. | 1400 // Mark all objects reachable from the objects on the marking stack. |
1367 // Before: the marking stack contains zero or more heap object pointers. | 1401 // Before: the marking stack contains zero or more heap object pointers. |
1368 // After: the marking stack is empty, and all objects reachable from the | 1402 // After: the marking stack is empty, and all objects reachable from the |
1369 // marking stack have been marked, or are overflowed in the heap. | 1403 // marking stack have been marked, or are overflowed in the heap. |
1370 void MarkCompactCollector::EmptyMarkingStack() { | 1404 void MarkCompactCollector::EmptyMarkingStack() { |
1371 while (!marking_stack_.is_empty()) { | 1405 while (!marking_stack_.is_empty()) { |
1372 HeapObject* object = marking_stack_.Pop(); | 1406 while (!marking_stack_.is_empty()) { |
1373 ASSERT(object->IsHeapObject()); | 1407 HeapObject* object = marking_stack_.Pop(); |
1374 ASSERT(heap()->Contains(object)); | 1408 ASSERT(object->IsHeapObject()); |
1375 ASSERT(object->IsMarked()); | 1409 ASSERT(heap()->Contains(object)); |
1376 ASSERT(!object->IsOverflowed()); | 1410 ASSERT(object->IsMarked()); |
| 1411 ASSERT(!object->IsOverflowed()); |
1377 | 1412 |
1378 // Because the object is marked, we have to recover the original map | 1413 // Because the object is marked, we have to recover the original map |
1379 // pointer and use it to mark the object's body. | 1414 // pointer and use it to mark the object's body. |
1380 MapWord map_word = object->map_word(); | 1415 MapWord map_word = object->map_word(); |
1381 map_word.ClearMark(); | 1416 map_word.ClearMark(); |
1382 Map* map = map_word.ToMap(); | 1417 Map* map = map_word.ToMap(); |
1383 MarkObject(map); | 1418 MarkObject(map); |
1384 | 1419 |
1385 StaticMarkingVisitor::IterateBody(map, object); | 1420 StaticMarkingVisitor::IterateBody(map, object); |
| 1421 } |
| 1422 |
| 1423 // Process encountered weak maps, mark objects only reachable by those |
| 1424 // weak maps and repeat until fix-point is reached. |
| 1425 ProcessWeakMaps(); |
1386 } | 1426 } |
1387 } | 1427 } |
1388 | 1428 |
1389 | 1429 |
1390 // Sweep the heap for overflowed objects, clear their overflow bits, and | 1430 // Sweep the heap for overflowed objects, clear their overflow bits, and |
1391 // push them on the marking stack. Stop early if the marking stack fills | 1431 // push them on the marking stack. Stop early if the marking stack fills |
1392 // before sweeping completes. If sweeping completes, there are no remaining | 1432 // before sweeping completes. If sweeping completes, there are no remaining |
1393 // overflowed objects in the heap so the overflow flag on the markings stack | 1433 // overflowed objects in the heap so the overflow flag on the markings stack |
1394 // is cleared. | 1434 // is cleared. |
1395 void MarkCompactCollector::RefillMarkingStack() { | 1435 void MarkCompactCollector::RefillMarkingStack() { |
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1728 on_dead_path = false; | 1768 on_dead_path = false; |
1729 current->ClearNonLiveTransitions(heap(), real_prototype); | 1769 current->ClearNonLiveTransitions(heap(), real_prototype); |
1730 } | 1770 } |
1731 *HeapObject::RawField(current, Map::kPrototypeOffset) = | 1771 *HeapObject::RawField(current, Map::kPrototypeOffset) = |
1732 real_prototype; | 1772 real_prototype; |
1733 current = reinterpret_cast<Map*>(next); | 1773 current = reinterpret_cast<Map*>(next); |
1734 } | 1774 } |
1735 } | 1775 } |
1736 } | 1776 } |
1737 | 1777 |
| 1778 |
| 1779 void MarkCompactCollector::ProcessWeakMaps() { |
| 1780 Object* weak_map_obj = encountered_weak_maps(); |
| 1781 while (weak_map_obj != Smi::FromInt(0)) { |
| 1782 ASSERT(HeapObject::cast(weak_map_obj)->IsMarked()); |
| 1783 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
| 1784 ObjectHashTable* table = weak_map->unchecked_table(); |
| 1785 for (int i = 0; i < table->Capacity(); i++) { |
| 1786 if (HeapObject::cast(table->KeyAt(i))->IsMarked()) { |
| 1787 Object* value = table->get(table->EntryToValueIndex(i)); |
| 1788 StaticMarkingVisitor::MarkObjectByPointer(heap(), &value); |
| 1789 table->set_unchecked(heap(), |
| 1790 table->EntryToValueIndex(i), |
| 1791 value, |
| 1792 UPDATE_WRITE_BARRIER); |
| 1793 } |
| 1794 } |
| 1795 weak_map_obj = weak_map->next(); |
| 1796 } |
| 1797 } |
| 1798 |
| 1799 |
| 1800 void MarkCompactCollector::ClearWeakMaps() { |
| 1801 Object* weak_map_obj = encountered_weak_maps(); |
| 1802 while (weak_map_obj != Smi::FromInt(0)) { |
| 1803 ASSERT(HeapObject::cast(weak_map_obj)->IsMarked()); |
| 1804 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
| 1805 ObjectHashTable* table = weak_map->unchecked_table(); |
| 1806 for (int i = 0; i < table->Capacity(); i++) { |
| 1807 if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) { |
| 1808 table->RemoveEntry(i, heap()); |
| 1809 } |
| 1810 } |
| 1811 weak_map_obj = weak_map->next(); |
| 1812 weak_map->set_next(Smi::FromInt(0)); |
| 1813 } |
| 1814 set_encountered_weak_maps(Smi::FromInt(0)); |
| 1815 } |
| 1816 |
1738 // ------------------------------------------------------------------------- | 1817 // ------------------------------------------------------------------------- |
1739 // Phase 2: Encode forwarding addresses. | 1818 // Phase 2: Encode forwarding addresses. |
1740 // When compacting, forwarding addresses for objects in old space and map | 1819 // When compacting, forwarding addresses for objects in old space and map |
1741 // space are encoded in their map pointer word (along with an encoding of | 1820 // space are encoded in their map pointer word (along with an encoding of |
1742 // their map pointers). | 1821 // their map pointers). |
1743 // | 1822 // |
1744 // The excact encoding is described in the comments for class MapWord in | 1823 // The excact encoding is described in the comments for class MapWord in |
1745 // objects.h. | 1824 // objects.h. |
1746 // | 1825 // |
1747 // An address range [start, end) can have both live and non-live objects. | 1826 // An address range [start, end) can have both live and non-live objects. |
(...skipping 1523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3271 } | 3350 } |
3272 | 3351 |
3273 | 3352 |
3274 void MarkCompactCollector::Initialize() { | 3353 void MarkCompactCollector::Initialize() { |
3275 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3354 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
3276 StaticMarkingVisitor::Initialize(); | 3355 StaticMarkingVisitor::Initialize(); |
3277 } | 3356 } |
3278 | 3357 |
3279 | 3358 |
3280 } } // namespace v8::internal | 3359 } } // namespace v8::internal |
OLD | NEW |