Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(91)

Side by Side Diff: src/mark-compact.cc

Issue 11035053: Rollback trunk to bleeding_edge revision 12525 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
61 #endif 61 #endif
62 sweep_precisely_(false), 62 sweep_precisely_(false),
63 reduce_memory_footprint_(false), 63 reduce_memory_footprint_(false),
64 abort_incremental_marking_(false), 64 abort_incremental_marking_(false),
65 compacting_(false), 65 compacting_(false),
66 was_marked_incrementally_(false), 66 was_marked_incrementally_(false),
67 tracer_(NULL), 67 tracer_(NULL),
68 migration_slots_buffer_(NULL), 68 migration_slots_buffer_(NULL),
69 heap_(NULL), 69 heap_(NULL),
70 code_flusher_(NULL), 70 code_flusher_(NULL),
71 encountered_weak_maps_(NULL) { } 71 encountered_weak_maps_(NULL),
72 marker_(this, this) { }
72 73
73 74
74 #ifdef DEBUG 75 #ifdef DEBUG
75 class VerifyMarkingVisitor: public ObjectVisitor { 76 class VerifyMarkingVisitor: public ObjectVisitor {
76 public: 77 public:
77 void VisitPointers(Object** start, Object** end) { 78 void VisitPointers(Object** start, Object** end) {
78 for (Object** current = start; current < end; current++) { 79 for (Object** current = start; current < end; current++) {
79 if ((*current)->IsHeapObject()) { 80 if ((*current)->IsHeapObject()) {
80 HeapObject* object = HeapObject::cast(*current); 81 HeapObject* object = HeapObject::cast(*current);
81 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); 82 ASSERT(HEAP->mark_compact_collector()->IsMarked(object));
(...skipping 964 matching lines...) Expand 10 before | Expand all | Expand 10 after
1046 public: 1047 public:
1047 static inline void Visit(Map* map, HeapObject* obj); 1048 static inline void Visit(Map* map, HeapObject* obj);
1048 }; 1049 };
1049 1050
1050 static void Initialize(); 1051 static void Initialize();
1051 1052
1052 INLINE(static void VisitPointer(Heap* heap, Object** p)) { 1053 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
1053 MarkObjectByPointer(heap->mark_compact_collector(), p, p); 1054 MarkObjectByPointer(heap->mark_compact_collector(), p, p);
1054 } 1055 }
1055 1056
1056 INLINE(static void VisitPointers(Heap* heap, 1057 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
1057 Object** anchor,
1058 Object** start,
1059 Object** end)) {
1060 // Mark all objects pointed to in [start, end). 1058 // Mark all objects pointed to in [start, end).
1061 const int kMinRangeForMarkingRecursion = 64; 1059 const int kMinRangeForMarkingRecursion = 64;
1062 if (end - start >= kMinRangeForMarkingRecursion) { 1060 if (end - start >= kMinRangeForMarkingRecursion) {
1063 if (VisitUnmarkedObjects(heap, anchor, start, end)) return; 1061 if (VisitUnmarkedObjects(heap, start, end)) return;
1064 // We are close to a stack overflow, so just mark the objects. 1062 // We are close to a stack overflow, so just mark the objects.
1065 } 1063 }
1066 MarkCompactCollector* collector = heap->mark_compact_collector(); 1064 MarkCompactCollector* collector = heap->mark_compact_collector();
1067 for (Object** p = start; p < end; p++) { 1065 for (Object** p = start; p < end; p++) {
1068 MarkObjectByPointer(collector, anchor, p); 1066 MarkObjectByPointer(collector, start, p);
1069 } 1067 }
1070 } 1068 }
1071 1069
1072 static void VisitHugeFixedArray(Heap* heap, FixedArray* array, int length);
1073
1074 // The deque is contiguous and we use new space, it is therefore contained in
1075 // one page minus the header. It also has a size that is a power of two so
1076 // it is half the size of a page. We want to scan a number of array entries
1077 // that is less than the number of entries in the deque, so we divide by 2
1078 // once more.
1079 static const int kScanningChunk = Page::kPageSize / 4 / kPointerSize;
1080
1081 INLINE(static void VisitFixedArray(Map* map, HeapObject* object)) {
1082 FixedArray* array = FixedArray::cast(object);
1083 int length = array->length();
1084 Heap* heap = map->GetHeap();
1085
1086 if (length < kScanningChunk ||
1087 MemoryChunk::FromAddress(array->address())->owner()->identity() !=
1088 LO_SPACE) {
1089 Object** start_slot = array->data_start();
1090 VisitPointers(heap, start_slot, start_slot, start_slot + length);
1091 } else {
1092 VisitHugeFixedArray(heap, array, length);
1093 }
1094 }
1095
1096 // Marks the object black and pushes it on the marking stack.
1097 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { 1070 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
1098 MarkBit mark = Marking::MarkBitFrom(object); 1071 MarkBit mark = Marking::MarkBitFrom(object);
1099 heap->mark_compact_collector()->MarkObject(object, mark); 1072 heap->mark_compact_collector()->MarkObject(object, mark);
1100 } 1073 }
1101 1074
1102 // Marks the object black without pushing it on the marking stack.
1103 // Returns true if object needed marking and false otherwise.
1104 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
1105 MarkBit mark_bit = Marking::MarkBitFrom(object);
1106 if (!mark_bit.Get()) {
1107 heap->mark_compact_collector()->SetMark(object, mark_bit);
1108 return true;
1109 }
1110 return false;
1111 }
1112
1113 // Mark object pointed to by p. 1075 // Mark object pointed to by p.
1114 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, 1076 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
1115 Object** anchor_slot, 1077 Object** anchor_slot,
1116 Object** p)) { 1078 Object** p)) {
1117 if (!(*p)->IsHeapObject()) return; 1079 if (!(*p)->IsHeapObject()) return;
1118 HeapObject* object = ShortCircuitConsString(p); 1080 HeapObject* object = ShortCircuitConsString(p);
1119 collector->RecordSlot(anchor_slot, p, object); 1081 collector->RecordSlot(anchor_slot, p, object);
1120 MarkBit mark = Marking::MarkBitFrom(object); 1082 MarkBit mark = Marking::MarkBitFrom(object);
1121 collector->MarkObject(object, mark); 1083 collector->MarkObject(object, mark);
1122 } 1084 }
1123 1085
1124 1086
1125 // Visit an unmarked object. 1087 // Visit an unmarked object.
1126 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, 1088 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
1127 HeapObject* obj)) { 1089 HeapObject* obj)) {
1128 #ifdef DEBUG 1090 #ifdef DEBUG
1129 ASSERT(Isolate::Current()->heap()->Contains(obj)); 1091 ASSERT(Isolate::Current()->heap()->Contains(obj));
1130 ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj)); 1092 ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj));
1131 #endif 1093 #endif
1132 Map* map = obj->map(); 1094 Map* map = obj->map();
1133 Heap* heap = obj->GetHeap(); 1095 Heap* heap = obj->GetHeap();
1134 MarkBit mark = Marking::MarkBitFrom(obj); 1096 MarkBit mark = Marking::MarkBitFrom(obj);
1135 heap->mark_compact_collector()->SetMark(obj, mark); 1097 heap->mark_compact_collector()->SetMark(obj, mark);
1136 // Mark the map pointer and the body. 1098 // Mark the map pointer and the body.
1137 MarkBit map_mark = Marking::MarkBitFrom(map); 1099 MarkBit map_mark = Marking::MarkBitFrom(map);
1138 heap->mark_compact_collector()->MarkObject(map, map_mark); 1100 heap->mark_compact_collector()->MarkObject(map, map_mark);
1139 IterateBody(map, obj); 1101 IterateBody(map, obj);
1140 } 1102 }
1141 1103
1142 // Visit all unmarked objects pointed to by [start_slot, end_slot). 1104 // Visit all unmarked objects pointed to by [start, end).
1143 // Returns false if the operation fails (lack of stack space). 1105 // Returns false if the operation fails (lack of stack space).
1144 static inline bool VisitUnmarkedObjects(Heap* heap, 1106 static inline bool VisitUnmarkedObjects(Heap* heap,
1145 Object** anchor_slot, 1107 Object** start,
1146 Object** start_slot, 1108 Object** end) {
1147 Object** end_slot) {
1148 // Return false is we are close to the stack limit. 1109 // Return false is we are close to the stack limit.
1149 StackLimitCheck check(heap->isolate()); 1110 StackLimitCheck check(heap->isolate());
1150 if (check.HasOverflowed()) return false; 1111 if (check.HasOverflowed()) return false;
1151 1112
1152 MarkCompactCollector* collector = heap->mark_compact_collector(); 1113 MarkCompactCollector* collector = heap->mark_compact_collector();
1153 // Visit the unmarked objects. 1114 // Visit the unmarked objects.
1154 for (Object** p = start_slot; p < end_slot; p++) { 1115 for (Object** p = start; p < end; p++) {
1155 Object* o = *p; 1116 Object* o = *p;
1156 if (!o->IsHeapObject()) continue; 1117 if (!o->IsHeapObject()) continue;
1157 collector->RecordSlot(anchor_slot, p, o); 1118 collector->RecordSlot(start, p, o);
1158 HeapObject* obj = HeapObject::cast(o); 1119 HeapObject* obj = HeapObject::cast(o);
1159 MarkBit mark = Marking::MarkBitFrom(obj); 1120 MarkBit mark = Marking::MarkBitFrom(obj);
1160 if (mark.Get()) continue; 1121 if (mark.Get()) continue;
1161 VisitUnmarkedObject(collector, obj); 1122 VisitUnmarkedObject(collector, obj);
1162 } 1123 }
1163 return true; 1124 return true;
1164 } 1125 }
1165 1126
1166 static void VisitJSWeakMap(Map* map, HeapObject* object) { 1127 static void VisitJSWeakMap(Map* map, HeapObject* object) {
1167 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); 1128 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after
1468 reinterpret_cast<JSFunction*>(object), 1429 reinterpret_cast<JSFunction*>(object),
1469 false); 1430 false);
1470 } 1431 }
1471 1432
1472 1433
1473 static inline void VisitJSFunctionFields(Map* map, 1434 static inline void VisitJSFunctionFields(Map* map,
1474 JSFunction* object, 1435 JSFunction* object,
1475 bool flush_code_candidate) { 1436 bool flush_code_candidate) {
1476 Heap* heap = map->GetHeap(); 1437 Heap* heap = map->GetHeap();
1477 1438
1478 Object** start_slot = 1439 VisitPointers(heap,
1479 HeapObject::RawField(object, JSFunction::kPropertiesOffset); 1440 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
1480 Object** end_slot = 1441 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
1481 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
1482 VisitPointers(heap, start_slot, start_slot, end_slot);
1483 1442
1484 if (!flush_code_candidate) { 1443 if (!flush_code_candidate) {
1485 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); 1444 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
1486 } else { 1445 } else {
1487 // Don't visit code object. 1446 // Don't visit code object.
1488 1447
1489 // Visit shared function info to avoid double checking of its 1448 // Visit shared function info to avoid double checking of its
1490 // flushability. 1449 // flushability.
1491 SharedFunctionInfo* shared_info = object->unchecked_shared(); 1450 SharedFunctionInfo* shared_info = object->unchecked_shared();
1492 MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info); 1451 MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info);
1493 if (!shared_info_mark.Get()) { 1452 if (!shared_info_mark.Get()) {
1494 Map* shared_info_map = shared_info->map(); 1453 Map* shared_info_map = shared_info->map();
1495 MarkBit shared_info_map_mark = 1454 MarkBit shared_info_map_mark =
1496 Marking::MarkBitFrom(shared_info_map); 1455 Marking::MarkBitFrom(shared_info_map);
1497 heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark); 1456 heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
1498 heap->mark_compact_collector()->MarkObject(shared_info_map, 1457 heap->mark_compact_collector()->MarkObject(shared_info_map,
1499 shared_info_map_mark); 1458 shared_info_map_mark);
1500 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, 1459 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
1501 shared_info, 1460 shared_info,
1502 true); 1461 true);
1503 } 1462 }
1504 } 1463 }
1505 1464
1506 start_slot = 1465 VisitPointers(
1466 heap,
1507 HeapObject::RawField(object, 1467 HeapObject::RawField(object,
1508 JSFunction::kCodeEntryOffset + kPointerSize); 1468 JSFunction::kCodeEntryOffset + kPointerSize),
1509 end_slot = 1469 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
1510 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
1511 VisitPointers(heap, start_slot, start_slot, end_slot);
1512 } 1470 }
1513 1471
1514 1472
1515 static void VisitSharedFunctionInfoFields(Heap* heap, 1473 static void VisitSharedFunctionInfoFields(Heap* heap,
1516 HeapObject* object, 1474 HeapObject* object,
1517 bool flush_code_candidate) { 1475 bool flush_code_candidate) {
1518 VisitPointer(heap, 1476 VisitPointer(heap,
1519 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset)); 1477 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset));
1520 1478
1521 if (!flush_code_candidate) { 1479 if (!flush_code_candidate) {
1522 VisitPointer(heap, 1480 VisitPointer(heap,
1523 HeapObject::RawField(object, 1481 HeapObject::RawField(object,
1524 SharedFunctionInfo::kCodeOffset)); 1482 SharedFunctionInfo::kCodeOffset));
1525 } 1483 }
1526 1484
1527 Object** start_slot = 1485 VisitPointers(
1486 heap,
1528 HeapObject::RawField(object, 1487 HeapObject::RawField(object,
1529 SharedFunctionInfo::kOptimizedCodeMapOffset); 1488 SharedFunctionInfo::kOptimizedCodeMapOffset),
1530 Object** end_slot = 1489 HeapObject::RawField(object, SharedFunctionInfo::kSize));
1531 HeapObject::RawField(object, SharedFunctionInfo::kSize);
1532
1533 VisitPointers(heap, start_slot, start_slot, end_slot);
1534 } 1490 }
1535 1491
1536 static VisitorDispatchTable<Callback> non_count_table_; 1492 static VisitorDispatchTable<Callback> non_count_table_;
1537 }; 1493 };
1538 1494
1539 1495
1540 void MarkCompactMarkingVisitor::VisitHugeFixedArray(Heap* heap,
1541 FixedArray* array,
1542 int length) {
1543 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
1544
1545 ASSERT(chunk->owner()->identity() == LO_SPACE);
1546
1547 Object** start_slot = array->data_start();
1548 int from =
1549 chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0;
1550 int to = Min(from + kScanningChunk, length);
1551 VisitPointers(heap, start_slot, start_slot + from, start_slot + to);
1552
1553 if (to == length) {
1554 chunk->SetCompletelyScanned();
1555 } else {
1556 chunk->SetPartiallyScannedProgress(to);
1557 }
1558 }
1559
1560
1561 void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray( 1496 void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray(
1562 FixedArrayBase* fixed_array, 1497 FixedArrayBase* fixed_array,
1563 FixedArraySubInstanceType fast_type, 1498 FixedArraySubInstanceType fast_type,
1564 FixedArraySubInstanceType dictionary_type) { 1499 FixedArraySubInstanceType dictionary_type) {
1565 Heap* heap = fixed_array->map()->GetHeap(); 1500 Heap* heap = fixed_array->map()->GetHeap();
1566 if (fixed_array->map() != heap->fixed_cow_array_map() && 1501 if (fixed_array->map() != heap->fixed_cow_array_map() &&
1567 fixed_array->map() != heap->fixed_double_array_map() && 1502 fixed_array->map() != heap->fixed_double_array_map() &&
1568 fixed_array != heap->empty_fixed_array()) { 1503 fixed_array != heap->empty_fixed_array()) {
1569 if (fixed_array->IsDictionary()) { 1504 if (fixed_array->IsDictionary()) {
1570 heap->RecordObjectStats(FIXED_ARRAY_TYPE, 1505 heap->RecordObjectStats(FIXED_ARRAY_TYPE,
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
1692 1627
1693 table_.Register(kVisitSharedFunctionInfo, 1628 table_.Register(kVisitSharedFunctionInfo,
1694 &VisitSharedFunctionInfoAndFlushCode); 1629 &VisitSharedFunctionInfoAndFlushCode);
1695 1630
1696 table_.Register(kVisitJSFunction, 1631 table_.Register(kVisitJSFunction,
1697 &VisitJSFunctionAndFlushCode); 1632 &VisitJSFunctionAndFlushCode);
1698 1633
1699 table_.Register(kVisitJSRegExp, 1634 table_.Register(kVisitJSRegExp,
1700 &VisitRegExpAndFlushCode); 1635 &VisitRegExpAndFlushCode);
1701 1636
1702 table_.Register(kVisitFixedArray,
1703 &VisitFixedArray);
1704
1705 if (FLAG_track_gc_object_stats) { 1637 if (FLAG_track_gc_object_stats) {
1706 // Copy the visitor table to make call-through possible. 1638 // Copy the visitor table to make call-through possible.
1707 non_count_table_.CopyFrom(&table_); 1639 non_count_table_.CopyFrom(&table_);
1708 #define VISITOR_ID_COUNT_FUNCTION(id) \ 1640 #define VISITOR_ID_COUNT_FUNCTION(id) \
1709 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit); 1641 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit);
1710 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) 1642 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION)
1711 #undef VISITOR_ID_COUNT_FUNCTION 1643 #undef VISITOR_ID_COUNT_FUNCTION
1712 } 1644 }
1713 } 1645 }
1714 1646
1715 1647
1716 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback> 1648 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback>
1717 MarkCompactMarkingVisitor::non_count_table_; 1649 MarkCompactMarkingVisitor::non_count_table_;
1718 1650
1719 1651
1720 class MarkingVisitor : public ObjectVisitor { 1652 class MarkingVisitor : public ObjectVisitor {
1721 public: 1653 public:
1722 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } 1654 explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
1723 1655
1724 void VisitPointer(Object** p) { 1656 void VisitPointer(Object** p) {
1725 MarkCompactMarkingVisitor::VisitPointer(heap_, p); 1657 MarkCompactMarkingVisitor::VisitPointer(heap_, p);
1726 } 1658 }
1727 1659
1728 void VisitPointers(Object** start_slot, Object** end_slot) { 1660 void VisitPointers(Object** start, Object** end) {
1729 MarkCompactMarkingVisitor::VisitPointers( 1661 MarkCompactMarkingVisitor::VisitPointers(heap_, start, end);
1730 heap_, start_slot, start_slot, end_slot);
1731 } 1662 }
1732 1663
1733 private: 1664 private:
1734 Heap* heap_; 1665 Heap* heap_;
1735 }; 1666 };
1736 1667
1737 1668
1738 class CodeMarkingVisitor : public ThreadVisitor { 1669 class CodeMarkingVisitor : public ThreadVisitor {
1739 public: 1670 public:
1740 explicit CodeMarkingVisitor(MarkCompactCollector* collector) 1671 explicit CodeMarkingVisitor(MarkCompactCollector* collector)
1741 : collector_(collector) {} 1672 : collector_(collector) {}
1742 1673
1743 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 1674 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
1744 collector_->PrepareThreadForCodeFlushing(isolate, top); 1675 collector_->PrepareThreadForCodeFlushing(isolate, top);
1745 } 1676 }
1746 1677
1747 private: 1678 private:
1748 MarkCompactCollector* collector_; 1679 MarkCompactCollector* collector_;
1749 }; 1680 };
1750 1681
1751 1682
1752 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { 1683 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
1753 public: 1684 public:
1754 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector) 1685 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
1755 : collector_(collector) {} 1686 : collector_(collector) {}
1756 1687
1757 void VisitPointers(Object** start_slot, Object** end_slot) { 1688 void VisitPointers(Object** start, Object** end) {
1758 for (Object** p = start_slot; p < end_slot; p++) VisitPointer(p); 1689 for (Object** p = start; p < end; p++) VisitPointer(p);
1759 } 1690 }
1760 1691
1761 void VisitPointer(Object** slot) { 1692 void VisitPointer(Object** slot) {
1762 Object* obj = *slot; 1693 Object* obj = *slot;
1763 if (obj->IsSharedFunctionInfo()) { 1694 if (obj->IsSharedFunctionInfo()) {
1764 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); 1695 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
1765 MarkBit shared_mark = Marking::MarkBitFrom(shared); 1696 MarkBit shared_mark = Marking::MarkBitFrom(shared);
1766 MarkBit code_mark = Marking::MarkBitFrom(shared->code()); 1697 MarkBit code_mark = Marking::MarkBitFrom(shared->code());
1767 collector_->MarkObject(shared->code(), code_mark); 1698 collector_->MarkObject(shared->code(), code_mark);
1768 collector_->MarkObject(shared, shared_mark); 1699 collector_->MarkObject(shared, shared_mark);
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
1859 // Visitor class for marking heap roots. 1790 // Visitor class for marking heap roots.
1860 class RootMarkingVisitor : public ObjectVisitor { 1791 class RootMarkingVisitor : public ObjectVisitor {
1861 public: 1792 public:
1862 explicit RootMarkingVisitor(Heap* heap) 1793 explicit RootMarkingVisitor(Heap* heap)
1863 : collector_(heap->mark_compact_collector()) { } 1794 : collector_(heap->mark_compact_collector()) { }
1864 1795
1865 void VisitPointer(Object** p) { 1796 void VisitPointer(Object** p) {
1866 MarkObjectByPointer(p); 1797 MarkObjectByPointer(p);
1867 } 1798 }
1868 1799
1869 void VisitPointers(Object** start_slot, Object** end_slot) { 1800 void VisitPointers(Object** start, Object** end) {
1870 for (Object** p = start_slot; p < end_slot; p++) MarkObjectByPointer(p); 1801 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
1871 } 1802 }
1872 1803
1873 private: 1804 private:
1874 void MarkObjectByPointer(Object** p) { 1805 void MarkObjectByPointer(Object** p) {
1875 if (!(*p)->IsHeapObject()) return; 1806 if (!(*p)->IsHeapObject()) return;
1876 1807
1877 // Replace flat cons strings in place. 1808 // Replace flat cons strings in place.
1878 HeapObject* object = ShortCircuitConsString(p); 1809 HeapObject* object = ShortCircuitConsString(p);
1879 MarkBit mark_bit = Marking::MarkBitFrom(object); 1810 MarkBit mark_bit = Marking::MarkBitFrom(object);
1880 if (mark_bit.Get()) return; 1811 if (mark_bit.Get()) return;
(...skipping 15 matching lines...) Expand all
1896 MarkCompactCollector* collector_; 1827 MarkCompactCollector* collector_;
1897 }; 1828 };
1898 1829
1899 1830
1900 // Helper class for pruning the symbol table. 1831 // Helper class for pruning the symbol table.
1901 class SymbolTableCleaner : public ObjectVisitor { 1832 class SymbolTableCleaner : public ObjectVisitor {
1902 public: 1833 public:
1903 explicit SymbolTableCleaner(Heap* heap) 1834 explicit SymbolTableCleaner(Heap* heap)
1904 : heap_(heap), pointers_removed_(0) { } 1835 : heap_(heap), pointers_removed_(0) { }
1905 1836
1906 virtual void VisitPointers(Object** start_slot, Object** end_slot) { 1837 virtual void VisitPointers(Object** start, Object** end) {
1907 // Visit all HeapObject pointers in [start_slot, end_slot). 1838 // Visit all HeapObject pointers in [start, end).
1908 for (Object** p = start_slot; p < end_slot; p++) { 1839 for (Object** p = start; p < end; p++) {
1909 Object* o = *p; 1840 Object* o = *p;
1910 if (o->IsHeapObject() && 1841 if (o->IsHeapObject() &&
1911 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { 1842 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) {
1912 // Check if the symbol being pruned is an external symbol. We need to 1843 // Check if the symbol being pruned is an external symbol. We need to
1913 // delete the associated external data as this symbol is going away. 1844 // delete the associated external data as this symbol is going away.
1914 1845
1915 // Since no objects have yet been moved we can safely access the map of 1846 // Since no objects have yet been moved we can safely access the map of
1916 // the object. 1847 // the object.
1917 if (o->IsExternalString()) { 1848 if (o->IsExternalString()) {
1918 heap_->FinalizeExternalString(String::cast(*p)); 1849 heap_->FinalizeExternalString(String::cast(*p));
(...skipping 22 matching lines...) Expand all
1941 virtual Object* RetainAs(Object* object) { 1872 virtual Object* RetainAs(Object* object) {
1942 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { 1873 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) {
1943 return object; 1874 return object;
1944 } else { 1875 } else {
1945 return NULL; 1876 return NULL;
1946 } 1877 }
1947 } 1878 }
1948 }; 1879 };
1949 1880
1950 1881
1882 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
1883 ASSERT(IsMarked(object));
1884 ASSERT(HEAP->Contains(object));
1885 if (object->IsMap()) {
1886 Map* map = Map::cast(object);
1887 heap_->ClearCacheOnMap(map);
1888
1889 // When map collection is enabled we have to mark through map's transitions
1890 // in a special way to make transition links weak. Only maps for subclasses
1891 // of JSReceiver can have transitions.
1892 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1893 if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
1894 marker_.MarkMapContents(map);
1895 } else {
1896 marking_deque_.PushBlack(map);
1897 }
1898 } else {
1899 marking_deque_.PushBlack(object);
1900 }
1901 }
1902
1903
1904 // Force instantiation of template instances.
1905 template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
1906 template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
1907
1908
1909 template <class T>
1910 void Marker<T>::MarkMapContents(Map* map) {
1911 // Make sure that the back pointer stored either in the map itself or inside
1912 // its transitions array is marked. Treat pointers in the transitions array as
1913 // weak and also mark that array to prevent visiting it later.
1914 base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
1915
1916 Object** transitions_slot =
1917 HeapObject::RawField(map, Map::kTransitionsOrBackPointerOffset);
1918 Object* transitions = *transitions_slot;
1919 if (transitions->IsTransitionArray()) {
1920 MarkTransitionArray(reinterpret_cast<TransitionArray*>(transitions));
1921 } else {
1922 // Already marked by marking map->GetBackPointer().
1923 ASSERT(transitions->IsMap() || transitions->IsUndefined());
1924 }
1925
1926 // Mark the Object* fields of the Map. Since the transitions array has been
1927 // marked already, it is fine that one of these fields contains a pointer to
1928 // it.
1929 Object** start_slot =
1930 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset);
1931 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1932 for (Object** slot = start_slot; slot < end_slot; slot++) {
1933 Object* obj = *slot;
1934 if (!obj->NonFailureIsHeapObject()) continue;
1935 mark_compact_collector()->RecordSlot(start_slot, slot, obj);
1936 base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
1937 }
1938 }
1939
1940
1941 template <class T>
1942 void Marker<T>::MarkTransitionArray(TransitionArray* transitions) {
1943 if (!base_marker()->MarkObjectWithoutPush(transitions)) return;
1944 Object** transitions_start = transitions->data_start();
1945
1946 // We don't have to record the descriptors_pointer slot since the cell space
1947 // is not compacted.
1948 JSGlobalPropertyCell* descriptors_cell = transitions->descriptors_pointer();
1949 base_marker()->MarkObjectAndPush(descriptors_cell);
1950
1951 if (transitions->HasPrototypeTransitions()) {
1952 // Mark prototype transitions array but don't push it into marking stack.
1953 // This will make references from it weak. We will clean dead prototype
1954 // transitions in ClearNonLiveTransitions.
1955 Object** proto_trans_slot = transitions->GetPrototypeTransitionsSlot();
1956 HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
1957 base_marker()->MarkObjectWithoutPush(prototype_transitions);
1958 mark_compact_collector()->RecordSlot(
1959 transitions_start, proto_trans_slot, prototype_transitions);
1960 }
1961
1962 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
1963 Object** key_slot = transitions->GetKeySlot(i);
1964 Object* key = *key_slot;
1965 if (key->IsHeapObject()) {
1966 base_marker()->MarkObjectAndPush(HeapObject::cast(key));
1967 mark_compact_collector()->RecordSlot(transitions_start, key_slot, key);
1968 }
1969 }
1970 }
1971
1972
1951 // Fill the marking stack with overflowed objects returned by the given 1973 // Fill the marking stack with overflowed objects returned by the given
1952 // iterator. Stop when the marking stack is filled or the end of the space 1974 // iterator. Stop when the marking stack is filled or the end of the space
1953 // is reached, whichever comes first. 1975 // is reached, whichever comes first.
1954 template<class T> 1976 template<class T>
1955 static void DiscoverGreyObjectsWithIterator(Heap* heap, 1977 static void DiscoverGreyObjectsWithIterator(Heap* heap,
1956 MarkingDeque* marking_deque, 1978 MarkingDeque* marking_deque,
1957 T* it) { 1979 T* it) {
1958 // The caller should ensure that the marking stack is initially not full, 1980 // The caller should ensure that the marking stack is initially not full,
1959 // so that we don't waste effort pointlessly scanning for objects. 1981 // so that we don't waste effort pointlessly scanning for objects.
1960 ASSERT(!marking_deque->IsFull()); 1982 ASSERT(!marking_deque->IsFull());
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
2179 ASSERT(object->IsHeapObject()); 2201 ASSERT(object->IsHeapObject());
2180 ASSERT(heap()->Contains(object)); 2202 ASSERT(heap()->Contains(object));
2181 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); 2203 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object)));
2182 2204
2183 Map* map = object->map(); 2205 Map* map = object->map();
2184 MarkBit map_mark = Marking::MarkBitFrom(map); 2206 MarkBit map_mark = Marking::MarkBitFrom(map);
2185 MarkObject(map, map_mark); 2207 MarkObject(map, map_mark);
2186 2208
2187 MarkCompactMarkingVisitor::IterateBody(map, object); 2209 MarkCompactMarkingVisitor::IterateBody(map, object);
2188 } 2210 }
2189 ProcessLargePostponedArrays(heap(), &marking_deque_);
2190 2211
2191 // Process encountered weak maps, mark objects only reachable by those 2212 // Process encountered weak maps, mark objects only reachable by those
2192 // weak maps and repeat until fix-point is reached. 2213 // weak maps and repeat until fix-point is reached.
2193 ProcessWeakMaps(); 2214 ProcessWeakMaps();
2194 } 2215 }
2195 } 2216 }
2196 2217
2197 2218
2198 void MarkCompactCollector::ProcessLargePostponedArrays(Heap* heap,
2199 MarkingDeque* deque) {
2200 ASSERT(deque->IsEmpty());
2201 LargeObjectIterator it(heap->lo_space());
2202 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
2203 if (!obj->IsFixedArray()) continue;
2204 MemoryChunk* p = MemoryChunk::FromAddress(obj->address());
2205 if (p->IsPartiallyScanned()) {
2206 deque->PushBlack(obj);
2207 }
2208 }
2209 }
2210
2211
2212 // Sweep the heap for overflowed objects, clear their overflow bits, and 2219 // Sweep the heap for overflowed objects, clear their overflow bits, and
2213 // push them on the marking stack. Stop early if the marking stack fills 2220 // push them on the marking stack. Stop early if the marking stack fills
2214 // before sweeping completes. If sweeping completes, there are no remaining 2221 // before sweeping completes. If sweeping completes, there are no remaining
2215 // overflowed objects in the heap so the overflow flag on the markings stack 2222 // overflowed objects in the heap so the overflow flag on the markings stack
2216 // is cleared. 2223 // is cleared.
2217 void MarkCompactCollector::RefillMarkingDeque() { 2224 void MarkCompactCollector::RefillMarkingDeque() {
2218 if (FLAG_trace_gc) {
2219 PrintPID("Marking queue overflowed\n");
2220 }
2221 ASSERT(marking_deque_.overflowed()); 2225 ASSERT(marking_deque_.overflowed());
2222 2226
2223 SemiSpaceIterator new_it(heap()->new_space()); 2227 SemiSpaceIterator new_it(heap()->new_space());
2224 DiscoverGreyObjectsWithIterator(heap(), &marking_deque_, &new_it); 2228 DiscoverGreyObjectsWithIterator(heap(), &marking_deque_, &new_it);
2225 if (marking_deque_.IsFull()) return; 2229 if (marking_deque_.IsFull()) return;
2226 2230
2227 DiscoverGreyObjectsInSpace(heap(), 2231 DiscoverGreyObjectsInSpace(heap(),
2228 &marking_deque_, 2232 &marking_deque_,
2229 heap()->old_pointer_space()); 2233 heap()->old_pointer_space());
2230 if (marking_deque_.IsFull()) return; 2234 if (marking_deque_.IsFull()) return;
(...skipping 470 matching lines...) Expand 10 before | Expand all | Expand 10 after
2701 // Visitor for updating pointers from live objects in old spaces to new space. 2705 // Visitor for updating pointers from live objects in old spaces to new space.
2702 // It does not expect to encounter pointers to dead objects. 2706 // It does not expect to encounter pointers to dead objects.
2703 class PointersUpdatingVisitor: public ObjectVisitor { 2707 class PointersUpdatingVisitor: public ObjectVisitor {
2704 public: 2708 public:
2705 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) { } 2709 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) { }
2706 2710
2707 void VisitPointer(Object** p) { 2711 void VisitPointer(Object** p) {
2708 UpdatePointer(p); 2712 UpdatePointer(p);
2709 } 2713 }
2710 2714
2711 void VisitPointers(Object** start_slot, Object** end_slot) { 2715 void VisitPointers(Object** start, Object** end) {
2712 for (Object** p = start_slot; p < end_slot; p++) UpdatePointer(p); 2716 for (Object** p = start; p < end; p++) UpdatePointer(p);
2713 } 2717 }
2714 2718
2715 void VisitEmbeddedPointer(RelocInfo* rinfo) { 2719 void VisitEmbeddedPointer(RelocInfo* rinfo) {
2716 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 2720 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
2717 Object* target = rinfo->target_object(); 2721 Object* target = rinfo->target_object();
2718 VisitPointer(&target); 2722 VisitPointer(&target);
2719 rinfo->set_target_object(target); 2723 rinfo->set_target_object(target);
2720 } 2724 }
2721 2725
2722 void VisitCodeTarget(RelocInfo* rinfo) { 2726 void VisitCodeTarget(RelocInfo* rinfo) {
(...skipping 1418 matching lines...) Expand 10 before | Expand all | Expand 10 after
4141 while (buffer != NULL) { 4145 while (buffer != NULL) {
4142 SlotsBuffer* next_buffer = buffer->next(); 4146 SlotsBuffer* next_buffer = buffer->next();
4143 DeallocateBuffer(buffer); 4147 DeallocateBuffer(buffer);
4144 buffer = next_buffer; 4148 buffer = next_buffer;
4145 } 4149 }
4146 *buffer_address = NULL; 4150 *buffer_address = NULL;
4147 } 4151 }
4148 4152
4149 4153
4150 } } // namespace v8::internal 4154 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698