| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #include "execution.h" | 30 #include "execution.h" |
| 31 #include "global-handles.h" | 31 #include "global-handles.h" |
| 32 #include "ic-inl.h" | 32 #include "ic-inl.h" |
| 33 #include "mark-compact.h" | 33 #include "mark-compact.h" |
| 34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 35 | 35 |
| 36 namespace v8 { namespace internal { | 36 namespace v8 { namespace internal { |
| 37 | 37 |
| 38 #ifdef DEBUG | 38 // ------------------------------------------------------------------------- |
| 39 // The verification code used between phases of the m-c collector does not | |
| 40 // currently work. | |
| 41 // | |
| 42 // TODO(1240833): Fix the heap verification code and turn this into a real | |
| 43 // flag. | |
| 44 static const bool FLAG_verify_global_gc = false; | |
| 45 #endif // DEBUG | |
| 46 | |
| 47 // ---------------------------------------------------------------------------- | |
| 48 // MarkCompactCollector | 39 // MarkCompactCollector |
| 49 | 40 |
| 50 bool MarkCompactCollector::compacting_collection_ = false; | 41 bool MarkCompactCollector::compacting_collection_ = false; |
| 51 | 42 |
| 52 int MarkCompactCollector::previous_marked_count_ = 0; | 43 int MarkCompactCollector::previous_marked_count_ = 0; |
| 53 GCTracer* MarkCompactCollector::tracer_ = NULL; | 44 GCTracer* MarkCompactCollector::tracer_ = NULL; |
| 54 | 45 |
| 55 | 46 |
| 56 #ifdef DEBUG | 47 #ifdef DEBUG |
| 57 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; | 48 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 170 state_ = IDLE; | 161 state_ = IDLE; |
| 171 #endif | 162 #endif |
| 172 // The stub cache is not traversed during GC; clear the cache to | 163 // The stub cache is not traversed during GC; clear the cache to |
| 173 // force lazy re-initialization of it. This must be done after the | 164 // force lazy re-initialization of it. This must be done after the |
| 174 // GC, because it relies on the new address of certain old space | 165 // GC, because it relies on the new address of certain old space |
| 175 // objects (empty string, illegal builtin). | 166 // objects (empty string, illegal builtin). |
| 176 StubCache::Clear(); | 167 StubCache::Clear(); |
| 177 } | 168 } |
| 178 | 169 |
| 179 | 170 |
| 180 // ---------------------------------------------------------------------------- | 171 // ------------------------------------------------------------------------- |
| 181 // Phase 1: tracing and marking live objects. | 172 // Phase 1: tracing and marking live objects. |
| 182 // before: all objects are in normal state. | 173 // before: all objects are in normal state. |
| 183 // after: a live object's map pointer is marked as '00'. | 174 // after: a live object's map pointer is marked as '00'. |
| 184 | 175 |
| 185 // Marking all live objects in the heap as part of mark-sweep or mark-compact | 176 // Marking all live objects in the heap as part of mark-sweep or mark-compact |
| 186 // collection. Before marking, all objects are in their normal state. After | 177 // collection. Before marking, all objects are in their normal state. After |
| 187 // marking, live objects' map pointers are marked indicating that the object | 178 // marking, live objects' map pointers are marked indicating that the object |
| 188 // has been found reachable. | 179 // has been found reachable. |
| 189 // | 180 // |
| 190 // The marking algorithm is a (mostly) depth-first (because of possible stack | 181 // The marking algorithm is a (mostly) depth-first (because of possible stack |
| (...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 722 | 713 |
| 723 // Prune the symbol table removing all symbols only pointed to by the | 714 // Prune the symbol table removing all symbols only pointed to by the |
| 724 // symbol table. Cannot use SymbolTable::cast here because the symbol | 715 // symbol table. Cannot use SymbolTable::cast here because the symbol |
| 725 // table is marked. | 716 // table is marked. |
| 726 SymbolTable* symbol_table = | 717 SymbolTable* symbol_table = |
| 727 reinterpret_cast<SymbolTable*>(Heap::symbol_table()); | 718 reinterpret_cast<SymbolTable*>(Heap::symbol_table()); |
| 728 SymbolTableCleaner v; | 719 SymbolTableCleaner v; |
| 729 symbol_table->IterateElements(&v); | 720 symbol_table->IterateElements(&v); |
| 730 symbol_table->ElementsRemoved(v.PointersRemoved()); | 721 symbol_table->ElementsRemoved(v.PointersRemoved()); |
| 731 | 722 |
| 732 #ifdef DEBUG | |
| 733 if (FLAG_verify_global_gc) VerifyHeapAfterMarkingPhase(); | |
| 734 #endif | |
| 735 | |
| 736 // Remove object groups after marking phase. | 723 // Remove object groups after marking phase. |
| 737 GlobalHandles::RemoveObjectGroups(); | 724 GlobalHandles::RemoveObjectGroups(); |
| 738 } | 725 } |
| 739 | 726 |
| 740 | 727 |
| 741 static int CountMarkedCallback(HeapObject* obj) { | 728 static int CountMarkedCallback(HeapObject* obj) { |
| 742 MapWord map_word = obj->map_word(); | 729 MapWord map_word = obj->map_word(); |
| 743 map_word.ClearMark(); | 730 map_word.ClearMark(); |
| 744 return obj->SizeFromMap(map_word.ToMap()); | 731 return obj->SizeFromMap(map_word.ToMap()); |
| 745 } | 732 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 758 } else if (Heap::old_data_space()->Contains(obj)) { | 745 } else if (Heap::old_data_space()->Contains(obj)) { |
| 759 live_old_data_objects_++; | 746 live_old_data_objects_++; |
| 760 } else if (Heap::code_space()->Contains(obj)) { | 747 } else if (Heap::code_space()->Contains(obj)) { |
| 761 live_code_objects_++; | 748 live_code_objects_++; |
| 762 } else if (Heap::lo_space()->Contains(obj)) { | 749 } else if (Heap::lo_space()->Contains(obj)) { |
| 763 live_lo_objects_++; | 750 live_lo_objects_++; |
| 764 } else { | 751 } else { |
| 765 UNREACHABLE(); | 752 UNREACHABLE(); |
| 766 } | 753 } |
| 767 } | 754 } |
| 768 | |
| 769 | |
| 770 void MarkCompactCollector::VerifyHeapAfterMarkingPhase() { | |
| 771 Heap::new_space()->Verify(); | |
| 772 Heap::old_pointer_space()->Verify(); | |
| 773 Heap::old_data_space()->Verify(); | |
| 774 Heap::code_space()->Verify(); | |
| 775 Heap::map_space()->Verify(); | |
| 776 | |
| 777 int live_objects; | |
| 778 | |
| 779 #define CHECK_LIVE_OBJECTS(it, expected) \ | |
| 780 live_objects = 0; \ | |
| 781 while (it.has_next()) { \ | |
| 782 HeapObject* obj = HeapObject::cast(it.next()); \ | |
| 783 if (obj->IsMarked()) live_objects++; \ | |
| 784 } \ | |
| 785 ASSERT(live_objects == expected); | |
| 786 | |
| 787 SemiSpaceIterator new_it(Heap::new_space(), &CountMarkedCallback); | |
| 788 CHECK_LIVE_OBJECTS(new_it, live_young_objects_); | |
| 789 | |
| 790 HeapObjectIterator old_pointer_it(Heap::old_pointer_space(), | |
| 791 &CountMarkedCallback); | |
| 792 CHECK_LIVE_OBJECTS(old_pointer_it, live_old_pointer_objects_); | |
| 793 | |
| 794 HeapObjectIterator old_data_it(Heap::old_data_space(), &CountMarkedCallback); | |
| 795 CHECK_LIVE_OBJECTS(old_data_it, live_old_data_objects_); | |
| 796 | |
| 797 HeapObjectIterator code_it(Heap::code_space(), &CountMarkedCallback); | |
| 798 CHECK_LIVE_OBJECTS(code_it, live_code_objects_); | |
| 799 | |
| 800 HeapObjectIterator map_it(Heap::map_space(), &CountMarkedCallback); | |
| 801 CHECK_LIVE_OBJECTS(map_it, live_map_objects_); | |
| 802 | |
| 803 LargeObjectIterator lo_it(Heap::lo_space(), &CountMarkedCallback); | |
| 804 CHECK_LIVE_OBJECTS(lo_it, live_lo_objects_); | |
| 805 | |
| 806 #undef CHECK_LIVE_OBJECTS | |
| 807 } | |
| 808 #endif // DEBUG | 755 #endif // DEBUG |
| 809 | 756 |
| 810 | 757 |
| 811 void MarkCompactCollector::SweepLargeObjectSpace() { | 758 void MarkCompactCollector::SweepLargeObjectSpace() { |
| 812 #ifdef DEBUG | 759 #ifdef DEBUG |
| 813 ASSERT(state_ == MARK_LIVE_OBJECTS); | 760 ASSERT(state_ == MARK_LIVE_OBJECTS); |
| 814 state_ = | 761 state_ = |
| 815 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; | 762 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; |
| 816 #endif | 763 #endif |
| 817 // Deallocate unmarked objects and clear marked bits for marked objects. | 764 // Deallocate unmarked objects and clear marked bits for marked objects. |
| (...skipping 500 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1318 while (it.has_next()) { | 1265 while (it.has_next()) { |
| 1319 Page* p = it.next(); | 1266 Page* p = it.next(); |
| 1320 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), | 1267 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), |
| 1321 p->AllocationTop(), | 1268 p->AllocationTop(), |
| 1322 size_f); | 1269 size_f); |
| 1323 } | 1270 } |
| 1324 return total; | 1271 return total; |
| 1325 } | 1272 } |
| 1326 | 1273 |
| 1327 | 1274 |
| 1328 #ifdef DEBUG | 1275 // ------------------------------------------------------------------------- |
| 1329 static int VerifyMapObject(HeapObject* obj) { | |
| 1330 InstanceType type = reinterpret_cast<Map*>(obj)->instance_type(); | |
| 1331 ASSERT(FIRST_TYPE <= type && type <= LAST_TYPE); | |
| 1332 return Map::kSize; | |
| 1333 } | |
| 1334 | |
| 1335 | |
| 1336 void MarkCompactCollector::VerifyHeapAfterEncodingForwardingAddresses() { | |
| 1337 AllSpaces spaces; | |
| 1338 while (Space* space = spaces.next()) space->Verify(); | |
| 1339 | |
| 1340 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); | |
| 1341 int live_maps = IterateLiveObjects(Heap::map_space(), &VerifyMapObject); | |
| 1342 ASSERT(live_maps == live_map_objects_); | |
| 1343 | |
| 1344 // Verify page headers in paged spaces. | |
| 1345 PagedSpaces paged_spaces; | |
| 1346 while (PagedSpace* space = paged_spaces.next()) VerifyPageHeaders(space); | |
| 1347 } | |
| 1348 | |
| 1349 | |
| 1350 void MarkCompactCollector::VerifyPageHeaders(PagedSpace* space) { | |
| 1351 PageIterator mc_it(space, PageIterator::PAGES_USED_BY_MC); | |
| 1352 while (mc_it.has_next()) { | |
| 1353 Page* p = mc_it.next(); | |
| 1354 Address mc_alloc_top = p->mc_relocation_top; | |
| 1355 ASSERT(p->ObjectAreaStart() <= mc_alloc_top && | |
| 1356 mc_alloc_top <= p->ObjectAreaEnd()); | |
| 1357 } | |
| 1358 | |
| 1359 int page_count = 0; | |
| 1360 PageIterator it(space, PageIterator::PAGES_IN_USE); | |
| 1361 while (it.has_next()) { | |
| 1362 Page* p = it.next(); | |
| 1363 ASSERT(p->mc_page_index == page_count); | |
| 1364 page_count++; | |
| 1365 | |
| 1366 // first_forwarded could be 'deadbeed' if no live objects in this page | |
| 1367 Address first_forwarded = p->mc_first_forwarded; | |
| 1368 ASSERT(first_forwarded == kZapValue || | |
| 1369 space->Contains(first_forwarded)); | |
| 1370 } | |
| 1371 } | |
| 1372 #endif | |
| 1373 | |
| 1374 | |
| 1375 // ---------------------------------------------------------------------------- | |
| 1376 // Phase 3: Update pointers | 1276 // Phase 3: Update pointers |
| 1377 | 1277 |
| 1378 // Helper class for updating pointers in HeapObjects. | 1278 // Helper class for updating pointers in HeapObjects. |
| 1379 class UpdatingVisitor: public ObjectVisitor { | 1279 class UpdatingVisitor: public ObjectVisitor { |
| 1380 public: | 1280 public: |
| 1381 void VisitPointer(Object** p) { | 1281 void VisitPointer(Object** p) { |
| 1382 UpdatePointer(p); | 1282 UpdatePointer(p); |
| 1383 } | 1283 } |
| 1384 | 1284 |
| 1385 void VisitPointers(Object** start, Object** end) { | 1285 void VisitPointers(Object** start, Object** end) { |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1487 USE(live_data_olds); | 1387 USE(live_data_olds); |
| 1488 USE(live_codes); | 1388 USE(live_codes); |
| 1489 USE(live_news); | 1389 USE(live_news); |
| 1490 | 1390 |
| 1491 #ifdef DEBUG | 1391 #ifdef DEBUG |
| 1492 ASSERT(live_maps == live_map_objects_); | 1392 ASSERT(live_maps == live_map_objects_); |
| 1493 ASSERT(live_data_olds == live_old_data_objects_); | 1393 ASSERT(live_data_olds == live_old_data_objects_); |
| 1494 ASSERT(live_pointer_olds == live_old_pointer_objects_); | 1394 ASSERT(live_pointer_olds == live_old_pointer_objects_); |
| 1495 ASSERT(live_codes == live_code_objects_); | 1395 ASSERT(live_codes == live_code_objects_); |
| 1496 ASSERT(live_news == live_young_objects_); | 1396 ASSERT(live_news == live_young_objects_); |
| 1497 | |
| 1498 if (FLAG_verify_global_gc) VerifyHeapAfterUpdatingPointers(); | |
| 1499 #endif | 1397 #endif |
| 1500 } | 1398 } |
| 1501 | 1399 |
| 1502 | 1400 |
| 1503 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { | 1401 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { |
| 1504 // Keep old map pointers | 1402 // Keep old map pointers |
| 1505 Map* old_map = obj->map(); | 1403 Map* old_map = obj->map(); |
| 1506 ASSERT(old_map->IsHeapObject()); | 1404 ASSERT(old_map->IsHeapObject()); |
| 1507 | 1405 |
| 1508 Address forwarded = GetForwardingAddressInOldSpace(old_map); | 1406 Address forwarded = GetForwardingAddressInOldSpace(old_map); |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1594 offset -= (mc_top_offset - forwarded_offset); | 1492 offset -= (mc_top_offset - forwarded_offset); |
| 1595 offset += Page::kObjectStartOffset; | 1493 offset += Page::kObjectStartOffset; |
| 1596 | 1494 |
| 1597 ASSERT_PAGE_OFFSET(offset); | 1495 ASSERT_PAGE_OFFSET(offset); |
| 1598 ASSERT(next_page->OffsetToAddress(offset) < next_page->mc_relocation_top); | 1496 ASSERT(next_page->OffsetToAddress(offset) < next_page->mc_relocation_top); |
| 1599 | 1497 |
| 1600 return next_page->OffsetToAddress(offset); | 1498 return next_page->OffsetToAddress(offset); |
| 1601 } | 1499 } |
| 1602 | 1500 |
| 1603 | 1501 |
| 1604 #ifdef DEBUG | 1502 // ------------------------------------------------------------------------- |
| 1605 void MarkCompactCollector::VerifyHeapAfterUpdatingPointers() { | |
| 1606 ASSERT(state_ == UPDATE_POINTERS); | |
| 1607 | |
| 1608 AllSpaces spaces; | |
| 1609 while (Space* space = spaces.next()) space->Verify(); | |
| 1610 PagedSpaces paged_spaces; | |
| 1611 while (PagedSpace* space = paged_spaces.next()) VerifyPageHeaders(space); | |
| 1612 } | |
| 1613 #endif | |
| 1614 | |
| 1615 | |
| 1616 // ---------------------------------------------------------------------------- | |
| 1617 // Phase 4: Relocate objects | 1503 // Phase 4: Relocate objects |
| 1618 | 1504 |
| 1619 void MarkCompactCollector::RelocateObjects() { | 1505 void MarkCompactCollector::RelocateObjects() { |
| 1620 #ifdef DEBUG | 1506 #ifdef DEBUG |
| 1621 ASSERT(state_ == UPDATE_POINTERS); | 1507 ASSERT(state_ == UPDATE_POINTERS); |
| 1622 state_ = RELOCATE_OBJECTS; | 1508 state_ = RELOCATE_OBJECTS; |
| 1623 #endif | 1509 #endif |
| 1624 // Relocates objects, always relocate map objects first. Relocating | 1510 // Relocates objects, always relocate map objects first. Relocating |
| 1625 // objects in other space relies on map objects to get object size. | 1511 // objects in other space relies on map objects to get object size. |
| 1626 int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject); | 1512 int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1657 Heap::new_space()->set_age_mark(mark); | 1543 Heap::new_space()->set_age_mark(mark); |
| 1658 | 1544 |
| 1659 Heap::new_space()->MCCommitRelocationInfo(); | 1545 Heap::new_space()->MCCommitRelocationInfo(); |
| 1660 #ifdef DEBUG | 1546 #ifdef DEBUG |
| 1661 // It is safe to write to the remembered sets as remembered sets on a | 1547 // It is safe to write to the remembered sets as remembered sets on a |
| 1662 // page-by-page basis after committing the m-c forwarding pointer. | 1548 // page-by-page basis after committing the m-c forwarding pointer. |
| 1663 Page::set_rset_state(Page::IN_USE); | 1549 Page::set_rset_state(Page::IN_USE); |
| 1664 #endif | 1550 #endif |
| 1665 PagedSpaces spaces; | 1551 PagedSpaces spaces; |
| 1666 while (PagedSpace* space = spaces.next()) space->MCCommitRelocationInfo(); | 1552 while (PagedSpace* space = spaces.next()) space->MCCommitRelocationInfo(); |
| 1667 | |
| 1668 #ifdef DEBUG | |
| 1669 if (FLAG_verify_global_gc) VerifyHeapAfterRelocatingObjects(); | |
| 1670 #endif | |
| 1671 } | 1553 } |
| 1672 | 1554 |
| 1673 | 1555 |
| 1674 int MarkCompactCollector::ConvertCodeICTargetToAddress(HeapObject* obj) { | 1556 int MarkCompactCollector::ConvertCodeICTargetToAddress(HeapObject* obj) { |
| 1675 if (obj->IsCode()) { | 1557 if (obj->IsCode()) { |
| 1676 Code::cast(obj)->ConvertICTargetsFromObjectToAddress(); | 1558 Code::cast(obj)->ConvertICTargetsFromObjectToAddress(); |
| 1677 } | 1559 } |
| 1678 return obj->Size(); | 1560 return obj->Size(); |
| 1679 } | 1561 } |
| 1680 | 1562 |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1797 // may also update inline cache target. | 1679 // may also update inline cache target. |
| 1798 Code::cast(copied_to)->Relocate(new_addr - old_addr); | 1680 Code::cast(copied_to)->Relocate(new_addr - old_addr); |
| 1799 // Notify the logger that compile code has moved. | 1681 // Notify the logger that compile code has moved. |
| 1800 LOG(CodeMoveEvent(old_addr, new_addr)); | 1682 LOG(CodeMoveEvent(old_addr, new_addr)); |
| 1801 } | 1683 } |
| 1802 | 1684 |
| 1803 return obj_size; | 1685 return obj_size; |
| 1804 } | 1686 } |
| 1805 | 1687 |
| 1806 | 1688 |
| 1807 #ifdef DEBUG | |
| 1808 class VerifyCopyingVisitor: public ObjectVisitor { | |
| 1809 public: | |
| 1810 void VisitPointers(Object** start, Object** end) { | |
| 1811 for (Object** p = start; p < end; p++) { | |
| 1812 MarkCompactCollector::VerifyCopyingObjects(p); | |
| 1813 } | |
| 1814 } | |
| 1815 }; | |
| 1816 | |
| 1817 #endif | |
| 1818 | |
| 1819 int MarkCompactCollector::RelocateNewObject(HeapObject* obj) { | 1689 int MarkCompactCollector::RelocateNewObject(HeapObject* obj) { |
| 1820 int obj_size = obj->Size(); | 1690 int obj_size = obj->Size(); |
| 1821 | 1691 |
| 1822 // Get forwarding address | 1692 // Get forwarding address |
| 1823 Address old_addr = obj->address(); | 1693 Address old_addr = obj->address(); |
| 1824 int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr); | 1694 int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr); |
| 1825 | 1695 |
| 1826 Address new_addr = | 1696 Address new_addr = |
| 1827 Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset); | 1697 Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset); |
| 1828 | 1698 |
| 1829 if (Heap::new_space()->FromSpaceContains(new_addr)) { | 1699 if (Heap::new_space()->FromSpaceContains(new_addr)) { |
| 1830 ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <= | 1700 ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <= |
| 1831 Heap::new_space()->ToSpaceOffsetForAddress(old_addr)); | 1701 Heap::new_space()->ToSpaceOffsetForAddress(old_addr)); |
| 1832 } else { | 1702 } else { |
| 1833 OldSpace* target_space = Heap::TargetSpace(obj); | 1703 OldSpace* target_space = Heap::TargetSpace(obj); |
| 1834 ASSERT(target_space == Heap::old_pointer_space() || | 1704 ASSERT(target_space == Heap::old_pointer_space() || |
| 1835 target_space == Heap::old_data_space()); | 1705 target_space == Heap::old_data_space()); |
| 1836 target_space->MCAdjustRelocationEnd(new_addr, obj_size); | 1706 target_space->MCAdjustRelocationEnd(new_addr, obj_size); |
| 1837 } | 1707 } |
| 1838 | 1708 |
| 1839 // New and old addresses cannot overlap. | 1709 // New and old addresses cannot overlap. |
| 1840 memcpy(reinterpret_cast<void*>(new_addr), | 1710 memcpy(reinterpret_cast<void*>(new_addr), |
| 1841 reinterpret_cast<void*>(old_addr), | 1711 reinterpret_cast<void*>(old_addr), |
| 1842 obj_size); | 1712 obj_size); |
| 1843 | 1713 |
| 1844 #ifdef DEBUG | 1714 #ifdef DEBUG |
| 1845 if (FLAG_gc_verbose) { | 1715 if (FLAG_gc_verbose) { |
| 1846 PrintF("relocate %p -> %p\n", old_addr, new_addr); | 1716 PrintF("relocate %p -> %p\n", old_addr, new_addr); |
| 1847 } | 1717 } |
| 1848 if (FLAG_verify_global_gc) { | |
| 1849 VerifyCopyingVisitor v; | |
| 1850 HeapObject* copied_to = HeapObject::FromAddress(new_addr); | |
| 1851 copied_to->Iterate(&v); | |
| 1852 } | |
| 1853 #endif | 1718 #endif |
| 1854 | 1719 |
| 1855 return obj_size; | 1720 return obj_size; |
| 1856 } | 1721 } |
| 1857 | 1722 |
| 1858 | 1723 |
| 1859 #ifdef DEBUG | 1724 // ------------------------------------------------------------------------- |
| 1860 void MarkCompactCollector::VerifyHeapAfterRelocatingObjects() { | |
| 1861 ASSERT(state_ == RELOCATE_OBJECTS); | |
| 1862 | |
| 1863 Heap::new_space()->Verify(); | |
| 1864 PagedSpaces spaces; | |
| 1865 while (PagedSpace* space = spaces.next()) { | |
| 1866 space->Verify(); | |
| 1867 PageIterator it(space, PageIterator::PAGES_IN_USE); | |
| 1868 while (it.has_next()) { | |
| 1869 Page* p = it.next(); | |
| 1870 ASSERT_PAGE_OFFSET(p->Offset(p->AllocationTop())); | |
| 1871 } | |
| 1872 } | |
| 1873 } | |
| 1874 #endif | |
| 1875 | |
| 1876 | |
| 1877 #ifdef DEBUG | |
| 1878 void MarkCompactCollector::VerifyCopyingObjects(Object** p) { | |
| 1879 if (!(*p)->IsHeapObject()) return; | |
| 1880 ASSERT(!Heap::InToSpace(*p)); | |
| 1881 } | |
| 1882 #endif // DEBUG | |
| 1883 | |
| 1884 | |
| 1885 // ----------------------------------------------------------------------------- | |
| 1886 // Phase 5: rebuild remembered sets | 1725 // Phase 5: rebuild remembered sets |
| 1887 | 1726 |
| 1888 void MarkCompactCollector::RebuildRSets() { | 1727 void MarkCompactCollector::RebuildRSets() { |
| 1889 #ifdef DEBUG | 1728 #ifdef DEBUG |
| 1890 ASSERT(state_ == RELOCATE_OBJECTS); | 1729 ASSERT(state_ == RELOCATE_OBJECTS); |
| 1891 state_ = REBUILD_RSETS; | 1730 state_ = REBUILD_RSETS; |
| 1892 #endif | 1731 #endif |
| 1893 Heap::RebuildRSets(); | 1732 Heap::RebuildRSets(); |
| 1894 } | 1733 } |
| 1895 | 1734 |
| 1896 } } // namespace v8::internal | 1735 } } // namespace v8::internal |
| OLD | NEW |