| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 583 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 594 // nodes) we want to verify them as well. | 594 // nodes) we want to verify them as well. |
| 595 object->Iterate(&v); | 595 object->Iterate(&v); |
| 596 } | 596 } |
| 597 } | 597 } |
| 598 | 598 |
| 599 HeapObjectIterator data_it(Heap::old_data_space()); | 599 HeapObjectIterator data_it(Heap::old_data_space()); |
| 600 while (data_it.has_next()) data_it.next()->Iterate(&v); | 600 while (data_it.has_next()) data_it.next()->Iterate(&v); |
| 601 } | 601 } |
| 602 #endif | 602 #endif |
| 603 | 603 |
| 604 |
| 604 void Heap::Scavenge() { | 605 void Heap::Scavenge() { |
| 605 #ifdef DEBUG | 606 #ifdef DEBUG |
| 606 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); | 607 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); |
| 607 #endif | 608 #endif |
| 608 | 609 |
| 609 gc_state_ = SCAVENGE; | 610 gc_state_ = SCAVENGE; |
| 610 | 611 |
| 611 // Implements Cheney's copying algorithm | 612 // Implements Cheney's copying algorithm |
| 612 LOG(ResourceEvent("scavenge", "begin")); | 613 LOG(ResourceEvent("scavenge", "begin")); |
| 613 | 614 |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 660 GlobalHandles::IterateWeakRoots(&scavenge_visitor); | 661 GlobalHandles::IterateWeakRoots(&scavenge_visitor); |
| 661 | 662 |
| 662 #ifdef V8_HOST_ARCH_64_BIT | 663 #ifdef V8_HOST_ARCH_64_BIT |
| 663 // TODO(X64): Make this go away again. We currently disable RSets for | 664 // TODO(X64): Make this go away again. We currently disable RSets for |
| 664 // 64-bit-mode. | 665 // 64-bit-mode. |
| 665 HeapObjectIterator old_pointer_iterator(old_pointer_space_); | 666 HeapObjectIterator old_pointer_iterator(old_pointer_space_); |
| 666 while (old_pointer_iterator.has_next()) { | 667 while (old_pointer_iterator.has_next()) { |
| 667 HeapObject* heap_object = old_pointer_iterator.next(); | 668 HeapObject* heap_object = old_pointer_iterator.next(); |
| 668 heap_object->Iterate(&scavenge_visitor); | 669 heap_object->Iterate(&scavenge_visitor); |
| 669 } | 670 } |
| 670 | |
| 671 HeapObjectIterator cell_iterator(cell_space_); | |
| 672 while (cell_iterator.has_next()) { | |
| 673 cell_iterator.next()->Iterate(&scavenge_visitor)); | |
| 674 } | |
| 675 | |
| 676 HeapObjectIterator map_iterator(map_space_); | 671 HeapObjectIterator map_iterator(map_space_); |
| 677 while (map_iterator.has_next()) { | 672 while (map_iterator.has_next()) { |
| 678 HeapObject* heap_object = map_iterator.next(); | 673 HeapObject* heap_object = map_iterator.next(); |
| 679 heap_object->Iterate(&scavenge_visitor); | 674 heap_object->Iterate(&scavenge_visitor); |
| 680 } | 675 } |
| 681 LargeObjectIterator lo_iterator(lo_space_); | 676 LargeObjectIterator lo_iterator(lo_space_); |
| 682 while (lo_iterator.has_next()) { | 677 while (lo_iterator.has_next()) { |
| 683 HeapObject* heap_object = lo_iterator.next(); | 678 HeapObject* heap_object = lo_iterator.next(); |
| 684 if (heap_object->IsFixedArray()) { | 679 if (heap_object->IsFixedArray()) { |
| 685 heap_object->Iterate(&scavenge_visitor); | 680 heap_object->Iterate(&scavenge_visitor); |
| 686 } | 681 } |
| 687 } | 682 } |
| 688 #else // !defined(V8_HOST_ARCH_64_BIT) | 683 #else // !defined(V8_HOST_ARCH_64_BIT) |
| 689 // Copy objects reachable from the old generation. By definition, | 684 // Copy objects reachable from the old generation. By definition, |
| 690 // there are no intergenerational pointers in code or data spaces. | 685 // there are no intergenerational pointers in code or data spaces. |
| 691 IterateRSet(old_pointer_space_, &ScavengePointer); | 686 IterateRSet(old_pointer_space_, &ScavengePointer); |
| 692 IterateRSet(cell_space_, &ScavengePointer); | |
| 693 IterateRSet(map_space_, &ScavengePointer); | 687 IterateRSet(map_space_, &ScavengePointer); |
| 694 lo_space_->IterateRSet(&ScavengePointer); | 688 lo_space_->IterateRSet(&ScavengePointer); |
| 695 #endif | 689 #endif |
| 696 | 690 |
| 691 // Copy objects reachable from cells by scavenging cell values directly. |
| 692 HeapObjectIterator cell_iterator(cell_space_); |
| 693 while (cell_iterator.has_next()) { |
| 694 HeapObject* cell = cell_iterator.next(); |
| 695 if (cell->IsJSGlobalPropertyCell()) { |
| 696 Address value_address = |
| 697 reinterpret_cast<Address>(cell) + |
| 698 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
| 699 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
| 700 } |
| 701 } |
| 702 |
| 697 do { | 703 do { |
| 698 ASSERT(new_space_front <= new_space_.top()); | 704 ASSERT(new_space_front <= new_space_.top()); |
| 699 | 705 |
| 700 // The addresses new_space_front and new_space_.top() define a | 706 // The addresses new_space_front and new_space_.top() define a |
| 701 // queue of unprocessed copied objects. Process them until the | 707 // queue of unprocessed copied objects. Process them until the |
| 702 // queue is empty. | 708 // queue is empty. |
| 703 while (new_space_front < new_space_.top()) { | 709 while (new_space_front < new_space_.top()) { |
| 704 HeapObject* object = HeapObject::FromAddress(new_space_front); | 710 HeapObject* object = HeapObject::FromAddress(new_space_front); |
| 705 object->Iterate(&scavenge_visitor); | 711 object->Iterate(&scavenge_visitor); |
| 706 new_space_front += object->Size(); | 712 new_space_front += object->Size(); |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 825 // pointers. | 831 // pointers. |
| 826 UpdateRSetVisitor v; | 832 UpdateRSetVisitor v; |
| 827 obj->Iterate(&v); | 833 obj->Iterate(&v); |
| 828 } | 834 } |
| 829 #endif // V8_HOST_ARCH_64_BIT | 835 #endif // V8_HOST_ARCH_64_BIT |
| 830 return obj->Size(); | 836 return obj->Size(); |
| 831 } | 837 } |
| 832 | 838 |
| 833 | 839 |
| 834 void Heap::RebuildRSets() { | 840 void Heap::RebuildRSets() { |
| 835 // By definition, we do not care about remembered set bits in code or data | 841 // By definition, we do not care about remembered set bits in code, |
| 836 // spaces. | 842 // data, or cell spaces. |
| 837 map_space_->ClearRSet(); | 843 map_space_->ClearRSet(); |
| 838 RebuildRSets(map_space_); | 844 RebuildRSets(map_space_); |
| 839 | 845 |
| 840 old_pointer_space_->ClearRSet(); | 846 old_pointer_space_->ClearRSet(); |
| 841 RebuildRSets(old_pointer_space_); | 847 RebuildRSets(old_pointer_space_); |
| 842 | 848 |
| 843 cell_space_->ClearRSet(); | |
| 844 RebuildRSets(cell_space_); | |
| 845 | |
| 846 Heap::lo_space_->ClearRSet(); | 849 Heap::lo_space_->ClearRSet(); |
| 847 RebuildRSets(lo_space_); | 850 RebuildRSets(lo_space_); |
| 848 } | 851 } |
| 849 | 852 |
| 850 | 853 |
| 851 void Heap::RebuildRSets(PagedSpace* space) { | 854 void Heap::RebuildRSets(PagedSpace* space) { |
| 852 HeapObjectIterator it(space); | 855 HeapObjectIterator it(space); |
| 853 while (it.has_next()) Heap::UpdateRSet(it.next()); | 856 while (it.has_next()) Heap::UpdateRSet(it.next()); |
| 854 } | 857 } |
| 855 | 858 |
| (...skipping 2001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2857 object_address += kPointerSize * kBitsPerInt; | 2860 object_address += kPointerSize * kBitsPerInt; |
| 2858 } | 2861 } |
| 2859 rset_address += kIntSize; | 2862 rset_address += kIntSize; |
| 2860 } | 2863 } |
| 2861 return set_bits_count; | 2864 return set_bits_count; |
| 2862 } | 2865 } |
| 2863 | 2866 |
| 2864 | 2867 |
| 2865 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { | 2868 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { |
| 2866 ASSERT(Page::is_rset_in_use()); | 2869 ASSERT(Page::is_rset_in_use()); |
| 2867 ASSERT(space == old_pointer_space_ || | 2870 ASSERT(space == old_pointer_space_ || space == map_space_); |
| 2868 space == cell_space_ || | |
| 2869 space == map_space_); | |
| 2870 | 2871 |
| 2871 static void* paged_rset_histogram = StatsTable::CreateHistogram( | 2872 static void* paged_rset_histogram = StatsTable::CreateHistogram( |
| 2872 "V8.RSetPaged", | 2873 "V8.RSetPaged", |
| 2873 0, | 2874 0, |
| 2874 Page::kObjectAreaSize / kPointerSize, | 2875 Page::kObjectAreaSize / kPointerSize, |
| 2875 30); | 2876 30); |
| 2876 | 2877 |
| 2877 PageIterator it(space, PageIterator::PAGES_IN_USE); | 2878 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 2878 while (it.has_next()) { | 2879 while (it.has_next()) { |
| 2879 Page* page = it.next(); | 2880 Page* page = it.next(); |
| (...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3681 #ifdef DEBUG | 3682 #ifdef DEBUG |
| 3682 bool Heap::GarbageCollectionGreedyCheck() { | 3683 bool Heap::GarbageCollectionGreedyCheck() { |
| 3683 ASSERT(FLAG_gc_greedy); | 3684 ASSERT(FLAG_gc_greedy); |
| 3684 if (Bootstrapper::IsActive()) return true; | 3685 if (Bootstrapper::IsActive()) return true; |
| 3685 if (disallow_allocation_failure()) return true; | 3686 if (disallow_allocation_failure()) return true; |
| 3686 return CollectGarbage(0, NEW_SPACE); | 3687 return CollectGarbage(0, NEW_SPACE); |
| 3687 } | 3688 } |
| 3688 #endif | 3689 #endif |
| 3689 | 3690 |
| 3690 } } // namespace v8::internal | 3691 } } // namespace v8::internal |
| OLD | NEW |