OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/assembler-inl.h" | 9 #include "src/assembler-inl.h" |
10 #include "src/ast/context-slot-cache.h" | 10 #include "src/ast/context-slot-cache.h" |
(...skipping 1697 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1708 // objects lie between a 'front' mark and a 'rear' mark that is | 1708 // objects lie between a 'front' mark and a 'rear' mark that is |
1709 // updated as a side effect of promoting an object. | 1709 // updated as a side effect of promoting an object. |
1710 // | 1710 // |
1711 // There is guaranteed to be enough room at the top of the to space | 1711 // There is guaranteed to be enough room at the top of the to space |
1712 // for the addresses of promoted objects: every object promoted | 1712 // for the addresses of promoted objects: every object promoted |
1713 // frees up its size in bytes from the top of the new space, and | 1713 // frees up its size in bytes from the top of the new space, and |
1714 // objects are at least one pointer in size. | 1714 // objects are at least one pointer in size. |
1715 Address new_space_front = new_space_->ToSpaceStart(); | 1715 Address new_space_front = new_space_->ToSpaceStart(); |
1716 promotion_queue_.Initialize(); | 1716 promotion_queue_.Initialize(); |
1717 | 1717 |
1718 ScavengeVisitor scavenge_visitor(this); | 1718 RootScavengeVisitor root_scavenge_visitor(this); |
1719 | 1719 |
1720 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | 1720 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( |
1721 &IsUnmodifiedHeapObject); | 1721 &IsUnmodifiedHeapObject); |
1722 | 1722 |
1723 { | 1723 { |
1724 // Copy roots. | 1724 // Copy roots. |
1725 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); | 1725 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); |
1726 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1726 IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
1727 } | 1727 } |
1728 | 1728 |
1729 { | 1729 { |
1730 // Copy objects reachable from the old generation. | 1730 // Copy objects reachable from the old generation. |
1731 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); | 1731 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); |
1732 RememberedSet<OLD_TO_NEW>::Iterate( | 1732 RememberedSet<OLD_TO_NEW>::Iterate( |
1733 this, SYNCHRONIZED, [this](Address addr) { | 1733 this, SYNCHRONIZED, [this](Address addr) { |
1734 return Scavenger::CheckAndScavengeObject(this, addr); | 1734 return Scavenger::CheckAndScavengeObject(this, addr); |
1735 }); | 1735 }); |
1736 | 1736 |
1737 RememberedSet<OLD_TO_NEW>::IterateTyped( | 1737 RememberedSet<OLD_TO_NEW>::IterateTyped( |
1738 this, SYNCHRONIZED, | 1738 this, SYNCHRONIZED, |
1739 [this](SlotType type, Address host_addr, Address addr) { | 1739 [this](SlotType type, Address host_addr, Address addr) { |
1740 return UpdateTypedSlotHelper::UpdateTypedSlot( | 1740 return UpdateTypedSlotHelper::UpdateTypedSlot( |
1741 isolate(), type, addr, [this](Object** addr) { | 1741 isolate(), type, addr, [this](Object** addr) { |
1742 // We expect that objects referenced by code are long living. | 1742 // We expect that objects referenced by code are long living. |
1743 // If we do not force promotion, then we need to clear | 1743 // If we do not force promotion, then we need to clear |
1744 // old_to_new slots in dead code objects after mark-compact. | 1744 // old_to_new slots in dead code objects after mark-compact. |
1745 return Scavenger::CheckAndScavengeObject( | 1745 return Scavenger::CheckAndScavengeObject( |
1746 this, reinterpret_cast<Address>(addr)); | 1746 this, reinterpret_cast<Address>(addr)); |
1747 }); | 1747 }); |
1748 }); | 1748 }); |
1749 } | 1749 } |
1750 | 1750 |
1751 { | 1751 { |
1752 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); | 1752 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); |
1753 // Copy objects reachable from the encountered weak collections list. | 1753 IterateEncounteredWeakCollections(&root_scavenge_visitor); |
1754 scavenge_visitor.VisitPointer(&encountered_weak_collections_); | |
1755 } | 1754 } |
1756 | 1755 |
1757 { | 1756 { |
1758 // Copy objects reachable from the code flushing candidates list. | 1757 // Copy objects reachable from the code flushing candidates list. |
1759 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); | 1758 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); |
1760 MarkCompactCollector* collector = mark_compact_collector(); | 1759 MarkCompactCollector* collector = mark_compact_collector(); |
1761 if (collector->is_code_flushing_enabled()) { | 1760 if (collector->is_code_flushing_enabled()) { |
1762 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1761 collector->code_flusher()->IteratePointersToFromSpace( |
| 1762 &root_scavenge_visitor); |
1763 } | 1763 } |
1764 } | 1764 } |
1765 | 1765 |
1766 { | 1766 { |
1767 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1767 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
1768 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1768 new_space_front = DoScavenge(new_space_front); |
1769 } | 1769 } |
1770 | 1770 |
1771 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | 1771 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( |
1772 &IsUnscavengedHeapObject); | 1772 &IsUnscavengedHeapObject); |
1773 | 1773 |
1774 isolate() | 1774 isolate() |
1775 ->global_handles() | 1775 ->global_handles() |
1776 ->IterateNewSpaceWeakUnmodifiedRoots< | 1776 ->IterateNewSpaceWeakUnmodifiedRoots< |
1777 GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(&scavenge_visitor); | 1777 GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>( |
1778 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1778 &root_scavenge_visitor); |
| 1779 new_space_front = DoScavenge(new_space_front); |
1779 | 1780 |
1780 UpdateNewSpaceReferencesInExternalStringTable( | 1781 UpdateNewSpaceReferencesInExternalStringTable( |
1781 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1782 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
1782 | 1783 |
1783 promotion_queue_.Destroy(); | 1784 promotion_queue_.Destroy(); |
1784 | 1785 |
1785 incremental_marking()->UpdateMarkingDequeAfterScavenge(); | 1786 incremental_marking()->UpdateMarkingDequeAfterScavenge(); |
1786 | 1787 |
1787 ScavengeWeakObjectRetainer weak_object_retainer(this); | 1788 ScavengeWeakObjectRetainer weak_object_retainer(this); |
1788 ProcessYoungWeakReferences(&weak_object_retainer); | 1789 ProcessYoungWeakReferences(&weak_object_retainer); |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1956 old_generation_survival_rate); | 1957 old_generation_survival_rate); |
1957 } | 1958 } |
1958 } | 1959 } |
1959 } | 1960 } |
1960 | 1961 |
1961 | 1962 |
1962 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { | 1963 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { |
1963 DisallowHeapAllocation no_allocation; | 1964 DisallowHeapAllocation no_allocation; |
1964 // All external strings are listed in the external string table. | 1965 // All external strings are listed in the external string table. |
1965 | 1966 |
1966 class ExternalStringTableVisitorAdapter : public ObjectVisitor { | 1967 class ExternalStringTableVisitorAdapter : public RootVisitor { |
1967 public: | 1968 public: |
1968 explicit ExternalStringTableVisitorAdapter( | 1969 explicit ExternalStringTableVisitorAdapter( |
1969 v8::ExternalResourceVisitor* visitor) | 1970 v8::ExternalResourceVisitor* visitor) |
1970 : visitor_(visitor) {} | 1971 : visitor_(visitor) {} |
1971 virtual void VisitPointers(Object** start, Object** end) { | 1972 virtual void VisitRootPointers(Root root, Object** start, Object** end) { |
1972 for (Object** p = start; p < end; p++) { | 1973 for (Object** p = start; p < end; p++) { |
1973 DCHECK((*p)->IsExternalString()); | 1974 DCHECK((*p)->IsExternalString()); |
1974 visitor_->VisitExternalString( | 1975 visitor_->VisitExternalString( |
1975 Utils::ToLocal(Handle<String>(String::cast(*p)))); | 1976 Utils::ToLocal(Handle<String>(String::cast(*p)))); |
1976 } | 1977 } |
1977 } | 1978 } |
1978 | 1979 |
1979 private: | 1980 private: |
1980 v8::ExternalResourceVisitor* visitor_; | 1981 v8::ExternalResourceVisitor* visitor_; |
1981 } external_string_table_visitor(visitor); | 1982 } external_string_table_visitor(visitor); |
1982 | 1983 |
1983 external_string_table_.IterateAll(&external_string_table_visitor); | 1984 external_string_table_.IterateAll(&external_string_table_visitor); |
1984 } | 1985 } |
1985 | 1986 |
1986 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, | 1987 Address Heap::DoScavenge(Address new_space_front) { |
1987 Address new_space_front) { | |
1988 do { | 1988 do { |
1989 SemiSpace::AssertValidRange(new_space_front, new_space_->top()); | 1989 SemiSpace::AssertValidRange(new_space_front, new_space_->top()); |
1990 // The addresses new_space_front and new_space_.top() define a | 1990 // The addresses new_space_front and new_space_.top() define a |
1991 // queue of unprocessed copied objects. Process them until the | 1991 // queue of unprocessed copied objects. Process them until the |
1992 // queue is empty. | 1992 // queue is empty. |
1993 while (new_space_front != new_space_->top()) { | 1993 while (new_space_front != new_space_->top()) { |
1994 if (!Page::IsAlignedToPageSize(new_space_front)) { | 1994 if (!Page::IsAlignedToPageSize(new_space_front)) { |
1995 HeapObject* object = HeapObject::FromAddress(new_space_front); | 1995 HeapObject* object = HeapObject::FromAddress(new_space_front); |
1996 new_space_front += | 1996 new_space_front += |
1997 StaticScavengeVisitor::IterateBody(object->map(), object); | 1997 StaticScavengeVisitor::IterateBody(object->map(), object); |
(...skipping 2762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4760 #undef INTERNALIZED_STRING | 4760 #undef INTERNALIZED_STRING |
4761 #define STRING_TYPE(NAME, size, name, Name) case Heap::k##Name##MapRootIndex: | 4761 #define STRING_TYPE(NAME, size, name, Name) case Heap::k##Name##MapRootIndex: |
4762 STRING_TYPE_LIST(STRING_TYPE) | 4762 STRING_TYPE_LIST(STRING_TYPE) |
4763 #undef STRING_TYPE | 4763 #undef STRING_TYPE |
4764 return true; | 4764 return true; |
4765 default: | 4765 default: |
4766 return false; | 4766 return false; |
4767 } | 4767 } |
4768 } | 4768 } |
4769 | 4769 |
4770 | |
4771 #ifdef VERIFY_HEAP | 4770 #ifdef VERIFY_HEAP |
4772 void Heap::Verify() { | 4771 void Heap::Verify() { |
4773 CHECK(HasBeenSetUp()); | 4772 CHECK(HasBeenSetUp()); |
4774 HandleScope scope(isolate()); | 4773 HandleScope scope(isolate()); |
4775 | 4774 |
4776 // We have to wait here for the sweeper threads to have an iterable heap. | 4775 // We have to wait here for the sweeper threads to have an iterable heap. |
4777 mark_compact_collector()->EnsureSweepingCompleted(); | 4776 mark_compact_collector()->EnsureSweepingCompleted(); |
4778 | 4777 |
4779 VerifyPointersVisitor visitor; | 4778 VerifyPointersVisitor visitor; |
4780 IterateRoots(&visitor, VISIT_ONLY_STRONG); | 4779 IterateRoots(&visitor, VISIT_ONLY_STRONG); |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4892 // TODO(hpayer): Implement a special promotion visitor that incorporates | 4891 // TODO(hpayer): Implement a special promotion visitor that incorporates |
4893 // regular visiting and IteratePromotedObjectPointers. | 4892 // regular visiting and IteratePromotedObjectPointers. |
4894 if (!was_marked_black) { | 4893 if (!was_marked_black) { |
4895 if (incremental_marking()->black_allocation()) { | 4894 if (incremental_marking()->black_allocation()) { |
4896 IncrementalMarking::MarkGrey(this, target->map()); | 4895 IncrementalMarking::MarkGrey(this, target->map()); |
4897 incremental_marking()->IterateBlackObject(target); | 4896 incremental_marking()->IterateBlackObject(target); |
4898 } | 4897 } |
4899 } | 4898 } |
4900 } | 4899 } |
4901 | 4900 |
4902 | 4901 void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { |
4903 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | |
4904 IterateStrongRoots(v, mode); | 4902 IterateStrongRoots(v, mode); |
4905 IterateWeakRoots(v, mode); | 4903 IterateWeakRoots(v, mode); |
4906 } | 4904 } |
4907 | 4905 |
4908 | 4906 void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) { |
4909 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { | 4907 v->VisitRootPointer(Root::kStringTable, reinterpret_cast<Object**>( |
4910 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex])); | 4908 &roots_[kStringTableRootIndex])); |
4911 v->Synchronize(VisitorSynchronization::kStringTable); | 4909 v->Synchronize(VisitorSynchronization::kStringTable); |
4912 if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { | 4910 if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { |
4913 // Scavenge collections have special processing for this. | 4911 // Scavenge collections have special processing for this. |
4914 external_string_table_.IterateAll(v); | 4912 external_string_table_.IterateAll(v); |
4915 } | 4913 } |
4916 v->Synchronize(VisitorSynchronization::kExternalStringsTable); | 4914 v->Synchronize(VisitorSynchronization::kExternalStringsTable); |
4917 } | 4915 } |
4918 | 4916 |
4919 | 4917 void Heap::IterateSmiRoots(RootVisitor* v) { |
4920 void Heap::IterateSmiRoots(ObjectVisitor* v) { | |
4921 // Acquire execution access since we are going to read stack limit values. | 4918 // Acquire execution access since we are going to read stack limit values. |
4922 ExecutionAccess access(isolate()); | 4919 ExecutionAccess access(isolate()); |
4923 v->VisitPointers(&roots_[kSmiRootsStart], &roots_[kRootListLength]); | 4920 v->VisitRootPointers(Root::kSmiRootList, &roots_[kSmiRootsStart], |
| 4921 &roots_[kRootListLength]); |
4924 v->Synchronize(VisitorSynchronization::kSmiRootList); | 4922 v->Synchronize(VisitorSynchronization::kSmiRootList); |
4925 } | 4923 } |
4926 | 4924 |
| 4925 void Heap::IterateEncounteredWeakCollections(RootVisitor* visitor) { |
| 4926 visitor->VisitRootPointer(Root::kWeakCollections, |
| 4927 &encountered_weak_collections_); |
| 4928 } |
| 4929 |
4927 // We cannot avoid stale handles to left-trimmed objects, but can only make | 4930 // We cannot avoid stale handles to left-trimmed objects, but can only make |
4928 // sure all handles still needed are updated. Filter out a stale pointer | 4931 // sure all handles still needed are updated. Filter out a stale pointer |
4929 // and clear the slot to allow post processing of handles (needed because | 4932 // and clear the slot to allow post processing of handles (needed because |
4930 // the sweeper might actually free the underlying page). | 4933 // the sweeper might actually free the underlying page). |
4931 class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor { | 4934 class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor { |
4932 public: | 4935 public: |
4933 explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) { | 4936 explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) { |
4934 USE(heap_); | 4937 USE(heap_); |
4935 } | 4938 } |
4936 | 4939 |
4937 void VisitPointer(Object** p) override { FixHandle(p); } | 4940 void VisitRootPointer(Root root, Object** p) override { FixHandle(p); } |
4938 | 4941 |
4939 void VisitPointers(Object** start, Object** end) override { | 4942 void VisitRootPointers(Root root, Object** start, Object** end) override { |
4940 for (Object** p = start; p < end; p++) FixHandle(p); | 4943 for (Object** p = start; p < end; p++) FixHandle(p); |
4941 } | 4944 } |
4942 | 4945 |
4943 private: | 4946 private: |
4944 inline void FixHandle(Object** p) { | 4947 inline void FixHandle(Object** p) { |
4945 HeapObject* current = reinterpret_cast<HeapObject*>(*p); | 4948 HeapObject* current = reinterpret_cast<HeapObject*>(*p); |
4946 if (!current->IsHeapObject()) return; | 4949 if (!current->IsHeapObject()) return; |
4947 const MapWord map_word = current->map_word(); | 4950 const MapWord map_word = current->map_word(); |
4948 if (!map_word.IsForwardingAddress() && current->IsFiller()) { | 4951 if (!map_word.IsForwardingAddress() && current->IsFiller()) { |
4949 #ifdef DEBUG | 4952 #ifdef DEBUG |
(...skipping 11 matching lines...) Expand all Loading... |
4961 } | 4964 } |
4962 DCHECK(current->IsFixedArrayBase()); | 4965 DCHECK(current->IsFixedArrayBase()); |
4963 #endif // DEBUG | 4966 #endif // DEBUG |
4964 *p = nullptr; | 4967 *p = nullptr; |
4965 } | 4968 } |
4966 } | 4969 } |
4967 | 4970 |
4968 Heap* heap_; | 4971 Heap* heap_; |
4969 }; | 4972 }; |
4970 | 4973 |
4971 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { | 4974 void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) { |
4972 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); | 4975 v->VisitRootPointers(Root::kStrongRootList, &roots_[0], |
| 4976 &roots_[kStrongRootListLength]); |
4973 v->Synchronize(VisitorSynchronization::kStrongRootList); | 4977 v->Synchronize(VisitorSynchronization::kStrongRootList); |
4974 // The serializer/deserializer iterates the root list twice, first to pick | 4978 // The serializer/deserializer iterates the root list twice, first to pick |
4975 // off immortal immovable roots to make sure they end up on the first page, | 4979 // off immortal immovable roots to make sure they end up on the first page, |
4976 // and then again for the rest. | 4980 // and then again for the rest. |
4977 if (mode == VISIT_ONLY_STRONG_ROOT_LIST) return; | 4981 if (mode == VISIT_ONLY_STRONG_ROOT_LIST) return; |
4978 | 4982 |
4979 isolate_->bootstrapper()->Iterate(v); | 4983 isolate_->bootstrapper()->Iterate(v); |
4980 v->Synchronize(VisitorSynchronization::kBootstrapper); | 4984 v->Synchronize(VisitorSynchronization::kBootstrapper); |
4981 isolate_->Iterate(v); | 4985 isolate_->Iterate(v); |
4982 v->Synchronize(VisitorSynchronization::kTop); | 4986 v->Synchronize(VisitorSynchronization::kTop); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5032 isolate_->eternal_handles()->IterateAllRoots(v); | 5036 isolate_->eternal_handles()->IterateAllRoots(v); |
5033 } | 5037 } |
5034 v->Synchronize(VisitorSynchronization::kEternalHandles); | 5038 v->Synchronize(VisitorSynchronization::kEternalHandles); |
5035 | 5039 |
5036 // Iterate over pointers being held by inactive threads. | 5040 // Iterate over pointers being held by inactive threads. |
5037 isolate_->thread_manager()->Iterate(v); | 5041 isolate_->thread_manager()->Iterate(v); |
5038 v->Synchronize(VisitorSynchronization::kThreadManager); | 5042 v->Synchronize(VisitorSynchronization::kThreadManager); |
5039 | 5043 |
5040 // Iterate over other strong roots (currently only identity maps). | 5044 // Iterate over other strong roots (currently only identity maps). |
5041 for (StrongRootsList* list = strong_roots_list_; list; list = list->next) { | 5045 for (StrongRootsList* list = strong_roots_list_; list; list = list->next) { |
5042 v->VisitPointers(list->start, list->end); | 5046 v->VisitRootPointers(Root::kStrongRoots, list->start, list->end); |
5043 } | 5047 } |
5044 v->Synchronize(VisitorSynchronization::kStrongRoots); | 5048 v->Synchronize(VisitorSynchronization::kStrongRoots); |
5045 | 5049 |
5046 // Iterate over the partial snapshot cache unless serializing. | 5050 // Iterate over the partial snapshot cache unless serializing. |
5047 if (mode != VISIT_ONLY_STRONG_FOR_SERIALIZATION) { | 5051 if (mode != VISIT_ONLY_STRONG_FOR_SERIALIZATION) { |
5048 SerializerDeserializer::Iterate(isolate_, v); | 5052 SerializerDeserializer::Iterate(isolate_, v); |
5049 } | 5053 } |
5050 // We don't do a v->Synchronize call here, because in debug mode that will | 5054 // We don't do a v->Synchronize call here, because in debug mode that will |
5051 // output a flag to the snapshot. However at this point the serializer and | 5055 // output a flag to the snapshot. However at this point the serializer and |
5052 // deserializer are deliberately a little unsynchronized (see above) so the | 5056 // deserializer are deliberately a little unsynchronized (see above) so the |
(...skipping 872 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5925 } | 5929 } |
5926 if (new_length != length) retained_maps->SetLength(new_length); | 5930 if (new_length != length) retained_maps->SetLength(new_length); |
5927 } | 5931 } |
5928 | 5932 |
5929 void Heap::FatalProcessOutOfMemory(const char* location, bool is_heap_oom) { | 5933 void Heap::FatalProcessOutOfMemory(const char* location, bool is_heap_oom) { |
5930 v8::internal::V8::FatalProcessOutOfMemory(location, is_heap_oom); | 5934 v8::internal::V8::FatalProcessOutOfMemory(location, is_heap_oom); |
5931 } | 5935 } |
5932 | 5936 |
5933 #ifdef DEBUG | 5937 #ifdef DEBUG |
5934 | 5938 |
5935 class PrintHandleVisitor : public ObjectVisitor { | 5939 class PrintHandleVisitor : public RootVisitor { |
5936 public: | 5940 public: |
5937 void VisitPointers(Object** start, Object** end) override { | 5941 void VisitRootPointers(Root root, Object** start, Object** end) override { |
5938 for (Object** p = start; p < end; p++) | 5942 for (Object** p = start; p < end; p++) |
5939 PrintF(" handle %p to %p\n", reinterpret_cast<void*>(p), | 5943 PrintF(" handle %p to %p\n", reinterpret_cast<void*>(p), |
5940 reinterpret_cast<void*>(*p)); | 5944 reinterpret_cast<void*>(*p)); |
5941 } | 5945 } |
5942 }; | 5946 }; |
5943 | 5947 |
5944 | 5948 |
5945 void Heap::PrintHandles() { | 5949 void Heap::PrintHandles() { |
5946 PrintF("Handles:\n"); | 5950 PrintF("Handles:\n"); |
5947 PrintHandleVisitor v; | 5951 PrintHandleVisitor v; |
5948 isolate_->handle_scope_implementer()->Iterate(&v); | 5952 isolate_->handle_scope_implementer()->Iterate(&v); |
5949 } | 5953 } |
5950 | 5954 |
5951 #endif | 5955 #endif |
5952 | 5956 |
5953 class CheckHandleCountVisitor : public ObjectVisitor { | 5957 class CheckHandleCountVisitor : public RootVisitor { |
5954 public: | 5958 public: |
5955 CheckHandleCountVisitor() : handle_count_(0) {} | 5959 CheckHandleCountVisitor() : handle_count_(0) {} |
5956 ~CheckHandleCountVisitor() override { | 5960 ~CheckHandleCountVisitor() override { |
5957 CHECK(handle_count_ < HandleScope::kCheckHandleThreshold); | 5961 CHECK(handle_count_ < HandleScope::kCheckHandleThreshold); |
5958 } | 5962 } |
5959 void VisitPointers(Object** start, Object** end) override { | 5963 void VisitRootPointers(Root root, Object** start, Object** end) override { |
5960 handle_count_ += end - start; | 5964 handle_count_ += end - start; |
5961 } | 5965 } |
5962 | 5966 |
5963 private: | 5967 private: |
5964 ptrdiff_t handle_count_; | 5968 ptrdiff_t handle_count_; |
5965 }; | 5969 }; |
5966 | 5970 |
5967 | 5971 |
5968 void Heap::CheckHandleCount() { | 5972 void Heap::CheckHandleCount() { |
5969 CheckHandleCountVisitor v; | 5973 CheckHandleCountVisitor v; |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6105 ~UnreachableObjectsFilter() { | 6109 ~UnreachableObjectsFilter() { |
6106 heap_->mark_compact_collector()->ClearMarkbits(); | 6110 heap_->mark_compact_collector()->ClearMarkbits(); |
6107 } | 6111 } |
6108 | 6112 |
6109 bool SkipObject(HeapObject* object) { | 6113 bool SkipObject(HeapObject* object) { |
6110 if (object->IsFiller()) return true; | 6114 if (object->IsFiller()) return true; |
6111 return ObjectMarking::IsWhite(object, MarkingState::Internal(object)); | 6115 return ObjectMarking::IsWhite(object, MarkingState::Internal(object)); |
6112 } | 6116 } |
6113 | 6117 |
6114 private: | 6118 private: |
6115 class MarkingVisitor : public ObjectVisitor { | 6119 class MarkingVisitor : public ObjectVisitor, public RootVisitor { |
6116 public: | 6120 public: |
6117 MarkingVisitor() : marking_stack_(10) {} | 6121 MarkingVisitor() : marking_stack_(10) {} |
6118 | 6122 |
6119 void VisitPointers(Object** start, Object** end) override { | 6123 void VisitPointers(Object** start, Object** end) override { |
| 6124 MarkPointers(start, end); |
| 6125 } |
| 6126 |
| 6127 void VisitRootPointers(Root root, Object** start, Object** end) override { |
| 6128 MarkPointers(start, end); |
| 6129 } |
| 6130 |
| 6131 void TransitiveClosure() { |
| 6132 while (!marking_stack_.is_empty()) { |
| 6133 HeapObject* obj = marking_stack_.RemoveLast(); |
| 6134 obj->Iterate(this); |
| 6135 } |
| 6136 } |
| 6137 |
| 6138 private: |
| 6139 void MarkPointers(Object** start, Object** end) { |
6120 for (Object** p = start; p < end; p++) { | 6140 for (Object** p = start; p < end; p++) { |
6121 if (!(*p)->IsHeapObject()) continue; | 6141 if (!(*p)->IsHeapObject()) continue; |
6122 HeapObject* obj = HeapObject::cast(*p); | 6142 HeapObject* obj = HeapObject::cast(*p); |
6123 // Use Marking instead of ObjectMarking to avoid adjusting live bytes | 6143 // Use Marking instead of ObjectMarking to avoid adjusting live bytes |
6124 // counter. | 6144 // counter. |
6125 MarkBit mark_bit = | 6145 MarkBit mark_bit = |
6126 ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj)); | 6146 ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj)); |
6127 if (Marking::IsWhite(mark_bit)) { | 6147 if (Marking::IsWhite(mark_bit)) { |
6128 Marking::WhiteToBlack(mark_bit); | 6148 Marking::WhiteToBlack(mark_bit); |
6129 marking_stack_.Add(obj); | 6149 marking_stack_.Add(obj); |
6130 } | 6150 } |
6131 } | 6151 } |
6132 } | 6152 } |
6133 | |
6134 void TransitiveClosure() { | |
6135 while (!marking_stack_.is_empty()) { | |
6136 HeapObject* obj = marking_stack_.RemoveLast(); | |
6137 obj->Iterate(this); | |
6138 } | |
6139 } | |
6140 | |
6141 private: | |
6142 List<HeapObject*> marking_stack_; | 6153 List<HeapObject*> marking_stack_; |
6143 }; | 6154 }; |
6144 | 6155 |
6145 void MarkReachableObjects() { | 6156 void MarkReachableObjects() { |
6146 MarkingVisitor visitor; | 6157 MarkingVisitor visitor; |
6147 heap_->IterateRoots(&visitor, VISIT_ALL); | 6158 heap_->IterateRoots(&visitor, VISIT_ALL); |
6148 visitor.TransitiveClosure(); | 6159 visitor.TransitiveClosure(); |
6149 } | 6160 } |
6150 | 6161 |
6151 Heap* heap_; | 6162 Heap* heap_; |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6417 case LO_SPACE: | 6428 case LO_SPACE: |
6418 return "LO_SPACE"; | 6429 return "LO_SPACE"; |
6419 default: | 6430 default: |
6420 UNREACHABLE(); | 6431 UNREACHABLE(); |
6421 } | 6432 } |
6422 return NULL; | 6433 return NULL; |
6423 } | 6434 } |
6424 | 6435 |
6425 } // namespace internal | 6436 } // namespace internal |
6426 } // namespace v8 | 6437 } // namespace v8 |
OLD | NEW |