| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" | 
| 6 | 6 | 
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" | 
| 8 #include "src/api.h" | 8 #include "src/api.h" | 
| 9 #include "src/assembler-inl.h" | 9 #include "src/assembler-inl.h" | 
| 10 #include "src/ast/context-slot-cache.h" | 10 #include "src/ast/context-slot-cache.h" | 
| (...skipping 1119 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1130   Object** dst_objects = array->data_start() + dst_index; | 1130   Object** dst_objects = array->data_start() + dst_index; | 
| 1131   MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); | 1131   MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); | 
| 1132   FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(this, array, dst_index, len); | 1132   FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(this, array, dst_index, len); | 
| 1133 } | 1133 } | 
| 1134 | 1134 | 
| 1135 | 1135 | 
| 1136 #ifdef VERIFY_HEAP | 1136 #ifdef VERIFY_HEAP | 
| 1137 // Helper class for verifying the string table. | 1137 // Helper class for verifying the string table. | 
| 1138 class StringTableVerifier : public ObjectVisitor { | 1138 class StringTableVerifier : public ObjectVisitor { | 
| 1139  public: | 1139  public: | 
| 1140   void VisitPointers(Object** start, Object** end) override { | 1140   void VisitPointers(HeapObject* host, Object** start, Object** end) override { | 
| 1141     // Visit all HeapObject pointers in [start, end). | 1141     // Visit all HeapObject pointers in [start, end). | 
| 1142     for (Object** p = start; p < end; p++) { | 1142     for (Object** p = start; p < end; p++) { | 
| 1143       if ((*p)->IsHeapObject()) { | 1143       if ((*p)->IsHeapObject()) { | 
| 1144         HeapObject* object = HeapObject::cast(*p); | 1144         HeapObject* object = HeapObject::cast(*p); | 
| 1145         Isolate* isolate = object->GetIsolate(); | 1145         Isolate* isolate = object->GetIsolate(); | 
| 1146         // Check that the string is actually internalized. | 1146         // Check that the string is actually internalized. | 
| 1147         CHECK(object->IsTheHole(isolate) || object->IsUndefined(isolate) || | 1147         CHECK(object->IsTheHole(isolate) || object->IsUndefined(isolate) || | 
| 1148               object->IsInternalizedString()); | 1148               object->IsInternalizedString()); | 
| 1149       } | 1149       } | 
| 1150     } | 1150     } | 
| (...skipping 600 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1751   { | 1751   { | 
| 1752     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); | 1752     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); | 
| 1753     IterateEncounteredWeakCollections(&root_scavenge_visitor); | 1753     IterateEncounteredWeakCollections(&root_scavenge_visitor); | 
| 1754   } | 1754   } | 
| 1755 | 1755 | 
| 1756   { | 1756   { | 
| 1757     // Copy objects reachable from the code flushing candidates list. | 1757     // Copy objects reachable from the code flushing candidates list. | 
| 1758     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); | 1758     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); | 
| 1759     MarkCompactCollector* collector = mark_compact_collector(); | 1759     MarkCompactCollector* collector = mark_compact_collector(); | 
| 1760     if (collector->is_code_flushing_enabled()) { | 1760     if (collector->is_code_flushing_enabled()) { | 
| 1761       collector->code_flusher()->IteratePointersToFromSpace( | 1761       collector->code_flusher()->VisitListHeads(&root_scavenge_visitor); | 
| 1762           &root_scavenge_visitor); | 1762       collector->code_flusher() | 
|  | 1763           ->IteratePointersToFromSpace<StaticScavengeVisitor>(); | 
| 1763     } | 1764     } | 
| 1764   } | 1765   } | 
| 1765 | 1766 | 
| 1766   { | 1767   { | 
| 1767     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1768     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 
| 1768     new_space_front = DoScavenge(new_space_front); | 1769     new_space_front = DoScavenge(new_space_front); | 
| 1769   } | 1770   } | 
| 1770 | 1771 | 
| 1771   isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | 1772   isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | 
| 1772       &IsUnscavengedHeapObject); | 1773       &IsUnscavengedHeapObject); | 
| (...skipping 3036 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4809     } | 4810     } | 
| 4810   } | 4811   } | 
| 4811 } | 4812 } | 
| 4812 | 4813 | 
| 4813 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { | 4814 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { | 
| 4814  public: | 4815  public: | 
| 4815   IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target, | 4816   IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target, | 
| 4816                                            bool record_slots) | 4817                                            bool record_slots) | 
| 4817       : heap_(heap), target_(target), record_slots_(record_slots) {} | 4818       : heap_(heap), target_(target), record_slots_(record_slots) {} | 
| 4818 | 4819 | 
| 4819   inline void VisitPointers(Object** start, Object** end) override { | 4820   inline void VisitPointers(HeapObject* host, Object** start, | 
|  | 4821                             Object** end) override { | 
|  | 4822     DCHECK_EQ(host, target_); | 
| 4820     Address slot_address = reinterpret_cast<Address>(start); | 4823     Address slot_address = reinterpret_cast<Address>(start); | 
| 4821     Page* page = Page::FromAddress(slot_address); | 4824     Page* page = Page::FromAddress(slot_address); | 
| 4822 | 4825 | 
| 4823     while (slot_address < reinterpret_cast<Address>(end)) { | 4826     while (slot_address < reinterpret_cast<Address>(end)) { | 
| 4824       Object** slot = reinterpret_cast<Object**>(slot_address); | 4827       Object** slot = reinterpret_cast<Object**>(slot_address); | 
| 4825       Object* target = *slot; | 4828       Object* target = *slot; | 
| 4826 | 4829 | 
| 4827       if (target->IsHeapObject()) { | 4830       if (target->IsHeapObject()) { | 
| 4828         if (heap_->InFromSpace(target)) { | 4831         if (heap_->InFromSpace(target)) { | 
| 4829           Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), | 4832           Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), | 
| (...skipping 10 matching lines...) Expand all  Loading... | 
| 4840                    MarkCompactCollector::IsOnEvacuationCandidate( | 4843                    MarkCompactCollector::IsOnEvacuationCandidate( | 
| 4841                        HeapObject::cast(target))) { | 4844                        HeapObject::cast(target))) { | 
| 4842           heap_->mark_compact_collector()->RecordSlot(target_, slot, target); | 4845           heap_->mark_compact_collector()->RecordSlot(target_, slot, target); | 
| 4843         } | 4846         } | 
| 4844       } | 4847       } | 
| 4845 | 4848 | 
| 4846       slot_address += kPointerSize; | 4849       slot_address += kPointerSize; | 
| 4847     } | 4850     } | 
| 4848   } | 4851   } | 
| 4849 | 4852 | 
| 4850   inline void VisitCodeEntry(Address code_entry_slot) override { | 4853   inline void VisitCodeEntry(JSFunction* host, | 
|  | 4854                              Address code_entry_slot) override { | 
| 4851     // Black allocation requires us to process objects referenced by | 4855     // Black allocation requires us to process objects referenced by | 
| 4852     // promoted objects. | 4856     // promoted objects. | 
| 4853     if (heap_->incremental_marking()->black_allocation()) { | 4857     if (heap_->incremental_marking()->black_allocation()) { | 
| 4854       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 4858       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 
| 4855       IncrementalMarking::MarkGrey(heap_, code); | 4859       IncrementalMarking::MarkGrey(heap_, code); | 
| 4856     } | 4860     } | 
| 4857   } | 4861   } | 
| 4858 | 4862 | 
| 4859  private: | 4863  private: | 
| 4860   Heap* heap_; | 4864   Heap* heap_; | 
| 4861   HeapObject* target_; | 4865   HeapObject* target_; | 
| 4862   bool record_slots_; | 4866   bool record_slots_; | 
| 4863 }; | 4867 }; | 
| 4864 | 4868 | 
| 4865 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, | 4869 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, | 
| 4866                                             bool was_marked_black) { | 4870                                             bool was_marked_black) { | 
| 4867   // We are not collecting slots on new space objects during mutation | 4871   // We are not collecting slots on new space objects during mutation | 
| 4868   // thus we have to scan for pointers to evacuation candidates when we | 4872   // thus we have to scan for pointers to evacuation candidates when we | 
| 4869   // promote objects. But we should not record any slots in non-black | 4873   // promote objects. But we should not record any slots in non-black | 
| 4870   // objects. Grey object's slots would be rescanned. | 4874   // objects. Grey object's slots would be rescanned. | 
| 4871   // White object might not survive until the end of collection | 4875   // White object might not survive until the end of collection | 
| 4872   // it would be a violation of the invariant to record it's slots. | 4876   // it would be a violation of the invariant to record it's slots. | 
| 4873   bool record_slots = false; | 4877   bool record_slots = false; | 
| 4874   if (incremental_marking()->IsCompacting()) { | 4878   if (incremental_marking()->IsCompacting()) { | 
| 4875     record_slots = | 4879     record_slots = | 
| 4876         ObjectMarking::IsBlack(target, MarkingState::Internal(target)); | 4880         ObjectMarking::IsBlack(target, MarkingState::Internal(target)); | 
| 4877   } | 4881   } | 
| 4878 | 4882 | 
|  | 4883   // TODO(ulan): remove the target, the visitor now gets the host object | 
|  | 4884   // in each visit method. | 
| 4879   IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); | 4885   IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); | 
| 4880   if (target->IsJSFunction()) { | 4886   if (target->IsJSFunction()) { | 
| 4881     // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for | 4887     // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for | 
| 4882     // this links are recorded during processing of weak lists. | 4888     // this links are recorded during processing of weak lists. | 
| 4883     JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); | 4889     JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); | 
| 4884   } else { | 4890   } else { | 
| 4885     target->IterateBody(target->map()->instance_type(), size, &visitor); | 4891     target->IterateBody(target->map()->instance_type(), size, &visitor); | 
| 4886   } | 4892   } | 
| 4887 | 4893 | 
| 4888   // When black allocations is on, we have to visit not already marked black | 4894   // When black allocations is on, we have to visit not already marked black | 
| (...skipping 1224 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 6113   bool SkipObject(HeapObject* object) { | 6119   bool SkipObject(HeapObject* object) { | 
| 6114     if (object->IsFiller()) return true; | 6120     if (object->IsFiller()) return true; | 
| 6115     return ObjectMarking::IsWhite(object, MarkingState::Internal(object)); | 6121     return ObjectMarking::IsWhite(object, MarkingState::Internal(object)); | 
| 6116   } | 6122   } | 
| 6117 | 6123 | 
| 6118  private: | 6124  private: | 
| 6119   class MarkingVisitor : public ObjectVisitor, public RootVisitor { | 6125   class MarkingVisitor : public ObjectVisitor, public RootVisitor { | 
| 6120    public: | 6126    public: | 
| 6121     MarkingVisitor() : marking_stack_(10) {} | 6127     MarkingVisitor() : marking_stack_(10) {} | 
| 6122 | 6128 | 
| 6123     void VisitPointers(Object** start, Object** end) override { | 6129     void VisitPointers(HeapObject* host, Object** start, | 
|  | 6130                        Object** end) override { | 
| 6124       MarkPointers(start, end); | 6131       MarkPointers(start, end); | 
| 6125     } | 6132     } | 
| 6126 | 6133 | 
| 6127     void VisitRootPointers(Root root, Object** start, Object** end) override { | 6134     void VisitRootPointers(Root root, Object** start, Object** end) override { | 
| 6128       MarkPointers(start, end); | 6135       MarkPointers(start, end); | 
| 6129     } | 6136     } | 
| 6130 | 6137 | 
| 6131     void TransitiveClosure() { | 6138     void TransitiveClosure() { | 
| 6132       while (!marking_stack_.is_empty()) { | 6139       while (!marking_stack_.is_empty()) { | 
| 6133         HeapObject* obj = marking_stack_.RemoveLast(); | 6140         HeapObject* obj = marking_stack_.RemoveLast(); | 
| (...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 6428     case LO_SPACE: | 6435     case LO_SPACE: | 
| 6429       return "LO_SPACE"; | 6436       return "LO_SPACE"; | 
| 6430     default: | 6437     default: | 
| 6431       UNREACHABLE(); | 6438       UNREACHABLE(); | 
| 6432   } | 6439   } | 
| 6433   return NULL; | 6440   return NULL; | 
| 6434 } | 6441 } | 
| 6435 | 6442 | 
| 6436 }  // namespace internal | 6443 }  // namespace internal | 
| 6437 }  // namespace v8 | 6444 }  // namespace v8 | 
| OLD | NEW | 
|---|