| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 72 tracer_(NULL), | 72 tracer_(NULL), |
| 73 migration_slots_buffer_(NULL), | 73 migration_slots_buffer_(NULL), |
| 74 heap_(NULL), | 74 heap_(NULL), |
| 75 code_flusher_(NULL), | 75 code_flusher_(NULL), |
| 76 encountered_weak_collections_(NULL), | 76 encountered_weak_collections_(NULL), |
| 77 have_code_to_deoptimize_(false) { } | 77 have_code_to_deoptimize_(false) { } |
| 78 | 78 |
| 79 #ifdef VERIFY_HEAP | 79 #ifdef VERIFY_HEAP |
| 80 class VerifyMarkingVisitor: public ObjectVisitor { | 80 class VerifyMarkingVisitor: public ObjectVisitor { |
| 81 public: | 81 public: |
| 82 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} |
| 83 |
| 82 void VisitPointers(Object** start, Object** end) { | 84 void VisitPointers(Object** start, Object** end) { |
| 83 for (Object** current = start; current < end; current++) { | 85 for (Object** current = start; current < end; current++) { |
| 84 if ((*current)->IsHeapObject()) { | 86 if ((*current)->IsHeapObject()) { |
| 85 HeapObject* object = HeapObject::cast(*current); | 87 HeapObject* object = HeapObject::cast(*current); |
| 86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); | 88 CHECK(heap_->mark_compact_collector()->IsMarked(object)); |
| 87 } | 89 } |
| 88 } | 90 } |
| 89 } | 91 } |
| 90 | 92 |
| 91 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 93 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
| 92 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 94 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 93 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || | 95 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || |
| 94 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || | 96 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || |
| 95 !rinfo->target_object()->IsMap() || | 97 !rinfo->target_object()->IsMap() || |
| 96 !Map::cast(rinfo->target_object())->CanTransition()) { | 98 !Map::cast(rinfo->target_object())->CanTransition()) { |
| 97 VisitPointer(rinfo->target_object_address()); | 99 VisitPointer(rinfo->target_object_address()); |
| 98 } | 100 } |
| 99 } | 101 } |
| 102 |
| 103 private: |
| 104 Heap* heap_; |
| 100 }; | 105 }; |
| 101 | 106 |
| 102 | 107 |
| 103 static void VerifyMarking(Address bottom, Address top) { | 108 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
| 104 VerifyMarkingVisitor visitor; | 109 VerifyMarkingVisitor visitor(heap); |
| 105 HeapObject* object; | 110 HeapObject* object; |
| 106 Address next_object_must_be_here_or_later = bottom; | 111 Address next_object_must_be_here_or_later = bottom; |
| 107 | 112 |
| 108 for (Address current = bottom; | 113 for (Address current = bottom; |
| 109 current < top; | 114 current < top; |
| 110 current += kPointerSize) { | 115 current += kPointerSize) { |
| 111 object = HeapObject::FromAddress(current); | 116 object = HeapObject::FromAddress(current); |
| 112 if (MarkCompactCollector::IsMarked(object)) { | 117 if (MarkCompactCollector::IsMarked(object)) { |
| 113 CHECK(current >= next_object_must_be_here_or_later); | 118 CHECK(current >= next_object_must_be_here_or_later); |
| 114 object->Iterate(&visitor); | 119 object->Iterate(&visitor); |
| 115 next_object_must_be_here_or_later = current + object->Size(); | 120 next_object_must_be_here_or_later = current + object->Size(); |
| 116 } | 121 } |
| 117 } | 122 } |
| 118 } | 123 } |
| 119 | 124 |
| 120 | 125 |
| 121 static void VerifyMarking(NewSpace* space) { | 126 static void VerifyMarking(NewSpace* space) { |
| 122 Address end = space->top(); | 127 Address end = space->top(); |
| 123 NewSpacePageIterator it(space->bottom(), end); | 128 NewSpacePageIterator it(space->bottom(), end); |
| 124 // The bottom position is at the start of its page. Allows us to use | 129 // The bottom position is at the start of its page. Allows us to use |
| 125 // page->area_start() as start of range on all pages. | 130 // page->area_start() as start of range on all pages. |
| 126 CHECK_EQ(space->bottom(), | 131 CHECK_EQ(space->bottom(), |
| 127 NewSpacePage::FromAddress(space->bottom())->area_start()); | 132 NewSpacePage::FromAddress(space->bottom())->area_start()); |
| 128 while (it.has_next()) { | 133 while (it.has_next()) { |
| 129 NewSpacePage* page = it.next(); | 134 NewSpacePage* page = it.next(); |
| 130 Address limit = it.has_next() ? page->area_end() : end; | 135 Address limit = it.has_next() ? page->area_end() : end; |
| 131 CHECK(limit == end || !page->Contains(end)); | 136 CHECK(limit == end || !page->Contains(end)); |
| 132 VerifyMarking(page->area_start(), limit); | 137 VerifyMarking(space->heap(), page->area_start(), limit); |
| 133 } | 138 } |
| 134 } | 139 } |
| 135 | 140 |
| 136 | 141 |
| 137 static void VerifyMarking(PagedSpace* space) { | 142 static void VerifyMarking(PagedSpace* space) { |
| 138 PageIterator it(space); | 143 PageIterator it(space); |
| 139 | 144 |
| 140 while (it.has_next()) { | 145 while (it.has_next()) { |
| 141 Page* p = it.next(); | 146 Page* p = it.next(); |
| 142 VerifyMarking(p->area_start(), p->area_end()); | 147 VerifyMarking(space->heap(), p->area_start(), p->area_end()); |
| 143 } | 148 } |
| 144 } | 149 } |
| 145 | 150 |
| 146 | 151 |
| 147 static void VerifyMarking(Heap* heap) { | 152 static void VerifyMarking(Heap* heap) { |
| 148 VerifyMarking(heap->old_pointer_space()); | 153 VerifyMarking(heap->old_pointer_space()); |
| 149 VerifyMarking(heap->old_data_space()); | 154 VerifyMarking(heap->old_data_space()); |
| 150 VerifyMarking(heap->code_space()); | 155 VerifyMarking(heap->code_space()); |
| 151 VerifyMarking(heap->cell_space()); | 156 VerifyMarking(heap->cell_space()); |
| 152 VerifyMarking(heap->property_cell_space()); | 157 VerifyMarking(heap->property_cell_space()); |
| 153 VerifyMarking(heap->map_space()); | 158 VerifyMarking(heap->map_space()); |
| 154 VerifyMarking(heap->new_space()); | 159 VerifyMarking(heap->new_space()); |
| 155 | 160 |
| 156 VerifyMarkingVisitor visitor; | 161 VerifyMarkingVisitor visitor(heap); |
| 157 | 162 |
| 158 LargeObjectIterator it(heap->lo_space()); | 163 LargeObjectIterator it(heap->lo_space()); |
| 159 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 164 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 160 if (MarkCompactCollector::IsMarked(obj)) { | 165 if (MarkCompactCollector::IsMarked(obj)) { |
| 161 obj->Iterate(&visitor); | 166 obj->Iterate(&visitor); |
| 162 } | 167 } |
| 163 } | 168 } |
| 164 | 169 |
| 165 heap->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); | 170 heap->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); |
| 166 } | 171 } |
| (...skipping 1235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1402 MarkBit mark = Marking::MarkBitFrom(object); | 1407 MarkBit mark = Marking::MarkBitFrom(object); |
| 1403 collector->MarkObject(object, mark); | 1408 collector->MarkObject(object, mark); |
| 1404 } | 1409 } |
| 1405 | 1410 |
| 1406 | 1411 |
| 1407 // Visit an unmarked object. | 1412 // Visit an unmarked object. |
| 1408 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1413 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
| 1409 HeapObject* obj)) { | 1414 HeapObject* obj)) { |
| 1410 #ifdef DEBUG | 1415 #ifdef DEBUG |
| 1411 ASSERT(collector->heap()->Contains(obj)); | 1416 ASSERT(collector->heap()->Contains(obj)); |
| 1412 ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj)); | 1417 ASSERT(!collector->heap()->mark_compact_collector()->IsMarked(obj)); |
| 1413 #endif | 1418 #endif |
| 1414 Map* map = obj->map(); | 1419 Map* map = obj->map(); |
| 1415 Heap* heap = obj->GetHeap(); | 1420 Heap* heap = obj->GetHeap(); |
| 1416 MarkBit mark = Marking::MarkBitFrom(obj); | 1421 MarkBit mark = Marking::MarkBitFrom(obj); |
| 1417 heap->mark_compact_collector()->SetMark(obj, mark); | 1422 heap->mark_compact_collector()->SetMark(obj, mark); |
| 1418 // Mark the map pointer and the body. | 1423 // Mark the map pointer and the body. |
| 1419 MarkBit map_mark = Marking::MarkBitFrom(map); | 1424 MarkBit map_mark = Marking::MarkBitFrom(map); |
| 1420 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1425 heap->mark_compact_collector()->MarkObject(map, map_mark); |
| 1421 IterateBody(map, obj); | 1426 IterateBody(map, obj); |
| 1422 } | 1427 } |
| (...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1776 MarkObject(code, code_mark); | 1781 MarkObject(code, code_mark); |
| 1777 if (frame->is_optimized()) { | 1782 if (frame->is_optimized()) { |
| 1778 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), | 1783 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), |
| 1779 frame->LookupCode()); | 1784 frame->LookupCode()); |
| 1780 } | 1785 } |
| 1781 } | 1786 } |
| 1782 } | 1787 } |
| 1783 | 1788 |
| 1784 | 1789 |
| 1785 void MarkCompactCollector::PrepareForCodeFlushing() { | 1790 void MarkCompactCollector::PrepareForCodeFlushing() { |
| 1786 ASSERT(heap() == Isolate::Current()->heap()); | |
| 1787 | |
| 1788 // Enable code flushing for non-incremental cycles. | 1791 // Enable code flushing for non-incremental cycles. |
| 1789 if (FLAG_flush_code && !FLAG_flush_code_incrementally) { | 1792 if (FLAG_flush_code && !FLAG_flush_code_incrementally) { |
| 1790 EnableCodeFlushing(!was_marked_incrementally_); | 1793 EnableCodeFlushing(!was_marked_incrementally_); |
| 1791 } | 1794 } |
| 1792 | 1795 |
| 1793 // If code flushing is disabled, there is no need to prepare for it. | 1796 // If code flushing is disabled, there is no need to prepare for it. |
| 1794 if (!is_code_flushing_enabled()) return; | 1797 if (!is_code_flushing_enabled()) return; |
| 1795 | 1798 |
| 1796 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray | 1799 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray |
| 1797 // relies on it being marked before any other descriptor array. | 1800 // relies on it being marked before any other descriptor array. |
| (...skipping 1241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3039 MapWord map_word = heap_object->map_word(); | 3042 MapWord map_word = heap_object->map_word(); |
| 3040 if (map_word.IsForwardingAddress()) { | 3043 if (map_word.IsForwardingAddress()) { |
| 3041 return map_word.ToForwardingAddress(); | 3044 return map_word.ToForwardingAddress(); |
| 3042 } | 3045 } |
| 3043 } | 3046 } |
| 3044 return object; | 3047 return object; |
| 3045 } | 3048 } |
| 3046 }; | 3049 }; |
| 3047 | 3050 |
| 3048 | 3051 |
| 3049 static inline void UpdateSlot(ObjectVisitor* v, | 3052 static inline void UpdateSlot(Isolate* isolate, |
| 3053 ObjectVisitor* v, |
| 3050 SlotsBuffer::SlotType slot_type, | 3054 SlotsBuffer::SlotType slot_type, |
| 3051 Address addr) { | 3055 Address addr) { |
| 3052 switch (slot_type) { | 3056 switch (slot_type) { |
| 3053 case SlotsBuffer::CODE_TARGET_SLOT: { | 3057 case SlotsBuffer::CODE_TARGET_SLOT: { |
| 3054 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL); | 3058 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL); |
| 3055 rinfo.Visit(v); | 3059 rinfo.Visit(isolate, v); |
| 3056 break; | 3060 break; |
| 3057 } | 3061 } |
| 3058 case SlotsBuffer::CODE_ENTRY_SLOT: { | 3062 case SlotsBuffer::CODE_ENTRY_SLOT: { |
| 3059 v->VisitCodeEntry(addr); | 3063 v->VisitCodeEntry(addr); |
| 3060 break; | 3064 break; |
| 3061 } | 3065 } |
| 3062 case SlotsBuffer::RELOCATED_CODE_OBJECT: { | 3066 case SlotsBuffer::RELOCATED_CODE_OBJECT: { |
| 3063 HeapObject* obj = HeapObject::FromAddress(addr); | 3067 HeapObject* obj = HeapObject::FromAddress(addr); |
| 3064 Code::cast(obj)->CodeIterateBody(v); | 3068 Code::cast(obj)->CodeIterateBody(v); |
| 3065 break; | 3069 break; |
| 3066 } | 3070 } |
| 3067 case SlotsBuffer::DEBUG_TARGET_SLOT: { | 3071 case SlotsBuffer::DEBUG_TARGET_SLOT: { |
| 3068 RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT, 0, NULL); | 3072 RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT, 0, NULL); |
| 3069 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(v); | 3073 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v); |
| 3070 break; | 3074 break; |
| 3071 } | 3075 } |
| 3072 case SlotsBuffer::JS_RETURN_SLOT: { | 3076 case SlotsBuffer::JS_RETURN_SLOT: { |
| 3073 RelocInfo rinfo(addr, RelocInfo::JS_RETURN, 0, NULL); | 3077 RelocInfo rinfo(addr, RelocInfo::JS_RETURN, 0, NULL); |
| 3074 if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v); | 3078 if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(isolate, v); |
| 3075 break; | 3079 break; |
| 3076 } | 3080 } |
| 3077 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: { | 3081 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: { |
| 3078 RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL); | 3082 RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL); |
| 3079 rinfo.Visit(v); | 3083 rinfo.Visit(isolate, v); |
| 3080 break; | 3084 break; |
| 3081 } | 3085 } |
| 3082 default: | 3086 default: |
| 3083 UNREACHABLE(); | 3087 UNREACHABLE(); |
| 3084 break; | 3088 break; |
| 3085 } | 3089 } |
| 3086 } | 3090 } |
| 3087 | 3091 |
| 3088 | 3092 |
| 3089 enum SweepingMode { | 3093 enum SweepingMode { |
| (...skipping 1166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4256 void SlotsBuffer::UpdateSlots(Heap* heap) { | 4260 void SlotsBuffer::UpdateSlots(Heap* heap) { |
| 4257 PointersUpdatingVisitor v(heap); | 4261 PointersUpdatingVisitor v(heap); |
| 4258 | 4262 |
| 4259 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { | 4263 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { |
| 4260 ObjectSlot slot = slots_[slot_idx]; | 4264 ObjectSlot slot = slots_[slot_idx]; |
| 4261 if (!IsTypedSlot(slot)) { | 4265 if (!IsTypedSlot(slot)) { |
| 4262 PointersUpdatingVisitor::UpdateSlot(heap, slot); | 4266 PointersUpdatingVisitor::UpdateSlot(heap, slot); |
| 4263 } else { | 4267 } else { |
| 4264 ++slot_idx; | 4268 ++slot_idx; |
| 4265 ASSERT(slot_idx < idx_); | 4269 ASSERT(slot_idx < idx_); |
| 4266 UpdateSlot(&v, | 4270 UpdateSlot(heap->isolate(), |
| 4271 &v, |
| 4267 DecodeSlotType(slot), | 4272 DecodeSlotType(slot), |
| 4268 reinterpret_cast<Address>(slots_[slot_idx])); | 4273 reinterpret_cast<Address>(slots_[slot_idx])); |
| 4269 } | 4274 } |
| 4270 } | 4275 } |
| 4271 } | 4276 } |
| 4272 | 4277 |
| 4273 | 4278 |
| 4274 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { | 4279 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { |
| 4275 PointersUpdatingVisitor v(heap); | 4280 PointersUpdatingVisitor v(heap); |
| 4276 | 4281 |
| 4277 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { | 4282 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { |
| 4278 ObjectSlot slot = slots_[slot_idx]; | 4283 ObjectSlot slot = slots_[slot_idx]; |
| 4279 if (!IsTypedSlot(slot)) { | 4284 if (!IsTypedSlot(slot)) { |
| 4280 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) { | 4285 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) { |
| 4281 PointersUpdatingVisitor::UpdateSlot(heap, slot); | 4286 PointersUpdatingVisitor::UpdateSlot(heap, slot); |
| 4282 } | 4287 } |
| 4283 } else { | 4288 } else { |
| 4284 ++slot_idx; | 4289 ++slot_idx; |
| 4285 ASSERT(slot_idx < idx_); | 4290 ASSERT(slot_idx < idx_); |
| 4286 Address pc = reinterpret_cast<Address>(slots_[slot_idx]); | 4291 Address pc = reinterpret_cast<Address>(slots_[slot_idx]); |
| 4287 if (!IsOnInvalidatedCodeObject(pc)) { | 4292 if (!IsOnInvalidatedCodeObject(pc)) { |
| 4288 UpdateSlot(&v, | 4293 UpdateSlot(heap->isolate(), |
| 4294 &v, |
| 4289 DecodeSlotType(slot), | 4295 DecodeSlotType(slot), |
| 4290 reinterpret_cast<Address>(slots_[slot_idx])); | 4296 reinterpret_cast<Address>(slots_[slot_idx])); |
| 4291 } | 4297 } |
| 4292 } | 4298 } |
| 4293 } | 4299 } |
| 4294 } | 4300 } |
| 4295 | 4301 |
| 4296 | 4302 |
| 4297 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { | 4303 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { |
| 4298 return new SlotsBuffer(next_buffer); | 4304 return new SlotsBuffer(next_buffer); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4309 while (buffer != NULL) { | 4315 while (buffer != NULL) { |
| 4310 SlotsBuffer* next_buffer = buffer->next(); | 4316 SlotsBuffer* next_buffer = buffer->next(); |
| 4311 DeallocateBuffer(buffer); | 4317 DeallocateBuffer(buffer); |
| 4312 buffer = next_buffer; | 4318 buffer = next_buffer; |
| 4313 } | 4319 } |
| 4314 *buffer_address = NULL; | 4320 *buffer_address = NULL; |
| 4315 } | 4321 } |
| 4316 | 4322 |
| 4317 | 4323 |
| 4318 } } // namespace v8::internal | 4324 } } // namespace v8::internal |
| OLD | NEW |