OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); | 280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); |
281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); | 281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); |
282 | 282 |
283 compacting_ = evacuation_candidates_.length() > 0; | 283 compacting_ = evacuation_candidates_.length() > 0; |
284 } | 284 } |
285 | 285 |
286 return compacting_; | 286 return compacting_; |
287 } | 287 } |
288 | 288 |
289 | 289 |
290 void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) { | |
291 PageIterator it(space); | |
292 while (it.has_next()) { | |
293 Page* p = it.next(); | |
294 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); | |
295 } | |
296 } | |
297 | |
298 | |
299 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { | |
300 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); | |
301 | |
302 ClearInvalidSlotsBufferEntries(heap_->old_pointer_space()); | |
303 ClearInvalidSlotsBufferEntries(heap_->old_data_space()); | |
304 ClearInvalidSlotsBufferEntries(heap_->code_space()); | |
305 ClearInvalidSlotsBufferEntries(heap_->cell_space()); | |
306 ClearInvalidSlotsBufferEntries(heap_->map_space()); | |
307 | |
308 LargeObjectIterator it(heap_->lo_space()); | |
309 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | |
310 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | |
311 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); | |
312 } | |
313 } | |
314 | |
315 | |
316 #ifdef VERIFY_HEAP | |
317 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) { | |
318 PageIterator it(space); | |
319 while (it.has_next()) { | |
320 Page* p = it.next(); | |
321 SlotsBuffer::VerifySlots(heap, p->slots_buffer()); | |
322 } | |
323 } | |
324 | |
325 | |
326 static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) { | |
327 heap->store_buffer()->VerifyValidStoreBufferEntries(); | |
328 | |
329 VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space()); | |
330 VerifyValidSlotsBufferEntries(heap, heap->old_data_space()); | |
331 VerifyValidSlotsBufferEntries(heap, heap->code_space()); | |
332 VerifyValidSlotsBufferEntries(heap, heap->cell_space()); | |
333 VerifyValidSlotsBufferEntries(heap, heap->map_space()); | |
334 | |
335 LargeObjectIterator it(heap->lo_space()); | |
336 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | |
337 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | |
338 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); | |
339 } | |
340 } | |
341 #endif | |
342 | |
343 | |
290 void MarkCompactCollector::CollectGarbage() { | 344 void MarkCompactCollector::CollectGarbage() { |
291 // Make sure that Prepare() has been called. The individual steps below will | 345 // Make sure that Prepare() has been called. The individual steps below will |
292 // update the state as they proceed. | 346 // update the state as they proceed. |
293 DCHECK(state_ == PREPARE_GC); | 347 DCHECK(state_ == PREPARE_GC); |
294 | 348 |
295 MarkLiveObjects(); | 349 MarkLiveObjects(); |
296 DCHECK(heap_->incremental_marking()->IsStopped()); | 350 DCHECK(heap_->incremental_marking()->IsStopped()); |
297 | 351 |
298 // ClearNonLiveReferences can deoptimize code in dependent code arrays. | 352 // ClearNonLiveReferences can deoptimize code in dependent code arrays. |
299 // Process weak cells before so that weak cells in dependent code | 353 // Process weak cells before so that weak cells in dependent code |
300 // arrays are cleared or contain only live code objects. | 354 // arrays are cleared or contain only live code objects. |
301 ProcessAndClearWeakCells(); | 355 ProcessAndClearWeakCells(); |
302 | 356 |
303 if (FLAG_collect_maps) ClearNonLiveReferences(); | 357 if (FLAG_collect_maps) ClearNonLiveReferences(); |
304 | 358 |
305 ClearWeakCollections(); | 359 ClearWeakCollections(); |
306 | 360 |
307 heap_->set_encountered_weak_cells(Smi::FromInt(0)); | 361 heap_->set_encountered_weak_cells(Smi::FromInt(0)); |
308 | 362 |
309 #ifdef VERIFY_HEAP | 363 #ifdef VERIFY_HEAP |
310 if (FLAG_verify_heap) { | 364 if (FLAG_verify_heap) { |
311 VerifyMarking(heap_); | 365 VerifyMarking(heap_); |
312 } | 366 } |
313 #endif | 367 #endif |
314 | 368 |
315 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); | 369 ClearInvalidStoreAndSlotsBufferEntries(); |
316 | 370 |
317 #ifdef VERIFY_HEAP | 371 #ifdef VERIFY_HEAP |
318 if (FLAG_verify_heap) { | 372 if (FLAG_verify_heap) { |
319 heap_->store_buffer()->VerifyValidStoreBufferEntries(); | 373 VerifyValidStoreAndSlotsBufferEntries(heap_); |
320 } | 374 } |
321 #endif | 375 #endif |
322 | 376 |
323 SweepSpaces(); | 377 SweepSpaces(); |
324 | 378 |
325 #ifdef VERIFY_HEAP | 379 #ifdef VERIFY_HEAP |
326 VerifyWeakEmbeddedObjectsInCode(); | 380 VerifyWeakEmbeddedObjectsInCode(); |
327 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { | 381 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { |
328 VerifyOmittedMapChecks(); | 382 VerifyOmittedMapChecks(); |
329 } | 383 } |
(...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
707 | 761 |
708 intptr_t estimated_release = 0; | 762 intptr_t estimated_release = 0; |
709 | 763 |
710 Candidate candidates[kMaxMaxEvacuationCandidates]; | 764 Candidate candidates[kMaxMaxEvacuationCandidates]; |
711 | 765 |
712 max_evacuation_candidates = | 766 max_evacuation_candidates = |
713 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates); | 767 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates); |
714 | 768 |
715 int count = 0; | 769 int count = 0; |
716 int fragmentation = 0; | 770 int fragmentation = 0; |
771 int page_number = 0; | |
717 Candidate* least = NULL; | 772 Candidate* least = NULL; |
718 | 773 |
719 PageIterator it(space); | 774 PageIterator it(space); |
720 while (it.has_next()) { | 775 while (it.has_next()) { |
721 Page* p = it.next(); | 776 Page* p = it.next(); |
722 if (p->NeverEvacuate()) continue; | 777 if (p->NeverEvacuate()) continue; |
723 | 778 |
724 // Invariant: Evacuation candidates are just created when marking is | 779 // Invariant: Evacuation candidates are just created when marking is |
725 // started. At the end of a GC all evacuation candidates are cleared and | 780 // started. At the end of a GC all evacuation candidates are cleared and |
726 // their slot buffers are released. | 781 // their slot buffers are released. |
727 CHECK(!p->IsEvacuationCandidate()); | 782 CHECK(!p->IsEvacuationCandidate()); |
728 CHECK(p->slots_buffer() == NULL); | 783 CHECK(p->slots_buffer() == NULL); |
729 | 784 |
730 if (FLAG_stress_compaction) { | 785 if (FLAG_stress_compaction) { |
731 unsigned int counter = space->heap()->ms_count(); | 786 if (FLAG_manual_evacuation_candidates_selection) { |
732 uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits; | 787 if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) { |
733 if ((counter & 1) == (page_number & 1)) fragmentation = 1; | 788 p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); |
789 fragmentation = 1; | |
790 } | |
791 } else { | |
792 unsigned int counter = space->heap()->ms_count(); | |
793 if ((counter & 1) == (page_number & 1)) fragmentation = 1; | |
794 page_number++; | |
795 } | |
734 } else if (mode == REDUCE_MEMORY_FOOTPRINT) { | 796 } else if (mode == REDUCE_MEMORY_FOOTPRINT) { |
735 // Don't try to release too many pages. | 797 // Don't try to release too many pages. |
736 if (estimated_release >= over_reserved) { | 798 if (estimated_release >= over_reserved) { |
737 continue; | 799 continue; |
738 } | 800 } |
739 | 801 |
740 intptr_t free_bytes = 0; | 802 intptr_t free_bytes = 0; |
741 | 803 |
742 if (!p->WasSwept()) { | 804 if (!p->WasSwept()) { |
743 free_bytes = (p->area_size() - p->LiveBytes()); | 805 free_bytes = (p->area_size() - p->LiveBytes()); |
(...skipping 2275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3019 if (allocation.To(&target)) { | 3081 if (allocation.To(&target)) { |
3020 MigrateObject(target, object, object_size, target_space->identity()); | 3082 MigrateObject(target, object, object_size, target_space->identity()); |
3021 heap()->IncrementPromotedObjectsSize(object_size); | 3083 heap()->IncrementPromotedObjectsSize(object_size); |
3022 return true; | 3084 return true; |
3023 } | 3085 } |
3024 | 3086 |
3025 return false; | 3087 return false; |
3026 } | 3088 } |
3027 | 3089 |
3028 | 3090 |
3029 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { | 3091 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, |
3092 HeapObject** out_object) { | |
3030 // This function does not support large objects right now. | 3093 // This function does not support large objects right now. |
3031 Space* owner = p->owner(); | 3094 Space* owner = p->owner(); |
3032 if (owner == heap_->lo_space() || owner == NULL) return true; | 3095 if (owner == heap_->lo_space() || owner == NULL) { |
3096 *out_object = NULL; | |
3097 return true; | |
3098 } | |
3033 | 3099 |
3034 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); | 3100 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); |
3035 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; | 3101 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; |
3036 MarkBit::CellType index_in_cell = 1U | 3102 MarkBit::CellType index_in_cell = 1U |
3037 << (mark_bit_index & Bitmap::kBitIndexMask); | 3103 << (mark_bit_index & Bitmap::kBitIndexMask); |
3038 MarkBit::CellType* cells = p->markbits()->cells(); | 3104 MarkBit::CellType* cells = p->markbits()->cells(); |
3039 Address cell_base = p->area_start(); | 3105 Address cell_base = p->area_start(); |
3040 unsigned int cell_base_start_index = Bitmap::IndexToCell( | 3106 unsigned int cell_base_start_index = Bitmap::IndexToCell( |
3041 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base))); | 3107 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base))); |
3042 | 3108 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3076 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros; | 3142 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros; |
3077 | 3143 |
3078 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize; | 3144 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize; |
3079 Address address = cell_base + offset * kPointerSize; | 3145 Address address = cell_base + offset * kPointerSize; |
3080 HeapObject* object = HeapObject::FromAddress(address); | 3146 HeapObject* object = HeapObject::FromAddress(address); |
3081 DCHECK(object->address() < reinterpret_cast<Address>(slot)); | 3147 DCHECK(object->address() < reinterpret_cast<Address>(slot)); |
3082 if (object->address() <= slot && | 3148 if (object->address() <= slot && |
3083 (object->address() + object->Size()) > slot) { | 3149 (object->address() + object->Size()) > slot) { |
3084 // If the slot is within the last found object in the cell, the slot is | 3150 // If the slot is within the last found object in the cell, the slot is |
3085 // in a live object. | 3151 // in a live object. |
3152 *out_object = object; | |
3086 return true; | 3153 return true; |
3087 } | 3154 } |
3088 return false; | 3155 return false; |
3089 } | 3156 } |
3090 | 3157 |
3091 | 3158 |
3092 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { | 3159 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { |
3093 // This function does not support large objects right now. | 3160 // This function does not support large objects right now. |
3094 Space* owner = p->owner(); | 3161 Space* owner = p->owner(); |
3095 if (owner == heap_->lo_space() || owner == NULL) return true; | 3162 if (owner == heap_->lo_space() || owner == NULL) return true; |
(...skipping 21 matching lines...) Expand all Loading... | |
3117 } | 3184 } |
3118 | 3185 |
3119 offset++; | 3186 offset++; |
3120 current_cell >>= 1; | 3187 current_cell >>= 1; |
3121 } | 3188 } |
3122 } | 3189 } |
3123 return false; | 3190 return false; |
3124 } | 3191 } |
3125 | 3192 |
3126 | 3193 |
3127 bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address, | 3194 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { |
3128 HeapObject* object) { | 3195 HeapObject* object = NULL; |
3129 // If the target object is not black, the source slot must be part | 3196 // The target object is black but we don't know if the source slot is black. |
3130 // of a non-black (dead) object. | 3197 // The source object could have died and the slot could be part of a free |
3131 if (!Marking::IsBlack(Marking::MarkBitFrom(object))) { | 3198 // space. Find out based on mark bits if the slot is part of a live object. |
3199 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { | |
3132 return false; | 3200 return false; |
3133 } | 3201 } |
3134 | 3202 |
3135 // The target object is black but we don't know if the source slot is black. | 3203 #if V8_DOUBLE_FIELDS_UNBOXING |
3136 // The source object could have died and the slot could be part of a free | 3204 // |object| is NULL only when the slot belongs to large object space. |
3137 // space. Find out based on mark bits if the slot is part of a live object. | 3205 DCHECK(object != NULL || |
3138 if (!IsSlotInBlackObject( | 3206 Page::FromAnyPointerAddress(heap_, slot)->owner() == |
3139 Page::FromAddress(reinterpret_cast<Address>(address)), | 3207 heap_->lo_space()); |
3140 reinterpret_cast<Address>(address))) { | 3208 // We don't need to check large objects' layout descriptor since it can't |
3141 return false; | 3209 // contain in-object fields anyway. |
3210 if (object != NULL) { | |
3211 // Filter out slots that happens to point to unboxed double fields. | |
3212 LayoutDescriptorHelper helper(object->map()); | |
3213 bool has_only_tagged_fields = helper.all_fields_tagged(); | |
3214 if (!has_only_tagged_fields && | |
3215 !helper.IsTagged(static_cast<int>(slot - object->address()))) { | |
3216 return false; | |
3217 } | |
3142 } | 3218 } |
3219 #endif | |
3143 | 3220 |
3144 return true; | 3221 return true; |
3145 } | 3222 } |
3146 | 3223 |
3147 | 3224 |
3148 void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address, | 3225 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, |
3149 HeapObject* object) { | 3226 HeapObject* object) { |
3150 // The target object has to be black. | 3227 // The target object has to be black. |
3151 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3228 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
3152 | 3229 |
3153 // The target object is black but we don't know if the source slot is black. | 3230 // The target object is black but we don't know if the source slot is black. |
3154 // The source object could have died and the slot could be part of a free | 3231 // The source object could have died and the slot could be part of a free |
3155 // space. Use the mark bit iterator to find out about liveness of the slot. | 3232 // space. Use the mark bit iterator to find out about liveness of the slot. |
3156 CHECK(IsSlotInBlackObjectSlow( | 3233 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); |
3157 Page::FromAddress(reinterpret_cast<Address>(address)), | |
3158 reinterpret_cast<Address>(address))); | |
3159 } | 3234 } |
3160 | 3235 |
3161 | 3236 |
3162 void MarkCompactCollector::EvacuateNewSpace() { | 3237 void MarkCompactCollector::EvacuateNewSpace() { |
3163 // There are soft limits in the allocation code, designed trigger a mark | 3238 // There are soft limits in the allocation code, designed trigger a mark |
3164 // sweep collection by failing allocations. But since we are already in | 3239 // sweep collection by failing allocations. But since we are already in |
3165 // a mark-sweep allocation, there is no sense in trying to trigger one. | 3240 // a mark-sweep allocation, there is no sense in trying to trigger one. |
3166 AlwaysAllocateScope scope(isolate()); | 3241 AlwaysAllocateScope scope(isolate()); |
3167 | 3242 |
3168 NewSpace* new_space = heap()->new_space(); | 3243 NewSpace* new_space = heap()->new_space(); |
(...skipping 1286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4455 buffer = allocator->AllocateBuffer(buffer); | 4530 buffer = allocator->AllocateBuffer(buffer); |
4456 *buffer_address = buffer; | 4531 *buffer_address = buffer; |
4457 } | 4532 } |
4458 DCHECK(buffer->HasSpaceForTypedSlot()); | 4533 DCHECK(buffer->HasSpaceForTypedSlot()); |
4459 buffer->Add(reinterpret_cast<ObjectSlot>(type)); | 4534 buffer->Add(reinterpret_cast<ObjectSlot>(type)); |
4460 buffer->Add(reinterpret_cast<ObjectSlot>(addr)); | 4535 buffer->Add(reinterpret_cast<ObjectSlot>(addr)); |
4461 return true; | 4536 return true; |
4462 } | 4537 } |
4463 | 4538 |
4464 | 4539 |
4540 static Object* g_smi_slot = NULL; | |
4541 | |
4542 | |
4543 void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) { | |
4544 DCHECK_EQ(Smi::FromInt(0), g_smi_slot); | |
Hannes Payer (out of office)
2015/03/24 14:47:05
SMI 0 will indicate an embedded object slot. Why d
Igor Sheludko
2015/03/24 15:10:25
We delete entries by replacing them with address o
| |
4545 | |
4546 // Remove entries by replacing them with a dummy slot containing a smi. | |
4547 const ObjectSlot kRemovedEntry = &g_smi_slot; | |
4548 | |
4549 while (buffer != NULL) { | |
4550 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | |
4551 intptr_t slots_count = buffer->idx_; | |
4552 | |
4553 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | |
4554 ObjectSlot slot = slots[slot_idx]; | |
4555 if (!IsTypedSlot(slot)) { | |
4556 Object* object = *slot; | |
4557 if (object->IsHeapObject()) { | |
4558 if (heap->InNewSpace(object) || | |
4559 !heap->mark_compact_collector()->IsSlotInLiveObject( | |
4560 reinterpret_cast<Address>(slot))) { | |
4561 slots[slot_idx] = kRemovedEntry; | |
4562 } | |
4563 } | |
4564 } else { | |
4565 ++slot_idx; | |
4566 DCHECK(slot_idx < slots_count); | |
4567 } | |
4568 } | |
4569 buffer = buffer->next(); | |
4570 } | |
4571 } | |
4572 | |
4573 | |
4574 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { | |
4575 DCHECK_EQ(Smi::FromInt(0), g_smi_slot); | |
4576 | |
4577 while (buffer != NULL) { | |
4578 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | |
4579 intptr_t slots_count = buffer->idx_; | |
4580 | |
4581 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | |
4582 ObjectSlot slot = slots[slot_idx]; | |
4583 if (!IsTypedSlot(slot)) { | |
4584 Object* object = *slot; | |
4585 if (object->IsHeapObject()) { | |
4586 CHECK(!heap->InNewSpace(object)); | |
4587 CHECK(heap->mark_compact_collector()->IsSlotInLiveObject( | |
4588 reinterpret_cast<Address>(slot))); | |
4589 } | |
4590 } else { | |
4591 ++slot_idx; | |
4592 DCHECK(slot_idx < slots_count); | |
4593 } | |
4594 } | |
4595 buffer = buffer->next(); | |
4596 } | |
4597 } | |
4598 | |
4599 | |
4465 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { | 4600 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { |
4466 if (RelocInfo::IsCodeTarget(rmode)) { | 4601 if (RelocInfo::IsCodeTarget(rmode)) { |
4467 return SlotsBuffer::CODE_TARGET_SLOT; | 4602 return SlotsBuffer::CODE_TARGET_SLOT; |
4468 } else if (RelocInfo::IsEmbeddedObject(rmode)) { | 4603 } else if (RelocInfo::IsEmbeddedObject(rmode)) { |
4469 return SlotsBuffer::EMBEDDED_OBJECT_SLOT; | 4604 return SlotsBuffer::EMBEDDED_OBJECT_SLOT; |
4470 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { | 4605 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { |
4471 return SlotsBuffer::DEBUG_TARGET_SLOT; | 4606 return SlotsBuffer::DEBUG_TARGET_SLOT; |
4472 } else if (RelocInfo::IsJSReturn(rmode)) { | 4607 } else if (RelocInfo::IsJSReturn(rmode)) { |
4473 return SlotsBuffer::JS_RETURN_SLOT; | 4608 return SlotsBuffer::JS_RETURN_SLOT; |
4474 } | 4609 } |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4596 SlotsBuffer* buffer = *buffer_address; | 4731 SlotsBuffer* buffer = *buffer_address; |
4597 while (buffer != NULL) { | 4732 while (buffer != NULL) { |
4598 SlotsBuffer* next_buffer = buffer->next(); | 4733 SlotsBuffer* next_buffer = buffer->next(); |
4599 DeallocateBuffer(buffer); | 4734 DeallocateBuffer(buffer); |
4600 buffer = next_buffer; | 4735 buffer = next_buffer; |
4601 } | 4736 } |
4602 *buffer_address = NULL; | 4737 *buffer_address = NULL; |
4603 } | 4738 } |
4604 } | 4739 } |
4605 } // namespace v8::internal | 4740 } // namespace v8::internal |
OLD | NEW |