OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); | 280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); |
281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); | 281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); |
282 | 282 |
283 compacting_ = evacuation_candidates_.length() > 0; | 283 compacting_ = evacuation_candidates_.length() > 0; |
284 } | 284 } |
285 | 285 |
286 return compacting_; | 286 return compacting_; |
287 } | 287 } |
288 | 288 |
289 | 289 |
290 void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) { | |
291 PageIterator it(space); | |
292 while (it.has_next()) { | |
293 Page* p = it.next(); | |
294 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); | |
295 } | |
296 } | |
297 | |
298 | |
299 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { | |
300 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); | |
301 | |
302 ClearInvalidSlotsBufferEntries(heap_->old_pointer_space()); | |
303 ClearInvalidSlotsBufferEntries(heap_->old_data_space()); | |
304 ClearInvalidSlotsBufferEntries(heap_->code_space()); | |
305 ClearInvalidSlotsBufferEntries(heap_->cell_space()); | |
306 ClearInvalidSlotsBufferEntries(heap_->map_space()); | |
307 | |
308 LargeObjectIterator it(heap_->lo_space()); | |
309 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | |
310 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | |
311 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); | |
312 } | |
313 } | |
314 | |
315 | |
316 #ifdef VERIFY_HEAP | |
317 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) { | |
318 PageIterator it(space); | |
319 while (it.has_next()) { | |
320 Page* p = it.next(); | |
321 SlotsBuffer::VerifySlots(heap, p->slots_buffer()); | |
322 } | |
323 } | |
324 | |
325 | |
326 static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) { | |
327 heap->store_buffer()->VerifyValidStoreBufferEntries(); | |
328 | |
329 VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space()); | |
330 VerifyValidSlotsBufferEntries(heap, heap->old_data_space()); | |
331 VerifyValidSlotsBufferEntries(heap, heap->code_space()); | |
332 VerifyValidSlotsBufferEntries(heap, heap->cell_space()); | |
333 VerifyValidSlotsBufferEntries(heap, heap->map_space()); | |
334 | |
335 LargeObjectIterator it(heap->lo_space()); | |
336 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | |
337 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | |
338 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); | |
339 } | |
340 } | |
341 #endif | |
342 | |
343 | |
344 void MarkCompactCollector::CollectGarbage() { | 290 void MarkCompactCollector::CollectGarbage() { |
345 // Make sure that Prepare() has been called. The individual steps below will | 291 // Make sure that Prepare() has been called. The individual steps below will |
346 // update the state as they proceed. | 292 // update the state as they proceed. |
347 DCHECK(state_ == PREPARE_GC); | 293 DCHECK(state_ == PREPARE_GC); |
348 | 294 |
349 MarkLiveObjects(); | 295 MarkLiveObjects(); |
350 DCHECK(heap_->incremental_marking()->IsStopped()); | 296 DCHECK(heap_->incremental_marking()->IsStopped()); |
351 | 297 |
352 // ClearNonLiveReferences can deoptimize code in dependent code arrays. | 298 // ClearNonLiveReferences can deoptimize code in dependent code arrays. |
353 // Process weak cells before so that weak cells in dependent code | 299 // Process weak cells before so that weak cells in dependent code |
354 // arrays are cleared or contain only live code objects. | 300 // arrays are cleared or contain only live code objects. |
355 ProcessAndClearWeakCells(); | 301 ProcessAndClearWeakCells(); |
356 | 302 |
357 if (FLAG_collect_maps) ClearNonLiveReferences(); | 303 if (FLAG_collect_maps) ClearNonLiveReferences(); |
358 | 304 |
359 ClearWeakCollections(); | 305 ClearWeakCollections(); |
360 | 306 |
361 heap_->set_encountered_weak_cells(Smi::FromInt(0)); | 307 heap_->set_encountered_weak_cells(Smi::FromInt(0)); |
362 | 308 |
363 #ifdef VERIFY_HEAP | 309 #ifdef VERIFY_HEAP |
364 if (FLAG_verify_heap) { | 310 if (FLAG_verify_heap) { |
365 VerifyMarking(heap_); | 311 VerifyMarking(heap_); |
366 } | 312 } |
367 #endif | 313 #endif |
368 | 314 |
369 ClearInvalidStoreAndSlotsBufferEntries(); | 315 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); |
370 | 316 |
371 #ifdef VERIFY_HEAP | 317 #ifdef VERIFY_HEAP |
372 if (FLAG_verify_heap) { | 318 if (FLAG_verify_heap) { |
373 VerifyValidStoreAndSlotsBufferEntries(heap_); | 319 heap_->store_buffer()->VerifyValidStoreBufferEntries(); |
374 } | 320 } |
375 #endif | 321 #endif |
376 | 322 |
377 SweepSpaces(); | 323 SweepSpaces(); |
378 | 324 |
379 #ifdef VERIFY_HEAP | 325 #ifdef VERIFY_HEAP |
380 VerifyWeakEmbeddedObjectsInCode(); | 326 VerifyWeakEmbeddedObjectsInCode(); |
381 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { | 327 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { |
382 VerifyOmittedMapChecks(); | 328 VerifyOmittedMapChecks(); |
383 } | 329 } |
(...skipping 386 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
770 if (FLAG_trace_fragmentation && | 716 if (FLAG_trace_fragmentation && |
771 max_evacuation_candidates >= kMaxMaxEvacuationCandidates) { | 717 max_evacuation_candidates >= kMaxMaxEvacuationCandidates) { |
772 PrintF("Hit max page compaction limit of %d pages\n", | 718 PrintF("Hit max page compaction limit of %d pages\n", |
773 kMaxMaxEvacuationCandidates); | 719 kMaxMaxEvacuationCandidates); |
774 } | 720 } |
775 max_evacuation_candidates = | 721 max_evacuation_candidates = |
776 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates); | 722 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates); |
777 | 723 |
778 int count = 0; | 724 int count = 0; |
779 int fragmentation = 0; | 725 int fragmentation = 0; |
780 int page_number = 0; | |
781 Candidate* least = NULL; | 726 Candidate* least = NULL; |
782 | 727 |
783 PageIterator it(space); | 728 PageIterator it(space); |
784 while (it.has_next()) { | 729 while (it.has_next()) { |
785 Page* p = it.next(); | 730 Page* p = it.next(); |
786 if (p->NeverEvacuate()) continue; | 731 if (p->NeverEvacuate()) continue; |
787 | 732 |
788 // Invariant: Evacuation candidates are just created when marking is | 733 // Invariant: Evacuation candidates are just created when marking is |
789 // started. At the end of a GC all evacuation candidates are cleared and | 734 // started. At the end of a GC all evacuation candidates are cleared and |
790 // their slot buffers are released. | 735 // their slot buffers are released. |
791 CHECK(!p->IsEvacuationCandidate()); | 736 CHECK(!p->IsEvacuationCandidate()); |
792 CHECK(p->slots_buffer() == NULL); | 737 CHECK(p->slots_buffer() == NULL); |
793 | 738 |
794 if (FLAG_stress_compaction) { | 739 if (FLAG_stress_compaction) { |
795 if (FLAG_manual_evacuation_candidates_selection) { | 740 unsigned int counter = space->heap()->ms_count(); |
796 if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) { | 741 uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits; |
797 p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); | 742 if ((counter & 1) == (page_number & 1)) fragmentation = 1; |
798 fragmentation = 1; | |
799 } | |
800 } else { | |
801 unsigned int counter = space->heap()->ms_count(); | |
802 if ((counter & 1) == (page_number & 1)) fragmentation = 1; | |
803 page_number++; | |
804 } | |
805 } else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) { | 743 } else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) { |
806 // Don't try to release too many pages. | 744 // Don't try to release too many pages. |
807 if (estimated_release >= over_reserved) { | 745 if (estimated_release >= over_reserved) { |
808 continue; | 746 continue; |
809 } | 747 } |
810 | 748 |
811 intptr_t free_bytes = 0; | 749 intptr_t free_bytes = 0; |
812 | 750 |
813 if (!p->WasSwept()) { | 751 if (!p->WasSwept()) { |
814 free_bytes = (p->area_size() - p->LiveBytes()); | 752 free_bytes = (p->area_size() - p->LiveBytes()); |
(...skipping 2276 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3091 if (allocation.To(&target)) { | 3029 if (allocation.To(&target)) { |
3092 MigrateObject(target, object, object_size, target_space->identity()); | 3030 MigrateObject(target, object, object_size, target_space->identity()); |
3093 heap()->IncrementPromotedObjectsSize(object_size); | 3031 heap()->IncrementPromotedObjectsSize(object_size); |
3094 return true; | 3032 return true; |
3095 } | 3033 } |
3096 | 3034 |
3097 return false; | 3035 return false; |
3098 } | 3036 } |
3099 | 3037 |
3100 | 3038 |
3101 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, | 3039 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { |
3102 HeapObject** out_object) { | |
3103 // This function does not support large objects right now. | 3040 // This function does not support large objects right now. |
3104 Space* owner = p->owner(); | 3041 Space* owner = p->owner(); |
3105 if (owner == heap_->lo_space() || owner == NULL) { | 3042 if (owner == heap_->lo_space() || owner == NULL) return true; |
3106 *out_object = NULL; | |
3107 return true; | |
3108 } | |
3109 | 3043 |
3110 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); | 3044 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); |
3111 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; | 3045 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; |
3112 MarkBit::CellType index_in_cell = 1U | 3046 MarkBit::CellType index_in_cell = 1U |
3113 << (mark_bit_index & Bitmap::kBitIndexMask); | 3047 << (mark_bit_index & Bitmap::kBitIndexMask); |
3114 MarkBit::CellType* cells = p->markbits()->cells(); | 3048 MarkBit::CellType* cells = p->markbits()->cells(); |
3115 Address cell_base = p->area_start(); | 3049 Address cell_base = p->area_start(); |
3116 unsigned int cell_base_start_index = Bitmap::IndexToCell( | 3050 unsigned int cell_base_start_index = Bitmap::IndexToCell( |
3117 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base))); | 3051 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base))); |
3118 | 3052 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3152 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros; | 3086 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros; |
3153 | 3087 |
3154 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize; | 3088 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize; |
3155 Address address = cell_base + offset * kPointerSize; | 3089 Address address = cell_base + offset * kPointerSize; |
3156 HeapObject* object = HeapObject::FromAddress(address); | 3090 HeapObject* object = HeapObject::FromAddress(address); |
3157 DCHECK(object->address() < reinterpret_cast<Address>(slot)); | 3091 DCHECK(object->address() < reinterpret_cast<Address>(slot)); |
3158 if (object->address() <= slot && | 3092 if (object->address() <= slot && |
3159 (object->address() + object->Size()) > slot) { | 3093 (object->address() + object->Size()) > slot) { |
3160 // If the slot is within the last found object in the cell, the slot is | 3094 // If the slot is within the last found object in the cell, the slot is |
3161 // in a live object. | 3095 // in a live object. |
3162 *out_object = object; | |
3163 return true; | 3096 return true; |
3164 } | 3097 } |
3165 return false; | 3098 return false; |
3166 } | 3099 } |
3167 | 3100 |
3168 | 3101 |
3169 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { | 3102 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { |
3170 // This function does not support large objects right now. | 3103 // This function does not support large objects right now. |
3171 Space* owner = p->owner(); | 3104 Space* owner = p->owner(); |
3172 if (owner == heap_->lo_space() || owner == NULL) return true; | 3105 if (owner == heap_->lo_space() || owner == NULL) return true; |
(...skipping 21 matching lines...) Expand all Loading... |
3194 } | 3127 } |
3195 | 3128 |
3196 offset++; | 3129 offset++; |
3197 current_cell >>= 1; | 3130 current_cell >>= 1; |
3198 } | 3131 } |
3199 } | 3132 } |
3200 return false; | 3133 return false; |
3201 } | 3134 } |
3202 | 3135 |
3203 | 3136 |
3204 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { | 3137 bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address, |
3205 HeapObject* object = NULL; | 3138 HeapObject* object) { |
| 3139 // If the target object is not black, the source slot must be part |
| 3140 // of a non-black (dead) object. |
| 3141 if (!Marking::IsBlack(Marking::MarkBitFrom(object))) { |
| 3142 return false; |
| 3143 } |
| 3144 |
3206 // The target object is black but we don't know if the source slot is black. | 3145 // The target object is black but we don't know if the source slot is black. |
3207 // The source object could have died and the slot could be part of a free | 3146 // The source object could have died and the slot could be part of a free |
3208 // space. Find out based on mark bits if the slot is part of a live object. | 3147 // space. Find out based on mark bits if the slot is part of a live object. |
3209 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { | 3148 if (!IsSlotInBlackObject( |
| 3149 Page::FromAddress(reinterpret_cast<Address>(address)), |
| 3150 reinterpret_cast<Address>(address))) { |
3210 return false; | 3151 return false; |
3211 } | 3152 } |
3212 | 3153 |
3213 #if V8_DOUBLE_FIELDS_UNBOXING | |
3214 // |object| is NULL only when the slot belongs to large object space. | |
3215 DCHECK(object != NULL || | |
3216 Page::FromAnyPointerAddress(heap_, slot)->owner() == | |
3217 heap_->lo_space()); | |
3218 // We don't need to check large objects' layout descriptor since it can't | |
3219 // contain in-object fields anyway. | |
3220 if (object != NULL) { | |
3221 // Filter out slots that happens to point to unboxed double fields. | |
3222 LayoutDescriptorHelper helper(object->map()); | |
3223 bool has_only_tagged_fields = helper.all_fields_tagged(); | |
3224 if (!has_only_tagged_fields && | |
3225 !helper.IsTagged(static_cast<int>(slot - object->address()))) { | |
3226 return false; | |
3227 } | |
3228 } | |
3229 #endif | |
3230 | |
3231 return true; | 3154 return true; |
3232 } | 3155 } |
3233 | 3156 |
3234 | 3157 |
3235 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, | 3158 void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address, |
3236 HeapObject* object) { | 3159 HeapObject* object) { |
3237 // The target object has to be black. | 3160 // The target object has to be black. |
3238 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3161 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
3239 | 3162 |
3240 // The target object is black but we don't know if the source slot is black. | 3163 // The target object is black but we don't know if the source slot is black. |
3241 // The source object could have died and the slot could be part of a free | 3164 // The source object could have died and the slot could be part of a free |
3242 // space. Use the mark bit iterator to find out about liveness of the slot. | 3165 // space. Use the mark bit iterator to find out about liveness of the slot. |
3243 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); | 3166 CHECK(IsSlotInBlackObjectSlow( |
| 3167 Page::FromAddress(reinterpret_cast<Address>(address)), |
| 3168 reinterpret_cast<Address>(address))); |
3244 } | 3169 } |
3245 | 3170 |
3246 | 3171 |
3247 void MarkCompactCollector::EvacuateNewSpace() { | 3172 void MarkCompactCollector::EvacuateNewSpace() { |
3248 // There are soft limits in the allocation code, designed trigger a mark | 3173 // There are soft limits in the allocation code, designed trigger a mark |
3249 // sweep collection by failing allocations. But since we are already in | 3174 // sweep collection by failing allocations. But since we are already in |
3250 // a mark-sweep allocation, there is no sense in trying to trigger one. | 3175 // a mark-sweep allocation, there is no sense in trying to trigger one. |
3251 AlwaysAllocateScope scope(isolate()); | 3176 AlwaysAllocateScope scope(isolate()); |
3252 | 3177 |
3253 NewSpace* new_space = heap()->new_space(); | 3178 NewSpace* new_space = heap()->new_space(); |
(...skipping 1298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4552 buffer = allocator->AllocateBuffer(buffer); | 4477 buffer = allocator->AllocateBuffer(buffer); |
4553 *buffer_address = buffer; | 4478 *buffer_address = buffer; |
4554 } | 4479 } |
4555 DCHECK(buffer->HasSpaceForTypedSlot()); | 4480 DCHECK(buffer->HasSpaceForTypedSlot()); |
4556 buffer->Add(reinterpret_cast<ObjectSlot>(type)); | 4481 buffer->Add(reinterpret_cast<ObjectSlot>(type)); |
4557 buffer->Add(reinterpret_cast<ObjectSlot>(addr)); | 4482 buffer->Add(reinterpret_cast<ObjectSlot>(addr)); |
4558 return true; | 4483 return true; |
4559 } | 4484 } |
4560 | 4485 |
4561 | 4486 |
4562 static Object* g_smi_slot = NULL; | |
4563 | |
4564 | |
4565 void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) { | |
4566 DCHECK_EQ(Smi::FromInt(0), g_smi_slot); | |
4567 | |
4568 // Remove entries by replacing them with a dummy slot containing a smi. | |
4569 const ObjectSlot kRemovedEntry = &g_smi_slot; | |
4570 | |
4571 while (buffer != NULL) { | |
4572 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | |
4573 intptr_t slots_count = buffer->idx_; | |
4574 | |
4575 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | |
4576 ObjectSlot slot = slots[slot_idx]; | |
4577 if (!IsTypedSlot(slot)) { | |
4578 Object* object = *slot; | |
4579 if (object->IsHeapObject()) { | |
4580 if (heap->InNewSpace(object) || | |
4581 !heap->mark_compact_collector()->IsSlotInLiveObject( | |
4582 reinterpret_cast<Address>(slot))) { | |
4583 slots[slot_idx] = kRemovedEntry; | |
4584 } | |
4585 } | |
4586 } else { | |
4587 ++slot_idx; | |
4588 DCHECK(slot_idx < slots_count); | |
4589 } | |
4590 } | |
4591 buffer = buffer->next(); | |
4592 } | |
4593 } | |
4594 | |
4595 | |
4596 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { | |
4597 DCHECK_EQ(Smi::FromInt(0), g_smi_slot); | |
4598 | |
4599 while (buffer != NULL) { | |
4600 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | |
4601 intptr_t slots_count = buffer->idx_; | |
4602 | |
4603 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | |
4604 ObjectSlot slot = slots[slot_idx]; | |
4605 if (!IsTypedSlot(slot)) { | |
4606 Object* object = *slot; | |
4607 if (object->IsHeapObject()) { | |
4608 CHECK(!heap->InNewSpace(object)); | |
4609 CHECK(heap->mark_compact_collector()->IsSlotInLiveObject( | |
4610 reinterpret_cast<Address>(slot))); | |
4611 } | |
4612 } else { | |
4613 ++slot_idx; | |
4614 DCHECK(slot_idx < slots_count); | |
4615 } | |
4616 } | |
4617 buffer = buffer->next(); | |
4618 } | |
4619 } | |
4620 | |
4621 | |
4622 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { | 4487 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { |
4623 if (RelocInfo::IsCodeTarget(rmode)) { | 4488 if (RelocInfo::IsCodeTarget(rmode)) { |
4624 return SlotsBuffer::CODE_TARGET_SLOT; | 4489 return SlotsBuffer::CODE_TARGET_SLOT; |
4625 } else if (RelocInfo::IsEmbeddedObject(rmode)) { | 4490 } else if (RelocInfo::IsEmbeddedObject(rmode)) { |
4626 return SlotsBuffer::EMBEDDED_OBJECT_SLOT; | 4491 return SlotsBuffer::EMBEDDED_OBJECT_SLOT; |
4627 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { | 4492 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { |
4628 return SlotsBuffer::DEBUG_TARGET_SLOT; | 4493 return SlotsBuffer::DEBUG_TARGET_SLOT; |
4629 } else if (RelocInfo::IsJSReturn(rmode)) { | 4494 } else if (RelocInfo::IsJSReturn(rmode)) { |
4630 return SlotsBuffer::JS_RETURN_SLOT; | 4495 return SlotsBuffer::JS_RETURN_SLOT; |
4631 } | 4496 } |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4777 SlotsBuffer* buffer = *buffer_address; | 4642 SlotsBuffer* buffer = *buffer_address; |
4778 while (buffer != NULL) { | 4643 while (buffer != NULL) { |
4779 SlotsBuffer* next_buffer = buffer->next(); | 4644 SlotsBuffer* next_buffer = buffer->next(); |
4780 DeallocateBuffer(buffer); | 4645 DeallocateBuffer(buffer); |
4781 buffer = next_buffer; | 4646 buffer = next_buffer; |
4782 } | 4647 } |
4783 *buffer_address = NULL; | 4648 *buffer_address = NULL; |
4784 } | 4649 } |
4785 } | 4650 } |
4786 } // namespace v8::internal | 4651 } // namespace v8::internal |
OLD | NEW |