OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
307 ClearWeakCollections(); | 307 ClearWeakCollections(); |
308 | 308 |
309 heap_->set_encountered_weak_cells(Smi::FromInt(0)); | 309 heap_->set_encountered_weak_cells(Smi::FromInt(0)); |
310 | 310 |
311 #ifdef VERIFY_HEAP | 311 #ifdef VERIFY_HEAP |
312 if (FLAG_verify_heap) { | 312 if (FLAG_verify_heap) { |
313 VerifyMarking(heap_); | 313 VerifyMarking(heap_); |
314 } | 314 } |
315 #endif | 315 #endif |
316 | 316 |
| 317 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); |
| 318 |
| 319 #ifdef VERIFY_HEAP |
| 320 if (FLAG_verify_heap) { |
| 321 heap_->store_buffer()->VerifyValidStoreBufferEntries(); |
| 322 } |
| 323 #endif |
| 324 |
317 SweepSpaces(); | 325 SweepSpaces(); |
318 | 326 |
319 #ifdef VERIFY_HEAP | 327 #ifdef VERIFY_HEAP |
320 VerifyWeakEmbeddedObjectsInCode(); | 328 VerifyWeakEmbeddedObjectsInCode(); |
321 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { | 329 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { |
322 VerifyOmittedMapChecks(); | 330 VerifyOmittedMapChecks(); |
323 } | 331 } |
324 #endif | 332 #endif |
325 | 333 |
326 Finish(); | 334 Finish(); |
(...skipping 2714 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3041 if (allocation.To(&target)) { | 3049 if (allocation.To(&target)) { |
3042 MigrateObject(target, object, object_size, target_space->identity()); | 3050 MigrateObject(target, object, object_size, target_space->identity()); |
3043 heap()->IncrementPromotedObjectsSize(object_size); | 3051 heap()->IncrementPromotedObjectsSize(object_size); |
3044 return true; | 3052 return true; |
3045 } | 3053 } |
3046 | 3054 |
3047 return false; | 3055 return false; |
3048 } | 3056 } |
3049 | 3057 |
3050 | 3058 |
| 3059 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { |
| 3060 // This function does not support large objects right now. |
| 3061 if (p->owner() == NULL) return true; |
| 3062 |
| 3063 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); |
| 3064 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; |
| 3065 MarkBit::CellType index_in_cell = 1U |
| 3066 << (mark_bit_index & Bitmap::kBitIndexMask); |
| 3067 MarkBit::CellType* cells = p->markbits()->cells(); |
| 3068 Address cell_base = p->area_start(); |
| 3069 unsigned int cell_base_start_index = Bitmap::IndexToCell( |
| 3070 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base))); |
| 3071 |
| 3072 // First check if the object is in the current cell. |
| 3073 MarkBit::CellType slot_mask; |
| 3074 if ((cells[start_index] == 0) || |
| 3075 (base::bits::CountTrailingZeros32(cells[start_index]) > |
| 3076 base::bits::CountTrailingZeros32(cells[start_index] | index_in_cell))) { |
| 3077 // If we are already in the first cell, there is no live object. |
| 3078 if (start_index == cell_base_start_index) return false; |
| 3079 |
| 3080 // If not, find a cell in a preceding cell slot that has a mark bit set. |
| 3081 do { |
| 3082 start_index--; |
| 3083 } while (start_index > cell_base_start_index && cells[start_index] == 0); |
| 3084 |
| 3085 // The slot must be in a dead object if there are no preceding cells that |
| 3086 // have mark bits set. |
| 3087 if (cells[start_index] == 0) { |
| 3088 return false; |
| 3089 } |
| 3090 |
| 3091 // The object is in a preceding cell. Set the mask to find any object. |
| 3092 slot_mask = 0xffffffff; |
| 3093 } else { |
| 3094 // We are interested in object mark bits right before the slot. |
| 3095 slot_mask = index_in_cell - 1; |
| 3096 } |
| 3097 |
| 3098 MarkBit::CellType current_cell = cells[start_index]; |
| 3099 DCHECK(current_cell != 0); |
| 3100 |
| 3101 // Find the last live object in the cell. |
| 3102 unsigned int leading_zeros = |
| 3103 base::bits::CountLeadingZeros32(current_cell & slot_mask); |
| 3104 DCHECK(leading_zeros != 32); |
| 3105 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros; |
| 3106 |
| 3107 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize; |
| 3108 Address address = cell_base + offset * kPointerSize; |
| 3109 HeapObject* object = HeapObject::FromAddress(address); |
| 3110 DCHECK(object->address() < reinterpret_cast<Address>(slot)); |
| 3111 if (object->address() <= slot && |
| 3112 (object->address() + object->Size()) > slot) { |
| 3113 // If the slot is within the last found object in the cell, the slot is |
| 3114 // in a live object. |
| 3115 return true; |
| 3116 } |
| 3117 return false; |
| 3118 } |
| 3119 |
| 3120 |
| 3121 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { |
| 3122 // This function does not support large objects right now. |
| 3123 if (p->owner() == NULL) return true; |
| 3124 |
| 3125 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { |
| 3126 Address cell_base = it.CurrentCellBase(); |
| 3127 MarkBit::CellType* cell = it.CurrentCell(); |
| 3128 |
| 3129 MarkBit::CellType current_cell = *cell; |
| 3130 if (current_cell == 0) continue; |
| 3131 |
| 3132 int offset = 0; |
| 3133 while (current_cell != 0) { |
| 3134 int trailing_zeros = base::bits::CountTrailingZeros32(current_cell); |
| 3135 current_cell >>= trailing_zeros; |
| 3136 offset += trailing_zeros; |
| 3137 Address address = cell_base + offset * kPointerSize; |
| 3138 |
| 3139 HeapObject* object = HeapObject::FromAddress(address); |
| 3140 int size = object->Size(); |
| 3141 |
| 3142 if (object->address() > slot) return false; |
| 3143 if (object->address() <= slot && slot < (object->address() + size)) { |
| 3144 return true; |
| 3145 } |
| 3146 |
| 3147 offset++; |
| 3148 current_cell >>= 1; |
| 3149 } |
| 3150 } |
| 3151 return false; |
| 3152 } |
| 3153 |
| 3154 |
| 3155 bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address, |
| 3156 HeapObject* object) { |
| 3157 // If the target object is not black, the source slot must be part |
| 3158 // of a non-black (dead) object. |
| 3159 if (!Marking::IsBlack(Marking::MarkBitFrom(object))) { |
| 3160 return false; |
| 3161 } |
| 3162 |
| 3163 // The target object is black but we don't know if the source slot is black. |
| 3164 // The source object could have died and the slot could be part of a free |
| 3165 // space. Find out based on mark bits if the slot is part of a live object. |
| 3166 if (!IsSlotInBlackObject( |
| 3167 Page::FromAddress(reinterpret_cast<Address>(address)), |
| 3168 reinterpret_cast<Address>(address))) { |
| 3169 return false; |
| 3170 } |
| 3171 |
| 3172 return true; |
| 3173 } |
| 3174 |
| 3175 |
| 3176 void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address, |
| 3177 HeapObject* object) { |
| 3178 // The target object has to be black. |
| 3179 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
| 3180 |
| 3181 // The target object is black but we don't know if the source slot is black. |
| 3182 // The source object could have died and the slot could be part of a free |
| 3183 // space. Use the mark bit iterator to find out about liveness of the slot. |
| 3184 CHECK(IsSlotInBlackObjectSlow( |
| 3185 Page::FromAddress(reinterpret_cast<Address>(address)), |
| 3186 reinterpret_cast<Address>(address))); |
| 3187 } |
| 3188 |
| 3189 |
3051 void MarkCompactCollector::EvacuateNewSpace() { | 3190 void MarkCompactCollector::EvacuateNewSpace() { |
3052 // There are soft limits in the allocation code, designed trigger a mark | 3191 // There are soft limits in the allocation code, designed trigger a mark |
3053 // sweep collection by failing allocations. But since we are already in | 3192 // sweep collection by failing allocations. But since we are already in |
3054 // a mark-sweep allocation, there is no sense in trying to trigger one. | 3193 // a mark-sweep allocation, there is no sense in trying to trigger one. |
3055 AlwaysAllocateScope scope(isolate()); | 3194 AlwaysAllocateScope scope(isolate()); |
3056 | 3195 |
3057 NewSpace* new_space = heap()->new_space(); | 3196 NewSpace* new_space = heap()->new_space(); |
3058 | 3197 |
3059 // Store allocation range before flipping semispaces. | 3198 // Store allocation range before flipping semispaces. |
3060 Address from_bottom = new_space->bottom(); | 3199 Address from_bottom = new_space->bottom(); |
(...skipping 1435 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4496 SlotsBuffer* buffer = *buffer_address; | 4635 SlotsBuffer* buffer = *buffer_address; |
4497 while (buffer != NULL) { | 4636 while (buffer != NULL) { |
4498 SlotsBuffer* next_buffer = buffer->next(); | 4637 SlotsBuffer* next_buffer = buffer->next(); |
4499 DeallocateBuffer(buffer); | 4638 DeallocateBuffer(buffer); |
4500 buffer = next_buffer; | 4639 buffer = next_buffer; |
4501 } | 4640 } |
4502 *buffer_address = NULL; | 4641 *buffer_address = NULL; |
4503 } | 4642 } |
4504 } | 4643 } |
4505 } // namespace v8::internal | 4644 } // namespace v8::internal |
OLD | NEW |