Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(358)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1043703003: Reland^2 "Filter invalid slots out from the SlotsBuffer after marking." (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/store-buffer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after
280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); 280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists();
281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); 281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists();
282 282
283 compacting_ = evacuation_candidates_.length() > 0; 283 compacting_ = evacuation_candidates_.length() > 0;
284 } 284 }
285 285
286 return compacting_; 286 return compacting_;
287 } 287 }
288 288
289 289
290 void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) {
291 PageIterator it(space);
292 while (it.has_next()) {
293 Page* p = it.next();
294 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
295 }
296 }
297
298
299 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
300 heap_->store_buffer()->ClearInvalidStoreBufferEntries();
301
302 ClearInvalidSlotsBufferEntries(heap_->old_pointer_space());
303 ClearInvalidSlotsBufferEntries(heap_->old_data_space());
304 ClearInvalidSlotsBufferEntries(heap_->code_space());
305 ClearInvalidSlotsBufferEntries(heap_->cell_space());
306 ClearInvalidSlotsBufferEntries(heap_->map_space());
307
308 LargeObjectIterator it(heap_->lo_space());
309 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
310 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
311 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer());
312 }
313 }
314
315
316 #ifdef VERIFY_HEAP
317 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
318 PageIterator it(space);
319 while (it.has_next()) {
320 Page* p = it.next();
321 SlotsBuffer::VerifySlots(heap, p->slots_buffer());
322 }
323 }
324
325
326 static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) {
327 heap->store_buffer()->VerifyValidStoreBufferEntries();
328
329 VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space());
330 VerifyValidSlotsBufferEntries(heap, heap->old_data_space());
331 VerifyValidSlotsBufferEntries(heap, heap->code_space());
332 VerifyValidSlotsBufferEntries(heap, heap->cell_space());
333 VerifyValidSlotsBufferEntries(heap, heap->map_space());
334
335 LargeObjectIterator it(heap->lo_space());
336 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
337 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
338 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer());
339 }
340 }
341 #endif
342
343
290 void MarkCompactCollector::CollectGarbage() { 344 void MarkCompactCollector::CollectGarbage() {
291 // Make sure that Prepare() has been called. The individual steps below will 345 // Make sure that Prepare() has been called. The individual steps below will
292 // update the state as they proceed. 346 // update the state as they proceed.
293 DCHECK(state_ == PREPARE_GC); 347 DCHECK(state_ == PREPARE_GC);
294 348
295 MarkLiveObjects(); 349 MarkLiveObjects();
296 DCHECK(heap_->incremental_marking()->IsStopped()); 350 DCHECK(heap_->incremental_marking()->IsStopped());
297 351
298 // ClearNonLiveReferences can deoptimize code in dependent code arrays. 352 // ClearNonLiveReferences can deoptimize code in dependent code arrays.
299 // Process weak cells before so that weak cells in dependent code 353 // Process weak cells before so that weak cells in dependent code
300 // arrays are cleared or contain only live code objects. 354 // arrays are cleared or contain only live code objects.
301 ProcessAndClearWeakCells(); 355 ProcessAndClearWeakCells();
302 356
303 if (FLAG_collect_maps) ClearNonLiveReferences(); 357 if (FLAG_collect_maps) ClearNonLiveReferences();
304 358
305 ClearWeakCollections(); 359 ClearWeakCollections();
306 360
307 heap_->set_encountered_weak_cells(Smi::FromInt(0)); 361 heap_->set_encountered_weak_cells(Smi::FromInt(0));
308 362
309 #ifdef VERIFY_HEAP 363 #ifdef VERIFY_HEAP
310 if (FLAG_verify_heap) { 364 if (FLAG_verify_heap) {
311 VerifyMarking(heap_); 365 VerifyMarking(heap_);
312 } 366 }
313 #endif 367 #endif
314 368
315 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); 369 ClearInvalidStoreAndSlotsBufferEntries();
316 370
317 #ifdef VERIFY_HEAP 371 #ifdef VERIFY_HEAP
318 if (FLAG_verify_heap) { 372 if (FLAG_verify_heap) {
319 heap_->store_buffer()->VerifyValidStoreBufferEntries(); 373 VerifyValidStoreAndSlotsBufferEntries(heap_);
320 } 374 }
321 #endif 375 #endif
322 376
323 SweepSpaces(); 377 SweepSpaces();
324 378
325 #ifdef VERIFY_HEAP 379 #ifdef VERIFY_HEAP
326 VerifyWeakEmbeddedObjectsInCode(); 380 VerifyWeakEmbeddedObjectsInCode();
327 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { 381 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
328 VerifyOmittedMapChecks(); 382 VerifyOmittedMapChecks();
329 } 383 }
(...skipping 2718 matching lines...) Expand 10 before | Expand all | Expand 10 after
3048 if (allocation.To(&target)) { 3102 if (allocation.To(&target)) {
3049 MigrateObject(target, object, object_size, target_space->identity()); 3103 MigrateObject(target, object, object_size, target_space->identity());
3050 heap()->IncrementPromotedObjectsSize(object_size); 3104 heap()->IncrementPromotedObjectsSize(object_size);
3051 return true; 3105 return true;
3052 } 3106 }
3053 3107
3054 return false; 3108 return false;
3055 } 3109 }
3056 3110
3057 3111
3058 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { 3112 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
3113 HeapObject** out_object) {
3059 // This function does not support large objects right now. 3114 // This function does not support large objects right now.
3060 Space* owner = p->owner(); 3115 Space* owner = p->owner();
3061 if (owner == heap_->lo_space() || owner == NULL) return true; 3116 if (owner == heap_->lo_space() || owner == NULL) {
3117 *out_object = NULL;
3118 return true;
3119 }
3062 3120
3063 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); 3121 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
3064 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; 3122 unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
3065 MarkBit::CellType index_in_cell = 1U 3123 MarkBit::CellType index_in_cell = 1U
3066 << (mark_bit_index & Bitmap::kBitIndexMask); 3124 << (mark_bit_index & Bitmap::kBitIndexMask);
3067 MarkBit::CellType* cells = p->markbits()->cells(); 3125 MarkBit::CellType* cells = p->markbits()->cells();
3068 Address cell_base = p->area_start(); 3126 Address cell_base = p->area_start();
3069 unsigned int cell_base_start_index = Bitmap::IndexToCell( 3127 unsigned int cell_base_start_index = Bitmap::IndexToCell(
3070 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base))); 3128 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(cell_base)));
3071 3129
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
3105 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros; 3163 unsigned int offset = Bitmap::kBitIndexMask - leading_zeros;
3106 3164
3107 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize; 3165 cell_base += (start_index - cell_base_start_index) * 32 * kPointerSize;
3108 Address address = cell_base + offset * kPointerSize; 3166 Address address = cell_base + offset * kPointerSize;
3109 HeapObject* object = HeapObject::FromAddress(address); 3167 HeapObject* object = HeapObject::FromAddress(address);
3110 DCHECK(object->address() < reinterpret_cast<Address>(slot)); 3168 DCHECK(object->address() < reinterpret_cast<Address>(slot));
3111 if (object->address() <= slot && 3169 if (object->address() <= slot &&
3112 (object->address() + object->Size()) > slot) { 3170 (object->address() + object->Size()) > slot) {
3113 // If the slot is within the last found object in the cell, the slot is 3171 // If the slot is within the last found object in the cell, the slot is
3114 // in a live object. 3172 // in a live object.
3173 *out_object = object;
3115 return true; 3174 return true;
3116 } 3175 }
3117 return false; 3176 return false;
3118 } 3177 }
3119 3178
3120 3179
3121 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { 3180 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) {
3122 // This function does not support large objects right now. 3181 // This function does not support large objects right now.
3123 Space* owner = p->owner(); 3182 Space* owner = p->owner();
3124 if (owner == heap_->lo_space() || owner == NULL) return true; 3183 if (owner == heap_->lo_space() || owner == NULL) return true;
(...skipping 21 matching lines...) Expand all
3146 } 3205 }
3147 3206
3148 offset++; 3207 offset++;
3149 current_cell >>= 1; 3208 current_cell >>= 1;
3150 } 3209 }
3151 } 3210 }
3152 return false; 3211 return false;
3153 } 3212 }
3154 3213
3155 3214
3156 bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address, 3215 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
3157 HeapObject* object) { 3216 HeapObject* object = NULL;
3158 // If the target object is not black, the source slot must be part 3217 // The target object is black but we don't know if the source slot is black.
3159 // of a non-black (dead) object. 3218 // The source object could have died and the slot could be part of a free
3160 if (!Marking::IsBlack(Marking::MarkBitFrom(object))) { 3219 // space. Find out based on mark bits if the slot is part of a live object.
3220 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
3161 return false; 3221 return false;
3162 } 3222 }
3163 3223
3164 // The target object is black but we don't know if the source slot is black. 3224 #if V8_DOUBLE_FIELDS_UNBOXING
3165 // The source object could have died and the slot could be part of a free 3225 // |object| is NULL only when the slot belongs to large object space.
3166 // space. Find out based on mark bits if the slot is part of a live object. 3226 DCHECK(object != NULL ||
3167 if (!IsSlotInBlackObject( 3227 Page::FromAnyPointerAddress(heap_, slot)->owner() ==
3168 Page::FromAddress(reinterpret_cast<Address>(address)), 3228 heap_->lo_space());
3169 reinterpret_cast<Address>(address))) { 3229 // We don't need to check large objects' layout descriptor since it can't
3170 return false; 3230 // contain in-object fields anyway.
3231 if (object != NULL) {
3232 // Filter out slots that happens to point to unboxed double fields.
3233 LayoutDescriptorHelper helper(object->map());
3234 bool has_only_tagged_fields = helper.all_fields_tagged();
3235 if (!has_only_tagged_fields &&
3236 !helper.IsTagged(static_cast<int>(slot - object->address()))) {
3237 return false;
3238 }
3171 } 3239 }
3240 #endif
3172 3241
3173 return true; 3242 return true;
3174 } 3243 }
3175 3244
3176 3245
3177 void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address, 3246 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
3178 HeapObject* object) { 3247 HeapObject* object) {
3179 // The target object has to be black. 3248 // The target object has to be black.
3180 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3249 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3181 3250
3182 // The target object is black but we don't know if the source slot is black. 3251 // The target object is black but we don't know if the source slot is black.
3183 // The source object could have died and the slot could be part of a free 3252 // The source object could have died and the slot could be part of a free
3184 // space. Use the mark bit iterator to find out about liveness of the slot. 3253 // space. Use the mark bit iterator to find out about liveness of the slot.
3185 CHECK(IsSlotInBlackObjectSlow( 3254 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
3186 Page::FromAddress(reinterpret_cast<Address>(address)),
3187 reinterpret_cast<Address>(address)));
3188 } 3255 }
3189 3256
3190 3257
3191 void MarkCompactCollector::EvacuateNewSpace() { 3258 void MarkCompactCollector::EvacuateNewSpace() {
3192 // There are soft limits in the allocation code, designed trigger a mark 3259 // There are soft limits in the allocation code, designed trigger a mark
3193 // sweep collection by failing allocations. But since we are already in 3260 // sweep collection by failing allocations. But since we are already in
3194 // a mark-sweep allocation, there is no sense in trying to trigger one. 3261 // a mark-sweep allocation, there is no sense in trying to trigger one.
3195 AlwaysAllocateScope scope(isolate()); 3262 AlwaysAllocateScope scope(isolate());
3196 3263
3197 NewSpace* new_space = heap()->new_space(); 3264 NewSpace* new_space = heap()->new_space();
(...skipping 1298 matching lines...) Expand 10 before | Expand all | Expand 10 after
4496 buffer = allocator->AllocateBuffer(buffer); 4563 buffer = allocator->AllocateBuffer(buffer);
4497 *buffer_address = buffer; 4564 *buffer_address = buffer;
4498 } 4565 }
4499 DCHECK(buffer->HasSpaceForTypedSlot()); 4566 DCHECK(buffer->HasSpaceForTypedSlot());
4500 buffer->Add(reinterpret_cast<ObjectSlot>(type)); 4567 buffer->Add(reinterpret_cast<ObjectSlot>(type));
4501 buffer->Add(reinterpret_cast<ObjectSlot>(addr)); 4568 buffer->Add(reinterpret_cast<ObjectSlot>(addr));
4502 return true; 4569 return true;
4503 } 4570 }
4504 4571
4505 4572
4573 void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) {
4574 // Remove entries by replacing them with an old-space slot containing a smi
4575 // that is located in an unmovable page.
4576 const ObjectSlot kRemovedEntry = HeapObject::RawField(
4577 heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
4578 DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
4579 ->NeverEvacuate());
4580
4581 while (buffer != NULL) {
4582 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4583 intptr_t slots_count = buffer->idx_;
4584
4585 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4586 ObjectSlot slot = slots[slot_idx];
4587 if (!IsTypedSlot(slot)) {
4588 Object* object = *slot;
4589 if (object->IsHeapObject()) {
4590 if (heap->InNewSpace(object) ||
4591 !heap->mark_compact_collector()->IsSlotInLiveObject(
4592 reinterpret_cast<Address>(slot))) {
4593 slots[slot_idx] = kRemovedEntry;
4594 }
4595 }
4596 } else {
4597 ++slot_idx;
4598 DCHECK(slot_idx < slots_count);
4599 }
4600 }
4601 buffer = buffer->next();
4602 }
4603 }
4604
4605
4606 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
4607 while (buffer != NULL) {
4608 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4609 intptr_t slots_count = buffer->idx_;
4610
4611 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4612 ObjectSlot slot = slots[slot_idx];
4613 if (!IsTypedSlot(slot)) {
4614 Object* object = *slot;
4615 if (object->IsHeapObject()) {
4616 CHECK(!heap->InNewSpace(object));
4617 CHECK(heap->mark_compact_collector()->IsSlotInLiveObject(
4618 reinterpret_cast<Address>(slot)));
4619 }
4620 } else {
4621 ++slot_idx;
4622 DCHECK(slot_idx < slots_count);
4623 }
4624 }
4625 buffer = buffer->next();
4626 }
4627 }
4628
4629
4506 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { 4630 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
4507 if (RelocInfo::IsCodeTarget(rmode)) { 4631 if (RelocInfo::IsCodeTarget(rmode)) {
4508 return SlotsBuffer::CODE_TARGET_SLOT; 4632 return SlotsBuffer::CODE_TARGET_SLOT;
4509 } else if (RelocInfo::IsEmbeddedObject(rmode)) { 4633 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
4510 return SlotsBuffer::EMBEDDED_OBJECT_SLOT; 4634 return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
4511 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { 4635 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
4512 return SlotsBuffer::DEBUG_TARGET_SLOT; 4636 return SlotsBuffer::DEBUG_TARGET_SLOT;
4513 } else if (RelocInfo::IsJSReturn(rmode)) { 4637 } else if (RelocInfo::IsJSReturn(rmode)) {
4514 return SlotsBuffer::JS_RETURN_SLOT; 4638 return SlotsBuffer::JS_RETURN_SLOT;
4515 } 4639 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
4637 SlotsBuffer* buffer = *buffer_address; 4761 SlotsBuffer* buffer = *buffer_address;
4638 while (buffer != NULL) { 4762 while (buffer != NULL) {
4639 SlotsBuffer* next_buffer = buffer->next(); 4763 SlotsBuffer* next_buffer = buffer->next();
4640 DeallocateBuffer(buffer); 4764 DeallocateBuffer(buffer);
4641 buffer = next_buffer; 4765 buffer = next_buffer;
4642 } 4766 }
4643 *buffer_address = NULL; 4767 *buffer_address = NULL;
4644 } 4768 }
4645 } 4769 }
4646 } // namespace v8::internal 4770 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/store-buffer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698