| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/compilation-cache.h" | 9 #include "src/compilation-cache.h" |
| 10 #include "src/cpu-profiler.h" | 10 #include "src/cpu-profiler.h" |
| (...skipping 30 matching lines...) Expand all Loading... |
| 41 #endif | 41 #endif |
| 42 sweep_precisely_(false), | 42 sweep_precisely_(false), |
| 43 reduce_memory_footprint_(false), | 43 reduce_memory_footprint_(false), |
| 44 abort_incremental_marking_(false), | 44 abort_incremental_marking_(false), |
| 45 marking_parity_(ODD_MARKING_PARITY), | 45 marking_parity_(ODD_MARKING_PARITY), |
| 46 compacting_(false), | 46 compacting_(false), |
| 47 was_marked_incrementally_(false), | 47 was_marked_incrementally_(false), |
| 48 sweeping_pending_(false), | 48 sweeping_pending_(false), |
| 49 pending_sweeper_jobs_semaphore_(0), | 49 pending_sweeper_jobs_semaphore_(0), |
| 50 sequential_sweeping_(false), | 50 sequential_sweeping_(false), |
| 51 tracer_(NULL), | |
| 52 migration_slots_buffer_(NULL), | 51 migration_slots_buffer_(NULL), |
| 53 heap_(heap), | 52 heap_(heap), |
| 54 code_flusher_(NULL), | 53 code_flusher_(NULL), |
| 55 have_code_to_deoptimize_(false) { } | 54 have_code_to_deoptimize_(false) { } |
| 56 | 55 |
| 57 #ifdef VERIFY_HEAP | 56 #ifdef VERIFY_HEAP |
| 58 class VerifyMarkingVisitor: public ObjectVisitor { | 57 class VerifyMarkingVisitor: public ObjectVisitor { |
| 59 public: | 58 public: |
| 60 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} | 59 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} |
| 61 | 60 |
| (...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 433 #endif | 432 #endif |
| 434 | 433 |
| 435 Finish(); | 434 Finish(); |
| 436 | 435 |
| 437 if (marking_parity_ == EVEN_MARKING_PARITY) { | 436 if (marking_parity_ == EVEN_MARKING_PARITY) { |
| 438 marking_parity_ = ODD_MARKING_PARITY; | 437 marking_parity_ = ODD_MARKING_PARITY; |
| 439 } else { | 438 } else { |
| 440 ASSERT(marking_parity_ == ODD_MARKING_PARITY); | 439 ASSERT(marking_parity_ == ODD_MARKING_PARITY); |
| 441 marking_parity_ = EVEN_MARKING_PARITY; | 440 marking_parity_ = EVEN_MARKING_PARITY; |
| 442 } | 441 } |
| 443 | |
| 444 tracer_ = NULL; | |
| 445 } | 442 } |
| 446 | 443 |
| 447 | 444 |
| 448 #ifdef VERIFY_HEAP | 445 #ifdef VERIFY_HEAP |
| 449 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 446 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
| 450 PageIterator it(space); | 447 PageIterator it(space); |
| 451 | 448 |
| 452 while (it.has_next()) { | 449 while (it.has_next()) { |
| 453 Page* p = it.next(); | 450 Page* p = it.next(); |
| 454 CHECK(p->markbits()->IsClean()); | 451 CHECK(p->markbits()->IsClean()); |
| (...skipping 483 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 938 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); | 935 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
| 939 } | 936 } |
| 940 compacting_ = false; | 937 compacting_ = false; |
| 941 evacuation_candidates_.Rewind(0); | 938 evacuation_candidates_.Rewind(0); |
| 942 invalidated_code_.Rewind(0); | 939 invalidated_code_.Rewind(0); |
| 943 } | 940 } |
| 944 ASSERT_EQ(0, evacuation_candidates_.length()); | 941 ASSERT_EQ(0, evacuation_candidates_.length()); |
| 945 } | 942 } |
| 946 | 943 |
| 947 | 944 |
| 948 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 945 void MarkCompactCollector::Prepare() { |
| 949 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); | 946 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
| 950 | 947 |
| 951 // Rather than passing the tracer around we stash it in a static member | |
| 952 // variable. | |
| 953 tracer_ = tracer; | |
| 954 | |
| 955 #ifdef DEBUG | 948 #ifdef DEBUG |
| 956 ASSERT(state_ == IDLE); | 949 ASSERT(state_ == IDLE); |
| 957 state_ = PREPARE_GC; | 950 state_ = PREPARE_GC; |
| 958 #endif | 951 #endif |
| 959 | 952 |
| 960 ASSERT(!FLAG_never_compact || !FLAG_always_compact); | 953 ASSERT(!FLAG_never_compact || !FLAG_always_compact); |
| 961 | 954 |
| 962 if (IsConcurrentSweepingInProgress()) { | 955 if (IsConcurrentSweepingInProgress()) { |
| 963 // Instead of waiting we could also abort the sweeper threads here. | 956 // Instead of waiting we could also abort the sweeper threads here. |
| 964 WaitUntilSweepingCompleted(); | 957 WaitUntilSweepingCompleted(); |
| (...skipping 1318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2283 code->CodeIterateBody(visitor); | 2276 code->CodeIterateBody(visitor); |
| 2284 } | 2277 } |
| 2285 ProcessMarkingDeque(); | 2278 ProcessMarkingDeque(); |
| 2286 return; | 2279 return; |
| 2287 } | 2280 } |
| 2288 } | 2281 } |
| 2289 } | 2282 } |
| 2290 | 2283 |
| 2291 | 2284 |
| 2292 void MarkCompactCollector::MarkLiveObjects() { | 2285 void MarkCompactCollector::MarkLiveObjects() { |
| 2293 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); | 2286 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK); |
| 2294 // The recursive GC marker detects when it is nearing stack overflow, | 2287 // The recursive GC marker detects when it is nearing stack overflow, |
| 2295 // and switches to a different marking system. JS interrupts interfere | 2288 // and switches to a different marking system. JS interrupts interfere |
| 2296 // with the C stack limit check. | 2289 // with the C stack limit check. |
| 2297 PostponeInterruptsScope postpone(isolate()); | 2290 PostponeInterruptsScope postpone(isolate()); |
| 2298 | 2291 |
| 2299 bool incremental_marking_overflowed = false; | 2292 bool incremental_marking_overflowed = false; |
| 2300 IncrementalMarking* incremental_marking = heap_->incremental_marking(); | 2293 IncrementalMarking* incremental_marking = heap_->incremental_marking(); |
| 2301 if (was_marked_incrementally_) { | 2294 if (was_marked_incrementally_) { |
| 2302 // Finalize the incremental marking and check whether we had an overflow. | 2295 // Finalize the incremental marking and check whether we had an overflow. |
| 2303 // Both markers use grey color to mark overflowed objects so | 2296 // Both markers use grey color to mark overflowed objects so |
| (...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2734 entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries); | 2727 entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries); |
| 2735 new_number_of_entries += survived; | 2728 new_number_of_entries += survived; |
| 2736 } | 2729 } |
| 2737 for (int i = new_number_of_entries; i < number_of_entries; i++) { | 2730 for (int i = new_number_of_entries; i < number_of_entries; i++) { |
| 2738 entries->clear_at(i); | 2731 entries->clear_at(i); |
| 2739 } | 2732 } |
| 2740 } | 2733 } |
| 2741 | 2734 |
| 2742 | 2735 |
| 2743 void MarkCompactCollector::ProcessWeakCollections() { | 2736 void MarkCompactCollector::ProcessWeakCollections() { |
| 2744 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); | 2737 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2738 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); |
| 2745 Object* weak_collection_obj = heap()->encountered_weak_collections(); | 2739 Object* weak_collection_obj = heap()->encountered_weak_collections(); |
| 2746 while (weak_collection_obj != Smi::FromInt(0)) { | 2740 while (weak_collection_obj != Smi::FromInt(0)) { |
| 2747 JSWeakCollection* weak_collection = | 2741 JSWeakCollection* weak_collection = |
| 2748 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2742 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
| 2749 ASSERT(MarkCompactCollector::IsMarked(weak_collection)); | 2743 ASSERT(MarkCompactCollector::IsMarked(weak_collection)); |
| 2750 if (weak_collection->table()->IsHashTable()) { | 2744 if (weak_collection->table()->IsHashTable()) { |
| 2751 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2745 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
| 2752 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2746 Object** anchor = reinterpret_cast<Object**>(table->address()); |
| 2753 for (int i = 0; i < table->Capacity(); i++) { | 2747 for (int i = 0; i < table->Capacity(); i++) { |
| 2754 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2748 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
| 2755 Object** key_slot = | 2749 Object** key_slot = |
| 2756 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); | 2750 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); |
| 2757 RecordSlot(anchor, key_slot, *key_slot); | 2751 RecordSlot(anchor, key_slot, *key_slot); |
| 2758 Object** value_slot = | 2752 Object** value_slot = |
| 2759 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); | 2753 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); |
| 2760 MarkCompactMarkingVisitor::MarkObjectByPointer( | 2754 MarkCompactMarkingVisitor::MarkObjectByPointer( |
| 2761 this, anchor, value_slot); | 2755 this, anchor, value_slot); |
| 2762 } | 2756 } |
| 2763 } | 2757 } |
| 2764 } | 2758 } |
| 2765 weak_collection_obj = weak_collection->next(); | 2759 weak_collection_obj = weak_collection->next(); |
| 2766 } | 2760 } |
| 2767 } | 2761 } |
| 2768 | 2762 |
| 2769 | 2763 |
| 2770 void MarkCompactCollector::ClearWeakCollections() { | 2764 void MarkCompactCollector::ClearWeakCollections() { |
| 2771 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); | 2765 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2766 GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); |
| 2772 Object* weak_collection_obj = heap()->encountered_weak_collections(); | 2767 Object* weak_collection_obj = heap()->encountered_weak_collections(); |
| 2773 while (weak_collection_obj != Smi::FromInt(0)) { | 2768 while (weak_collection_obj != Smi::FromInt(0)) { |
| 2774 JSWeakCollection* weak_collection = | 2769 JSWeakCollection* weak_collection = |
| 2775 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2770 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
| 2776 ASSERT(MarkCompactCollector::IsMarked(weak_collection)); | 2771 ASSERT(MarkCompactCollector::IsMarked(weak_collection)); |
| 2777 if (weak_collection->table()->IsHashTable()) { | 2772 if (weak_collection->table()->IsHashTable()) { |
| 2778 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2773 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
| 2779 for (int i = 0; i < table->Capacity(); i++) { | 2774 for (int i = 0; i < table->Capacity(); i++) { |
| 2780 HeapObject* key = HeapObject::cast(table->KeyAt(i)); | 2775 HeapObject* key = HeapObject::cast(table->KeyAt(i)); |
| 2781 if (!MarkCompactCollector::IsMarked(key)) { | 2776 if (!MarkCompactCollector::IsMarked(key)) { |
| (...skipping 653 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3435 } | 3430 } |
| 3436 } | 3431 } |
| 3437 invalidated_code_.Rewind(0); | 3432 invalidated_code_.Rewind(0); |
| 3438 } | 3433 } |
| 3439 | 3434 |
| 3440 | 3435 |
| 3441 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 3436 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| 3442 Heap::RelocationLock relocation_lock(heap()); | 3437 Heap::RelocationLock relocation_lock(heap()); |
| 3443 | 3438 |
| 3444 bool code_slots_filtering_required; | 3439 bool code_slots_filtering_required; |
| 3445 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); | 3440 { GCTracer::Scope gc_scope(heap()->tracer(), |
| 3441 GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| 3446 code_slots_filtering_required = MarkInvalidatedCode(); | 3442 code_slots_filtering_required = MarkInvalidatedCode(); |
| 3447 EvacuateNewSpace(); | 3443 EvacuateNewSpace(); |
| 3448 } | 3444 } |
| 3449 | 3445 |
| 3450 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES); | 3446 { GCTracer::Scope gc_scope(heap()->tracer(), |
| 3447 GCTracer::Scope::MC_EVACUATE_PAGES); |
| 3451 EvacuatePages(); | 3448 EvacuatePages(); |
| 3452 } | 3449 } |
| 3453 | 3450 |
| 3454 // Second pass: find pointers to new space and update them. | 3451 // Second pass: find pointers to new space and update them. |
| 3455 PointersUpdatingVisitor updating_visitor(heap()); | 3452 PointersUpdatingVisitor updating_visitor(heap()); |
| 3456 | 3453 |
| 3457 { GCTracer::Scope gc_scope(tracer_, | 3454 { GCTracer::Scope gc_scope(heap()->tracer(), |
| 3458 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); | 3455 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); |
| 3459 // Update pointers in to space. | 3456 // Update pointers in to space. |
| 3460 SemiSpaceIterator to_it(heap()->new_space()->bottom(), | 3457 SemiSpaceIterator to_it(heap()->new_space()->bottom(), |
| 3461 heap()->new_space()->top()); | 3458 heap()->new_space()->top()); |
| 3462 for (HeapObject* object = to_it.Next(); | 3459 for (HeapObject* object = to_it.Next(); |
| 3463 object != NULL; | 3460 object != NULL; |
| 3464 object = to_it.Next()) { | 3461 object = to_it.Next()) { |
| 3465 Map* map = object->map(); | 3462 Map* map = object->map(); |
| 3466 object->IterateBody(map->instance_type(), | 3463 object->IterateBody(map->instance_type(), |
| 3467 object->SizeFromMap(map), | 3464 object->SizeFromMap(map), |
| 3468 &updating_visitor); | 3465 &updating_visitor); |
| 3469 } | 3466 } |
| 3470 } | 3467 } |
| 3471 | 3468 |
| 3472 { GCTracer::Scope gc_scope(tracer_, | 3469 { GCTracer::Scope gc_scope(heap()->tracer(), |
| 3473 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS); | 3470 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS); |
| 3474 // Update roots. | 3471 // Update roots. |
| 3475 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); | 3472 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); |
| 3476 } | 3473 } |
| 3477 | 3474 |
| 3478 { GCTracer::Scope gc_scope(tracer_, | 3475 { GCTracer::Scope gc_scope(heap()->tracer(), |
| 3479 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); | 3476 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); |
| 3480 StoreBufferRebuildScope scope(heap_, | 3477 StoreBufferRebuildScope scope(heap_, |
| 3481 heap_->store_buffer(), | 3478 heap_->store_buffer(), |
| 3482 &Heap::ScavengeStoreBufferCallback); | 3479 &Heap::ScavengeStoreBufferCallback); |
| 3483 heap_->store_buffer()->IteratePointersToNewSpaceAndClearMaps( | 3480 heap_->store_buffer()->IteratePointersToNewSpaceAndClearMaps( |
| 3484 &UpdatePointer); | 3481 &UpdatePointer); |
| 3485 } | 3482 } |
| 3486 | 3483 |
| 3487 { GCTracer::Scope gc_scope(tracer_, | 3484 { GCTracer::Scope gc_scope(heap()->tracer(), |
| 3488 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); | 3485 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); |
| 3489 SlotsBuffer::UpdateSlotsRecordedIn(heap_, | 3486 SlotsBuffer::UpdateSlotsRecordedIn(heap_, |
| 3490 migration_slots_buffer_, | 3487 migration_slots_buffer_, |
| 3491 code_slots_filtering_required); | 3488 code_slots_filtering_required); |
| 3492 if (FLAG_trace_fragmentation) { | 3489 if (FLAG_trace_fragmentation) { |
| 3493 PrintF(" migration slots buffer: %d\n", | 3490 PrintF(" migration slots buffer: %d\n", |
| 3494 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); | 3491 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
| 3495 } | 3492 } |
| 3496 | 3493 |
| 3497 if (compacting_ && was_marked_incrementally_) { | 3494 if (compacting_ && was_marked_incrementally_) { |
| 3498 // It's difficult to filter out slots recorded for large objects. | 3495 // It's difficult to filter out slots recorded for large objects. |
| 3499 LargeObjectIterator it(heap_->lo_space()); | 3496 LargeObjectIterator it(heap_->lo_space()); |
| 3500 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 3497 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 3501 // LargeObjectSpace is not swept yet thus we have to skip | 3498 // LargeObjectSpace is not swept yet thus we have to skip |
| 3502 // dead objects explicitly. | 3499 // dead objects explicitly. |
| 3503 if (!IsMarked(obj)) continue; | 3500 if (!IsMarked(obj)) continue; |
| 3504 | 3501 |
| 3505 Page* p = Page::FromAddress(obj->address()); | 3502 Page* p = Page::FromAddress(obj->address()); |
| 3506 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | 3503 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| 3507 obj->Iterate(&updating_visitor); | 3504 obj->Iterate(&updating_visitor); |
| 3508 p->ClearFlag(Page::RESCAN_ON_EVACUATION); | 3505 p->ClearFlag(Page::RESCAN_ON_EVACUATION); |
| 3509 } | 3506 } |
| 3510 } | 3507 } |
| 3511 } | 3508 } |
| 3512 } | 3509 } |
| 3513 | 3510 |
| 3514 int npages = evacuation_candidates_.length(); | 3511 int npages = evacuation_candidates_.length(); |
| 3515 { GCTracer::Scope gc_scope( | 3512 { GCTracer::Scope gc_scope( |
| 3516 tracer_, GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); | 3513 heap()->tracer(), GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); |
| 3517 for (int i = 0; i < npages; i++) { | 3514 for (int i = 0; i < npages; i++) { |
| 3518 Page* p = evacuation_candidates_[i]; | 3515 Page* p = evacuation_candidates_[i]; |
| 3519 ASSERT(p->IsEvacuationCandidate() || | 3516 ASSERT(p->IsEvacuationCandidate() || |
| 3520 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3517 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
| 3521 | 3518 |
| 3522 if (p->IsEvacuationCandidate()) { | 3519 if (p->IsEvacuationCandidate()) { |
| 3523 SlotsBuffer::UpdateSlotsRecordedIn(heap_, | 3520 SlotsBuffer::UpdateSlotsRecordedIn(heap_, |
| 3524 p->slots_buffer(), | 3521 p->slots_buffer(), |
| 3525 code_slots_filtering_required); | 3522 code_slots_filtering_required); |
| 3526 if (FLAG_trace_fragmentation) { | 3523 if (FLAG_trace_fragmentation) { |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3566 } | 3563 } |
| 3567 break; | 3564 break; |
| 3568 default: | 3565 default: |
| 3569 UNREACHABLE(); | 3566 UNREACHABLE(); |
| 3570 break; | 3567 break; |
| 3571 } | 3568 } |
| 3572 } | 3569 } |
| 3573 } | 3570 } |
| 3574 } | 3571 } |
| 3575 | 3572 |
| 3576 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_UPDATE_MISC_POINTERS); | 3573 GCTracer::Scope gc_scope(heap()->tracer(), |
| 3574 GCTracer::Scope::MC_UPDATE_MISC_POINTERS); |
| 3577 | 3575 |
| 3578 // Update pointers from cells. | 3576 // Update pointers from cells. |
| 3579 HeapObjectIterator cell_iterator(heap_->cell_space()); | 3577 HeapObjectIterator cell_iterator(heap_->cell_space()); |
| 3580 for (HeapObject* cell = cell_iterator.Next(); | 3578 for (HeapObject* cell = cell_iterator.Next(); |
| 3581 cell != NULL; | 3579 cell != NULL; |
| 3582 cell = cell_iterator.Next()) { | 3580 cell = cell_iterator.Next()) { |
| 3583 if (cell->IsCell()) { | 3581 if (cell->IsCell()) { |
| 3584 Cell::BodyDescriptor::IterateBody(cell, &updating_visitor); | 3582 Cell::BodyDescriptor::IterateBody(cell, &updating_visitor); |
| 3585 } | 3583 } |
| 3586 } | 3584 } |
| (...skipping 614 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4201 AllocationSpaceName(space->identity()), | 4199 AllocationSpaceName(space->identity()), |
| 4202 pages_swept); | 4200 pages_swept); |
| 4203 } | 4201 } |
| 4204 | 4202 |
| 4205 // Give pages that are queued to be freed back to the OS. | 4203 // Give pages that are queued to be freed back to the OS. |
| 4206 heap()->FreeQueuedChunks(); | 4204 heap()->FreeQueuedChunks(); |
| 4207 } | 4205 } |
| 4208 | 4206 |
| 4209 | 4207 |
| 4210 void MarkCompactCollector::SweepSpaces() { | 4208 void MarkCompactCollector::SweepSpaces() { |
| 4211 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 4209 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP); |
| 4212 #ifdef DEBUG | 4210 #ifdef DEBUG |
| 4213 state_ = SWEEP_SPACES; | 4211 state_ = SWEEP_SPACES; |
| 4214 #endif | 4212 #endif |
| 4215 SweeperType how_to_sweep = CONSERVATIVE; | 4213 SweeperType how_to_sweep = CONSERVATIVE; |
| 4216 if (AreSweeperThreadsActivated()) { | 4214 if (AreSweeperThreadsActivated()) { |
| 4217 if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; | 4215 if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; |
| 4218 if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; | 4216 if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; |
| 4219 } | 4217 } |
| 4220 if (sweep_precisely_) how_to_sweep = PRECISE; | 4218 if (sweep_precisely_) how_to_sweep = PRECISE; |
| 4221 | 4219 |
| 4222 MoveEvacuationCandidatesToEndOfPagesList(); | 4220 MoveEvacuationCandidatesToEndOfPagesList(); |
| 4223 | 4221 |
| 4224 // Noncompacting collections simply sweep the spaces to clear the mark | 4222 // Noncompacting collections simply sweep the spaces to clear the mark |
| 4225 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4223 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 4226 // the map space last because freeing non-live maps overwrites them and | 4224 // the map space last because freeing non-live maps overwrites them and |
| 4227 // the other spaces rely on possibly non-live maps to get the sizes for | 4225 // the other spaces rely on possibly non-live maps to get the sizes for |
| 4228 // non-live objects. | 4226 // non-live objects. |
| 4229 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_OLDSPACE); | 4227 { GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4228 GCTracer::Scope::MC_SWEEP_OLDSPACE); |
| 4230 { SequentialSweepingScope scope(this); | 4229 { SequentialSweepingScope scope(this); |
| 4231 SweepSpace(heap()->old_pointer_space(), how_to_sweep); | 4230 SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
| 4232 SweepSpace(heap()->old_data_space(), how_to_sweep); | 4231 SweepSpace(heap()->old_data_space(), how_to_sweep); |
| 4233 } | 4232 } |
| 4234 | 4233 |
| 4235 if (how_to_sweep == PARALLEL_CONSERVATIVE || | 4234 if (how_to_sweep == PARALLEL_CONSERVATIVE || |
| 4236 how_to_sweep == CONCURRENT_CONSERVATIVE) { | 4235 how_to_sweep == CONCURRENT_CONSERVATIVE) { |
| 4237 StartSweeperThreads(); | 4236 StartSweeperThreads(); |
| 4238 } | 4237 } |
| 4239 | 4238 |
| 4240 if (how_to_sweep == PARALLEL_CONSERVATIVE) { | 4239 if (how_to_sweep == PARALLEL_CONSERVATIVE) { |
| 4241 WaitUntilSweepingCompleted(); | 4240 WaitUntilSweepingCompleted(); |
| 4242 } | 4241 } |
| 4243 } | 4242 } |
| 4244 RemoveDeadInvalidatedCode(); | 4243 RemoveDeadInvalidatedCode(); |
| 4245 | 4244 |
| 4246 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_CODE); | 4245 { GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4246 GCTracer::Scope::MC_SWEEP_CODE); |
| 4247 SweepSpace(heap()->code_space(), PRECISE); | 4247 SweepSpace(heap()->code_space(), PRECISE); |
| 4248 } | 4248 } |
| 4249 | 4249 |
| 4250 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_CELL); | 4250 { GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4251 GCTracer::Scope::MC_SWEEP_CELL); |
| 4251 SweepSpace(heap()->cell_space(), PRECISE); | 4252 SweepSpace(heap()->cell_space(), PRECISE); |
| 4252 SweepSpace(heap()->property_cell_space(), PRECISE); | 4253 SweepSpace(heap()->property_cell_space(), PRECISE); |
| 4253 } | 4254 } |
| 4254 | 4255 |
| 4255 EvacuateNewSpaceAndCandidates(); | 4256 EvacuateNewSpaceAndCandidates(); |
| 4256 | 4257 |
| 4257 // ClearNonLiveTransitions depends on precise sweeping of map space to | 4258 // ClearNonLiveTransitions depends on precise sweeping of map space to |
| 4258 // detect whether unmarked map became dead in this collection or in one | 4259 // detect whether unmarked map became dead in this collection or in one |
| 4259 // of the previous ones. | 4260 // of the previous ones. |
| 4260 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_MAP); | 4261 { GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4262 GCTracer::Scope::MC_SWEEP_MAP); |
| 4261 SweepSpace(heap()->map_space(), PRECISE); | 4263 SweepSpace(heap()->map_space(), PRECISE); |
| 4262 } | 4264 } |
| 4263 | 4265 |
| 4264 // Deallocate unmarked objects and clear marked bits for marked objects. | 4266 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 4265 heap_->lo_space()->FreeUnmarkedObjects(); | 4267 heap_->lo_space()->FreeUnmarkedObjects(); |
| 4266 | 4268 |
| 4267 // Deallocate evacuated candidate pages. | 4269 // Deallocate evacuated candidate pages. |
| 4268 ReleaseEvacuationCandidates(); | 4270 ReleaseEvacuationCandidates(); |
| 4269 } | 4271 } |
| 4270 | 4272 |
| (...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4501 while (buffer != NULL) { | 4503 while (buffer != NULL) { |
| 4502 SlotsBuffer* next_buffer = buffer->next(); | 4504 SlotsBuffer* next_buffer = buffer->next(); |
| 4503 DeallocateBuffer(buffer); | 4505 DeallocateBuffer(buffer); |
| 4504 buffer = next_buffer; | 4506 buffer = next_buffer; |
| 4505 } | 4507 } |
| 4506 *buffer_address = NULL; | 4508 *buffer_address = NULL; |
| 4507 } | 4509 } |
| 4508 | 4510 |
| 4509 | 4511 |
| 4510 } } // namespace v8::internal | 4512 } } // namespace v8::internal |
| OLD | NEW |