Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(158)

Side by Side Diff: src/mark-compact.cc

Issue 403543002: Make GCTracer persistent. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove destr and initialize last gc timestamp. Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 #include "src/compilation-cache.h" 9 #include "src/compilation-cache.h"
10 #include "src/cpu-profiler.h" 10 #include "src/cpu-profiler.h"
(...skipping 30 matching lines...) Expand all
41 #endif 41 #endif
42 sweep_precisely_(false), 42 sweep_precisely_(false),
43 reduce_memory_footprint_(false), 43 reduce_memory_footprint_(false),
44 abort_incremental_marking_(false), 44 abort_incremental_marking_(false),
45 marking_parity_(ODD_MARKING_PARITY), 45 marking_parity_(ODD_MARKING_PARITY),
46 compacting_(false), 46 compacting_(false),
47 was_marked_incrementally_(false), 47 was_marked_incrementally_(false),
48 sweeping_in_progress_(false), 48 sweeping_in_progress_(false),
49 pending_sweeper_jobs_semaphore_(0), 49 pending_sweeper_jobs_semaphore_(0),
50 sequential_sweeping_(false), 50 sequential_sweeping_(false),
51 tracer_(NULL),
52 migration_slots_buffer_(NULL), 51 migration_slots_buffer_(NULL),
53 heap_(heap), 52 heap_(heap),
54 code_flusher_(NULL), 53 code_flusher_(NULL),
55 have_code_to_deoptimize_(false) { } 54 have_code_to_deoptimize_(false) { }
56 55
57 #ifdef VERIFY_HEAP 56 #ifdef VERIFY_HEAP
58 class VerifyMarkingVisitor: public ObjectVisitor { 57 class VerifyMarkingVisitor: public ObjectVisitor {
59 public: 58 public:
60 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} 59 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {}
61 60
(...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after
433 #endif 432 #endif
434 433
435 Finish(); 434 Finish();
436 435
437 if (marking_parity_ == EVEN_MARKING_PARITY) { 436 if (marking_parity_ == EVEN_MARKING_PARITY) {
438 marking_parity_ = ODD_MARKING_PARITY; 437 marking_parity_ = ODD_MARKING_PARITY;
439 } else { 438 } else {
440 ASSERT(marking_parity_ == ODD_MARKING_PARITY); 439 ASSERT(marking_parity_ == ODD_MARKING_PARITY);
441 marking_parity_ = EVEN_MARKING_PARITY; 440 marking_parity_ = EVEN_MARKING_PARITY;
442 } 441 }
443
444 tracer_ = NULL;
445 } 442 }
446 443
447 444
448 #ifdef VERIFY_HEAP 445 #ifdef VERIFY_HEAP
449 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { 446 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
450 PageIterator it(space); 447 PageIterator it(space);
451 448
452 while (it.has_next()) { 449 while (it.has_next()) {
453 Page* p = it.next(); 450 Page* p = it.next();
454 CHECK(p->markbits()->IsClean()); 451 CHECK(p->markbits()->IsClean());
(...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after
943 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); 940 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
944 } 941 }
945 compacting_ = false; 942 compacting_ = false;
946 evacuation_candidates_.Rewind(0); 943 evacuation_candidates_.Rewind(0);
947 invalidated_code_.Rewind(0); 944 invalidated_code_.Rewind(0);
948 } 945 }
949 ASSERT_EQ(0, evacuation_candidates_.length()); 946 ASSERT_EQ(0, evacuation_candidates_.length());
950 } 947 }
951 948
952 949
953 void MarkCompactCollector::Prepare(GCTracer* tracer) { 950 void MarkCompactCollector::Prepare() {
954 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); 951 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
955 952
956 // Rather than passing the tracer around we stash it in a static member
957 // variable.
958 tracer_ = tracer;
959
960 #ifdef DEBUG 953 #ifdef DEBUG
961 ASSERT(state_ == IDLE); 954 ASSERT(state_ == IDLE);
962 state_ = PREPARE_GC; 955 state_ = PREPARE_GC;
963 #endif 956 #endif
964 957
965 ASSERT(!FLAG_never_compact || !FLAG_always_compact); 958 ASSERT(!FLAG_never_compact || !FLAG_always_compact);
966 959
967 if (sweeping_in_progress()) { 960 if (sweeping_in_progress()) {
968 // Instead of waiting we could also abort the sweeper threads here. 961 // Instead of waiting we could also abort the sweeper threads here.
969 EnsureSweepingCompleted(); 962 EnsureSweepingCompleted();
(...skipping 1318 matching lines...) Expand 10 before | Expand all | Expand 10 after
2288 code->CodeIterateBody(visitor); 2281 code->CodeIterateBody(visitor);
2289 } 2282 }
2290 ProcessMarkingDeque(); 2283 ProcessMarkingDeque();
2291 return; 2284 return;
2292 } 2285 }
2293 } 2286 }
2294 } 2287 }
2295 2288
2296 2289
2297 void MarkCompactCollector::MarkLiveObjects() { 2290 void MarkCompactCollector::MarkLiveObjects() {
2298 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); 2291 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK);
2299 // The recursive GC marker detects when it is nearing stack overflow, 2292 // The recursive GC marker detects when it is nearing stack overflow,
2300 // and switches to a different marking system. JS interrupts interfere 2293 // and switches to a different marking system. JS interrupts interfere
2301 // with the C stack limit check. 2294 // with the C stack limit check.
2302 PostponeInterruptsScope postpone(isolate()); 2295 PostponeInterruptsScope postpone(isolate());
2303 2296
2304 bool incremental_marking_overflowed = false; 2297 bool incremental_marking_overflowed = false;
2305 IncrementalMarking* incremental_marking = heap_->incremental_marking(); 2298 IncrementalMarking* incremental_marking = heap_->incremental_marking();
2306 if (was_marked_incrementally_) { 2299 if (was_marked_incrementally_) {
2307 // Finalize the incremental marking and check whether we had an overflow. 2300 // Finalize the incremental marking and check whether we had an overflow.
2308 // Both markers use grey color to mark overflowed objects so 2301 // Both markers use grey color to mark overflowed objects so
(...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after
2739 entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries); 2732 entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
2740 new_number_of_entries += survived; 2733 new_number_of_entries += survived;
2741 } 2734 }
2742 for (int i = new_number_of_entries; i < number_of_entries; i++) { 2735 for (int i = new_number_of_entries; i < number_of_entries; i++) {
2743 entries->clear_at(i); 2736 entries->clear_at(i);
2744 } 2737 }
2745 } 2738 }
2746 2739
2747 2740
2748 void MarkCompactCollector::ProcessWeakCollections() { 2741 void MarkCompactCollector::ProcessWeakCollections() {
2749 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); 2742 GCTracer::Scope gc_scope(heap()->tracer(),
2743 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2750 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2744 Object* weak_collection_obj = heap()->encountered_weak_collections();
2751 while (weak_collection_obj != Smi::FromInt(0)) { 2745 while (weak_collection_obj != Smi::FromInt(0)) {
2752 JSWeakCollection* weak_collection = 2746 JSWeakCollection* weak_collection =
2753 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2747 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2754 ASSERT(MarkCompactCollector::IsMarked(weak_collection)); 2748 ASSERT(MarkCompactCollector::IsMarked(weak_collection));
2755 if (weak_collection->table()->IsHashTable()) { 2749 if (weak_collection->table()->IsHashTable()) {
2756 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2750 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2757 Object** anchor = reinterpret_cast<Object**>(table->address()); 2751 Object** anchor = reinterpret_cast<Object**>(table->address());
2758 for (int i = 0; i < table->Capacity(); i++) { 2752 for (int i = 0; i < table->Capacity(); i++) {
2759 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2753 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2760 Object** key_slot = 2754 Object** key_slot =
2761 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); 2755 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2762 RecordSlot(anchor, key_slot, *key_slot); 2756 RecordSlot(anchor, key_slot, *key_slot);
2763 Object** value_slot = 2757 Object** value_slot =
2764 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); 2758 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2765 MarkCompactMarkingVisitor::MarkObjectByPointer( 2759 MarkCompactMarkingVisitor::MarkObjectByPointer(
2766 this, anchor, value_slot); 2760 this, anchor, value_slot);
2767 } 2761 }
2768 } 2762 }
2769 } 2763 }
2770 weak_collection_obj = weak_collection->next(); 2764 weak_collection_obj = weak_collection->next();
2771 } 2765 }
2772 } 2766 }
2773 2767
2774 2768
2775 void MarkCompactCollector::ClearWeakCollections() { 2769 void MarkCompactCollector::ClearWeakCollections() {
2776 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); 2770 GCTracer::Scope gc_scope(heap()->tracer(),
2771 GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR);
2777 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2772 Object* weak_collection_obj = heap()->encountered_weak_collections();
2778 while (weak_collection_obj != Smi::FromInt(0)) { 2773 while (weak_collection_obj != Smi::FromInt(0)) {
2779 JSWeakCollection* weak_collection = 2774 JSWeakCollection* weak_collection =
2780 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2775 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2781 ASSERT(MarkCompactCollector::IsMarked(weak_collection)); 2776 ASSERT(MarkCompactCollector::IsMarked(weak_collection));
2782 if (weak_collection->table()->IsHashTable()) { 2777 if (weak_collection->table()->IsHashTable()) {
2783 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2778 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2784 for (int i = 0; i < table->Capacity(); i++) { 2779 for (int i = 0; i < table->Capacity(); i++) {
2785 HeapObject* key = HeapObject::cast(table->KeyAt(i)); 2780 HeapObject* key = HeapObject::cast(table->KeyAt(i));
2786 if (!MarkCompactCollector::IsMarked(key)) { 2781 if (!MarkCompactCollector::IsMarked(key)) {
(...skipping 653 matching lines...) Expand 10 before | Expand all | Expand 10 after
3440 } 3435 }
3441 } 3436 }
3442 invalidated_code_.Rewind(0); 3437 invalidated_code_.Rewind(0);
3443 } 3438 }
3444 3439
3445 3440
3446 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3441 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3447 Heap::RelocationLock relocation_lock(heap()); 3442 Heap::RelocationLock relocation_lock(heap());
3448 3443
3449 bool code_slots_filtering_required; 3444 bool code_slots_filtering_required;
3450 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); 3445 { GCTracer::Scope gc_scope(heap()->tracer(),
3446 GCTracer::Scope::MC_SWEEP_NEWSPACE);
3451 code_slots_filtering_required = MarkInvalidatedCode(); 3447 code_slots_filtering_required = MarkInvalidatedCode();
3452 EvacuateNewSpace(); 3448 EvacuateNewSpace();
3453 } 3449 }
3454 3450
3455 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES); 3451 { GCTracer::Scope gc_scope(heap()->tracer(),
3452 GCTracer::Scope::MC_EVACUATE_PAGES);
3456 EvacuatePages(); 3453 EvacuatePages();
3457 } 3454 }
3458 3455
3459 // Second pass: find pointers to new space and update them. 3456 // Second pass: find pointers to new space and update them.
3460 PointersUpdatingVisitor updating_visitor(heap()); 3457 PointersUpdatingVisitor updating_visitor(heap());
3461 3458
3462 { GCTracer::Scope gc_scope(tracer_, 3459 { GCTracer::Scope gc_scope(heap()->tracer(),
3463 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); 3460 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS);
3464 // Update pointers in to space. 3461 // Update pointers in to space.
3465 SemiSpaceIterator to_it(heap()->new_space()->bottom(), 3462 SemiSpaceIterator to_it(heap()->new_space()->bottom(),
3466 heap()->new_space()->top()); 3463 heap()->new_space()->top());
3467 for (HeapObject* object = to_it.Next(); 3464 for (HeapObject* object = to_it.Next();
3468 object != NULL; 3465 object != NULL;
3469 object = to_it.Next()) { 3466 object = to_it.Next()) {
3470 Map* map = object->map(); 3467 Map* map = object->map();
3471 object->IterateBody(map->instance_type(), 3468 object->IterateBody(map->instance_type(),
3472 object->SizeFromMap(map), 3469 object->SizeFromMap(map),
3473 &updating_visitor); 3470 &updating_visitor);
3474 } 3471 }
3475 } 3472 }
3476 3473
3477 { GCTracer::Scope gc_scope(tracer_, 3474 { GCTracer::Scope gc_scope(heap()->tracer(),
3478 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS); 3475 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
3479 // Update roots. 3476 // Update roots.
3480 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3477 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3481 } 3478 }
3482 3479
3483 { GCTracer::Scope gc_scope(tracer_, 3480 { GCTracer::Scope gc_scope(heap()->tracer(),
3484 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); 3481 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3485 StoreBufferRebuildScope scope(heap_, 3482 StoreBufferRebuildScope scope(heap_,
3486 heap_->store_buffer(), 3483 heap_->store_buffer(),
3487 &Heap::ScavengeStoreBufferCallback); 3484 &Heap::ScavengeStoreBufferCallback);
3488 heap_->store_buffer()->IteratePointersToNewSpaceAndClearMaps( 3485 heap_->store_buffer()->IteratePointersToNewSpaceAndClearMaps(
3489 &UpdatePointer); 3486 &UpdatePointer);
3490 } 3487 }
3491 3488
3492 { GCTracer::Scope gc_scope(tracer_, 3489 { GCTracer::Scope gc_scope(heap()->tracer(),
3493 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); 3490 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED);
3494 SlotsBuffer::UpdateSlotsRecordedIn(heap_, 3491 SlotsBuffer::UpdateSlotsRecordedIn(heap_,
3495 migration_slots_buffer_, 3492 migration_slots_buffer_,
3496 code_slots_filtering_required); 3493 code_slots_filtering_required);
3497 if (FLAG_trace_fragmentation) { 3494 if (FLAG_trace_fragmentation) {
3498 PrintF(" migration slots buffer: %d\n", 3495 PrintF(" migration slots buffer: %d\n",
3499 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); 3496 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3500 } 3497 }
3501 3498
3502 if (compacting_ && was_marked_incrementally_) { 3499 if (compacting_ && was_marked_incrementally_) {
3503 // It's difficult to filter out slots recorded for large objects. 3500 // It's difficult to filter out slots recorded for large objects.
3504 LargeObjectIterator it(heap_->lo_space()); 3501 LargeObjectIterator it(heap_->lo_space());
3505 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 3502 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
3506 // LargeObjectSpace is not swept yet thus we have to skip 3503 // LargeObjectSpace is not swept yet thus we have to skip
3507 // dead objects explicitly. 3504 // dead objects explicitly.
3508 if (!IsMarked(obj)) continue; 3505 if (!IsMarked(obj)) continue;
3509 3506
3510 Page* p = Page::FromAddress(obj->address()); 3507 Page* p = Page::FromAddress(obj->address());
3511 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { 3508 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
3512 obj->Iterate(&updating_visitor); 3509 obj->Iterate(&updating_visitor);
3513 p->ClearFlag(Page::RESCAN_ON_EVACUATION); 3510 p->ClearFlag(Page::RESCAN_ON_EVACUATION);
3514 } 3511 }
3515 } 3512 }
3516 } 3513 }
3517 } 3514 }
3518 3515
3519 int npages = evacuation_candidates_.length(); 3516 int npages = evacuation_candidates_.length();
3520 { GCTracer::Scope gc_scope( 3517 { GCTracer::Scope gc_scope(
3521 tracer_, GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); 3518 heap()->tracer(), GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED);
3522 for (int i = 0; i < npages; i++) { 3519 for (int i = 0; i < npages; i++) {
3523 Page* p = evacuation_candidates_[i]; 3520 Page* p = evacuation_candidates_[i];
3524 ASSERT(p->IsEvacuationCandidate() || 3521 ASSERT(p->IsEvacuationCandidate() ||
3525 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3522 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3526 3523
3527 if (p->IsEvacuationCandidate()) { 3524 if (p->IsEvacuationCandidate()) {
3528 SlotsBuffer::UpdateSlotsRecordedIn(heap_, 3525 SlotsBuffer::UpdateSlotsRecordedIn(heap_,
3529 p->slots_buffer(), 3526 p->slots_buffer(),
3530 code_slots_filtering_required); 3527 code_slots_filtering_required);
3531 if (FLAG_trace_fragmentation) { 3528 if (FLAG_trace_fragmentation) {
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3571 } 3568 }
3572 break; 3569 break;
3573 default: 3570 default:
3574 UNREACHABLE(); 3571 UNREACHABLE();
3575 break; 3572 break;
3576 } 3573 }
3577 } 3574 }
3578 } 3575 }
3579 } 3576 }
3580 3577
3581 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_UPDATE_MISC_POINTERS); 3578 GCTracer::Scope gc_scope(heap()->tracer(),
3579 GCTracer::Scope::MC_UPDATE_MISC_POINTERS);
3582 3580
3583 // Update pointers from cells. 3581 // Update pointers from cells.
3584 HeapObjectIterator cell_iterator(heap_->cell_space()); 3582 HeapObjectIterator cell_iterator(heap_->cell_space());
3585 for (HeapObject* cell = cell_iterator.Next(); 3583 for (HeapObject* cell = cell_iterator.Next();
3586 cell != NULL; 3584 cell != NULL;
3587 cell = cell_iterator.Next()) { 3585 cell = cell_iterator.Next()) {
3588 if (cell->IsCell()) { 3586 if (cell->IsCell()) {
3589 Cell::BodyDescriptor::IterateBody(cell, &updating_visitor); 3587 Cell::BodyDescriptor::IterateBody(cell, &updating_visitor);
3590 } 3588 }
3591 } 3589 }
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after
4197 AllocationSpaceName(space->identity()), 4195 AllocationSpaceName(space->identity()),
4198 pages_swept); 4196 pages_swept);
4199 } 4197 }
4200 4198
4201 // Give pages that are queued to be freed back to the OS. 4199 // Give pages that are queued to be freed back to the OS.
4202 heap()->FreeQueuedChunks(); 4200 heap()->FreeQueuedChunks();
4203 } 4201 }
4204 4202
4205 4203
4206 void MarkCompactCollector::SweepSpaces() { 4204 void MarkCompactCollector::SweepSpaces() {
4207 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); 4205 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
4208 #ifdef DEBUG 4206 #ifdef DEBUG
4209 state_ = SWEEP_SPACES; 4207 state_ = SWEEP_SPACES;
4210 #endif 4208 #endif
4211 SweeperType how_to_sweep = CONCURRENT_CONSERVATIVE; 4209 SweeperType how_to_sweep = CONCURRENT_CONSERVATIVE;
4212 if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; 4210 if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE;
4213 if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; 4211 if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE;
4214 4212
4215 if (sweep_precisely_) how_to_sweep = PRECISE; 4213 if (sweep_precisely_) how_to_sweep = PRECISE;
4216 4214
4217 MoveEvacuationCandidatesToEndOfPagesList(); 4215 MoveEvacuationCandidatesToEndOfPagesList();
4218 4216
4219 // Noncompacting collections simply sweep the spaces to clear the mark 4217 // Noncompacting collections simply sweep the spaces to clear the mark
4220 // bits and free the nonlive blocks (for old and map spaces). We sweep 4218 // bits and free the nonlive blocks (for old and map spaces). We sweep
4221 // the map space last because freeing non-live maps overwrites them and 4219 // the map space last because freeing non-live maps overwrites them and
4222 // the other spaces rely on possibly non-live maps to get the sizes for 4220 // the other spaces rely on possibly non-live maps to get the sizes for
4223 // non-live objects. 4221 // non-live objects.
4224 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_OLDSPACE); 4222 { GCTracer::Scope sweep_scope(heap()->tracer(),
4223 GCTracer::Scope::MC_SWEEP_OLDSPACE);
4225 { SequentialSweepingScope scope(this); 4224 { SequentialSweepingScope scope(this);
4226 SweepSpace(heap()->old_pointer_space(), how_to_sweep); 4225 SweepSpace(heap()->old_pointer_space(), how_to_sweep);
4227 SweepSpace(heap()->old_data_space(), how_to_sweep); 4226 SweepSpace(heap()->old_data_space(), how_to_sweep);
4228 } 4227 }
4229 4228
4230 if (how_to_sweep == PARALLEL_CONSERVATIVE || 4229 if (how_to_sweep == PARALLEL_CONSERVATIVE ||
4231 how_to_sweep == CONCURRENT_CONSERVATIVE) { 4230 how_to_sweep == CONCURRENT_CONSERVATIVE) {
4232 StartSweeperThreads(); 4231 StartSweeperThreads();
4233 } 4232 }
4234 4233
4235 if (how_to_sweep == PARALLEL_CONSERVATIVE) { 4234 if (how_to_sweep == PARALLEL_CONSERVATIVE) {
4236 EnsureSweepingCompleted(); 4235 EnsureSweepingCompleted();
4237 } 4236 }
4238 } 4237 }
4239 RemoveDeadInvalidatedCode(); 4238 RemoveDeadInvalidatedCode();
4240 4239
4241 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_CODE); 4240 { GCTracer::Scope sweep_scope(heap()->tracer(),
4241 GCTracer::Scope::MC_SWEEP_CODE);
4242 SweepSpace(heap()->code_space(), PRECISE); 4242 SweepSpace(heap()->code_space(), PRECISE);
4243 } 4243 }
4244 4244
4245 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_CELL); 4245 { GCTracer::Scope sweep_scope(heap()->tracer(),
4246 GCTracer::Scope::MC_SWEEP_CELL);
4246 SweepSpace(heap()->cell_space(), PRECISE); 4247 SweepSpace(heap()->cell_space(), PRECISE);
4247 SweepSpace(heap()->property_cell_space(), PRECISE); 4248 SweepSpace(heap()->property_cell_space(), PRECISE);
4248 } 4249 }
4249 4250
4250 EvacuateNewSpaceAndCandidates(); 4251 EvacuateNewSpaceAndCandidates();
4251 4252
4252 // ClearNonLiveTransitions depends on precise sweeping of map space to 4253 // ClearNonLiveTransitions depends on precise sweeping of map space to
4253 // detect whether unmarked map became dead in this collection or in one 4254 // detect whether unmarked map became dead in this collection or in one
4254 // of the previous ones. 4255 // of the previous ones.
4255 { GCTracer::Scope sweep_scope(tracer_, GCTracer::Scope::MC_SWEEP_MAP); 4256 { GCTracer::Scope sweep_scope(heap()->tracer(),
4257 GCTracer::Scope::MC_SWEEP_MAP);
4256 SweepSpace(heap()->map_space(), PRECISE); 4258 SweepSpace(heap()->map_space(), PRECISE);
4257 } 4259 }
4258 4260
4259 // Deallocate unmarked objects and clear marked bits for marked objects. 4261 // Deallocate unmarked objects and clear marked bits for marked objects.
4260 heap_->lo_space()->FreeUnmarkedObjects(); 4262 heap_->lo_space()->FreeUnmarkedObjects();
4261 4263
4262 // Deallocate evacuated candidate pages. 4264 // Deallocate evacuated candidate pages.
4263 ReleaseEvacuationCandidates(); 4265 ReleaseEvacuationCandidates();
4264 } 4266 }
4265 4267
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after
4496 while (buffer != NULL) { 4498 while (buffer != NULL) {
4497 SlotsBuffer* next_buffer = buffer->next(); 4499 SlotsBuffer* next_buffer = buffer->next();
4498 DeallocateBuffer(buffer); 4500 DeallocateBuffer(buffer);
4499 buffer = next_buffer; 4501 buffer = next_buffer;
4500 } 4502 }
4501 *buffer_address = NULL; 4503 *buffer_address = NULL;
4502 } 4504 }
4503 4505
4504 4506
4505 } } // namespace v8::internal 4507 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698