| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 60 #ifdef DEBUG | 60 #ifdef DEBUG |
| 61 state_(IDLE), | 61 state_(IDLE), |
| 62 #endif | 62 #endif |
| 63 sweep_precisely_(false), | 63 sweep_precisely_(false), |
| 64 reduce_memory_footprint_(false), | 64 reduce_memory_footprint_(false), |
| 65 abort_incremental_marking_(false), | 65 abort_incremental_marking_(false), |
| 66 marking_parity_(ODD_MARKING_PARITY), | 66 marking_parity_(ODD_MARKING_PARITY), |
| 67 compacting_(false), | 67 compacting_(false), |
| 68 was_marked_incrementally_(false), | 68 was_marked_incrementally_(false), |
| 69 sweeping_pending_(false), | 69 sweeping_pending_(false), |
| 70 pending_sweeper_jobs_semaphore_(0), | |
| 71 sequential_sweeping_(false), | 70 sequential_sweeping_(false), |
| 72 tracer_(NULL), | 71 tracer_(NULL), |
| 73 migration_slots_buffer_(NULL), | 72 migration_slots_buffer_(NULL), |
| 74 heap_(heap), | 73 heap_(heap), |
| 75 code_flusher_(NULL), | 74 code_flusher_(NULL), |
| 76 encountered_weak_collections_(NULL), | 75 encountered_weak_collections_(NULL), |
| 77 have_code_to_deoptimize_(false) { } | 76 have_code_to_deoptimize_(false) { } |
| 78 | 77 |
| 79 #ifdef VERIFY_HEAP | 78 #ifdef VERIFY_HEAP |
| 80 class VerifyMarkingVisitor: public ObjectVisitor { | 79 class VerifyMarkingVisitor: public ObjectVisitor { |
| 81 public: | 80 public: |
| 82 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} | 81 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} |
| 83 | 82 |
| 84 void VisitPointers(Object** start, Object** end) { | 83 void VisitPointers(Object** start, Object** end) { |
| 85 for (Object** current = start; current < end; current++) { | 84 for (Object** current = start; current < end; current++) { |
| 86 if ((*current)->IsHeapObject()) { | 85 if ((*current)->IsHeapObject()) { |
| 87 HeapObject* object = HeapObject::cast(*current); | 86 HeapObject* object = HeapObject::cast(*current); |
| 88 CHECK(heap_->mark_compact_collector()->IsMarked(object)); | 87 CHECK(heap_->mark_compact_collector()->IsMarked(object)); |
| 89 } | 88 } |
| 90 } | 89 } |
| 91 } | 90 } |
| 92 | 91 |
| 93 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 92 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
| 94 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 93 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 95 if (!rinfo->host()->IsWeakObject(rinfo->target_object())) { | 94 if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(), |
| 95 rinfo->target_object())) { |
| 96 Object* p = rinfo->target_object(); | 96 Object* p = rinfo->target_object(); |
| 97 VisitPointer(&p); | 97 VisitPointer(&p); |
| 98 } | 98 } |
| 99 } | 99 } |
| 100 | 100 |
| 101 void VisitCell(RelocInfo* rinfo) { | 101 void VisitCell(RelocInfo* rinfo) { |
| 102 Code* code = rinfo->host(); | 102 Code* code = rinfo->host(); |
| 103 ASSERT(rinfo->rmode() == RelocInfo::CELL); | 103 ASSERT(rinfo->rmode() == RelocInfo::CELL); |
| 104 if (!code->IsWeakObject(rinfo->target_cell())) { | 104 if (!Code::IsWeakEmbeddedObject(code->kind(), rinfo->target_cell())) { |
| 105 ObjectVisitor::VisitCell(rinfo); | 105 ObjectVisitor::VisitCell(rinfo); |
| 106 } | 106 } |
| 107 } | 107 } |
| 108 | 108 |
| 109 private: | 109 private: |
| 110 Heap* heap_; | 110 Heap* heap_; |
| 111 }; | 111 }; |
| 112 | 112 |
| 113 | 113 |
| 114 static void VerifyMarking(Heap* heap, Address bottom, Address top) { | 114 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
| (...skipping 447 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 562 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 562 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 563 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 563 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
| 564 mark_bit.Clear(); | 564 mark_bit.Clear(); |
| 565 mark_bit.Next().Clear(); | 565 mark_bit.Next().Clear(); |
| 566 Page::FromAddress(obj->address())->ResetProgressBar(); | 566 Page::FromAddress(obj->address())->ResetProgressBar(); |
| 567 Page::FromAddress(obj->address())->ResetLiveBytes(); | 567 Page::FromAddress(obj->address())->ResetLiveBytes(); |
| 568 } | 568 } |
| 569 } | 569 } |
| 570 | 570 |
| 571 | 571 |
| 572 class MarkCompactCollector::SweeperTask : public v8::Task { | |
| 573 public: | |
| 574 SweeperTask(Heap* heap, PagedSpace* space) | |
| 575 : heap_(heap), space_(space) {} | |
| 576 | |
| 577 virtual ~SweeperTask() {} | |
| 578 | |
| 579 private: | |
| 580 // v8::Task overrides. | |
| 581 virtual void Run() V8_OVERRIDE { | |
| 582 heap_->mark_compact_collector()->SweepInParallel(space_); | |
| 583 heap_->mark_compact_collector()->pending_sweeper_jobs_semaphore_.Signal(); | |
| 584 } | |
| 585 | |
| 586 Heap* heap_; | |
| 587 PagedSpace* space_; | |
| 588 | |
| 589 DISALLOW_COPY_AND_ASSIGN(SweeperTask); | |
| 590 }; | |
| 591 | |
| 592 | |
| 593 void MarkCompactCollector::StartSweeperThreads() { | 572 void MarkCompactCollector::StartSweeperThreads() { |
| 594 // TODO(hpayer): This check is just used for debugging purpose and | 573 // TODO(hpayer): This check is just used for debugging purpose and |
| 595 // should be removed or turned into an assert after investigating the | 574 // should be removed or turned into an assert after investigating the |
| 596 // crash in concurrent sweeping. | 575 // crash in concurrent sweeping. |
| 597 CHECK(free_list_old_pointer_space_.get()->IsEmpty()); | 576 CHECK(free_list_old_pointer_space_.get()->IsEmpty()); |
| 598 CHECK(free_list_old_data_space_.get()->IsEmpty()); | 577 CHECK(free_list_old_data_space_.get()->IsEmpty()); |
| 599 sweeping_pending_ = true; | 578 sweeping_pending_ = true; |
| 600 for (int i = 0; i < isolate()->num_sweeper_threads(); i++) { | 579 for (int i = 0; i < isolate()->num_sweeper_threads(); i++) { |
| 601 isolate()->sweeper_threads()[i]->StartSweeping(); | 580 isolate()->sweeper_threads()[i]->StartSweeping(); |
| 602 } | 581 } |
| 603 if (FLAG_job_based_sweeping) { | |
| 604 V8::GetCurrentPlatform()->CallOnBackgroundThread( | |
| 605 new SweeperTask(heap(), heap()->old_data_space()), | |
| 606 v8::Platform::kShortRunningTask); | |
| 607 V8::GetCurrentPlatform()->CallOnBackgroundThread( | |
| 608 new SweeperTask(heap(), heap()->old_pointer_space()), | |
| 609 v8::Platform::kShortRunningTask); | |
| 610 } | |
| 611 } | 582 } |
| 612 | 583 |
| 613 | 584 |
| 614 void MarkCompactCollector::WaitUntilSweepingCompleted() { | 585 void MarkCompactCollector::WaitUntilSweepingCompleted() { |
| 615 ASSERT(sweeping_pending_ == true); | 586 ASSERT(sweeping_pending_ == true); |
| 616 for (int i = 0; i < isolate()->num_sweeper_threads(); i++) { | 587 for (int i = 0; i < isolate()->num_sweeper_threads(); i++) { |
| 617 isolate()->sweeper_threads()[i]->WaitForSweeperThread(); | 588 isolate()->sweeper_threads()[i]->WaitForSweeperThread(); |
| 618 } | 589 } |
| 619 if (FLAG_job_based_sweeping) { | |
| 620 // Wait twice for both jobs. | |
| 621 pending_sweeper_jobs_semaphore_.Wait(); | |
| 622 pending_sweeper_jobs_semaphore_.Wait(); | |
| 623 } | |
| 624 ParallelSweepSpacesComplete(); | |
| 625 sweeping_pending_ = false; | 590 sweeping_pending_ = false; |
| 626 RefillFreeLists(heap()->paged_space(OLD_DATA_SPACE)); | 591 RefillFreeLists(heap()->paged_space(OLD_DATA_SPACE)); |
| 627 RefillFreeLists(heap()->paged_space(OLD_POINTER_SPACE)); | 592 RefillFreeLists(heap()->paged_space(OLD_POINTER_SPACE)); |
| 628 heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes(); | 593 heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes(); |
| 629 heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes(); | 594 heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes(); |
| 630 } | 595 } |
| 631 | 596 |
| 632 | 597 |
| 633 intptr_t MarkCompactCollector::RefillFreeLists(PagedSpace* space) { | 598 intptr_t MarkCompactCollector::RefillFreeLists(PagedSpace* space) { |
| 634 FreeList* free_list; | 599 FreeList* free_list; |
| 635 | 600 |
| 636 if (space == heap()->old_pointer_space()) { | 601 if (space == heap()->old_pointer_space()) { |
| 637 free_list = free_list_old_pointer_space_.get(); | 602 free_list = free_list_old_pointer_space_.get(); |
| 638 } else if (space == heap()->old_data_space()) { | 603 } else if (space == heap()->old_data_space()) { |
| 639 free_list = free_list_old_data_space_.get(); | 604 free_list = free_list_old_data_space_.get(); |
| 640 } else { | 605 } else { |
| 641 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure | 606 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure |
| 642 // to only refill them for old data and pointer spaces. | 607 // to only refill them for old data and pointer spaces. |
| 643 return 0; | 608 return 0; |
| 644 } | 609 } |
| 645 | 610 |
| 646 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); | 611 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); |
| 647 space->AddToAccountingStats(freed_bytes); | 612 space->AddToAccountingStats(freed_bytes); |
| 648 space->DecrementUnsweptFreeBytes(freed_bytes); | 613 space->DecrementUnsweptFreeBytes(freed_bytes); |
| 649 return freed_bytes; | 614 return freed_bytes; |
| 650 } | 615 } |
| 651 | 616 |
| 652 | 617 |
| 653 bool MarkCompactCollector::AreSweeperThreadsActivated() { | 618 bool MarkCompactCollector::AreSweeperThreadsActivated() { |
| 654 return isolate()->sweeper_threads() != NULL || FLAG_job_based_sweeping; | 619 return isolate()->sweeper_threads() != NULL; |
| 655 } | 620 } |
| 656 | 621 |
| 657 | 622 |
| 658 bool MarkCompactCollector::IsConcurrentSweepingInProgress() { | 623 bool MarkCompactCollector::IsConcurrentSweepingInProgress() { |
| 659 return sweeping_pending_; | 624 return sweeping_pending_; |
| 660 } | 625 } |
| 661 | 626 |
| 662 | 627 |
| 663 bool Marking::TransferMark(Address old_start, Address new_start) { | 628 bool Marking::TransferMark(Address old_start, Address new_start) { |
| 664 // This is only used when resizing an object. | 629 // This is only used when resizing an object. |
| (...skipping 2748 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3413 | 3378 |
| 3414 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 3379 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| 3415 Heap::RelocationLock relocation_lock(heap()); | 3380 Heap::RelocationLock relocation_lock(heap()); |
| 3416 | 3381 |
| 3417 bool code_slots_filtering_required; | 3382 bool code_slots_filtering_required; |
| 3418 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); | 3383 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| 3419 code_slots_filtering_required = MarkInvalidatedCode(); | 3384 code_slots_filtering_required = MarkInvalidatedCode(); |
| 3420 EvacuateNewSpace(); | 3385 EvacuateNewSpace(); |
| 3421 } | 3386 } |
| 3422 | 3387 |
| 3388 // We have to travers our allocation sites scratchpad which contains raw |
| 3389 // pointers before we move objects. During new space evacauation we |
| 3390 // gathered pretenuring statistics. The found allocation sites may not be |
| 3391 // valid after compacting old space. |
| 3392 heap()->ProcessPretenuringFeedback(); |
| 3393 |
| 3394 |
| 3423 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES); | 3395 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES); |
| 3424 EvacuatePages(); | 3396 EvacuatePages(); |
| 3425 } | 3397 } |
| 3426 | 3398 |
| 3427 // Second pass: find pointers to new space and update them. | 3399 // Second pass: find pointers to new space and update them. |
| 3428 PointersUpdatingVisitor updating_visitor(heap()); | 3400 PointersUpdatingVisitor updating_visitor(heap()); |
| 3429 | 3401 |
| 3430 { GCTracer::Scope gc_scope(tracer_, | 3402 { GCTracer::Scope gc_scope(tracer_, |
| 3431 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); | 3403 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); |
| 3432 // Update pointers in to space. | 3404 // Update pointers in to space. |
| (...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3939 Page* p) { | 3911 Page* p) { |
| 3940 // TODO(hpayer): This check is just used for debugging purpose and | 3912 // TODO(hpayer): This check is just used for debugging purpose and |
| 3941 // should be removed or turned into an assert after investigating the | 3913 // should be removed or turned into an assert after investigating the |
| 3942 // crash in concurrent sweeping. | 3914 // crash in concurrent sweeping. |
| 3943 CHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); | 3915 CHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); |
| 3944 ASSERT((mode == MarkCompactCollector::SWEEP_IN_PARALLEL && | 3916 ASSERT((mode == MarkCompactCollector::SWEEP_IN_PARALLEL && |
| 3945 free_list != NULL) || | 3917 free_list != NULL) || |
| 3946 (mode == MarkCompactCollector::SWEEP_SEQUENTIALLY && | 3918 (mode == MarkCompactCollector::SWEEP_SEQUENTIALLY && |
| 3947 free_list == NULL)); | 3919 free_list == NULL)); |
| 3948 | 3920 |
| 3949 // When parallel sweeping is active, the page will be marked after | 3921 p->MarkSweptConservatively(); |
| 3950 // sweeping by the main thread. | |
| 3951 if (mode != MarkCompactCollector::SWEEP_IN_PARALLEL) { | |
| 3952 p->MarkSweptConservatively(); | |
| 3953 } | |
| 3954 | 3922 |
| 3955 intptr_t freed_bytes = 0; | 3923 intptr_t freed_bytes = 0; |
| 3956 size_t size = 0; | 3924 size_t size = 0; |
| 3957 | 3925 |
| 3958 // Skip over all the dead objects at the start of the page and mark them free. | 3926 // Skip over all the dead objects at the start of the page and mark them free. |
| 3959 Address cell_base = 0; | 3927 Address cell_base = 0; |
| 3960 MarkBit::CellType* cell = NULL; | 3928 MarkBit::CellType* cell = NULL; |
| 3961 MarkBitCellIterator it(p); | 3929 MarkBitCellIterator it(p); |
| 3962 for (; !it.Done(); it.Advance()) { | 3930 for (; !it.Done(); it.Advance()) { |
| 3963 cell_base = it.CurrentCellBase(); | 3931 cell_base = it.CurrentCellBase(); |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4055 PageIterator it(space); | 4023 PageIterator it(space); |
| 4056 | 4024 |
| 4057 int pages_swept = 0; | 4025 int pages_swept = 0; |
| 4058 bool lazy_sweeping_active = false; | 4026 bool lazy_sweeping_active = false; |
| 4059 bool unused_page_present = false; | 4027 bool unused_page_present = false; |
| 4060 bool parallel_sweeping_active = false; | 4028 bool parallel_sweeping_active = false; |
| 4061 | 4029 |
| 4062 while (it.has_next()) { | 4030 while (it.has_next()) { |
| 4063 Page* p = it.next(); | 4031 Page* p = it.next(); |
| 4064 | 4032 |
| 4065 ASSERT(p->parallel_sweeping() == MemoryChunk::PARALLEL_SWEEPING_DONE); | 4033 ASSERT(p->parallel_sweeping() == 0); |
| 4066 ASSERT(!p->IsEvacuationCandidate()); | 4034 ASSERT(!p->IsEvacuationCandidate()); |
| 4067 | 4035 |
| 4068 // Clear sweeping flags indicating that marking bits are still intact. | 4036 // Clear sweeping flags indicating that marking bits are still intact. |
| 4069 p->ClearSweptPrecisely(); | 4037 p->ClearSweptPrecisely(); |
| 4070 p->ClearSweptConservatively(); | 4038 p->ClearSweptConservatively(); |
| 4071 | 4039 |
| 4072 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | 4040 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| 4073 // Will be processed in EvacuateNewSpaceAndCandidates. | 4041 // Will be processed in EvacuateNewSpaceAndCandidates. |
| 4074 ASSERT(evacuation_candidates_.length() > 0); | 4042 ASSERT(evacuation_candidates_.length() > 0); |
| 4075 continue; | 4043 continue; |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4128 reinterpret_cast<intptr_t>(p)); | 4096 reinterpret_cast<intptr_t>(p)); |
| 4129 } | 4097 } |
| 4130 SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p); | 4098 SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p); |
| 4131 pages_swept++; | 4099 pages_swept++; |
| 4132 parallel_sweeping_active = true; | 4100 parallel_sweeping_active = true; |
| 4133 } else { | 4101 } else { |
| 4134 if (FLAG_gc_verbose) { | 4102 if (FLAG_gc_verbose) { |
| 4135 PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n", | 4103 PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n", |
| 4136 reinterpret_cast<intptr_t>(p)); | 4104 reinterpret_cast<intptr_t>(p)); |
| 4137 } | 4105 } |
| 4138 p->set_parallel_sweeping(MemoryChunk::PARALLEL_SWEEPING_PENDING); | 4106 p->set_parallel_sweeping(1); |
| 4139 space->IncreaseUnsweptFreeBytes(p); | 4107 space->IncreaseUnsweptFreeBytes(p); |
| 4140 } | 4108 } |
| 4141 break; | 4109 break; |
| 4142 } | 4110 } |
| 4143 case PRECISE: { | 4111 case PRECISE: { |
| 4144 if (FLAG_gc_verbose) { | 4112 if (FLAG_gc_verbose) { |
| 4145 PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n", | 4113 PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n", |
| 4146 reinterpret_cast<intptr_t>(p)); | 4114 reinterpret_cast<intptr_t>(p)); |
| 4147 } | 4115 } |
| 4148 if (space->identity() == CODE_SPACE) { | 4116 if (space->identity() == CODE_SPACE) { |
| (...skipping 21 matching lines...) Expand all Loading... |
| 4170 } | 4138 } |
| 4171 | 4139 |
| 4172 | 4140 |
| 4173 void MarkCompactCollector::SweepSpaces() { | 4141 void MarkCompactCollector::SweepSpaces() { |
| 4174 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 4142 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); |
| 4175 #ifdef DEBUG | 4143 #ifdef DEBUG |
| 4176 state_ = SWEEP_SPACES; | 4144 state_ = SWEEP_SPACES; |
| 4177 #endif | 4145 #endif |
| 4178 SweeperType how_to_sweep = | 4146 SweeperType how_to_sweep = |
| 4179 FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; | 4147 FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; |
| 4180 if (AreSweeperThreadsActivated()) { | 4148 if (isolate()->num_sweeper_threads() > 0) { |
| 4181 if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; | 4149 if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; |
| 4182 if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; | 4150 if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; |
| 4183 } | 4151 } |
| 4184 if (sweep_precisely_) how_to_sweep = PRECISE; | 4152 if (sweep_precisely_) how_to_sweep = PRECISE; |
| 4185 | 4153 |
| 4186 // Unlink evacuation candidates before sweeper threads access the list of | 4154 // Unlink evacuation candidates before sweeper threads access the list of |
| 4187 // pages to avoid race condition. | 4155 // pages to avoid race condition. |
| 4188 UnlinkEvacuationCandidates(); | 4156 UnlinkEvacuationCandidates(); |
| 4189 | 4157 |
| 4190 // Noncompacting collections simply sweep the spaces to clear the mark | 4158 // Noncompacting collections simply sweep the spaces to clear the mark |
| (...skipping 29 matching lines...) Expand all Loading... |
| 4220 SweepSpace(heap()->map_space(), PRECISE); | 4188 SweepSpace(heap()->map_space(), PRECISE); |
| 4221 | 4189 |
| 4222 // Deallocate unmarked objects and clear marked bits for marked objects. | 4190 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 4223 heap_->lo_space()->FreeUnmarkedObjects(); | 4191 heap_->lo_space()->FreeUnmarkedObjects(); |
| 4224 | 4192 |
| 4225 // Deallocate evacuated candidate pages. | 4193 // Deallocate evacuated candidate pages. |
| 4226 ReleaseEvacuationCandidates(); | 4194 ReleaseEvacuationCandidates(); |
| 4227 } | 4195 } |
| 4228 | 4196 |
| 4229 | 4197 |
| 4230 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) { | |
| 4231 PageIterator it(space); | |
| 4232 while (it.has_next()) { | |
| 4233 Page* p = it.next(); | |
| 4234 if (p->parallel_sweeping() == MemoryChunk::PARALLEL_SWEEPING_IN_PROGRESS) { | |
| 4235 p->set_parallel_sweeping(MemoryChunk::PARALLEL_SWEEPING_DONE); | |
| 4236 p->MarkSweptConservatively(); | |
| 4237 } | |
| 4238 } | |
| 4239 } | |
| 4240 | |
| 4241 | |
| 4242 void MarkCompactCollector::ParallelSweepSpacesComplete() { | |
| 4243 ParallelSweepSpaceComplete(heap()->old_pointer_space()); | |
| 4244 ParallelSweepSpaceComplete(heap()->old_data_space()); | |
| 4245 } | |
| 4246 | |
| 4247 | |
| 4248 void MarkCompactCollector::EnableCodeFlushing(bool enable) { | 4198 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
| 4249 #ifdef ENABLE_DEBUGGER_SUPPORT | 4199 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 4250 if (isolate()->debug()->IsLoaded() || | 4200 if (isolate()->debug()->IsLoaded() || |
| 4251 isolate()->debug()->has_break_points()) { | 4201 isolate()->debug()->has_break_points()) { |
| 4252 enable = false; | 4202 enable = false; |
| 4253 } | 4203 } |
| 4254 #endif | 4204 #endif |
| 4255 | 4205 |
| 4256 if (enable) { | 4206 if (enable) { |
| 4257 if (code_flusher_ != NULL) return; | 4207 if (code_flusher_ != NULL) return; |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4446 while (buffer != NULL) { | 4396 while (buffer != NULL) { |
| 4447 SlotsBuffer* next_buffer = buffer->next(); | 4397 SlotsBuffer* next_buffer = buffer->next(); |
| 4448 DeallocateBuffer(buffer); | 4398 DeallocateBuffer(buffer); |
| 4449 buffer = next_buffer; | 4399 buffer = next_buffer; |
| 4450 } | 4400 } |
| 4451 *buffer_address = NULL; | 4401 *buffer_address = NULL; |
| 4452 } | 4402 } |
| 4453 | 4403 |
| 4454 | 4404 |
| 4455 } } // namespace v8::internal | 4405 } } // namespace v8::internal |
| OLD | NEW |