OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2759 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2770 if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v); | 2770 if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v); |
2771 break; | 2771 break; |
2772 } | 2772 } |
2773 default: | 2773 default: |
2774 UNREACHABLE(); | 2774 UNREACHABLE(); |
2775 break; | 2775 break; |
2776 } | 2776 } |
2777 } | 2777 } |
2778 | 2778 |
2779 | 2779 |
2780 static inline void UpdateSlotsInRange(Object** start, Object** end) { | |
2781 for (Object** slot = start; | |
2782 slot < end; | |
2783 slot++) { | |
2784 Object* obj = *slot; | |
2785 if (obj->IsHeapObject() && | |
2786 MarkCompactCollector::IsOnEvacuationCandidate(obj)) { | |
2787 MapWord map_word = HeapObject::cast(obj)->map_word(); | |
2788 if (map_word.IsForwardingAddress()) { | |
2789 *slot = map_word.ToForwardingAddress(); | |
2790 ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(*slot)); | |
2791 } | |
2792 } | |
2793 } | |
2794 } | |
2795 | |
2796 | |
2797 enum SweepingMode { | 2780 enum SweepingMode { |
2798 SWEEP_ONLY, | 2781 SWEEP_ONLY, |
2799 SWEEP_AND_VISIT_LIVE_OBJECTS | 2782 SWEEP_AND_VISIT_LIVE_OBJECTS |
2800 }; | 2783 }; |
2801 | 2784 |
2802 | 2785 |
2803 enum SkipListRebuildingMode { | 2786 enum SkipListRebuildingMode { |
2804 REBUILD_SKIP_LIST, | 2787 REBUILD_SKIP_LIST, |
2805 IGNORE_SKIP_LIST | 2788 IGNORE_SKIP_LIST |
2806 }; | 2789 }; |
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3153 space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize); | 3136 space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize); |
3154 p->set_scan_on_scavenge(false); | 3137 p->set_scan_on_scavenge(false); |
3155 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); | 3138 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); |
3156 p->ClearEvacuationCandidate(); | 3139 p->ClearEvacuationCandidate(); |
3157 } | 3140 } |
3158 evacuation_candidates_.Rewind(0); | 3141 evacuation_candidates_.Rewind(0); |
3159 compacting_ = false; | 3142 compacting_ = false; |
3160 } | 3143 } |
3161 | 3144 |
3162 | 3145 |
3163 INLINE(static uint32_t SweepFree(PagedSpace* space, | |
3164 Page* p, | |
3165 uint32_t free_start, | |
3166 uint32_t region_end, | |
3167 uint32_t* cells)); | |
3168 | |
3169 | |
3170 static uint32_t SweepFree(PagedSpace* space, | |
3171 Page* p, | |
3172 uint32_t free_start, | |
3173 uint32_t region_end, | |
3174 uint32_t* cells) { | |
3175 uint32_t free_cell_index = Bitmap::IndexToCell(free_start); | |
3176 ASSERT(cells[free_cell_index] == 0); | |
3177 while (free_cell_index < region_end && cells[free_cell_index] == 0) { | |
3178 free_cell_index++; | |
3179 } | |
3180 | |
3181 if (free_cell_index >= region_end) { | |
3182 return free_cell_index; | |
3183 } | |
3184 | |
3185 uint32_t free_end = Bitmap::CellToIndex(free_cell_index); | |
3186 space->FreeOrUnmapPage(p, | |
3187 p->MarkbitIndexToAddress(free_start), | |
3188 (free_end - free_start) << kPointerSizeLog2); | |
3189 | |
3190 return free_cell_index; | |
3191 } | |
3192 | |
3193 | |
3194 INLINE(static uint32_t NextCandidate(uint32_t cell_index, | |
3195 uint32_t last_cell_index, | |
3196 uint32_t* cells)); | |
3197 | |
3198 | |
3199 static uint32_t NextCandidate(uint32_t cell_index, | |
3200 uint32_t last_cell_index, | |
3201 uint32_t* cells) { | |
3202 do { | |
3203 cell_index++; | |
3204 } while (cell_index < last_cell_index && cells[cell_index] != 0); | |
3205 return cell_index; | |
3206 } | |
3207 | |
3208 | |
3209 static const int kStartTableEntriesPerLine = 5; | 3146 static const int kStartTableEntriesPerLine = 5; |
3210 static const int kStartTableLines = 171; | 3147 static const int kStartTableLines = 171; |
3211 static const int kStartTableInvalidLine = 127; | 3148 static const int kStartTableInvalidLine = 127; |
3212 static const int kStartTableUnusedEntry = 126; | 3149 static const int kStartTableUnusedEntry = 126; |
3213 | 3150 |
3214 #define _ kStartTableUnusedEntry | 3151 #define _ kStartTableUnusedEntry |
3215 #define X kStartTableInvalidLine | 3152 #define X kStartTableInvalidLine |
3216 // Mark-bit to object start offset table. | 3153 // Mark-bit to object start offset table. |
3217 // | 3154 // |
3218 // The line is indexed by the mark bits in a byte. The first number on | 3155 // The line is indexed by the mark bits in a byte. The first number on |
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3582 space->set_was_swept_conservatively(sweeper == CONSERVATIVE || | 3519 space->set_was_swept_conservatively(sweeper == CONSERVATIVE || |
3583 sweeper == LAZY_CONSERVATIVE); | 3520 sweeper == LAZY_CONSERVATIVE); |
3584 | 3521 |
3585 space->ClearStats(); | 3522 space->ClearStats(); |
3586 | 3523 |
3587 PageIterator it(space); | 3524 PageIterator it(space); |
3588 | 3525 |
3589 intptr_t freed_bytes = 0; | 3526 intptr_t freed_bytes = 0; |
3590 intptr_t newspace_size = space->heap()->new_space()->Size(); | 3527 intptr_t newspace_size = space->heap()->new_space()->Size(); |
3591 bool lazy_sweeping_active = false; | 3528 bool lazy_sweeping_active = false; |
| 3529 bool unused_page_present = false; |
3592 | 3530 |
3593 while (it.has_next()) { | 3531 while (it.has_next()) { |
3594 Page* p = it.next(); | 3532 Page* p = it.next(); |
3595 | 3533 |
3596 // Clear sweeping flags indicating that marking bits are still intact. | 3534 // Clear sweeping flags indicating that marking bits are still intact. |
3597 p->ClearSweptPrecisely(); | 3535 p->ClearSweptPrecisely(); |
3598 p->ClearSweptConservatively(); | 3536 p->ClearSweptConservatively(); |
3599 | 3537 |
3600 if (p->IsEvacuationCandidate()) { | 3538 if (p->IsEvacuationCandidate()) { |
3601 ASSERT(evacuation_candidates_.length() > 0); | 3539 ASSERT(evacuation_candidates_.length() > 0); |
3602 continue; | 3540 continue; |
3603 } | 3541 } |
3604 | 3542 |
3605 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | 3543 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
3606 // Will be processed in EvacuateNewSpaceAndCandidates. | 3544 // Will be processed in EvacuateNewSpaceAndCandidates. |
3607 continue; | 3545 continue; |
3608 } | 3546 } |
3609 | 3547 |
3610 if (lazy_sweeping_active) { | 3548 if (lazy_sweeping_active) { |
3611 if (FLAG_gc_verbose) { | 3549 if (FLAG_gc_verbose) { |
3612 PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n", | 3550 PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n", |
3613 reinterpret_cast<intptr_t>(p)); | 3551 reinterpret_cast<intptr_t>(p)); |
3614 } | 3552 } |
3615 continue; | 3553 continue; |
3616 } | 3554 } |
3617 | 3555 |
| 3556 // One unused page is kept, all further are released before sweeping them. |
| 3557 if (p->LiveBytes() == 0) { |
| 3558 if (unused_page_present) { |
| 3559 if (FLAG_gc_verbose) { |
| 3560 PrintF("Sweeping 0x%" V8PRIxPTR " released page.\n", |
| 3561 reinterpret_cast<intptr_t>(p)); |
| 3562 } |
| 3563 space->ReleasePage(p); |
| 3564 continue; |
| 3565 } |
| 3566 unused_page_present = true; |
| 3567 } |
| 3568 |
3618 if (FLAG_gc_verbose) { | 3569 if (FLAG_gc_verbose) { |
3619 PrintF("Sweeping 0x%" V8PRIxPTR " with sweeper %d.\n", | 3570 PrintF("Sweeping 0x%" V8PRIxPTR " with sweeper %d.\n", |
3620 reinterpret_cast<intptr_t>(p), | 3571 reinterpret_cast<intptr_t>(p), |
3621 sweeper); | 3572 sweeper); |
3622 } | 3573 } |
3623 | 3574 |
3624 switch (sweeper) { | 3575 switch (sweeper) { |
3625 case CONSERVATIVE: { | 3576 case CONSERVATIVE: { |
3626 SweepConservatively(space, p); | 3577 SweepConservatively(space, p); |
3627 break; | 3578 break; |
3628 } | 3579 } |
3629 case LAZY_CONSERVATIVE: { | 3580 case LAZY_CONSERVATIVE: { |
3630 freed_bytes += SweepConservatively(space, p); | 3581 freed_bytes += SweepConservatively(space, p); |
3631 if (freed_bytes >= newspace_size && p != space->LastPage()) { | 3582 if (freed_bytes >= newspace_size && p != space->LastPage()) { |
3632 space->SetPagesToSweep(p->next_page(), space->LastPage()); | 3583 space->SetPagesToSweep(p->next_page(), space->anchor()); |
3633 lazy_sweeping_active = true; | 3584 lazy_sweeping_active = true; |
3634 } | 3585 } |
3635 break; | 3586 break; |
3636 } | 3587 } |
3637 case PRECISE: { | 3588 case PRECISE: { |
3638 if (space->identity() == CODE_SPACE) { | 3589 if (space->identity() == CODE_SPACE) { |
3639 SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>(space, p, NULL); | 3590 SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>(space, p, NULL); |
3640 } else { | 3591 } else { |
3641 SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>(space, p, NULL); | 3592 SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>(space, p, NULL); |
3642 } | 3593 } |
3643 break; | 3594 break; |
3644 } | 3595 } |
3645 default: { | 3596 default: { |
3646 UNREACHABLE(); | 3597 UNREACHABLE(); |
3647 } | 3598 } |
3648 } | 3599 } |
3649 } | 3600 } |
| 3601 |
| 3602 // Give pages that are queued to be freed back to the OS. |
| 3603 heap()->FreeQueuedChunks(); |
3650 } | 3604 } |
3651 | 3605 |
3652 | 3606 |
3653 void MarkCompactCollector::SweepSpaces() { | 3607 void MarkCompactCollector::SweepSpaces() { |
3654 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 3608 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); |
3655 #ifdef DEBUG | 3609 #ifdef DEBUG |
3656 state_ = SWEEP_SPACES; | 3610 state_ = SWEEP_SPACES; |
3657 #endif | 3611 #endif |
3658 SweeperType how_to_sweep = | 3612 SweeperType how_to_sweep = |
3659 FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; | 3613 FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3854 while (buffer != NULL) { | 3808 while (buffer != NULL) { |
3855 SlotsBuffer* next_buffer = buffer->next(); | 3809 SlotsBuffer* next_buffer = buffer->next(); |
3856 DeallocateBuffer(buffer); | 3810 DeallocateBuffer(buffer); |
3857 buffer = next_buffer; | 3811 buffer = next_buffer; |
3858 } | 3812 } |
3859 *buffer_address = NULL; | 3813 *buffer_address = NULL; |
3860 } | 3814 } |
3861 | 3815 |
3862 | 3816 |
3863 } } // namespace v8::internal | 3817 } } // namespace v8::internal |
OLD | NEW |