Chromium Code Reviews| Index: src/mark-compact.cc |
| diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
| index 4a29c21df74c4c60ed4afaef491f45d42bd6bb17..05178f3a61798210b19872a87ae77d9115100eea 100644 |
| --- a/src/mark-compact.cc |
| +++ b/src/mark-compact.cc |
| @@ -110,7 +110,8 @@ void MarkCompactCollector::CollectGarbage() { |
| // Check that swept all marked objects and |
| // null out the GC tracer. |
| - ASSERT(tracer_->marked_count() == 0); |
| + // TODO(gc) does not work with conservative sweeping. |
| + // ASSERT(tracer_->marked_count() == 0); |
| tracer_ = NULL; |
| } |
| @@ -135,6 +136,23 @@ static void VerifyMarkbitsAreClean() { |
| #endif |
| +static void ClearMarkbits(PagedSpace* space) { |
| + PageIterator it(space, PageIterator::PAGES_IN_USE); |
| + |
| + while (it.has_next()) { |
| + Page* p = it.next(); |
| + p->markbits()->Clear(); |
| + } |
| +} |
| + |
| +static void ClearMarkbits() { |
| + // We are sweeping code and map spaces presisely so clearing is not required. |
|
Erik Corry
2011/01/19 13:46:48
presisely -> precisely,
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
|
| + ClearMarkbits(Heap::old_pointer_space()); |
| + ClearMarkbits(Heap::old_data_space()); |
| + ClearMarkbits(Heap::cell_space()); |
| +} |
| + |
| + |
| void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| FLAG_flush_code = false; |
| FLAG_always_compact = false; |
| @@ -171,6 +189,8 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| Marking::ClearRange(new_space_bottom, |
| static_cast<int>(new_space_top - new_space_bottom)); |
| + ClearMarkbits(); |
| + |
| #ifdef DEBUG |
| VerifyMarkbitsAreClean(); |
| @@ -1718,7 +1738,164 @@ void MarkCompactCollector::SweepNewSpace(NewSpace* space) { |
| } |
| -void MarkCompactCollector::SweepSpace(PagedSpace* space) { |
| +INLINE(static uint32_t SweepFree(PagedSpace* space, |
| + Page* p, |
| + uint32_t free_start, |
| + uint32_t region_end, |
| + uint32_t* cells)); |
| + |
| + |
| +static uint32_t SweepFree(PagedSpace* space, |
| + Page* p, |
| + uint32_t free_start, |
| + uint32_t region_end, |
| + uint32_t* cells) { |
| + uint32_t free_cell = Page::MarkbitsBitmap::Index2Cell(free_start); |
| + ASSERT(cells[free_cell] == 0); |
| + while (free_cell < region_end && cells[free_cell] == 0) { |
| + free_cell++; |
| + } |
| + |
| + if (free_cell >= region_end) { |
| + return free_cell; |
| + } |
| + |
| + uint32_t free_end = Page::MarkbitsBitmap::Cell2Index(free_cell); |
| + space->DeallocateBlock(p->Markbit2Address(free_start), |
| + (free_end - free_start) << kPointerSizeLog2, |
| + true); |
| + |
| + return free_cell; |
| +} |
| + |
| + |
| +INLINE(static uint32_t NextCandidate(uint32_t cell, |
| + uint32_t last_cell, |
| + uint32_t* cells)); |
| + |
| + |
| +static uint32_t NextCandidate(uint32_t cell, |
| + uint32_t last_cell, |
| + uint32_t* cells) { |
| + do { |
| + cell++; |
| + } while (cell < last_cell && cells[cell] != 0); |
| + return cell; |
| +} |
| + |
| + |
| +INLINE(static int SizeOfPreviousObject(Page* p, |
| + uint32_t cell, |
| + uint32_t* cells)); |
| + |
| + |
| +static int SizeOfPreviousObject(Page* p, |
| + uint32_t cell, |
| + uint32_t* cells) { |
| + ASSERT(cells[cell] == 0); |
| + if (cells[cell - 1] == 0) return 0; |
| + |
| + int clz = __builtin_clz(cells[cell - 1]) + 1; |
|
Erik Corry
2011/01/19 13:46:48
Variable should be called leading_zeros
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
|
| + Address addr = |
| + p->Markbit2Address(Page::MarkbitsBitmap::Cell2Index(cell) - clz); |
| + HeapObject* obj = HeapObject::FromAddress(addr); |
| + ASSERT(obj->map()->IsMap()); |
| + return (obj->Size() >> kPointerSizeLog2) - clz; |
| +} |
| + |
| + |
| +static void SweepConservatively(PagedSpace* space, |
| + Page* p, |
| + Address* last_free_start) { |
| + typedef Page::MarkbitsBitmap::CellType CellType; |
| + Page::MarkbitsBitmap* markbits = p->markbits(); |
| + CellType* cells = markbits->cells(); |
| + |
| + uint32_t last_cell = |
| + Page::MarkbitsBitmap::Index2Cell( |
| + Page::MarkbitsBitmap::CellAlignIndex( |
| + p->Address2Markbit(p->AllocationTop()))); |
| + |
| + uint32_t cell = Page::kFirstUsedCell; |
|
Erik Corry
2011/01/19 13:46:48
See above.
|
| + uint32_t poluted_cell = Page::kFirstUsedCell; |
|
Erik Corry
2011/01/19 13:46:48
poluted -> polluted
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
|
| + if (cells[cell] == 0) { |
| + poluted_cell = SweepFree(space, |
| + p, |
| + p->Address2Markbit(p->ObjectAreaStart()), |
| + last_cell, |
| + cells); |
| + |
| + if (poluted_cell >= last_cell) { |
| + // All cells are free. |
| + *last_free_start = p->ObjectAreaStart(); |
| + return; |
| + } |
| + } |
| + |
| + p->ClearFlag(Page::IS_CONTINIOUS); |
| + |
| + ASSERT(cells[poluted_cell] != 0); |
| + for (cell = NextCandidate(poluted_cell, last_cell, cells); |
| + cell < last_cell; |
| + cell = NextCandidate(poluted_cell, last_cell, cells)) { |
| + ASSERT(cells[cell] == 0); |
| + |
| + int size = SizeOfPreviousObject(p, cell, cells); |
| + if (size <= 0) { |
| + poluted_cell = SweepFree(space, |
| + p, |
| + Page::MarkbitsBitmap::Cell2Index(cell), |
| + last_cell, |
| + cells); |
| + if (poluted_cell >= last_cell) { |
| + // This free region is the last on the page. |
| + *last_free_start = p->Markbit2Address( |
| + Page::MarkbitsBitmap::Cell2Index(cell)); |
| + return; |
| + } |
| + } else { |
| + // Skip cells covered by this object. |
| + poluted_cell = cell + |
| + Page::MarkbitsBitmap::Index2Cell(size - 1); |
| + } |
| + } |
| +} |
| + |
| + |
| +static void SweepPrecisely(PagedSpace* space, Page* p, Address* last_free_start) { |
|
Erik Corry
2011/01/19 13:46:48
Lint?
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
|
| + bool is_previous_alive = true; |
| + Address free_start = NULL; |
| + HeapObject* object; |
| + |
| + for (Address current = p->ObjectAreaStart(); |
| + current < p->AllocationTop(); |
| + current += object->Size()) { |
| + object = HeapObject::FromAddress(current); |
| + if (Marking::IsMarked(object)) { |
| + Marking::ClearMark(object); |
| + MarkCompactCollector::tracer()->decrement_marked_count(); |
| + |
| + if (!is_previous_alive) { // Transition from free to live. |
| + space->DeallocateBlock(free_start, |
| + static_cast<int>(current - free_start), |
| + true); |
| + is_previous_alive = true; |
| + } |
| + } else { |
| + MarkCompactCollector::ReportDeleteIfNeeded(object); |
| + if (is_previous_alive) { // Transition from live to free. |
| + free_start = current; |
| + is_previous_alive = false; |
| + } |
| + } |
| + } |
| + |
| + if (!is_previous_alive) *last_free_start = free_start; |
| +} |
| + |
| + |
| +void MarkCompactCollector::SweepSpace(PagedSpace* space, |
| + SweeperType sweeper) { |
| PageIterator it(space, PageIterator::PAGES_IN_USE); |
| // During sweeping of paged space we are trying to find longest sequences |
| @@ -1746,37 +1923,20 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space) { |
| while (it.has_next()) { |
| Page* p = it.next(); |
| - bool is_previous_alive = true; |
| - Address free_start = NULL; |
| - HeapObject* object; |
| - |
| - for (Address current = p->ObjectAreaStart(); |
| - current < p->AllocationTop(); |
| - current += object->Size()) { |
| - object = HeapObject::FromAddress(current); |
| - if (Marking::IsMarked(object)) { |
| - Marking::ClearMark(object); |
| - MarkCompactCollector::tracer()->decrement_marked_count(); |
| - |
| - if (!is_previous_alive) { // Transition from free to live. |
| - space->DeallocateBlock(free_start, |
| - static_cast<int>(current - free_start), |
| - true); |
| - is_previous_alive = true; |
| - } |
| - } else { |
| - MarkCompactCollector::ReportDeleteIfNeeded(object); |
| - if (is_previous_alive) { // Transition from live to free. |
| - free_start = current; |
| - is_previous_alive = false; |
| - } |
| - } |
| - // The object is now unmarked for the call to Size() at the top of the |
| - // loop. |
| + Address free_start = p->AllocationTop(); |
| + |
| + if (sweeper == CONSERVATIVE) { |
| + SweepConservatively(space, p, &free_start); |
| + p->set_linearity_boundary(free_start); |
| + } else { |
| + ASSERT(sweeper == PRECISE); |
| + SweepPrecisely(space, p, &free_start); |
| } |
| - bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop()) |
| - || (!is_previous_alive && free_start == p->ObjectAreaStart()); |
| + bool page_is_empty = (p->ObjectAreaStart() == free_start); |
| + bool is_previous_alive = (free_start == p->AllocationTop()); |
| + |
| + ASSERT(free_start <= p->AllocationTop()); |
| if (page_is_empty) { |
| // This page is empty. Check whether we are in the middle of |
| @@ -1830,7 +1990,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space) { |
| // Last used pages in space are empty. We can move allocation top backwards |
| // to the beginning of first empty page. |
| ASSERT(prev == space->AllocationTopPage()); |
| - |
| + space->FreePages(prec_first_empty_page, prev); |
| new_allocation_top = first_empty_page->ObjectAreaStart(); |
| } |
| @@ -1869,21 +2029,26 @@ void MarkCompactCollector::SweepSpaces() { |
| // the map space last because freeing non-live maps overwrites them and |
| // the other spaces rely on possibly non-live maps to get the sizes for |
| // non-live objects. |
| - SweepSpace(Heap::old_pointer_space()); |
| - SweepSpace(Heap::old_data_space()); |
| - SweepSpace(Heap::code_space()); |
| - SweepSpace(Heap::cell_space()); |
| + SweepSpace(Heap::old_pointer_space(), CONSERVATIVE); |
| + SweepSpace(Heap::old_data_space(), CONSERVATIVE); |
| + SweepSpace(Heap::code_space(), PRECISE); |
| + // TODO(gc): implement specialized sweeper for cell space. |
| + SweepSpace(Heap::cell_space(), CONSERVATIVE); |
| { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| SweepNewSpace(Heap::new_space()); |
| } |
| - SweepSpace(Heap::map_space()); |
| + // TODO(gc): ClearNonLiveTransitions depends on precise sweeping of |
| + // map space to detect whether unmarked map became dead in this |
| + // collection on in one of the previous ones. |
|
Erik Corry
2011/01/19 13:46:48
on -> or
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
|
| + // Implement specialized sweeper for map space. |
|
Erik Corry
2011/01/19 13:46:48
Missing TODO?
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
|
| + SweepSpace(Heap::map_space(), PRECISE); |
| Heap::IterateDirtyRegions(Heap::map_space(), |
| &Heap::IteratePointersInDirtyMapsRegion, |
| &UpdatePointerToNewGen, |
| Heap::WATERMARK_SHOULD_BE_VALID); |
| - ASSERT(live_map_objects_size_ == Heap::map_space()->Size()); |
| + ASSERT(live_map_objects_size_ <= Heap::map_space()->Size()); |
| } |
| @@ -1941,6 +2106,10 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { |
| if (obj->IsCode()) { |
| PROFILE(CodeDeleteEvent(obj->address())); |
| } else if (obj->IsJSFunction()) { |
| + // TODO(gc): we are sweeping old pointer space conservatively thus |
| + // we can't notify attached profiler about death of functions. |
| + // Consider disabling conservative sweeping when profiler |
| + // is enabled. |
| PROFILE(FunctionDeleteEvent(obj->address())); |
| } |
| #endif |