Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 156 | 156 |
| 157 void MarkCompactCollector::CollectGarbage() { | 157 void MarkCompactCollector::CollectGarbage() { |
| 158 // Make sure that Prepare() has been called. The individual steps below will | 158 // Make sure that Prepare() has been called. The individual steps below will |
| 159 // update the state as they proceed. | 159 // update the state as they proceed. |
| 160 ASSERT(state_ == PREPARE_GC); | 160 ASSERT(state_ == PREPARE_GC); |
| 161 | 161 |
| 162 // Prepare has selected whether to compact the old generation or not. | 162 // Prepare has selected whether to compact the old generation or not. |
| 163 // Tell the tracer. | 163 // Tell the tracer. |
| 164 if (IsCompacting()) tracer_->set_is_compacting(); | 164 if (IsCompacting()) tracer_->set_is_compacting(); |
| 165 | 165 |
| 166 if (heap_->incremental_marking()->IsStopped()) { | 166 if (!heap_->incremental_marking()->IsMarking()) { |
| 167 heap_->incremental_marking()->Abort(); | |
| 167 MarkLiveObjects(); | 168 MarkLiveObjects(); |
| 168 } else { | 169 } else { |
| 169 { | 170 { |
| 170 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); | 171 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); |
| 171 heap_->incremental_marking()->Finalize(); | 172 heap_->incremental_marking()->Finalize(); |
| 172 ASSERT(heap_->incremental_marking()->IsStopped()); | |
| 173 } | 173 } |
| 174 MarkLiveObjects(); | 174 MarkLiveObjects(); |
| 175 } | 175 } |
| 176 ASSERT(heap_->incremental_marking()->IsStopped()); | |
| 176 | 177 |
| 177 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 178 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
| 178 | 179 |
| 179 #ifdef DEBUG | 180 #ifdef DEBUG |
| 180 VerifyMarking(); | 181 VerifyMarking(); |
| 181 #endif | 182 #endif |
| 182 | 183 |
| 183 SweepSpaces(); | 184 SweepSpaces(); |
| 184 | 185 |
| 185 // TODO(gc) ISOLATES | 186 // TODO(gc) ISOLATES |
| (...skipping 2194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2380 } | 2381 } |
| 2381 | 2382 |
| 2382 | 2383 |
| 2383 // Sweeps a space conservatively. After this has been done the larger free | 2384 // Sweeps a space conservatively. After this has been done the larger free |
| 2384 // spaces have been put on the free list and the smaller ones have been | 2385 // spaces have been put on the free list and the smaller ones have been |
| 2385 // ignored and left untouched. A free space is always either ignored or put | 2386 // ignored and left untouched. A free space is always either ignored or put |
| 2386 // on the free list, never split up into two parts. This is important | 2387 // on the free list, never split up into two parts. This is important |
| 2387 // because it means that any FreeSpace maps left actually describe a region of | 2388 // because it means that any FreeSpace maps left actually describe a region of |
| 2388 // memory that can be ignored when scanning. Dead objects other than free | 2389 // memory that can be ignored when scanning. Dead objects other than free |
| 2389 // spaces will not contain the free space map. | 2390 // spaces will not contain the free space map. |
| 2390 static void SweepConservatively(PagedSpace* space, | 2391 int MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| 2391 Page* p) { | 2392 int freed_bytes = 0; |
| 2393 | |
| 2392 MarkBit::CellType* cells = p->markbits()->cells(); | 2394 MarkBit::CellType* cells = p->markbits()->cells(); |
| 2393 | 2395 |
| 2394 p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); | 2396 p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); |
| 2395 | 2397 |
| 2396 // This is the start of the 32 word block that we are currently looking at. | 2398 // This is the start of the 32 word block that we are currently looking at. |
| 2397 Address block_address = p->ObjectAreaStart(); | 2399 Address block_address = p->ObjectAreaStart(); |
| 2398 | 2400 |
| 2399 int last_cell_index = | 2401 int last_cell_index = |
| 2400 Page::MarkbitsBitmap::IndexToCell( | 2402 Page::MarkbitsBitmap::IndexToCell( |
| 2401 Page::MarkbitsBitmap::CellAlignIndex( | 2403 Page::MarkbitsBitmap::CellAlignIndex( |
| 2402 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 2404 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); |
| 2403 | 2405 |
| 2404 int cell_index = Page::kFirstUsedCell; | 2406 int cell_index = Page::kFirstUsedCell; |
| 2405 | 2407 |
| 2406 // Skip over all the dead objects at the start of the page and mark them free. | 2408 // Skip over all the dead objects at the start of the page and mark them free. |
| 2407 for (cell_index = Page::kFirstUsedCell; | 2409 for (cell_index = Page::kFirstUsedCell; |
| 2408 cell_index < last_cell_index; | 2410 cell_index < last_cell_index; |
| 2409 cell_index++, block_address += 32 * kPointerSize) { | 2411 cell_index++, block_address += 32 * kPointerSize) { |
| 2410 if (cells[cell_index] != 0) break; | 2412 if (cells[cell_index] != 0) break; |
| 2411 } | 2413 } |
| 2412 int size = block_address - p->ObjectAreaStart(); | 2414 int size = block_address - p->ObjectAreaStart(); |
| 2413 if (cell_index == last_cell_index) { | 2415 if (cell_index == last_cell_index) { |
| 2414 space->Free(p->ObjectAreaStart(), size); | 2416 freed_bytes += space->Free(p->ObjectAreaStart(), size); |
| 2415 return; | 2417 return freed_bytes; |
| 2416 } | 2418 } |
| 2417 // Grow the size of the start-of-page free space a little to get up to the | 2419 // Grow the size of the start-of-page free space a little to get up to the |
| 2418 // first live object. | 2420 // first live object. |
| 2419 Address free_end = StartOfLiveObject(block_address, cells[cell_index]); | 2421 Address free_end = StartOfLiveObject(block_address, cells[cell_index]); |
| 2420 // Free the first free space. | 2422 // Free the first free space. |
| 2421 size = free_end - p->ObjectAreaStart(); | 2423 size = free_end - p->ObjectAreaStart(); |
| 2422 space->Free(p->ObjectAreaStart(), size); | 2424 freed_bytes += space->Free(p->ObjectAreaStart(), size); |
| 2423 // The start of the current free area is represented in undigested form by | 2425 // The start of the current free area is represented in undigested form by |
| 2424 // the address of the last 32-word section that contained a live object and | 2426 // the address of the last 32-word section that contained a live object and |
| 2425 // the marking bitmap for that cell, which describes where the live object | 2427 // the marking bitmap for that cell, which describes where the live object |
| 2426 // started. Unless we find a large free space in the bitmap we will not | 2428 // started. Unless we find a large free space in the bitmap we will not |
| 2427 // digest this pair into a real address. We start the iteration here at the | 2429 // digest this pair into a real address. We start the iteration here at the |
| 2428 // first word in the marking bit map that indicates a live object. | 2430 // first word in the marking bit map that indicates a live object. |
| 2429 Address free_start = block_address; | 2431 Address free_start = block_address; |
| 2430 uint32_t free_start_cell = cells[cell_index]; | 2432 uint32_t free_start_cell = cells[cell_index]; |
| 2431 | 2433 |
| 2432 for ( ; | 2434 for ( ; |
| 2433 cell_index < last_cell_index; | 2435 cell_index < last_cell_index; |
| 2434 cell_index++, block_address += 32 * kPointerSize) { | 2436 cell_index++, block_address += 32 * kPointerSize) { |
| 2435 ASSERT((unsigned)cell_index == | 2437 ASSERT((unsigned)cell_index == |
| 2436 Page::MarkbitsBitmap::IndexToCell( | 2438 Page::MarkbitsBitmap::IndexToCell( |
| 2437 Page::MarkbitsBitmap::CellAlignIndex( | 2439 Page::MarkbitsBitmap::CellAlignIndex( |
| 2438 p->AddressToMarkbitIndex(block_address)))); | 2440 p->AddressToMarkbitIndex(block_address)))); |
| 2439 uint32_t cell = cells[cell_index]; | 2441 uint32_t cell = cells[cell_index]; |
| 2440 if (cell != 0) { | 2442 if (cell != 0) { |
| 2441 // We have a live object. Check approximately whether it is more than 32 | 2443 // We have a live object. Check approximately whether it is more than 32 |
| 2442 // words since the last live object. | 2444 // words since the last live object. |
| 2443 if (block_address - free_start > 32 * kPointerSize) { | 2445 if (block_address - free_start > 32 * kPointerSize) { |
| 2444 free_start = DigestFreeStart(free_start, free_start_cell); | 2446 free_start = DigestFreeStart(free_start, free_start_cell); |
| 2445 if (block_address - free_start > 32 * kPointerSize) { | 2447 if (block_address - free_start > 32 * kPointerSize) { |
| 2446 // Now that we know the exact start of the free space it still looks | 2448 // Now that we know the exact start of the free space it still looks |
| 2447 // like we have a large enough free space to be worth bothering with. | 2449 // like we have a large enough free space to be worth bothering with. |
| 2448 // so now we need to find the start of the first live object at the | 2450 // so now we need to find the start of the first live object at the |
| 2449 // end of the free space. | 2451 // end of the free space. |
| 2450 free_end = StartOfLiveObject(block_address, cell); | 2452 free_end = StartOfLiveObject(block_address, cell); |
| 2451 space->Free(free_start, free_end - free_start); | 2453 freed_bytes += space->Free(free_start, free_end - free_start); |
| 2452 } | 2454 } |
| 2453 } | 2455 } |
| 2454 // Update our undigested record of where the current free area started. | 2456 // Update our undigested record of where the current free area started. |
| 2455 free_start = block_address; | 2457 free_start = block_address; |
| 2456 free_start_cell = cell; | 2458 free_start_cell = cell; |
| 2457 } | 2459 } |
| 2458 } | 2460 } |
| 2459 | 2461 |
| 2460 // Handle the free space at the end of the page. | 2462 // Handle the free space at the end of the page. |
| 2461 if (block_address - free_start > 32 * kPointerSize) { | 2463 if (block_address - free_start > 32 * kPointerSize) { |
| 2462 free_start = DigestFreeStart(free_start, free_start_cell); | 2464 free_start = DigestFreeStart(free_start, free_start_cell); |
| 2463 space->Free(free_start, block_address - free_start); | 2465 freed_bytes += space->Free(free_start, block_address - free_start); |
| 2464 } | 2466 } |
| 2467 | |
| 2468 return freed_bytes; | |
| 2465 } | 2469 } |
| 2466 | 2470 |
| 2467 | 2471 |
| 2468 // Sweep a space precisely. After this has been done the space can | 2472 // Sweep a space precisely. After this has been done the space can |
| 2469 // be iterated precisely, hitting only the live objects. Code space | 2473 // be iterated precisely, hitting only the live objects. Code space |
| 2470 // is always swept precisely because we want to be able to iterate | 2474 // is always swept precisely because we want to be able to iterate |
| 2471 // over it. Map space is swept precisely, because it is not compacted. | 2475 // over it. Map space is swept precisely, because it is not compacted. |
| 2472 static void SweepPrecisely(PagedSpace* space, | 2476 static void SweepPrecisely(PagedSpace* space, |
| 2473 Page* p) { | 2477 Page* p) { |
| 2474 MarkBit::CellType* cells = p->markbits()->cells(); | 2478 MarkBit::CellType* cells = p->markbits()->cells(); |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 2505 } | 2509 } |
| 2506 } | 2510 } |
| 2507 if (free_start != p->ObjectAreaEnd()) { | 2511 if (free_start != p->ObjectAreaEnd()) { |
| 2508 space->Free(free_start, p->ObjectAreaEnd() - free_start); | 2512 space->Free(free_start, p->ObjectAreaEnd() - free_start); |
| 2509 } | 2513 } |
| 2510 } | 2514 } |
| 2511 | 2515 |
| 2512 | 2516 |
| 2513 void MarkCompactCollector::SweepSpace(PagedSpace* space, | 2517 void MarkCompactCollector::SweepSpace(PagedSpace* space, |
| 2514 SweeperType sweeper) { | 2518 SweeperType sweeper) { |
| 2515 space->set_was_swept_conservatively(sweeper == CONSERVATIVE); | 2519 space->set_was_swept_conservatively(sweeper == CONSERVATIVE || |
| 2520 sweeper == LAZY_CONSERVATIVE); | |
| 2516 | 2521 |
| 2517 space->ClearStats(); | 2522 space->ClearStats(); |
| 2518 | 2523 |
| 2519 PageIterator it(space); | 2524 PageIterator it(space); |
| 2520 | 2525 |
| 2521 // During sweeping of paged space we are trying to find longest sequences | 2526 int freed_bytes = 0; |
| 2522 // of pages without live objects and free them (instead of putting them on | 2527 int newspace_size = space->heap()->new_space()->Size(); |
| 2523 // the free list). | |
| 2524 | |
| 2525 // Page preceding current. | |
| 2526 Page* prev = Page::FromAddress(NULL); | |
| 2527 | 2528 |
| 2528 while (it.has_next()) { | 2529 while (it.has_next()) { |
| 2529 Page* p = it.next(); | 2530 Page* p = it.next(); |
| 2530 space->IncreaseCapacity(p->ObjectAreaEnd() - p->ObjectAreaStart()); | |
| 2531 | 2531 |
| 2532 if (sweeper == CONSERVATIVE) { | 2532 switch (sweeper) { |
| 2533 SweepConservatively(space, p); | 2533 case CONSERVATIVE: |
| 2534 } else { | 2534 SweepConservatively(space, p); |
| 2535 ASSERT(sweeper == PRECISE); | 2535 break; |
| 2536 SweepPrecisely(space, p); | 2536 case LAZY_CONSERVATIVE: |
| 2537 freed_bytes += SweepConservatively(space, p); | |
| 2538 if (freed_bytes >= newspace_size && p != space->LastPage()) { | |
| 2539 space->SetPagesToSweep(p->next_page(), space->LastPage()); | |
| 2540 return; | |
|
Erik Corry
2011/05/09 21:20:11
I think we will want to tune this heuristic. If w
| |
| 2541 } | |
| 2542 break; | |
| 2543 case PRECISE: | |
| 2544 SweepPrecisely(space, p); | |
| 2545 break; | |
| 2546 default: | |
| 2547 UNREACHABLE(); | |
| 2537 } | 2548 } |
| 2538 prev = p; | |
| 2539 } | 2549 } |
| 2550 | |
| 2540 // TODO(gc): set up allocation top and limit using the free list. | 2551 // TODO(gc): set up allocation top and limit using the free list. |
| 2541 } | 2552 } |
| 2542 | 2553 |
| 2543 | 2554 |
| 2544 void MarkCompactCollector::SweepSpaces() { | 2555 void MarkCompactCollector::SweepSpaces() { |
| 2545 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 2556 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); |
| 2546 #ifdef DEBUG | 2557 #ifdef DEBUG |
| 2547 state_ = SWEEP_SPACES; | 2558 state_ = SWEEP_SPACES; |
| 2548 #endif | 2559 #endif |
| 2549 | 2560 |
| 2550 ASSERT(!IsCompacting()); | 2561 ASSERT(!IsCompacting()); |
| 2551 SweeperType how_to_sweep = CONSERVATIVE; | 2562 SweeperType how_to_sweep = |
| 2563 FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; | |
| 2552 if (sweep_precisely_) how_to_sweep = PRECISE; | 2564 if (sweep_precisely_) how_to_sweep = PRECISE; |
| 2553 // Noncompacting collections simply sweep the spaces to clear the mark | 2565 // Noncompacting collections simply sweep the spaces to clear the mark |
| 2554 // bits and free the nonlive blocks (for old and map spaces). We sweep | 2566 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 2555 // the map space last because freeing non-live maps overwrites them and | 2567 // the map space last because freeing non-live maps overwrites them and |
| 2556 // the other spaces rely on possibly non-live maps to get the sizes for | 2568 // the other spaces rely on possibly non-live maps to get the sizes for |
| 2557 // non-live objects. | 2569 // non-live objects. |
| 2558 SweepSpace(HEAP->old_pointer_space(), how_to_sweep); | 2570 SweepSpace(HEAP->old_pointer_space(), how_to_sweep); |
| 2559 SweepSpace(HEAP->old_data_space(), how_to_sweep); | 2571 SweepSpace(HEAP->old_data_space(), how_to_sweep); |
| 2560 SweepSpace(HEAP->code_space(), PRECISE); | 2572 SweepSpace(HEAP->code_space(), PRECISE); |
| 2561 // TODO(gc): implement specialized sweeper for cell space. | 2573 // TODO(gc): implement specialized sweeper for cell space. |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2655 } | 2667 } |
| 2656 | 2668 |
| 2657 | 2669 |
| 2658 void MarkCompactCollector::Initialize() { | 2670 void MarkCompactCollector::Initialize() { |
| 2659 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 2671 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
| 2660 StaticMarkingVisitor::Initialize(); | 2672 StaticMarkingVisitor::Initialize(); |
| 2661 } | 2673 } |
| 2662 | 2674 |
| 2663 | 2675 |
| 2664 } } // namespace v8::internal | 2676 } } // namespace v8::internal |
| OLD | NEW |