| Index: src/mark-compact.cc
 | 
| ===================================================================
 | 
| --- src/mark-compact.cc	(revision 4449)
 | 
| +++ src/mark-compact.cc	(working copy)
 | 
| @@ -1055,15 +1055,22 @@
 | 
|    PageIterator it(space, PageIterator::PAGES_IN_USE);
 | 
|    while (it.has_next()) {
 | 
|      Page* p = it.next();
 | 
| -    // The offset of each live object in the page from the first live object
 | 
| -    // in the page.
 | 
| -    int offset = 0;
 | 
| -    EncodeForwardingAddressesInRange<Alloc,
 | 
| -                                     EncodeForwardingAddressInPagedSpace,
 | 
| -                                     ProcessNonLive>(
 | 
| -        p->ObjectAreaStart(),
 | 
| -        p->AllocationTop(),
 | 
| -        &offset);
 | 
| +
 | 
| +    if (p->WasInUseBeforeMC()) {
 | 
| +      // The offset of each live object in the page from the first live object
 | 
| +      // in the page.
 | 
| +      int offset = 0;
 | 
| +      EncodeForwardingAddressesInRange<Alloc,
 | 
| +                                       EncodeForwardingAddressInPagedSpace,
 | 
| +                                       ProcessNonLive>(
 | 
| +          p->ObjectAreaStart(),
 | 
| +          p->AllocationTop(),
 | 
| +          &offset);
 | 
| +    } else {
 | 
| +      // Mark whole unused page as a free region.
 | 
| +      EncodeFreeRegion(p->ObjectAreaStart(),
 | 
| +                       p->AllocationTop() - p->ObjectAreaStart());
 | 
| +    }
 | 
|    }
 | 
|  }
 | 
|  
 | 
| @@ -1277,6 +1284,23 @@
 | 
|  
 | 
|  static void SweepSpace(PagedSpace* space, DeallocateFunction dealloc) {
 | 
|    PageIterator it(space, PageIterator::PAGES_IN_USE);
 | 
| +
 | 
| +  // During sweeping of paged space we are trying to find longest sequences
 | 
| +  // of pages without live objects and free them (instead of putting them on
 | 
| +  // the free list).
 | 
| +  Page* prev = NULL;  // Page preceding current.
 | 
| +  Page* first_empty_page = NULL;  // First empty page in a sequence.
 | 
| +  Page* prec_first_empty_page = NULL;  // Page preceding first empty page.
 | 
| +
 | 
| +  // If last used page of space ends with a sequence of dead objects
 | 
| +  // we can adjust allocation top instead of puting this free area into
 | 
| +  // the free list. Thus during sweeping we keep track of such areas
 | 
| +  // and defer their deallocation until the sweeping of the next page
 | 
| +  // is done: if one of the next pages contains live objects we have
 | 
| +  // to put such area into the free list.
 | 
| +  Address last_free_start = NULL;
 | 
| +  int last_free_size = 0;
 | 
| +
 | 
|    while (it.has_next()) {
 | 
|      Page* p = it.next();
 | 
|  
 | 
| @@ -1291,8 +1315,9 @@
 | 
|        if (object->IsMarked()) {
 | 
|          object->ClearMark();
 | 
|          MarkCompactCollector::tracer()->decrement_marked_count();
 | 
| +
 | 
|          if (!is_previous_alive) {  // Transition from free to live.
 | 
| -          dealloc(free_start, static_cast<int>(current - free_start));
 | 
| +          dealloc(free_start, static_cast<int>(current - free_start), true);
 | 
|            is_previous_alive = true;
 | 
|          }
 | 
|        } else {
 | 
| @@ -1306,39 +1331,112 @@
 | 
|        // loop.
 | 
|      }
 | 
|  
 | 
| -    // If the last region was not live we need to deallocate from
 | 
| -    // free_start to the allocation top in the page.
 | 
| -    if (!is_previous_alive) {
 | 
| -      int free_size = static_cast<int>(p->AllocationTop() - free_start);
 | 
| -      if (free_size > 0) {
 | 
| -        dealloc(free_start, free_size);
 | 
| +    bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop())
 | 
| +        || (!is_previous_alive && free_start == p->ObjectAreaStart());
 | 
| +
 | 
| +    if (page_is_empty) {
 | 
| +      // This page is empty. Check whether we are in the middle of
 | 
| +      // sequence of empty pages and start one if not.
 | 
| +      if (first_empty_page == NULL) {
 | 
| +        first_empty_page = p;
 | 
| +        prec_first_empty_page = prev;
 | 
|        }
 | 
| +
 | 
| +      if (!is_previous_alive) {
 | 
| +        // There are dead objects on this page. Update space accounting stats
 | 
| +        // without putting anything into free list.
 | 
| +        int size_in_bytes = static_cast<int>(p->AllocationTop() - free_start);
 | 
| +        if (size_in_bytes > 0) {
 | 
| +          dealloc(free_start, size_in_bytes, false);
 | 
| +        }
 | 
| +      }
 | 
| +    } else {
 | 
| +      // This page is not empty. Sequence of empty pages ended on the previous
 | 
| +      // one.
 | 
| +      if (first_empty_page != NULL) {
 | 
| +        space->FreePages(prec_first_empty_page, prev);
 | 
| +        prec_first_empty_page = first_empty_page = NULL;
 | 
| +      }
 | 
| +
 | 
| +      // If there is a free ending area on one of the previous pages we have
 | 
| +      // deallocate that area and put it on the free list.
 | 
| +      if (last_free_size > 0) {
 | 
| +        dealloc(last_free_start, last_free_size, true);
 | 
| +        last_free_start = NULL;
 | 
| +        last_free_size  = 0;
 | 
| +      }
 | 
| +
 | 
| +      // If the last region of this page was not live we remember it.
 | 
| +      if (!is_previous_alive) {
 | 
| +        ASSERT(last_free_size == 0);
 | 
| +        last_free_size = static_cast<int>(p->AllocationTop() - free_start);
 | 
| +        last_free_start = free_start;
 | 
| +      }
 | 
|      }
 | 
| +
 | 
| +    prev = p;
 | 
|    }
 | 
| +
 | 
| +  // We reached end of space. See if we need to adjust allocation top.
 | 
| +  Address new_allocation_top = NULL;
 | 
| +
 | 
| +  if (first_empty_page != NULL) {
 | 
| +    // Last used pages in space are empty. We can move allocation top backwards
 | 
| +    // to the beginning of first empty page.
 | 
| +    ASSERT(prev == space->AllocationTopPage());
 | 
| +
 | 
| +    new_allocation_top = first_empty_page->ObjectAreaStart();
 | 
| +  }
 | 
| +
 | 
| +  if (last_free_size > 0) {
 | 
| +    // There was a free ending area on the previous page.
 | 
| +    // Deallocate it without putting it into freelist and move allocation
 | 
| +    // top to the beginning of this free area.
 | 
| +    dealloc(last_free_start, last_free_size, false);
 | 
| +    new_allocation_top = last_free_start;
 | 
| +  }
 | 
| +
 | 
| +  if (new_allocation_top != NULL) {
 | 
| +    Page* new_allocation_top_page = Page::FromAllocationTop(new_allocation_top);
 | 
| +
 | 
| +    ASSERT(((first_empty_page == NULL) &&
 | 
| +            (new_allocation_top_page == space->AllocationTopPage())) ||
 | 
| +           ((first_empty_page != NULL) && (last_free_size > 0) &&
 | 
| +            (new_allocation_top_page == prec_first_empty_page)) ||
 | 
| +           ((first_empty_page != NULL) && (last_free_size == 0) &&
 | 
| +            (new_allocation_top_page == first_empty_page)));
 | 
| +
 | 
| +    space->SetTop(new_allocation_top,
 | 
| +                  new_allocation_top_page->ObjectAreaEnd());
 | 
| +  }
 | 
|  }
 | 
|  
 | 
|  
 | 
|  void MarkCompactCollector::DeallocateOldPointerBlock(Address start,
 | 
| -                                                     int size_in_bytes) {
 | 
| +                                                     int size_in_bytes,
 | 
| +                                                     bool add_to_freelist) {
 | 
|    Heap::ClearRSetRange(start, size_in_bytes);
 | 
| -  Heap::old_pointer_space()->Free(start, size_in_bytes);
 | 
| +  Heap::old_pointer_space()->Free(start, size_in_bytes, add_to_freelist);
 | 
|  }
 | 
|  
 | 
|  
 | 
|  void MarkCompactCollector::DeallocateOldDataBlock(Address start,
 | 
| -                                                  int size_in_bytes) {
 | 
| -  Heap::old_data_space()->Free(start, size_in_bytes);
 | 
| +                                                  int size_in_bytes,
 | 
| +                                                  bool add_to_freelist) {
 | 
| +  Heap::old_data_space()->Free(start, size_in_bytes, add_to_freelist);
 | 
|  }
 | 
|  
 | 
|  
 | 
|  void MarkCompactCollector::DeallocateCodeBlock(Address start,
 | 
| -                                               int size_in_bytes) {
 | 
| -  Heap::code_space()->Free(start, size_in_bytes);
 | 
| +                                               int size_in_bytes,
 | 
| +                                               bool add_to_freelist) {
 | 
| +  Heap::code_space()->Free(start, size_in_bytes, add_to_freelist);
 | 
|  }
 | 
|  
 | 
|  
 | 
|  void MarkCompactCollector::DeallocateMapBlock(Address start,
 | 
| -                                              int size_in_bytes) {
 | 
| +                                              int size_in_bytes,
 | 
| +                                              bool add_to_freelist) {
 | 
|    // Objects in map space are assumed to have size Map::kSize and a
 | 
|    // valid map in their first word.  Thus, we break the free block up into
 | 
|    // chunks and free them separately.
 | 
| @@ -1346,13 +1444,14 @@
 | 
|    Heap::ClearRSetRange(start, size_in_bytes);
 | 
|    Address end = start + size_in_bytes;
 | 
|    for (Address a = start; a < end; a += Map::kSize) {
 | 
| -    Heap::map_space()->Free(a);
 | 
| +    Heap::map_space()->Free(a, add_to_freelist);
 | 
|    }
 | 
|  }
 | 
|  
 | 
|  
 | 
|  void MarkCompactCollector::DeallocateCellBlock(Address start,
 | 
| -                                               int size_in_bytes) {
 | 
| +                                               int size_in_bytes,
 | 
| +                                               bool add_to_freelist) {
 | 
|    // Free-list elements in cell space are assumed to have a fixed size.
 | 
|    // We break the free block into chunks and add them to the free list
 | 
|    // individually.
 | 
| @@ -1361,7 +1460,7 @@
 | 
|    Heap::ClearRSetRange(start, size_in_bytes);
 | 
|    Address end = start + size_in_bytes;
 | 
|    for (Address a = start; a < end; a += size) {
 | 
| -    Heap::cell_space()->Free(a);
 | 
| +    Heap::cell_space()->Free(a, add_to_freelist);
 | 
|    }
 | 
|  }
 | 
|  
 | 
| 
 |