| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "src/v8.h" | 5 #include "src/v8.h" | 
| 6 | 6 | 
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" | 
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" | 
| 9 #include "src/compilation-cache.h" | 9 #include "src/compilation-cache.h" | 
| 10 #include "src/cpu-profiler.h" | 10 #include "src/cpu-profiler.h" | 
| (...skipping 1957 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1968       if (marking_deque->IsFull()) return; | 1968       if (marking_deque->IsFull()) return; | 
| 1969       offset += 2; | 1969       offset += 2; | 
| 1970       grey_objects >>= 2; | 1970       grey_objects >>= 2; | 
| 1971     } | 1971     } | 
| 1972 | 1972 | 
| 1973     grey_objects >>= (Bitmap::kBitsPerCell - 1); | 1973     grey_objects >>= (Bitmap::kBitsPerCell - 1); | 
| 1974   } | 1974   } | 
| 1975 } | 1975 } | 
| 1976 | 1976 | 
| 1977 | 1977 | 
| 1978 int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage( | 1978 int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage( | 
| 1979     NewSpace* new_space, | 1979     NewSpace* new_space, | 
| 1980     NewSpacePage* p) { | 1980     NewSpacePage* p) { | 
| 1981   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 1981   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 
| 1982   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); | 1982   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); | 
| 1983   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0); | 1983   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0); | 
| 1984   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 1984   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 
| 1985 | 1985 | 
| 1986   MarkBit::CellType* cells = p->markbits()->cells(); | 1986   MarkBit::CellType* cells = p->markbits()->cells(); | 
| 1987   int survivors_size = 0; | 1987   int survivors_size = 0; | 
| 1988 | 1988 | 
| (...skipping 12 matching lines...) Expand all  Loading... | 
| 2001       Address address = cell_base + offset * kPointerSize; | 2001       Address address = cell_base + offset * kPointerSize; | 
| 2002       HeapObject* object = HeapObject::FromAddress(address); | 2002       HeapObject* object = HeapObject::FromAddress(address); | 
| 2003 | 2003 | 
| 2004       int size = object->Size(); | 2004       int size = object->Size(); | 
| 2005       survivors_size += size; | 2005       survivors_size += size; | 
| 2006 | 2006 | 
| 2007       Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); | 2007       Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); | 
| 2008 | 2008 | 
| 2009       offset++; | 2009       offset++; | 
| 2010       current_cell >>= 1; | 2010       current_cell >>= 1; | 
| 2011       // Aggressively promote young survivors to the old space. | 2011 | 
| 2012       if (TryPromoteObject(object, size)) { | 2012       // TODO(hpayer): Refactor EvacuateObject and call this function instead. | 
|  | 2013       if (heap()->ShouldBePromoted(object->address(), size) && | 
|  | 2014           TryPromoteObject(object, size)) { | 
| 2013         continue; | 2015         continue; | 
| 2014       } | 2016       } | 
| 2015 | 2017 | 
| 2016       // Promotion failed. Just migrate object to another semispace. | 2018       // Promotion failed. Just migrate object to another semispace. | 
| 2017       AllocationResult allocation = new_space->AllocateRaw(size); | 2019       AllocationResult allocation = new_space->AllocateRaw(size); | 
| 2018       if (allocation.IsRetry()) { | 2020       if (allocation.IsRetry()) { | 
| 2019         if (!new_space->AddFreshPage()) { | 2021         if (!new_space->AddFreshPage()) { | 
| 2020           // Shouldn't happen. We are sweeping linearly, and to-space | 2022           // Shouldn't happen. We are sweeping linearly, and to-space | 
| 2021           // has the same number of pages as from-space, so there is | 2023           // has the same number of pages as from-space, so there is | 
| 2022           // always room. | 2024           // always room. | 
| (...skipping 1022 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3045 | 3047 | 
| 3046   int survivors_size = 0; | 3048   int survivors_size = 0; | 
| 3047 | 3049 | 
| 3048   // First pass: traverse all objects in inactive semispace, remove marks, | 3050   // First pass: traverse all objects in inactive semispace, remove marks, | 
| 3049   // migrate live objects and write forwarding addresses.  This stage puts | 3051   // migrate live objects and write forwarding addresses.  This stage puts | 
| 3050   // new entries in the store buffer and may cause some pages to be marked | 3052   // new entries in the store buffer and may cause some pages to be marked | 
| 3051   // scan-on-scavenge. | 3053   // scan-on-scavenge. | 
| 3052   NewSpacePageIterator it(from_bottom, from_top); | 3054   NewSpacePageIterator it(from_bottom, from_top); | 
| 3053   while (it.has_next()) { | 3055   while (it.has_next()) { | 
| 3054     NewSpacePage* p = it.next(); | 3056     NewSpacePage* p = it.next(); | 
| 3055     survivors_size += DiscoverAndPromoteBlackObjectsOnPage(new_space, p); | 3057     survivors_size += DiscoverAndEvacuateBlackObjectsOnPage(new_space, p); | 
| 3056   } | 3058   } | 
| 3057 | 3059 | 
| 3058   heap_->IncrementYoungSurvivorsCounter(survivors_size); | 3060   heap_->IncrementYoungSurvivorsCounter(survivors_size); | 
| 3059   new_space->set_age_mark(new_space->top()); | 3061   new_space->set_age_mark(new_space->top()); | 
| 3060 } | 3062 } | 
| 3061 | 3063 | 
| 3062 | 3064 | 
| 3063 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { | 3065 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { | 
| 3064   AlwaysAllocateScope always_allocate(isolate()); | 3066   AlwaysAllocateScope always_allocate(isolate()); | 
| 3065   PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3067   PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 
| (...skipping 1399 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4465   while (buffer != NULL) { | 4467   while (buffer != NULL) { | 
| 4466     SlotsBuffer* next_buffer = buffer->next(); | 4468     SlotsBuffer* next_buffer = buffer->next(); | 
| 4467     DeallocateBuffer(buffer); | 4469     DeallocateBuffer(buffer); | 
| 4468     buffer = next_buffer; | 4470     buffer = next_buffer; | 
| 4469   } | 4471   } | 
| 4470   *buffer_address = NULL; | 4472   *buffer_address = NULL; | 
| 4471 } | 4473 } | 
| 4472 | 4474 | 
| 4473 | 4475 | 
| 4474 } }  // namespace v8::internal | 4476 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|