Index: src/heap/mark-compact.cc |
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
index 7b3c2a8c6eb75140aa1783353d3ae8b945a35c17..bcfd401fd335e54eac09c55fb494594ebb6c990c 100644 |
--- a/src/heap/mark-compact.cc |
+++ b/src/heap/mark-compact.cc |
@@ -349,11 +349,17 @@ void MarkCompactCollector::CollectGarbage() { |
DCHECK(heap_->incremental_marking()->IsStopped()); |
- // ClearNonLiveReferences can deoptimize code in dependent code arrays. |
- // Process weak cells before so that weak cells in dependent code |
- // arrays are cleared or contain only live code objects. |
+ // This should be done before processing weak cells because it checks |
Hannes Payer (out of office)
2015/11/24 09:35:36
Let's add here a special category: ProcessWeakRefe
ulan
2015/11/24 12:00:08
Done.
|
+ // mark bits of maps in weak cells. |
+ DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps(); |
+ |
+ // Process weak cells before MarkCodeForDeoptimization and |
+ // ClearNonLiveReferences so that weak cells in dependent code arrays are |
+ // cleared or contain only live code objects. |
ProcessAndClearWeakCells(); |
+ MarkDependentCodeListForDeoptimization(dependent_code_list); |
+ |
ClearNonLiveReferences(); |
ClearWeakCollections(); |
@@ -1821,18 +1827,35 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { |
} |
-void MarkCompactCollector::RetainMaps() { |
- if (heap()->ShouldReduceMemory() || heap()->ShouldAbortIncrementalMarking() || |
- FLAG_retain_maps_for_n_gc == 0) { |
- // Do not retain dead maps if flag disables it or there is |
- // - memory pressure (reduce_memory_footprint_), |
- // - GC is requested by tests or dev-tools (abort_incremental_marking_). |
- return; |
+bool ShouldRetainMap(Map* map, int age) { |
+ if (age == 0) { |
+ // The map has aged. Do not retain this map. |
+ return false; |
} |
+ Object* constructor = map->GetConstructor(); |
+ if (!constructor->IsHeapObject() || |
+ Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(constructor)))) { |
+ // The constructor is dead, no new objects with this map can |
+ // be created. Do not retain this map. |
+ return false; |
+ } |
+ return true; |
+} |
+ |
+ |
+void MarkCompactCollector::RetainMaps() { |
+ // Do not retain dead maps if flag disables it or there is |
+ // - memory pressure (reduce_memory_footprint_), |
+ // - GC is requested by tests or dev-tools (abort_incremental_marking_). |
+ bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || |
+ heap()->ShouldAbortIncrementalMarking() || |
+ FLAG_retain_maps_for_n_gc == 0; |
ArrayList* retained_maps = heap()->retained_maps(); |
int length = retained_maps->Length(); |
int new_length = 0; |
+ // This loop compacts the array by removing cleared weak cells, |
+ // ages and retains dead maps. |
for (int i = 0; i < length; i += 2) { |
DCHECK(retained_maps->Get(i)->IsWeakCell()); |
WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
@@ -1841,20 +1864,13 @@ void MarkCompactCollector::RetainMaps() { |
int new_age; |
Map* map = Map::cast(cell->value()); |
MarkBit map_mark = Marking::MarkBitFrom(map); |
- if (Marking::IsWhite(map_mark)) { |
- if (age == 0) { |
- // The map has aged. Do not retain this map. |
- continue; |
- } |
- Object* constructor = map->GetConstructor(); |
- if (!constructor->IsHeapObject() || Marking::IsWhite(Marking::MarkBitFrom( |
- HeapObject::cast(constructor)))) { |
- // The constructor is dead, no new objects with this map can |
- // be created. Do not retain this map. |
- continue; |
+ // Age the map and retain it if necessary. |
+ if (!map_retaining_is_disabled && Marking::IsWhite(map_mark)) { |
+ if (ShouldRetainMap(map, age)) { |
+ MarkObject(map, map_mark); |
} |
Object* prototype = map->prototype(); |
- if (prototype->IsHeapObject() && |
+ if (age > 0 && prototype->IsHeapObject() && |
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(prototype)))) { |
// The prototype is not marked, age the map. |
new_age = age - 1; |
@@ -1863,10 +1879,10 @@ void MarkCompactCollector::RetainMaps() { |
// transition tree alive, not JSObjects. Do not age the map. |
new_age = age; |
} |
- MarkObject(map, map_mark); |
} else { |
new_age = FLAG_retain_maps_for_n_gc; |
} |
+ // Compact the array and update the age. |
if (i != new_length) { |
retained_maps->Set(new_length, cell); |
Object** slot = retained_maps->Slot(new_length); |
@@ -1886,6 +1902,31 @@ void MarkCompactCollector::RetainMaps() { |
} |
+DependentCode* MarkCompactCollector::DependentCodeListFromNonLiveMaps() { |
Hannes Payer (out of office)
2015/11/24 09:35:37
Timer scope.
ulan
2015/11/24 12:00:08
Done.
|
+ ArrayList* retained_maps = heap()->retained_maps(); |
+ int length = retained_maps->Length(); |
+ DependentCode* head = DependentCode::cast(heap()->empty_fixed_array()); |
+ for (int i = 0; i < length; i += 2) { |
+ DCHECK(retained_maps->Get(i)->IsWeakCell()); |
+ WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
+ DCHECK(!cell->cleared()); |
+ Map* map = Map::cast(cell->value()); |
+ MarkBit map_mark = Marking::MarkBitFrom(map); |
+ if (Marking::IsWhite(map_mark)) { |
+ DependentCode* candidate = map->dependent_code(); |
+ // We rely on the fact that the weak code group comes first. |
+ STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0); |
+ if (candidate->length() > 0 && |
+ candidate->group() == DependentCode::kWeakCodeGroup) { |
+ candidate->set_next_link(head); |
+ head = candidate; |
+ } |
+ } |
+ } |
+ return head; |
+} |
+ |
+ |
void MarkCompactCollector::EnsureMarkingDequeIsReserved() { |
DCHECK(!marking_deque_.in_use()); |
if (marking_deque_memory_ == NULL) { |
@@ -2209,10 +2250,6 @@ void MarkCompactCollector::ClearNonLiveReferences() { |
ClearNonLivePrototypeTransitions(map); |
} else { |
ClearNonLiveMapTransitions(map); |
- have_code_to_deoptimize_ |= |
- map->dependent_code()->MarkCodeForDeoptimization( |
- isolate(), DependentCode::kWeakCodeGroup); |
- map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
} |
} |
@@ -2237,6 +2274,18 @@ void MarkCompactCollector::ClearNonLiveReferences() { |
} |
+void MarkCompactCollector::MarkDependentCodeListForDeoptimization( |
Hannes Payer (out of office)
2015/11/24 09:35:36
Timer scope.
ulan
2015/11/24 12:00:08
Done.
|
+ DependentCode* list_head) { |
+ Isolate* isolate = this->isolate(); |
+ DependentCode* current = list_head; |
+ while (current->length() > 0) { |
+ have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( |
+ isolate, DependentCode::kWeakCodeGroup); |
+ current = current->next_link(); |
+ } |
+} |
+ |
+ |
void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
FixedArray* prototype_transitions = |
TransitionArray::GetPrototypeTransitions(map); |