| Index: src/heap/mark-compact.cc
|
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
|
| index fa127db6129c0f1598eb2140f3d1fdb294ff4e90..2ff7fb37f26b64e9f6f553bdf1a8e94648418c6f 100644
|
| --- a/src/heap/mark-compact.cc
|
| +++ b/src/heap/mark-compact.cc
|
| @@ -301,9 +301,13 @@ void MarkCompactCollector::CollectGarbage() {
|
| MarkLiveObjects();
|
| DCHECK(heap_->incremental_marking()->IsStopped());
|
|
|
| + // ClearNonLiveReferences can deoptimize code in dependent code arrays.
|
| + // Process weak cells before so that weak cells in dependent code
|
| + // arrays are cleared or contain only live code objects.
|
| + ProcessAndClearWeakCells();
|
| +
|
| if (FLAG_collect_maps) ClearNonLiveReferences();
|
|
|
| - ProcessAndClearWeakCells();
|
|
|
| ClearWeakCollections();
|
|
|
| @@ -1959,7 +1963,7 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
| // Handle the string table specially.
|
| MarkStringTable(visitor);
|
|
|
| - MarkWeakObjectToCodeTable();
|
| + MarkWeakObjectToCodeTable(visitor);
|
|
|
| // There may be overflowed objects in the heap. Visit them now.
|
| while (marking_deque_.overflowed()) {
|
| @@ -2001,12 +2005,21 @@ void MarkCompactCollector::MarkImplicitRefGroups() {
|
| }
|
|
|
|
|
| -void MarkCompactCollector::MarkWeakObjectToCodeTable() {
|
| +void MarkCompactCollector::MarkWeakObjectToCodeTable(ObjectVisitor* visitor) {
|
| HeapObject* weak_object_to_code_table =
|
| HeapObject::cast(heap()->weak_object_to_code_table());
|
| if (!IsMarked(weak_object_to_code_table)) {
|
| MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
|
| SetMark(weak_object_to_code_table, mark);
|
| + if (weak_object_to_code_table->IsHashTable()) {
|
| + // Values of the table are strong references to dependent code. Mark them.
|
| + WeakHashTable* table = WeakHashTable::cast(weak_object_to_code_table);
|
| + uint32_t capacity = table->Capacity();
|
| + for (uint32_t i = 0; i < capacity; i++) {
|
| + uint32_t value_index = table->EntryToValueIndex(i);
|
| + visitor->VisitPointer(table->RawFieldOfElementAt(value_index));
|
| + }
|
| + }
|
| }
|
| }
|
|
|
| @@ -2313,34 +2326,14 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
| ClearNonLivePrototypeTransitions(map);
|
| ClearNonLiveMapTransitions(map, map_mark);
|
|
|
| - if (map_mark.Get()) {
|
| - ClearNonLiveDependentCode(map->dependent_code());
|
| - } else {
|
| - ClearDependentCode(map->dependent_code());
|
| + if (!map_mark.Get()) {
|
| + have_code_to_deoptimize_ |=
|
| + map->dependent_code()->MarkCodeForDeoptimization(
|
| + isolate(), DependentCode::kWeakCodeGroup);
|
| map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
|
| }
|
| }
|
|
|
| - // Iterate over property cell space, removing dependent code that is not
|
| - // otherwise kept alive by strong references.
|
| - HeapObjectIterator cell_iterator(heap_->property_cell_space());
|
| - for (HeapObject* cell = cell_iterator.Next(); cell != NULL;
|
| - cell = cell_iterator.Next()) {
|
| - if (IsMarked(cell)) {
|
| - ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code());
|
| - }
|
| - }
|
| -
|
| - // Iterate over allocation sites, removing dependent code that is not
|
| - // otherwise kept alive by strong references.
|
| - Object* undefined = heap()->undefined_value();
|
| - for (Object* site = heap()->allocation_sites_list(); site != undefined;
|
| - site = AllocationSite::cast(site)->weak_next()) {
|
| - if (IsMarked(site)) {
|
| - ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code());
|
| - }
|
| - }
|
| -
|
| if (heap_->weak_object_to_code_table()->IsHashTable()) {
|
| WeakHashTable* table =
|
| WeakHashTable::cast(heap_->weak_object_to_code_table());
|
| @@ -2367,9 +2360,10 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
| MarkBit mark = Marking::MarkBitFrom(obj);
|
| SetMark(obj, mark);
|
| }
|
| - ClearNonLiveDependentCode(DependentCode::cast(value));
|
| } else {
|
| - ClearDependentCode(DependentCode::cast(value));
|
| + have_code_to_deoptimize_ |=
|
| + DependentCode::cast(value)->MarkCodeForDeoptimization(
|
| + isolate(), DependentCode::kWeakCodeGroup);
|
| table->set(key_index, heap_->the_hole_value());
|
| table->set(value_index, heap_->the_hole_value());
|
| table->ElementRemoved();
|
| @@ -2545,70 +2539,6 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
|
| }
|
|
|
|
|
| -void MarkCompactCollector::ClearDependentCode(DependentCode* entries) {
|
| - DisallowHeapAllocation no_allocation;
|
| - DependentCode::GroupStartIndexes starts(entries);
|
| - int number_of_entries = starts.number_of_entries();
|
| - if (number_of_entries == 0) return;
|
| - int g = DependentCode::kWeakCodeGroup;
|
| - for (int i = starts.at(g); i < starts.at(g + 1); i++) {
|
| - // If the entry is compilation info then the map must be alive,
|
| - // and ClearDependentCode shouldn't be called.
|
| - DCHECK(entries->is_code_at(i));
|
| - Code* code = entries->code_at(i);
|
| - if (IsMarked(code) && !code->marked_for_deoptimization()) {
|
| - DependentCode::SetMarkedForDeoptimization(
|
| - code, static_cast<DependentCode::DependencyGroup>(g));
|
| - code->InvalidateEmbeddedObjects();
|
| - have_code_to_deoptimize_ = true;
|
| - }
|
| - }
|
| - for (int i = 0; i < number_of_entries; i++) {
|
| - entries->clear_at(i);
|
| - }
|
| -}
|
| -
|
| -
|
| -int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
|
| - DependentCode* entries, int group, int start, int end, int new_start) {
|
| - int survived = 0;
|
| - for (int i = start; i < end; i++) {
|
| - Object* obj = entries->object_at(i);
|
| - DCHECK(obj->IsCode() || IsMarked(obj));
|
| - if (IsMarked(obj) &&
|
| - (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
|
| - if (new_start + survived != i) {
|
| - entries->set_object_at(new_start + survived, obj);
|
| - }
|
| - Object** slot = entries->slot_at(new_start + survived);
|
| - RecordSlot(slot, slot, obj);
|
| - survived++;
|
| - }
|
| - }
|
| - entries->set_number_of_entries(
|
| - static_cast<DependentCode::DependencyGroup>(group), survived);
|
| - return survived;
|
| -}
|
| -
|
| -
|
| -void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
|
| - DisallowHeapAllocation no_allocation;
|
| - DependentCode::GroupStartIndexes starts(entries);
|
| - int number_of_entries = starts.number_of_entries();
|
| - if (number_of_entries == 0) return;
|
| - int new_number_of_entries = 0;
|
| - // Go through all groups, remove dead codes and compact.
|
| - for (int g = 0; g < DependentCode::kGroupCount; g++) {
|
| - int survived = ClearNonLiveDependentCodeInGroup(
|
| - entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
|
| - new_number_of_entries += survived;
|
| - }
|
| - for (int i = new_number_of_entries; i < number_of_entries; i++) {
|
| - entries->clear_at(i);
|
| - }
|
| -}
|
| -
|
| -
|
| void MarkCompactCollector::ProcessWeakCollections() {
|
| GCTracer::Scope gc_scope(heap()->tracer(),
|
| GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
|
|
|