| Index: src/mark-compact.cc
|
| diff --git a/src/mark-compact.cc b/src/mark-compact.cc
|
| index f04a8bcb9a64105a496b8e4926cc833d06b7bc3a..6972c1e0644facec630dc8a403b9793f18f806f5 100644
|
| --- a/src/mark-compact.cc
|
| +++ b/src/mark-compact.cc
|
| @@ -449,7 +449,7 @@ void MarkCompactCollector::CollectGarbage() {
|
|
|
| #ifdef VERIFY_HEAP
|
| if (heap()->weak_embedded_objects_verification_enabled()) {
|
| - VerifyWeakEmbeddedObjectsInOptimizedCode();
|
| + VerifyWeakEmbeddedObjectsInCode();
|
| }
|
| if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
|
| VerifyOmittedMapChecks();
|
| @@ -510,13 +510,13 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
| }
|
|
|
|
|
| -void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() {
|
| +void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
|
| HeapObjectIterator code_iterator(heap()->code_space());
|
| for (HeapObject* obj = code_iterator.Next();
|
| obj != NULL;
|
| obj = code_iterator.Next()) {
|
| Code* code = Code::cast(obj);
|
| - if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
|
| + if (!code->is_optimized_code() && !code->is_weak_stub()) continue;
|
| if (WillBeDeoptimized(code)) continue;
|
| code->VerifyEmbeddedObjectsDependency();
|
| }
|
| @@ -2583,7 +2583,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
| if (map_mark.Get()) {
|
| ClearNonLiveDependentCode(map->dependent_code());
|
| } else {
|
| - ClearAndDeoptimizeDependentCode(map->dependent_code());
|
| + ClearDependentCode(map->dependent_code());
|
| map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
|
| }
|
| }
|
| @@ -2638,7 +2638,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
| }
|
| ClearNonLiveDependentCode(DependentCode::cast(value));
|
| } else {
|
| - ClearAndDeoptimizeDependentCode(DependentCode::cast(value));
|
| + ClearDependentCode(DependentCode::cast(value));
|
| table->set(key_index, heap_->the_hole_value());
|
| table->set(value_index, heap_->the_hole_value());
|
| }
|
| @@ -2708,56 +2708,102 @@ void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
|
| }
|
|
|
|
|
| -void MarkCompactCollector::ClearAndDeoptimizeDependentCode(
|
| +void MarkCompactCollector::ClearDependentICList(Object* head) {
|
| + Object* current = head;
|
| + Object* undefined = heap()->undefined_value();
|
| + while (current != undefined) {
|
| + Code* code = Code::cast(current);
|
| + if (IsMarked(code)) {
|
| + ASSERT(code->is_weak_stub());
|
| + IC::InvalidateMaps(code);
|
| + }
|
| + current = code->next_code_link();
|
| + code->set_next_code_link(undefined);
|
| + }
|
| +}
|
| +
|
| +
|
| +void MarkCompactCollector::ClearDependentCode(
|
| DependentCode* entries) {
|
| DisallowHeapAllocation no_allocation;
|
| DependentCode::GroupStartIndexes starts(entries);
|
| int number_of_entries = starts.number_of_entries();
|
| if (number_of_entries == 0) return;
|
| - for (int i = 0; i < number_of_entries; i++) {
|
| + int g = DependentCode::kWeakICGroup;
|
| + if (starts.at(g) != starts.at(g + 1)) {
|
| + int i = starts.at(g);
|
| + ASSERT(i + 1 == starts.at(g + 1));
|
| + Object* head = entries->object_at(i);
|
| + ClearDependentICList(head);
|
| + }
|
| + g = DependentCode::kWeakCodeGroup;
|
| + for (int i = starts.at(g); i < starts.at(g + 1); i++) {
|
| // If the entry is compilation info then the map must be alive,
|
| - // and ClearAndDeoptimizeDependentCode shouldn't be called.
|
| + // and ClearDependentCode shouldn't be called.
|
| ASSERT(entries->is_code_at(i));
|
| Code* code = entries->code_at(i);
|
| -
|
| if (IsMarked(code) && !code->marked_for_deoptimization()) {
|
| code->set_marked_for_deoptimization(true);
|
| code->InvalidateEmbeddedObjects();
|
| have_code_to_deoptimize_ = true;
|
| }
|
| + }
|
| + for (int i = 0; i < number_of_entries; i++) {
|
| entries->clear_at(i);
|
| }
|
| }
|
|
|
|
|
| -void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
|
| - DisallowHeapAllocation no_allocation;
|
| - DependentCode::GroupStartIndexes starts(entries);
|
| - int number_of_entries = starts.number_of_entries();
|
| - if (number_of_entries == 0) return;
|
| - int new_number_of_entries = 0;
|
| - // Go through all groups, remove dead codes and compact.
|
| - for (int g = 0; g < DependentCode::kGroupCount; g++) {
|
| - int group_number_of_entries = 0;
|
| - for (int i = starts.at(g); i < starts.at(g + 1); i++) {
|
| +int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
|
| + DependentCode* entries, int group, int start, int end, int new_start) {
|
| + int survived = 0;
|
| + if (group == DependentCode::kWeakICGroup) {
|
| + // Dependent weak IC stubs form a linked list and only the head is stored
|
| + // in the dependent code array.
|
| + if (start != end) {
|
| + ASSERT(start + 1 == end);
|
| + Object* old_head = entries->object_at(start);
|
| + MarkCompactWeakObjectRetainer retainer;
|
| + Object* head = VisitWeakList<Code>(heap(), old_head, &retainer, true);
|
| + entries->set_object_at(new_start, head);
|
| + Object** slot = entries->slot_at(new_start);
|
| + RecordSlot(slot, slot, head);
|
| + // We do not compact this group even if the head is undefined,
|
| + // more dependent ICs are likely to be added later.
|
| + survived = 1;
|
| + }
|
| + } else {
|
| + for (int i = start; i < end; i++) {
|
| Object* obj = entries->object_at(i);
|
| ASSERT(obj->IsCode() || IsMarked(obj));
|
| if (IsMarked(obj) &&
|
| (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
|
| - if (new_number_of_entries + group_number_of_entries != i) {
|
| - entries->set_object_at(
|
| - new_number_of_entries + group_number_of_entries, obj);
|
| + if (new_start + survived != i) {
|
| + entries->set_object_at(new_start + survived, obj);
|
| }
|
| - Object** slot = entries->slot_at(new_number_of_entries +
|
| - group_number_of_entries);
|
| + Object** slot = entries->slot_at(new_start + survived);
|
| RecordSlot(slot, slot, obj);
|
| - group_number_of_entries++;
|
| + survived++;
|
| }
|
| }
|
| - entries->set_number_of_entries(
|
| - static_cast<DependentCode::DependencyGroup>(g),
|
| - group_number_of_entries);
|
| - new_number_of_entries += group_number_of_entries;
|
| + }
|
| + entries->set_number_of_entries(
|
| + static_cast<DependentCode::DependencyGroup>(group), survived);
|
| + return survived;
|
| +}
|
| +
|
| +
|
| +void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
|
| + DisallowHeapAllocation no_allocation;
|
| + DependentCode::GroupStartIndexes starts(entries);
|
| + int number_of_entries = starts.number_of_entries();
|
| + if (number_of_entries == 0) return;
|
| + int new_number_of_entries = 0;
|
| + // Go through all groups, remove dead codes and compact.
|
| + for (int g = 0; g < DependentCode::kGroupCount; g++) {
|
| + int survived = ClearNonLiveDependentCodeInGroup(
|
| + entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
|
| + new_number_of_entries += survived;
|
| }
|
| for (int i = new_number_of_entries; i < number_of_entries; i++) {
|
| entries->clear_at(i);
|
| @@ -3409,7 +3455,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
|
|
|
| // Return true if the given code is deoptimized or will be deoptimized.
|
| bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
|
| - return code->marked_for_deoptimization();
|
| + return code->is_optimized_code() && code->marked_for_deoptimization();
|
| }
|
|
|
|
|
|
|