Chromium Code Reviews| Index: src/heap/mark-compact.cc |
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
| index 1c82da0d02e289dddd3a96f3ccc0485e643a9883..c7f933996e2b2a1daee1810d0018978eaf1a297a 100644 |
| --- a/src/heap/mark-compact.cc |
| +++ b/src/heap/mark-compact.cc |
| @@ -2147,13 +2147,6 @@ void MarkCompactCollector::AfterMarking() { |
| code_flusher_->ProcessCandidates(); |
| } |
| - // Process and clear all optimized code maps. |
| - if (!FLAG_flush_optimized_code_cache) { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS); |
| - ProcessAndClearOptimizedCodeMaps(); |
| - } |
| - |
| if (FLAG_track_gc_object_stats) { |
| if (FLAG_trace_gc_object_stats) { |
| heap()->object_stats_->TraceObjectStats(); |
| @@ -2163,72 +2156,6 @@ void MarkCompactCollector::AfterMarking() { |
| } |
| -void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() { |
|
Michael Starzinger
2015/11/27 10:06:39
Woot, I love it!
mvstanton
2015/12/01 11:28:15
Acknowledged.
|
| - SharedFunctionInfo::Iterator iterator(isolate()); |
| - while (SharedFunctionInfo* shared = iterator.Next()) { |
| - if (shared->OptimizedCodeMapIsCleared()) continue; |
| - |
| - // Process context-dependent entries in the optimized code map. |
| - FixedArray* code_map = shared->optimized_code_map(); |
| - int new_length = SharedFunctionInfo::kEntriesStart; |
| - int old_length = code_map->length(); |
| - for (int i = SharedFunctionInfo::kEntriesStart; i < old_length; |
| - i += SharedFunctionInfo::kEntryLength) { |
| - // Each entry contains [ context, code, literals, ast-id ] as fields. |
| - STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); |
| - Context* context = |
| - Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset)); |
| - HeapObject* code = HeapObject::cast( |
| - code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); |
| - FixedArray* literals = FixedArray::cast( |
| - code_map->get(i + SharedFunctionInfo::kLiteralsOffset)); |
| - Smi* ast_id = |
| - Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset)); |
| - if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue; |
| - DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context))); |
| - if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue; |
| - DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code))); |
| - if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue; |
| - DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals))); |
| - // Move every slot in the entry and record slots when needed. |
| - code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code); |
| - code_map->set(new_length + SharedFunctionInfo::kContextOffset, context); |
| - code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals); |
| - code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id); |
| - Object** code_slot = code_map->RawFieldOfElementAt( |
| - new_length + SharedFunctionInfo::kCachedCodeOffset); |
| - RecordSlot(code_map, code_slot, *code_slot); |
| - Object** context_slot = code_map->RawFieldOfElementAt( |
| - new_length + SharedFunctionInfo::kContextOffset); |
| - RecordSlot(code_map, context_slot, *context_slot); |
| - Object** literals_slot = code_map->RawFieldOfElementAt( |
| - new_length + SharedFunctionInfo::kLiteralsOffset); |
| - RecordSlot(code_map, literals_slot, *literals_slot); |
| - new_length += SharedFunctionInfo::kEntryLength; |
| - } |
| - |
| - // Process context-independent entry in the optimized code map. |
| - Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex); |
| - if (shared_object->IsCode()) { |
| - Code* shared_code = Code::cast(shared_object); |
| - if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) { |
| - code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex); |
| - } else { |
| - DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code))); |
| - Object** slot = |
| - code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex); |
| - RecordSlot(code_map, slot, *slot); |
| - } |
| - } |
| - |
| - // Trim the optimized code map if entries have been removed. |
| - if (new_length < old_length) { |
| - shared->TrimOptimizedCodeMap(old_length - new_length); |
| - } |
| - } |
| -} |
| - |
| - |
| void MarkCompactCollector::ProcessWeakReferences() { |
| // This should be done before processing weak cells because it checks |
| // mark bits of maps in weak cells. |