OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 2115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2126 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); | 2126 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); |
2127 } | 2127 } |
2128 | 2128 |
2129 // Flush code from collected candidates. | 2129 // Flush code from collected candidates. |
2130 if (is_code_flushing_enabled()) { | 2130 if (is_code_flushing_enabled()) { |
2131 GCTracer::Scope gc_scope(heap()->tracer(), | 2131 GCTracer::Scope gc_scope(heap()->tracer(), |
2132 GCTracer::Scope::MC_MARK_CODE_FLUSH); | 2132 GCTracer::Scope::MC_MARK_CODE_FLUSH); |
2133 code_flusher_->ProcessCandidates(); | 2133 code_flusher_->ProcessCandidates(); |
2134 } | 2134 } |
2135 | 2135 |
| 2136 // Process and clear all optimized code maps. |
| 2137 if (!FLAG_flush_optimized_code_cache) { |
| 2138 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2139 GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS); |
| 2140 ProcessAndClearOptimizedCodeMaps(); |
| 2141 } |
| 2142 |
2136 if (FLAG_track_gc_object_stats) { | 2143 if (FLAG_track_gc_object_stats) { |
2137 if (FLAG_trace_gc_object_stats) { | 2144 if (FLAG_trace_gc_object_stats) { |
2138 heap()->object_stats_->TraceObjectStats(); | 2145 heap()->object_stats_->TraceObjectStats(); |
2139 } | 2146 } |
2140 heap()->object_stats_->CheckpointObjectStats(); | 2147 heap()->object_stats_->CheckpointObjectStats(); |
2141 } | 2148 } |
2142 } | 2149 } |
2143 | 2150 |
2144 | 2151 |
| 2152 void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() { |
| 2153 SharedFunctionInfo::Iterator iterator(isolate()); |
| 2154 while (SharedFunctionInfo* shared = iterator.Next()) { |
| 2155 if (shared->OptimizedCodeMapIsCleared()) continue; |
| 2156 |
| 2157 // Process context-dependent entries in the optimized code map. |
| 2158 FixedArray* code_map = shared->optimized_code_map(); |
| 2159 int new_length = SharedFunctionInfo::kEntriesStart; |
| 2160 int old_length = code_map->length(); |
| 2161 for (int i = SharedFunctionInfo::kEntriesStart; i < old_length; |
| 2162 i += SharedFunctionInfo::kEntryLength) { |
| 2163 // Each entry contains [ context, code, literals, ast-id ] as fields. |
| 2164 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); |
| 2165 Context* context = |
| 2166 Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset)); |
| 2167 HeapObject* code = HeapObject::cast( |
| 2168 code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); |
| 2169 FixedArray* literals = FixedArray::cast( |
| 2170 code_map->get(i + SharedFunctionInfo::kLiteralsOffset)); |
| 2171 Smi* ast_id = |
| 2172 Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset)); |
| 2173 if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue; |
| 2174 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context))); |
| 2175 if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue; |
| 2176 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code))); |
| 2177 if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue; |
| 2178 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals))); |
| 2179 // Move every slot in the entry and record slots when needed. |
| 2180 code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code); |
| 2181 code_map->set(new_length + SharedFunctionInfo::kContextOffset, context); |
| 2182 code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals); |
| 2183 code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id); |
| 2184 Object** code_slot = code_map->RawFieldOfElementAt( |
| 2185 new_length + SharedFunctionInfo::kCachedCodeOffset); |
| 2186 RecordSlot(code_map, code_slot, *code_slot); |
| 2187 Object** context_slot = code_map->RawFieldOfElementAt( |
| 2188 new_length + SharedFunctionInfo::kContextOffset); |
| 2189 RecordSlot(code_map, context_slot, *context_slot); |
| 2190 Object** literals_slot = code_map->RawFieldOfElementAt( |
| 2191 new_length + SharedFunctionInfo::kLiteralsOffset); |
| 2192 RecordSlot(code_map, literals_slot, *literals_slot); |
| 2193 new_length += SharedFunctionInfo::kEntryLength; |
| 2194 } |
| 2195 |
| 2196 // Process context-independent entry in the optimized code map. |
| 2197 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex); |
| 2198 if (shared_object->IsCode()) { |
| 2199 Code* shared_code = Code::cast(shared_object); |
| 2200 if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) { |
| 2201 code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex); |
| 2202 } else { |
| 2203 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code))); |
| 2204 Object** slot = |
| 2205 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex); |
| 2206 RecordSlot(code_map, slot, *slot); |
| 2207 } |
| 2208 } |
| 2209 |
| 2210 // Trim the optimized code map if entries have been removed. |
| 2211 if (new_length < old_length) { |
| 2212 shared->TrimOptimizedCodeMap(old_length - new_length); |
| 2213 } |
| 2214 } |
| 2215 } |
| 2216 |
| 2217 |
2145 void MarkCompactCollector::ProcessWeakReferences() { | 2218 void MarkCompactCollector::ProcessWeakReferences() { |
2146 // This should be done before processing weak cells because it checks | 2219 // This should be done before processing weak cells because it checks |
2147 // mark bits of maps in weak cells. | 2220 // mark bits of maps in weak cells. |
2148 DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps(); | 2221 DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps(); |
2149 | 2222 |
2150 // Process weak cells before MarkCodeForDeoptimization and | 2223 // Process weak cells before MarkCodeForDeoptimization and |
2151 // ClearNonLiveReferences so that weak cells in dependent code arrays are | 2224 // ClearNonLiveReferences so that weak cells in dependent code arrays are |
2152 // cleared or contain only live code objects. | 2225 // cleared or contain only live code objects. |
2153 ProcessAndClearWeakCells(); | 2226 ProcessAndClearWeakCells(); |
2154 | 2227 |
(...skipping 1910 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4065 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4138 MarkBit mark_bit = Marking::MarkBitFrom(host); |
4066 if (Marking::IsBlack(mark_bit)) { | 4139 if (Marking::IsBlack(mark_bit)) { |
4067 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4140 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
4068 RecordRelocSlot(&rinfo, target); | 4141 RecordRelocSlot(&rinfo, target); |
4069 } | 4142 } |
4070 } | 4143 } |
4071 } | 4144 } |
4072 | 4145 |
4073 } // namespace internal | 4146 } // namespace internal |
4074 } // namespace v8 | 4147 } // namespace v8 |
OLD | NEW |