| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 966 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 977 isolate_->heap()->mark_compact_collector()->RecordSlot(candidate, code_slot, | 977 isolate_->heap()->mark_compact_collector()->RecordSlot(candidate, code_slot, |
| 978 *code_slot); | 978 *code_slot); |
| 979 | 979 |
| 980 candidate = next_candidate; | 980 candidate = next_candidate; |
| 981 } | 981 } |
| 982 | 982 |
| 983 shared_function_info_candidates_head_ = NULL; | 983 shared_function_info_candidates_head_ = NULL; |
| 984 } | 984 } |
| 985 | 985 |
| 986 | 986 |
| 987 void CodeFlusher::ProcessOptimizedCodeMaps() { | |
| 988 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); | |
| 989 | |
| 990 SharedFunctionInfo* holder = optimized_code_map_holder_head_; | |
| 991 SharedFunctionInfo* next_holder; | |
| 992 | |
| 993 while (holder != NULL) { | |
| 994 next_holder = GetNextCodeMap(holder); | |
| 995 ClearNextCodeMap(holder); | |
| 996 | |
| 997 // Process context-dependent entries in the optimized code map. | |
| 998 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map()); | |
| 999 int new_length = SharedFunctionInfo::kEntriesStart; | |
| 1000 int old_length = code_map->length(); | |
| 1001 for (int i = SharedFunctionInfo::kEntriesStart; i < old_length; | |
| 1002 i += SharedFunctionInfo::kEntryLength) { | |
| 1003 // Each entry contains [ context, code, literals, ast-id ] as fields. | |
| 1004 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); | |
| 1005 Context* context = | |
| 1006 Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset)); | |
| 1007 HeapObject* code = HeapObject::cast( | |
| 1008 code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); | |
| 1009 FixedArray* literals = FixedArray::cast( | |
| 1010 code_map->get(i + SharedFunctionInfo::kLiteralsOffset)); | |
| 1011 Smi* ast_id = | |
| 1012 Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset)); | |
| 1013 if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue; | |
| 1014 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context))); | |
| 1015 if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue; | |
| 1016 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code))); | |
| 1017 if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue; | |
| 1018 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals))); | |
| 1019 // Move every slot in the entry and record slots when needed. | |
| 1020 code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code); | |
| 1021 code_map->set(new_length + SharedFunctionInfo::kContextOffset, context); | |
| 1022 code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals); | |
| 1023 code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id); | |
| 1024 Object** code_slot = code_map->RawFieldOfElementAt( | |
| 1025 new_length + SharedFunctionInfo::kCachedCodeOffset); | |
| 1026 isolate_->heap()->mark_compact_collector()->RecordSlot( | |
| 1027 code_map, code_slot, *code_slot); | |
| 1028 Object** context_slot = code_map->RawFieldOfElementAt( | |
| 1029 new_length + SharedFunctionInfo::kContextOffset); | |
| 1030 isolate_->heap()->mark_compact_collector()->RecordSlot( | |
| 1031 code_map, context_slot, *context_slot); | |
| 1032 Object** literals_slot = code_map->RawFieldOfElementAt( | |
| 1033 new_length + SharedFunctionInfo::kLiteralsOffset); | |
| 1034 isolate_->heap()->mark_compact_collector()->RecordSlot( | |
| 1035 code_map, literals_slot, *literals_slot); | |
| 1036 new_length += SharedFunctionInfo::kEntryLength; | |
| 1037 } | |
| 1038 | |
| 1039 // Process context-independent entry in the optimized code map. | |
| 1040 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex); | |
| 1041 if (shared_object->IsCode()) { | |
| 1042 Code* shared_code = Code::cast(shared_object); | |
| 1043 if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) { | |
| 1044 code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex); | |
| 1045 } else { | |
| 1046 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code))); | |
| 1047 Object** slot = | |
| 1048 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex); | |
| 1049 isolate_->heap()->mark_compact_collector()->RecordSlot(code_map, slot, | |
| 1050 *slot); | |
| 1051 } | |
| 1052 } | |
| 1053 | |
| 1054 // Trim the optimized code map if entries have been removed. | |
| 1055 if (new_length < old_length) { | |
| 1056 holder->TrimOptimizedCodeMap(old_length - new_length); | |
| 1057 } | |
| 1058 | |
| 1059 holder = next_holder; | |
| 1060 } | |
| 1061 | |
| 1062 optimized_code_map_holder_head_ = NULL; | |
| 1063 } | |
| 1064 | |
| 1065 | |
| 1066 void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) { | 987 void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) { |
| 1067 // Make sure previous flushing decisions are revisited. | 988 // Make sure previous flushing decisions are revisited. |
| 1068 isolate_->heap()->incremental_marking()->RecordWrites(shared_info); | 989 isolate_->heap()->incremental_marking()->RecordWrites(shared_info); |
| 1069 | 990 |
| 1070 if (FLAG_trace_code_flushing) { | 991 if (FLAG_trace_code_flushing) { |
| 1071 PrintF("[code-flushing abandons function-info: "); | 992 PrintF("[code-flushing abandons function-info: "); |
| 1072 shared_info->ShortPrint(); | 993 shared_info->ShortPrint(); |
| 1073 PrintF("]\n"); | 994 PrintF("]\n"); |
| 1074 } | 995 } |
| 1075 | 996 |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1126 ClearNextCandidate(function, undefined); | 1047 ClearNextCandidate(function, undefined); |
| 1127 break; | 1048 break; |
| 1128 } | 1049 } |
| 1129 | 1050 |
| 1130 candidate = next_candidate; | 1051 candidate = next_candidate; |
| 1131 } | 1052 } |
| 1132 } | 1053 } |
| 1133 } | 1054 } |
| 1134 | 1055 |
| 1135 | 1056 |
| 1136 void CodeFlusher::EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder) { | |
| 1137 FixedArray* code_map = | |
| 1138 FixedArray::cast(code_map_holder->optimized_code_map()); | |
| 1139 DCHECK(!code_map->get(SharedFunctionInfo::kNextMapIndex)->IsUndefined()); | |
| 1140 | |
| 1141 // Make sure previous flushing decisions are revisited. | |
| 1142 isolate_->heap()->incremental_marking()->RecordWrites(code_map); | |
| 1143 isolate_->heap()->incremental_marking()->RecordWrites(code_map_holder); | |
| 1144 | |
| 1145 if (FLAG_trace_code_flushing) { | |
| 1146 PrintF("[code-flushing abandons code-map: "); | |
| 1147 code_map_holder->ShortPrint(); | |
| 1148 PrintF("]\n"); | |
| 1149 } | |
| 1150 | |
| 1151 SharedFunctionInfo* holder = optimized_code_map_holder_head_; | |
| 1152 SharedFunctionInfo* next_holder; | |
| 1153 if (holder == code_map_holder) { | |
| 1154 next_holder = GetNextCodeMap(code_map_holder); | |
| 1155 optimized_code_map_holder_head_ = next_holder; | |
| 1156 ClearNextCodeMap(code_map_holder); | |
| 1157 } else { | |
| 1158 while (holder != NULL) { | |
| 1159 next_holder = GetNextCodeMap(holder); | |
| 1160 | |
| 1161 if (next_holder == code_map_holder) { | |
| 1162 next_holder = GetNextCodeMap(code_map_holder); | |
| 1163 SetNextCodeMap(holder, next_holder); | |
| 1164 ClearNextCodeMap(code_map_holder); | |
| 1165 break; | |
| 1166 } | |
| 1167 | |
| 1168 holder = next_holder; | |
| 1169 } | |
| 1170 } | |
| 1171 } | |
| 1172 | |
| 1173 | |
| 1174 void CodeFlusher::EvictJSFunctionCandidates() { | 1057 void CodeFlusher::EvictJSFunctionCandidates() { |
| 1175 JSFunction* candidate = jsfunction_candidates_head_; | 1058 JSFunction* candidate = jsfunction_candidates_head_; |
| 1176 JSFunction* next_candidate; | 1059 JSFunction* next_candidate; |
| 1177 while (candidate != NULL) { | 1060 while (candidate != NULL) { |
| 1178 next_candidate = GetNextCandidate(candidate); | 1061 next_candidate = GetNextCandidate(candidate); |
| 1179 EvictCandidate(candidate); | 1062 EvictCandidate(candidate); |
| 1180 candidate = next_candidate; | 1063 candidate = next_candidate; |
| 1181 } | 1064 } |
| 1182 DCHECK(jsfunction_candidates_head_ == NULL); | 1065 DCHECK(jsfunction_candidates_head_ == NULL); |
| 1183 } | 1066 } |
| 1184 | 1067 |
| 1185 | 1068 |
| 1186 void CodeFlusher::EvictSharedFunctionInfoCandidates() { | 1069 void CodeFlusher::EvictSharedFunctionInfoCandidates() { |
| 1187 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 1070 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 1188 SharedFunctionInfo* next_candidate; | 1071 SharedFunctionInfo* next_candidate; |
| 1189 while (candidate != NULL) { | 1072 while (candidate != NULL) { |
| 1190 next_candidate = GetNextCandidate(candidate); | 1073 next_candidate = GetNextCandidate(candidate); |
| 1191 EvictCandidate(candidate); | 1074 EvictCandidate(candidate); |
| 1192 candidate = next_candidate; | 1075 candidate = next_candidate; |
| 1193 } | 1076 } |
| 1194 DCHECK(shared_function_info_candidates_head_ == NULL); | 1077 DCHECK(shared_function_info_candidates_head_ == NULL); |
| 1195 } | 1078 } |
| 1196 | 1079 |
| 1197 | 1080 |
| 1198 void CodeFlusher::EvictOptimizedCodeMaps() { | |
| 1199 SharedFunctionInfo* holder = optimized_code_map_holder_head_; | |
| 1200 SharedFunctionInfo* next_holder; | |
| 1201 while (holder != NULL) { | |
| 1202 next_holder = GetNextCodeMap(holder); | |
| 1203 EvictOptimizedCodeMap(holder); | |
| 1204 holder = next_holder; | |
| 1205 } | |
| 1206 DCHECK(optimized_code_map_holder_head_ == NULL); | |
| 1207 } | |
| 1208 | |
| 1209 | |
| 1210 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) { | 1081 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) { |
| 1211 Heap* heap = isolate_->heap(); | 1082 Heap* heap = isolate_->heap(); |
| 1212 | 1083 |
| 1213 JSFunction** slot = &jsfunction_candidates_head_; | 1084 JSFunction** slot = &jsfunction_candidates_head_; |
| 1214 JSFunction* candidate = jsfunction_candidates_head_; | 1085 JSFunction* candidate = jsfunction_candidates_head_; |
| 1215 while (candidate != NULL) { | 1086 while (candidate != NULL) { |
| 1216 if (heap->InFromSpace(candidate)) { | 1087 if (heap->InFromSpace(candidate)) { |
| 1217 v->VisitPointer(reinterpret_cast<Object**>(slot)); | 1088 v->VisitPointer(reinterpret_cast<Object**>(slot)); |
| 1218 } | 1089 } |
| 1219 candidate = GetNextCandidate(*slot); | 1090 candidate = GetNextCandidate(*slot); |
| (...skipping 1008 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2228 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); | 2099 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 2229 } | 2100 } |
| 2230 | 2101 |
| 2231 // Flush code from collected candidates. | 2102 // Flush code from collected candidates. |
| 2232 if (is_code_flushing_enabled()) { | 2103 if (is_code_flushing_enabled()) { |
| 2233 GCTracer::Scope gc_scope(heap()->tracer(), | 2104 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2234 GCTracer::Scope::MC_MARK_CODE_FLUSH); | 2105 GCTracer::Scope::MC_MARK_CODE_FLUSH); |
| 2235 code_flusher_->ProcessCandidates(); | 2106 code_flusher_->ProcessCandidates(); |
| 2236 } | 2107 } |
| 2237 | 2108 |
| 2109 // Process and clear all optimized code maps. |
| 2110 if (!FLAG_flush_optimized_code_cache) { |
| 2111 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2112 GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS); |
| 2113 ProcessAndClearOptimizedCodeMaps(); |
| 2114 } |
| 2115 |
| 2238 if (FLAG_track_gc_object_stats) { | 2116 if (FLAG_track_gc_object_stats) { |
| 2239 if (FLAG_trace_gc_object_stats) { | 2117 if (FLAG_trace_gc_object_stats) { |
| 2240 heap()->object_stats_->TraceObjectStats(); | 2118 heap()->object_stats_->TraceObjectStats(); |
| 2241 } | 2119 } |
| 2242 heap()->object_stats_->CheckpointObjectStats(); | 2120 heap()->object_stats_->CheckpointObjectStats(); |
| 2243 } | 2121 } |
| 2244 } | 2122 } |
| 2245 | 2123 |
| 2246 | 2124 |
| 2125 void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() { |
| 2126 SharedFunctionInfo::Iterator iterator(isolate()); |
| 2127 while (SharedFunctionInfo* shared = iterator.Next()) { |
| 2128 if (shared->optimized_code_map()->IsSmi()) continue; |
| 2129 |
| 2130 // Process context-dependent entries in the optimized code map. |
| 2131 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); |
| 2132 int new_length = SharedFunctionInfo::kEntriesStart; |
| 2133 int old_length = code_map->length(); |
| 2134 for (int i = SharedFunctionInfo::kEntriesStart; i < old_length; |
| 2135 i += SharedFunctionInfo::kEntryLength) { |
| 2136 // Each entry contains [ context, code, literals, ast-id ] as fields. |
| 2137 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); |
| 2138 Context* context = |
| 2139 Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset)); |
| 2140 HeapObject* code = HeapObject::cast( |
| 2141 code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); |
| 2142 FixedArray* literals = FixedArray::cast( |
| 2143 code_map->get(i + SharedFunctionInfo::kLiteralsOffset)); |
| 2144 Smi* ast_id = |
| 2145 Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset)); |
| 2146 if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue; |
| 2147 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context))); |
| 2148 if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue; |
| 2149 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code))); |
| 2150 if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue; |
| 2151 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals))); |
| 2152 // Move every slot in the entry and record slots when needed. |
| 2153 code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code); |
| 2154 code_map->set(new_length + SharedFunctionInfo::kContextOffset, context); |
| 2155 code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals); |
| 2156 code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id); |
| 2157 Object** code_slot = code_map->RawFieldOfElementAt( |
| 2158 new_length + SharedFunctionInfo::kCachedCodeOffset); |
| 2159 RecordSlot(code_map, code_slot, *code_slot); |
| 2160 Object** context_slot = code_map->RawFieldOfElementAt( |
| 2161 new_length + SharedFunctionInfo::kContextOffset); |
| 2162 RecordSlot(code_map, context_slot, *context_slot); |
| 2163 Object** literals_slot = code_map->RawFieldOfElementAt( |
| 2164 new_length + SharedFunctionInfo::kLiteralsOffset); |
| 2165 RecordSlot(code_map, literals_slot, *literals_slot); |
| 2166 new_length += SharedFunctionInfo::kEntryLength; |
| 2167 } |
| 2168 |
| 2169 // Process context-independent entry in the optimized code map. |
| 2170 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex); |
| 2171 if (shared_object->IsCode()) { |
| 2172 Code* shared_code = Code::cast(shared_object); |
| 2173 if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) { |
| 2174 code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex); |
| 2175 } else { |
| 2176 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code))); |
| 2177 Object** slot = |
| 2178 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex); |
| 2179 RecordSlot(code_map, slot, *slot); |
| 2180 } |
| 2181 } |
| 2182 |
| 2183 // Trim the optimized code map if entries have been removed. |
| 2184 if (new_length < old_length) { |
| 2185 shared->TrimOptimizedCodeMap(old_length - new_length); |
| 2186 } |
| 2187 } |
| 2188 } |
| 2189 |
| 2190 |
| 2247 void MarkCompactCollector::ClearNonLiveReferences() { | 2191 void MarkCompactCollector::ClearNonLiveReferences() { |
| 2248 GCTracer::Scope gc_scope(heap()->tracer(), | 2192 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2249 GCTracer::Scope::MC_NONLIVEREFERENCES); | 2193 GCTracer::Scope::MC_NONLIVEREFERENCES); |
| 2250 // Iterate over the map space, setting map transitions that go from | 2194 // Iterate over the map space, setting map transitions that go from |
| 2251 // a marked map to an unmarked map to null transitions. This action | 2195 // a marked map to an unmarked map to null transitions. This action |
| 2252 // is carried out only on maps of JSObjects and related subtypes. | 2196 // is carried out only on maps of JSObjects and related subtypes. |
| 2253 HeapObjectIterator map_iterator(heap()->map_space()); | 2197 HeapObjectIterator map_iterator(heap()->map_space()); |
| 2254 for (HeapObject* obj = map_iterator.Next(); obj != NULL; | 2198 for (HeapObject* obj = map_iterator.Next(); obj != NULL; |
| 2255 obj = map_iterator.Next()) { | 2199 obj = map_iterator.Next()) { |
| 2256 Map* map = Map::cast(obj); | 2200 Map* map = Map::cast(obj); |
| (...skipping 2374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4631 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4575 MarkBit mark_bit = Marking::MarkBitFrom(host); |
| 4632 if (Marking::IsBlack(mark_bit)) { | 4576 if (Marking::IsBlack(mark_bit)) { |
| 4633 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); | 4577 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); |
| 4634 RecordRelocSlot(&rinfo, target); | 4578 RecordRelocSlot(&rinfo, target); |
| 4635 } | 4579 } |
| 4636 } | 4580 } |
| 4637 } | 4581 } |
| 4638 | 4582 |
| 4639 } // namespace internal | 4583 } // namespace internal |
| 4640 } // namespace v8 | 4584 } // namespace v8 |
| OLD | NEW |