OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1036 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1047 isolate_->heap()->mark_compact_collector()-> | 1047 isolate_->heap()->mark_compact_collector()-> |
1048 RecordSlot(code_slot, code_slot, *code_slot); | 1048 RecordSlot(code_slot, code_slot, *code_slot); |
1049 | 1049 |
1050 candidate = next_candidate; | 1050 candidate = next_candidate; |
1051 } | 1051 } |
1052 | 1052 |
1053 shared_function_info_candidates_head_ = NULL; | 1053 shared_function_info_candidates_head_ = NULL; |
1054 } | 1054 } |
1055 | 1055 |
1056 | 1056 |
| 1057 void CodeFlusher::ProcessOptimizedCodeMaps() { |
| 1058 static const int kEntriesStart = SharedFunctionInfo::kEntriesStart; |
| 1059 static const int kEntryLength = SharedFunctionInfo::kEntryLength; |
| 1060 STATIC_ASSERT(kEntryLength == 3); |
| 1061 |
| 1062 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
| 1063 SharedFunctionInfo* next_holder; |
| 1064 while (holder != NULL) { |
| 1065 next_holder = GetNextCodeMap(holder); |
| 1066 ClearNextCodeMap(holder); |
| 1067 |
| 1068 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map()); |
| 1069 int new_length = kEntriesStart; |
| 1070 int old_length = code_map->length(); |
| 1071 for (int i = kEntriesStart; i < old_length; i += kEntryLength) { |
| 1072 Code* code = Code::cast(code_map->get(i + 1)); |
| 1073 MarkBit code_mark = Marking::MarkBitFrom(code); |
| 1074 if (!code_mark.Get()) { |
| 1075 continue; |
| 1076 } |
| 1077 |
| 1078 // Update and record the context slot in the optimized code map. |
| 1079 Object** context_slot = HeapObject::RawField(code_map, |
| 1080 FixedArray::OffsetOfElementAt(new_length)); |
| 1081 code_map->set(new_length++, code_map->get(i)); |
| 1082 ASSERT(Marking::IsBlack( |
| 1083 Marking::MarkBitFrom(HeapObject::cast(*context_slot)))); |
| 1084 isolate_->heap()->mark_compact_collector()-> |
| 1085 RecordSlot(context_slot, context_slot, *context_slot); |
| 1086 |
| 1087 // Update and record the code slot in the optimized code map. |
| 1088 Object** code_slot = HeapObject::RawField(code_map, |
| 1089 FixedArray::OffsetOfElementAt(new_length)); |
| 1090 code_map->set(new_length++, code_map->get(i + 1)); |
| 1091 ASSERT(Marking::IsBlack( |
| 1092 Marking::MarkBitFrom(HeapObject::cast(*code_slot)))); |
| 1093 isolate_->heap()->mark_compact_collector()-> |
| 1094 RecordSlot(code_slot, code_slot, *code_slot); |
| 1095 |
| 1096 // Update and record the literals slot in the optimized code map. |
| 1097 Object** literals_slot = HeapObject::RawField(code_map, |
| 1098 FixedArray::OffsetOfElementAt(new_length)); |
| 1099 code_map->set(new_length++, code_map->get(i + 2)); |
| 1100 ASSERT(Marking::IsBlack( |
| 1101 Marking::MarkBitFrom(HeapObject::cast(*literals_slot)))); |
| 1102 isolate_->heap()->mark_compact_collector()-> |
| 1103 RecordSlot(literals_slot, literals_slot, *literals_slot); |
| 1104 } |
| 1105 |
| 1106 // Trim the optimized code map if entries have been removed. |
| 1107 if (new_length < old_length) { |
| 1108 holder->TrimOptimizedCodeMap(old_length - new_length); |
| 1109 } |
| 1110 |
| 1111 holder = next_holder; |
| 1112 } |
| 1113 |
| 1114 optimized_code_map_holder_head_ = NULL; |
| 1115 } |
| 1116 |
| 1117 |
1057 void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) { | 1118 void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) { |
1058 // Make sure previous flushing decisions are revisited. | 1119 // Make sure previous flushing decisions are revisited. |
1059 isolate_->heap()->incremental_marking()->RecordWrites(shared_info); | 1120 isolate_->heap()->incremental_marking()->RecordWrites(shared_info); |
1060 | 1121 |
1061 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 1122 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
1062 SharedFunctionInfo* next_candidate; | 1123 SharedFunctionInfo* next_candidate; |
1063 if (candidate == shared_info) { | 1124 if (candidate == shared_info) { |
1064 next_candidate = GetNextCandidate(shared_info); | 1125 next_candidate = GetNextCandidate(shared_info); |
1065 shared_function_info_candidates_head_ = next_candidate; | 1126 shared_function_info_candidates_head_ = next_candidate; |
1066 ClearNextCandidate(shared_info); | 1127 ClearNextCandidate(shared_info); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1105 ClearNextCandidate(function, undefined); | 1166 ClearNextCandidate(function, undefined); |
1106 break; | 1167 break; |
1107 } | 1168 } |
1108 | 1169 |
1109 candidate = next_candidate; | 1170 candidate = next_candidate; |
1110 } | 1171 } |
1111 } | 1172 } |
1112 } | 1173 } |
1113 | 1174 |
1114 | 1175 |
| 1176 void CodeFlusher::EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder) { |
| 1177 ASSERT(!FixedArray::cast(code_map_holder->optimized_code_map())-> |
| 1178 get(SharedFunctionInfo::kNextMapIndex)->IsUndefined()); |
| 1179 |
| 1180 // Make sure previous flushing decisions are revisited. |
| 1181 isolate_->heap()->incremental_marking()->RecordWrites(code_map_holder); |
| 1182 |
| 1183 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
| 1184 SharedFunctionInfo* next_holder; |
| 1185 if (holder == code_map_holder) { |
| 1186 next_holder = GetNextCodeMap(code_map_holder); |
| 1187 optimized_code_map_holder_head_ = next_holder; |
| 1188 ClearNextCodeMap(code_map_holder); |
| 1189 } else { |
| 1190 while (holder != NULL) { |
| 1191 next_holder = GetNextCodeMap(holder); |
| 1192 |
| 1193 if (next_holder == code_map_holder) { |
| 1194 next_holder = GetNextCodeMap(code_map_holder); |
| 1195 SetNextCodeMap(holder, next_holder); |
| 1196 ClearNextCodeMap(code_map_holder); |
| 1197 break; |
| 1198 } |
| 1199 |
| 1200 holder = next_holder; |
| 1201 } |
| 1202 } |
| 1203 } |
| 1204 |
| 1205 |
1115 void CodeFlusher::EvictJSFunctionCandidates() { | 1206 void CodeFlusher::EvictJSFunctionCandidates() { |
1116 JSFunction* candidate = jsfunction_candidates_head_; | 1207 JSFunction* candidate = jsfunction_candidates_head_; |
1117 JSFunction* next_candidate; | 1208 JSFunction* next_candidate; |
1118 while (candidate != NULL) { | 1209 while (candidate != NULL) { |
1119 next_candidate = GetNextCandidate(candidate); | 1210 next_candidate = GetNextCandidate(candidate); |
1120 EvictCandidate(candidate); | 1211 EvictCandidate(candidate); |
1121 candidate = next_candidate; | 1212 candidate = next_candidate; |
1122 } | 1213 } |
1123 ASSERT(jsfunction_candidates_head_ == NULL); | 1214 ASSERT(jsfunction_candidates_head_ == NULL); |
1124 } | 1215 } |
1125 | 1216 |
1126 | 1217 |
1127 void CodeFlusher::EvictSharedFunctionInfoCandidates() { | 1218 void CodeFlusher::EvictSharedFunctionInfoCandidates() { |
1128 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 1219 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
1129 SharedFunctionInfo* next_candidate; | 1220 SharedFunctionInfo* next_candidate; |
1130 while (candidate != NULL) { | 1221 while (candidate != NULL) { |
1131 next_candidate = GetNextCandidate(candidate); | 1222 next_candidate = GetNextCandidate(candidate); |
1132 EvictCandidate(candidate); | 1223 EvictCandidate(candidate); |
1133 candidate = next_candidate; | 1224 candidate = next_candidate; |
1134 } | 1225 } |
1135 ASSERT(shared_function_info_candidates_head_ == NULL); | 1226 ASSERT(shared_function_info_candidates_head_ == NULL); |
1136 } | 1227 } |
1137 | 1228 |
1138 | 1229 |
| 1230 void CodeFlusher::EvictOptimizedCodeMaps() { |
| 1231 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
| 1232 SharedFunctionInfo* next_holder; |
| 1233 while (holder != NULL) { |
| 1234 next_holder = GetNextCodeMap(holder); |
| 1235 EvictOptimizedCodeMap(holder); |
| 1236 holder = next_holder; |
| 1237 } |
| 1238 ASSERT(optimized_code_map_holder_head_ == NULL); |
| 1239 } |
| 1240 |
| 1241 |
1139 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) { | 1242 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) { |
1140 Heap* heap = isolate_->heap(); | 1243 Heap* heap = isolate_->heap(); |
1141 | 1244 |
1142 JSFunction** slot = &jsfunction_candidates_head_; | 1245 JSFunction** slot = &jsfunction_candidates_head_; |
1143 JSFunction* candidate = jsfunction_candidates_head_; | 1246 JSFunction* candidate = jsfunction_candidates_head_; |
1144 while (candidate != NULL) { | 1247 while (candidate != NULL) { |
1145 if (heap->InFromSpace(candidate)) { | 1248 if (heap->InFromSpace(candidate)) { |
1146 v->VisitPointer(reinterpret_cast<Object**>(slot)); | 1249 v->VisitPointer(reinterpret_cast<Object**>(slot)); |
1147 } | 1250 } |
1148 candidate = GetNextCandidate(*slot); | 1251 candidate = GetNextCandidate(*slot); |
(...skipping 2799 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3948 | 4051 |
3949 if (enable) { | 4052 if (enable) { |
3950 if (code_flusher_ != NULL) return; | 4053 if (code_flusher_ != NULL) return; |
3951 code_flusher_ = new CodeFlusher(isolate()); | 4054 code_flusher_ = new CodeFlusher(isolate()); |
3952 } else { | 4055 } else { |
3953 if (code_flusher_ == NULL) return; | 4056 if (code_flusher_ == NULL) return; |
3954 code_flusher_->EvictAllCandidates(); | 4057 code_flusher_->EvictAllCandidates(); |
3955 delete code_flusher_; | 4058 delete code_flusher_; |
3956 code_flusher_ = NULL; | 4059 code_flusher_ = NULL; |
3957 } | 4060 } |
| 4061 |
| 4062 if (FLAG_trace_code_flushing) { |
| 4063 PrintF("[code-flushing is now %s]\n", enable ? "on" : "off"); |
| 4064 } |
3958 } | 4065 } |
3959 | 4066 |
3960 | 4067 |
3961 // TODO(1466) ReportDeleteIfNeeded is not called currently. | 4068 // TODO(1466) ReportDeleteIfNeeded is not called currently. |
3962 // Our profiling tools do not expect intersections between | 4069 // Our profiling tools do not expect intersections between |
3963 // code objects. We should either reenable it or change our tools. | 4070 // code objects. We should either reenable it or change our tools. |
3964 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, | 4071 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, |
3965 Isolate* isolate) { | 4072 Isolate* isolate) { |
3966 #ifdef ENABLE_GDB_JIT_INTERFACE | 4073 #ifdef ENABLE_GDB_JIT_INTERFACE |
3967 if (obj->IsCode()) { | 4074 if (obj->IsCode()) { |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4133 while (buffer != NULL) { | 4240 while (buffer != NULL) { |
4134 SlotsBuffer* next_buffer = buffer->next(); | 4241 SlotsBuffer* next_buffer = buffer->next(); |
4135 DeallocateBuffer(buffer); | 4242 DeallocateBuffer(buffer); |
4136 buffer = next_buffer; | 4243 buffer = next_buffer; |
4137 } | 4244 } |
4138 *buffer_address = NULL; | 4245 *buffer_address = NULL; |
4139 } | 4246 } |
4140 | 4247 |
4141 | 4248 |
4142 } } // namespace v8::internal | 4249 } } // namespace v8::internal |
OLD | NEW |