OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1036 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1047 isolate_->heap()->mark_compact_collector()-> | 1047 isolate_->heap()->mark_compact_collector()-> |
1048 RecordSlot(code_slot, code_slot, *code_slot); | 1048 RecordSlot(code_slot, code_slot, *code_slot); |
1049 | 1049 |
1050 candidate = next_candidate; | 1050 candidate = next_candidate; |
1051 } | 1051 } |
1052 | 1052 |
1053 shared_function_info_candidates_head_ = NULL; | 1053 shared_function_info_candidates_head_ = NULL; |
1054 } | 1054 } |
1055 | 1055 |
1056 | 1056 |
| 1057 void CodeFlusher::ProcessOptimizedCodeMaps() { |
| 1058 static const int kEntriesStart = SharedFunctionInfo::kEntriesStart; |
| 1059 static const int kEntryLength = SharedFunctionInfo::kEntryLength; |
| 1060 static const int kContextOffset = 0; |
| 1061 static const int kCodeOffset = 1; |
| 1062 static const int kLiteralsOffset = 2; |
| 1063 STATIC_ASSERT(kEntryLength == 3); |
| 1064 |
| 1065 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
| 1066 SharedFunctionInfo* next_holder; |
| 1067 while (holder != NULL) { |
| 1068 next_holder = GetNextCodeMap(holder); |
| 1069 ClearNextCodeMap(holder); |
| 1070 |
| 1071 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map()); |
| 1072 int new_length = kEntriesStart; |
| 1073 int old_length = code_map->length(); |
| 1074 for (int i = kEntriesStart; i < old_length; i += kEntryLength) { |
| 1075 Code* code = Code::cast(code_map->get(i + kCodeOffset)); |
| 1076 MarkBit code_mark = Marking::MarkBitFrom(code); |
| 1077 if (!code_mark.Get()) { |
| 1078 continue; |
| 1079 } |
| 1080 |
| 1081 // Update and record the context slot in the optimizled code map. |
| 1082 Object** context_slot = HeapObject::RawField(code_map, |
| 1083 FixedArray::OffsetOfElementAt(new_length)); |
| 1084 code_map->set(new_length++, code_map->get(i + kContextOffset)); |
| 1085 ASSERT(Marking::IsBlack( |
| 1086 Marking::MarkBitFrom(HeapObject::cast(*context_slot)))); |
| 1087 isolate_->heap()->mark_compact_collector()-> |
| 1088 RecordSlot(context_slot, context_slot, *context_slot); |
| 1089 |
| 1090 // Update and record the code slot in the optimized code map. |
| 1091 Object** code_slot = HeapObject::RawField(code_map, |
| 1092 FixedArray::OffsetOfElementAt(new_length)); |
| 1093 code_map->set(new_length++, code_map->get(i + kCodeOffset)); |
| 1094 ASSERT(Marking::IsBlack( |
| 1095 Marking::MarkBitFrom(HeapObject::cast(*code_slot)))); |
| 1096 isolate_->heap()->mark_compact_collector()-> |
| 1097 RecordSlot(code_slot, code_slot, *code_slot); |
| 1098 |
| 1099 // Update and record the literals slot in the optimized code map. |
| 1100 Object** literals_slot = HeapObject::RawField(code_map, |
| 1101 FixedArray::OffsetOfElementAt(new_length)); |
| 1102 code_map->set(new_length++, code_map->get(i + kLiteralsOffset)); |
| 1103 ASSERT(Marking::IsBlack( |
| 1104 Marking::MarkBitFrom(HeapObject::cast(*literals_slot)))); |
| 1105 isolate_->heap()->mark_compact_collector()-> |
| 1106 RecordSlot(literals_slot, literals_slot, *literals_slot); |
| 1107 } |
| 1108 |
| 1109 // Trim the optimized code map if entries have been removed. |
| 1110 if (new_length < old_length) { |
| 1111 holder->TrimOptimizedCodeMap(old_length - new_length); |
| 1112 } |
| 1113 |
| 1114 holder = next_holder; |
| 1115 } |
| 1116 |
| 1117 optimized_code_map_holder_head_ = NULL; |
| 1118 } |
| 1119 |
| 1120 |
1057 void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) { | 1121 void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) { |
1058 // Make sure previous flushing decisions are revisited. | 1122 // Make sure previous flushing decisions are revisited. |
1059 isolate_->heap()->incremental_marking()->RecordWrites(shared_info); | 1123 isolate_->heap()->incremental_marking()->RecordWrites(shared_info); |
1060 | 1124 |
1061 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 1125 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
1062 SharedFunctionInfo* next_candidate; | 1126 SharedFunctionInfo* next_candidate; |
1063 if (candidate == shared_info) { | 1127 if (candidate == shared_info) { |
1064 next_candidate = GetNextCandidate(shared_info); | 1128 next_candidate = GetNextCandidate(shared_info); |
1065 shared_function_info_candidates_head_ = next_candidate; | 1129 shared_function_info_candidates_head_ = next_candidate; |
1066 ClearNextCandidate(shared_info); | 1130 ClearNextCandidate(shared_info); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1105 ClearNextCandidate(function, undefined); | 1169 ClearNextCandidate(function, undefined); |
1106 break; | 1170 break; |
1107 } | 1171 } |
1108 | 1172 |
1109 candidate = next_candidate; | 1173 candidate = next_candidate; |
1110 } | 1174 } |
1111 } | 1175 } |
1112 } | 1176 } |
1113 | 1177 |
1114 | 1178 |
| 1179 void CodeFlusher::EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder) { |
| 1180 ASSERT(!FixedArray::cast(code_map_holder->optimized_code_map())-> |
| 1181 get(SharedFunctionInfo::kNextMapIndex)->IsUndefined()); |
| 1182 |
| 1183 // Make sure previous flushing decisions are revisited. |
| 1184 isolate_->heap()->incremental_marking()->RecordWrites(code_map_holder); |
| 1185 |
| 1186 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
| 1187 SharedFunctionInfo* next_holder; |
| 1188 if (holder == code_map_holder) { |
| 1189 next_holder = GetNextCodeMap(code_map_holder); |
| 1190 optimized_code_map_holder_head_ = next_holder; |
| 1191 ClearNextCodeMap(code_map_holder); |
| 1192 } else { |
| 1193 while (holder != NULL) { |
| 1194 next_holder = GetNextCodeMap(holder); |
| 1195 |
| 1196 if (next_holder == code_map_holder) { |
| 1197 next_holder = GetNextCodeMap(code_map_holder); |
| 1198 SetNextCodeMap(holder, next_holder); |
| 1199 ClearNextCodeMap(code_map_holder); |
| 1200 break; |
| 1201 } |
| 1202 |
| 1203 holder = next_holder; |
| 1204 } |
| 1205 } |
| 1206 } |
| 1207 |
| 1208 |
1115 void CodeFlusher::EvictJSFunctionCandidates() { | 1209 void CodeFlusher::EvictJSFunctionCandidates() { |
1116 JSFunction* candidate = jsfunction_candidates_head_; | 1210 JSFunction* candidate = jsfunction_candidates_head_; |
1117 JSFunction* next_candidate; | 1211 JSFunction* next_candidate; |
1118 while (candidate != NULL) { | 1212 while (candidate != NULL) { |
1119 next_candidate = GetNextCandidate(candidate); | 1213 next_candidate = GetNextCandidate(candidate); |
1120 EvictCandidate(candidate); | 1214 EvictCandidate(candidate); |
1121 candidate = next_candidate; | 1215 candidate = next_candidate; |
1122 } | 1216 } |
1123 ASSERT(jsfunction_candidates_head_ == NULL); | 1217 ASSERT(jsfunction_candidates_head_ == NULL); |
1124 } | 1218 } |
1125 | 1219 |
1126 | 1220 |
1127 void CodeFlusher::EvictSharedFunctionInfoCandidates() { | 1221 void CodeFlusher::EvictSharedFunctionInfoCandidates() { |
1128 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 1222 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
1129 SharedFunctionInfo* next_candidate; | 1223 SharedFunctionInfo* next_candidate; |
1130 while (candidate != NULL) { | 1224 while (candidate != NULL) { |
1131 next_candidate = GetNextCandidate(candidate); | 1225 next_candidate = GetNextCandidate(candidate); |
1132 EvictCandidate(candidate); | 1226 EvictCandidate(candidate); |
1133 candidate = next_candidate; | 1227 candidate = next_candidate; |
1134 } | 1228 } |
1135 ASSERT(shared_function_info_candidates_head_ == NULL); | 1229 ASSERT(shared_function_info_candidates_head_ == NULL); |
1136 } | 1230 } |
1137 | 1231 |
1138 | 1232 |
| 1233 void CodeFlusher::EvictOptimizedCodeMaps() { |
| 1234 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
| 1235 SharedFunctionInfo* next_holder; |
| 1236 while (holder != NULL) { |
| 1237 next_holder = GetNextCodeMap(holder); |
| 1238 EvictOptimizedCodeMap(holder); |
| 1239 holder = next_holder; |
| 1240 } |
| 1241 ASSERT(optimized_code_map_holder_head_ == NULL); |
| 1242 } |
| 1243 |
| 1244 |
1139 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) { | 1245 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) { |
1140 Heap* heap = isolate_->heap(); | 1246 Heap* heap = isolate_->heap(); |
1141 | 1247 |
1142 JSFunction** slot = &jsfunction_candidates_head_; | 1248 JSFunction** slot = &jsfunction_candidates_head_; |
1143 JSFunction* candidate = jsfunction_candidates_head_; | 1249 JSFunction* candidate = jsfunction_candidates_head_; |
1144 while (candidate != NULL) { | 1250 while (candidate != NULL) { |
1145 if (heap->InFromSpace(candidate)) { | 1251 if (heap->InFromSpace(candidate)) { |
1146 v->VisitPointer(reinterpret_cast<Object**>(slot)); | 1252 v->VisitPointer(reinterpret_cast<Object**>(slot)); |
1147 } | 1253 } |
1148 candidate = GetNextCandidate(*slot); | 1254 candidate = GetNextCandidate(*slot); |
(...skipping 2799 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3948 | 4054 |
3949 if (enable) { | 4055 if (enable) { |
3950 if (code_flusher_ != NULL) return; | 4056 if (code_flusher_ != NULL) return; |
3951 code_flusher_ = new CodeFlusher(isolate()); | 4057 code_flusher_ = new CodeFlusher(isolate()); |
3952 } else { | 4058 } else { |
3953 if (code_flusher_ == NULL) return; | 4059 if (code_flusher_ == NULL) return; |
3954 code_flusher_->EvictAllCandidates(); | 4060 code_flusher_->EvictAllCandidates(); |
3955 delete code_flusher_; | 4061 delete code_flusher_; |
3956 code_flusher_ = NULL; | 4062 code_flusher_ = NULL; |
3957 } | 4063 } |
| 4064 |
| 4065 if (FLAG_trace_code_flushing) { |
| 4066 PrintF("[code-flushing is now %s]\n", enable ? "on" : "off"); |
| 4067 } |
3958 } | 4068 } |
3959 | 4069 |
3960 | 4070 |
3961 // TODO(1466) ReportDeleteIfNeeded is not called currently. | 4071 // TODO(1466) ReportDeleteIfNeeded is not called currently. |
3962 // Our profiling tools do not expect intersections between | 4072 // Our profiling tools do not expect intersections between |
3963 // code objects. We should either reenable it or change our tools. | 4073 // code objects. We should either reenable it or change our tools. |
3964 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, | 4074 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, |
3965 Isolate* isolate) { | 4075 Isolate* isolate) { |
3966 #ifdef ENABLE_GDB_JIT_INTERFACE | 4076 #ifdef ENABLE_GDB_JIT_INTERFACE |
3967 if (obj->IsCode()) { | 4077 if (obj->IsCode()) { |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4133 while (buffer != NULL) { | 4243 while (buffer != NULL) { |
4134 SlotsBuffer* next_buffer = buffer->next(); | 4244 SlotsBuffer* next_buffer = buffer->next(); |
4135 DeallocateBuffer(buffer); | 4245 DeallocateBuffer(buffer); |
4136 buffer = next_buffer; | 4246 buffer = next_buffer; |
4137 } | 4247 } |
4138 *buffer_address = NULL; | 4248 *buffer_address = NULL; |
4139 } | 4249 } |
4140 | 4250 |
4141 | 4251 |
4142 } } // namespace v8::internal | 4252 } } // namespace v8::internal |
OLD | NEW |