Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(67)

Side by Side Diff: src/heap.cc

Issue 3066044: Generalize virtually dispatched scavenger to virtually dispatched specialized visitors. (Closed)
Patch Set: cleanup Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap.h ('k') | src/ia32/assembler-ia32-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 19 matching lines...) Expand all
30 #include "accessors.h" 30 #include "accessors.h"
31 #include "api.h" 31 #include "api.h"
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "codegen-inl.h" 33 #include "codegen-inl.h"
34 #include "compilation-cache.h" 34 #include "compilation-cache.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "heap-profiler.h" 36 #include "heap-profiler.h"
37 #include "global-handles.h" 37 #include "global-handles.h"
38 #include "mark-compact.h" 38 #include "mark-compact.h"
39 #include "natives.h" 39 #include "natives.h"
40 #include "objects-visiting.h"
40 #include "scanner.h" 41 #include "scanner.h"
41 #include "scopeinfo.h" 42 #include "scopeinfo.h"
42 #include "snapshot.h" 43 #include "snapshot.h"
43 #include "v8threads.h" 44 #include "v8threads.h"
44 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 45 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
45 #include "regexp-macro-assembler.h" 46 #include "regexp-macro-assembler.h"
46 #include "arm/regexp-macro-assembler-arm.h" 47 #include "arm/regexp-macro-assembler-arm.h"
47 #endif 48 #endif
48 49
49 50
(...skipping 975 matching lines...) Expand 10 before | Expand all | Expand 10 after
1025 // String got promoted. Move it to the old string list. 1026 // String got promoted. Move it to the old string list.
1026 ExternalStringTable::AddOldString(target); 1027 ExternalStringTable::AddOldString(target);
1027 } 1028 }
1028 } 1029 }
1029 1030
1030 ASSERT(last <= end); 1031 ASSERT(last <= end);
1031 ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start)); 1032 ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start));
1032 } 1033 }
1033 1034
1034 1035
1036 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1037 public:
1038 static inline void VisitPointer(Object** p) {
1039 Object* object = *p;
1040 if (!Heap::InNewSpace(object)) return;
1041 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1042 reinterpret_cast<HeapObject*>(object));
1043 }
1044 };
1045
1046
1035 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, 1047 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1036 Address new_space_front) { 1048 Address new_space_front) {
1037 do { 1049 do {
1038 ASSERT(new_space_front <= new_space_.top()); 1050 ASSERT(new_space_front <= new_space_.top());
1039 1051
1040 // The addresses new_space_front and new_space_.top() define a 1052 // The addresses new_space_front and new_space_.top() define a
1041 // queue of unprocessed copied objects. Process them until the 1053 // queue of unprocessed copied objects. Process them until the
1042 // queue is empty. 1054 // queue is empty.
1043 while (new_space_front < new_space_.top()) { 1055 while (new_space_front < new_space_.top()) {
1044 HeapObject* object = HeapObject::FromAddress(new_space_front); 1056 HeapObject* object = HeapObject::FromAddress(new_space_front);
1045 Map* map = object->map(); 1057 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object);
1046 int size = object->SizeFromMap(map);
1047 object->IterateBody(map->instance_type(), size, scavenge_visitor);
1048 new_space_front += size;
1049 } 1058 }
1050 1059
1051 // Promote and process all the to-be-promoted objects. 1060 // Promote and process all the to-be-promoted objects.
1052 while (!promotion_queue.is_empty()) { 1061 while (!promotion_queue.is_empty()) {
1053 HeapObject* target; 1062 HeapObject* target;
1054 int size; 1063 int size;
1055 promotion_queue.remove(&target, &size); 1064 promotion_queue.remove(&target, &size);
1056 1065
1057 // Promoted object might be already partially visited 1066 // Promoted object might be already partially visited
1058 // during dirty regions iteration. Thus we search specificly 1067 // during dirty regions iteration. Thus we search specificly
1059 // for pointers to from semispace instead of looking for pointers 1068 // for pointers to from semispace instead of looking for pointers
1060 // to new space. 1069 // to new space.
1061 ASSERT(!target->IsMap()); 1070 ASSERT(!target->IsMap());
1062 IterateAndMarkPointersToFromSpace(target->address(), 1071 IterateAndMarkPointersToFromSpace(target->address(),
1063 target->address() + size, 1072 target->address() + size,
1064 &ScavengePointer); 1073 &ScavengePointer);
1065 } 1074 }
1066 1075
1067 // Take another spin if there are now unswept objects in new space 1076 // Take another spin if there are now unswept objects in new space
1068 // (there are currently no more unswept promoted objects). 1077 // (there are currently no more unswept promoted objects).
1069 } while (new_space_front < new_space_.top()); 1078 } while (new_space_front < new_space_.top());
1070 1079
1071 return new_space_front; 1080 return new_space_front;
1072 } 1081 }
1073 1082
1074 1083
1084 class ScavengingVisitor : public StaticVisitorBase {
1085 public:
1086 static void Initialize() {
1087 table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
1088 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1089 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1090 table_.Register(kVisitByteArray, &EvacuateByteArray);
1091 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1092
1093 typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
1094
1095 table_.Register(kVisitConsString,
1096 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1097 VisitSpecialized<ConsString::kSize>);
1098
1099 table_.Register(kVisitSharedFunctionInfo,
1100 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1101 VisitSpecialized<SharedFunctionInfo::kSize>);
1102
1103 table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
1104 kVisitDataObject,
1105 kVisitDataObjectGeneric>();
1106
1107 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1108 kVisitJSObject,
1109 kVisitJSObjectGeneric>();
1110
1111 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1112 kVisitStruct,
1113 kVisitStructGeneric>();
1114 }
1115
1116
1117 static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
1118 table_.GetVisitor(map)(map, slot, obj);
1119 }
1120
1121
1122 private:
1123 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1124 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1125
1075 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1126 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1076 static void RecordCopiedObject(HeapObject* obj) { 1127 static void RecordCopiedObject(HeapObject* obj) {
1077 bool should_record = false; 1128 bool should_record = false;
1078 #ifdef DEBUG 1129 #ifdef DEBUG
1079 should_record = FLAG_heap_stats; 1130 should_record = FLAG_heap_stats;
1080 #endif 1131 #endif
1081 #ifdef ENABLE_LOGGING_AND_PROFILING 1132 #ifdef ENABLE_LOGGING_AND_PROFILING
1082 should_record = should_record || FLAG_log_gc; 1133 should_record = should_record || FLAG_log_gc;
1083 #endif 1134 #endif
1084 if (should_record) { 1135 if (should_record) {
1085 if (Heap::new_space()->Contains(obj)) { 1136 if (Heap::new_space()->Contains(obj)) {
1086 Heap::new_space()->RecordAllocation(obj); 1137 Heap::new_space()->RecordAllocation(obj);
1087 } else { 1138 } else {
1088 Heap::new_space()->RecordPromotion(obj); 1139 Heap::new_space()->RecordPromotion(obj);
1089 } 1140 }
1090 } 1141 }
1091 } 1142 }
1092 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1143 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1093 1144
1094 1145 // Helper function used by CopyObject to copy a source object to an
1095 // Helper function used by CopyObject to copy a source object to an 1146 // allocated target object and update the forwarding pointer in the source
1096 // allocated target object and update the forwarding pointer in the source 1147 // object. Returns the target object.
1097 // object. Returns the target object. 1148 INLINE(static HeapObject* MigrateObject(HeapObject* source,
1098 inline static HeapObject* MigrateObject(HeapObject* source, 1149 HeapObject* target,
1099 HeapObject* target, 1150 int size)) {
1100 int size) { 1151 // Copy the content of source to target.
1101 // Copy the content of source to target. 1152 Heap::CopyBlock(target->address(), source->address(), size);
1102 Heap::CopyBlock(target->address(), source->address(), size); 1153
1103 1154 // Set the forwarding address.
1104 // Set the forwarding address. 1155 source->set_map_word(MapWord::FromForwardingAddress(target));
1105 source->set_map_word(MapWord::FromForwardingAddress(target));
1106 1156
1107 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1157 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1108 // Update NewSpace stats if necessary. 1158 // Update NewSpace stats if necessary.
1109 RecordCopiedObject(target); 1159 RecordCopiedObject(target);
1110 #endif 1160 #endif
1111 HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address())); 1161 HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
1112 1162
1113 return target; 1163 return target;
1114 } 1164 }
1115 1165
1116 1166
1117 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 1167 template<ObjectContents object_contents, SizeRestriction size_restriction>
1118 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; 1168 static inline void EvacuateObject(Map* map,
1119 1169 HeapObject** slot,
1120 1170 HeapObject* object,
1121 template<ObjectContents object_contents, SizeRestriction size_restriction> 1171 int object_size) {
1122 static inline void EvacuateObject(Map* map, 1172 ASSERT((size_restriction != SMALL) ||
1123 HeapObject** slot, 1173 (object_size <= Page::kMaxHeapObjectSize));
1124 HeapObject* object, 1174 ASSERT(object->Size() == object_size);
1125 int object_size) { 1175
1126 ASSERT((size_restriction != SMALL) || 1176 if (Heap::ShouldBePromoted(object->address(), object_size)) {
1127 (object_size <= Page::kMaxHeapObjectSize)); 1177 Object* result;
1128 ASSERT(object->Size() == object_size); 1178
1129 1179 if ((size_restriction != SMALL) &&
1130 if (Heap::ShouldBePromoted(object->address(), object_size)) { 1180 (object_size > Page::kMaxHeapObjectSize)) {
1131 Object* result; 1181 result = Heap::lo_space()->AllocateRawFixedArray(object_size);
1132
1133 if ((size_restriction != SMALL) &&
1134 (object_size > Page::kMaxHeapObjectSize)) {
1135 result = Heap::lo_space()->AllocateRawFixedArray(object_size);
1136 } else {
1137 if (object_contents == DATA_OBJECT) {
1138 result = Heap::old_data_space()->AllocateRaw(object_size);
1139 } else { 1182 } else {
1140 result = Heap::old_pointer_space()->AllocateRaw(object_size); 1183 if (object_contents == DATA_OBJECT) {
1141 } 1184 result = Heap::old_data_space()->AllocateRaw(object_size);
1142 } 1185 } else {
1143 1186 result = Heap::old_pointer_space()->AllocateRaw(object_size);
1144 if (!result->IsFailure()) { 1187 }
1145 HeapObject* target = HeapObject::cast(result); 1188 }
1146 *slot = MigrateObject(object, target, object_size); 1189
1147 1190 if (!result->IsFailure()) {
1148 if (object_contents == POINTER_OBJECT) { 1191 HeapObject* target = HeapObject::cast(result);
1149 promotion_queue.insert(target, object_size); 1192 *slot = MigrateObject(object, target, object_size);
1150 } 1193
1151 1194 if (object_contents == POINTER_OBJECT) {
1152 Heap::tracer()->increment_promoted_objects_size(object_size); 1195 promotion_queue.insert(target, object_size);
1153 return; 1196 }
1154 } 1197
1155 } 1198 Heap::tracer()->increment_promoted_objects_size(object_size);
1156 Object* result = Heap::new_space()->AllocateRaw(object_size); 1199 return;
1157 ASSERT(!result->IsFailure()); 1200 }
1158 *slot = MigrateObject(object, HeapObject::cast(result), object_size); 1201 }
1159 return; 1202 Object* result = Heap::new_space()->AllocateRaw(object_size);
1160 } 1203 ASSERT(!result->IsFailure());
1161 1204 *slot = MigrateObject(object, HeapObject::cast(result), object_size);
1162 1205 return;
1163 template<int object_size_in_words, ObjectContents object_contents> 1206 }
1164 static inline void EvacuateObjectOfFixedSize(Map* map, 1207
1165 HeapObject** slot, 1208
1166 HeapObject* object) { 1209 static inline void EvacuateFixedArray(Map* map,
1167 const int object_size = object_size_in_words << kPointerSizeLog2; 1210 HeapObject** slot,
1168 EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); 1211 HeapObject* object) {
1169 } 1212 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1170 1213 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
1171 1214 slot,
1172 template<ObjectContents object_contents> 1215 object,
1173 static inline void EvacuateObjectOfFixedSize(Map* map, 1216 object_size);
1174 HeapObject** slot, 1217 }
1175 HeapObject* object) { 1218
1176 int object_size = map->instance_size(); 1219
1177 EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); 1220 static inline void EvacuateByteArray(Map* map,
1178 } 1221 HeapObject** slot,
1179 1222 HeapObject* object) {
1180 1223 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
1181 static inline void EvacuateFixedArray(Map* map, 1224 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1182 HeapObject** slot, 1225 }
1183 HeapObject* object) { 1226
1184 int object_size = FixedArray::cast(object)->FixedArraySize(); 1227
1185 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); 1228 static inline void EvacuateSeqAsciiString(Map* map,
1186 }
1187
1188
1189 static inline void EvacuateByteArray(Map* map,
1190 HeapObject** slot,
1191 HeapObject* object) {
1192 int object_size = ByteArray::cast(object)->ByteArraySize();
1193 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1194 }
1195
1196
1197 static Scavenger GetScavengerForSize(int object_size,
1198 ObjectContents object_contents) {
1199 ASSERT(IsAligned(object_size, kPointerSize));
1200 ASSERT(object_size < Page::kMaxHeapObjectSize);
1201
1202 switch (object_size >> kPointerSizeLog2) {
1203 #define CASE(n) \
1204 case n: \
1205 if (object_contents == DATA_OBJECT) { \
1206 return static_cast<Scavenger>( \
1207 &EvacuateObjectOfFixedSize<n, DATA_OBJECT>); \
1208 } else { \
1209 return static_cast<Scavenger>( \
1210 &EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
1211 }
1212
1213 CASE(1);
1214 CASE(2);
1215 CASE(3);
1216 CASE(4);
1217 CASE(5);
1218 CASE(6);
1219 CASE(7);
1220 CASE(8);
1221 CASE(9);
1222 CASE(10);
1223 CASE(11);
1224 CASE(12);
1225 CASE(13);
1226 CASE(14);
1227 CASE(15);
1228 CASE(16);
1229 default:
1230 if (object_contents == DATA_OBJECT) {
1231 return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
1232 } else {
1233 return static_cast<Scavenger>(
1234 &EvacuateObjectOfFixedSize<POINTER_OBJECT>);
1235 }
1236
1237 #undef CASE
1238 }
1239 }
1240
1241
1242 static inline void EvacuateSeqAsciiString(Map* map,
1243 HeapObject** slot,
1244 HeapObject* object) {
1245 int object_size = SeqAsciiString::cast(object)->
1246 SeqAsciiStringSize(map->instance_type());
1247 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1248 }
1249
1250
1251 static inline void EvacuateSeqTwoByteString(Map* map,
1252 HeapObject** slot, 1229 HeapObject** slot,
1253 HeapObject* object) { 1230 HeapObject* object) {
1254 int object_size = SeqTwoByteString::cast(object)-> 1231 int object_size = SeqAsciiString::cast(object)->
1255 SeqTwoByteStringSize(map->instance_type()); 1232 SeqAsciiStringSize(map->instance_type());
1256 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); 1233 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1257 } 1234 }
1258 1235
1259 1236
1260 static inline bool IsShortcutCandidate(int type) { 1237 static inline void EvacuateSeqTwoByteString(Map* map,
1261 return ((type & kShortcutTypeMask) == kShortcutTypeTag); 1238 HeapObject** slot,
1262 } 1239 HeapObject* object) {
1263 1240 int object_size = SeqTwoByteString::cast(object)->
1264 1241 SeqTwoByteStringSize(map->instance_type());
1265 static inline void EvacuateShortcutCandidate(Map* map, 1242 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1266 HeapObject** slot, 1243 }
1267 HeapObject* object) { 1244
1268 ASSERT(IsShortcutCandidate(map->instance_type())); 1245
1269 1246 static inline bool IsShortcutCandidate(int type) {
1270 if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) { 1247 return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1271 HeapObject* first = 1248 }
1272 HeapObject::cast(ConsString::cast(object)->unchecked_first()); 1249
1273 1250 static inline void EvacuateShortcutCandidate(Map* map,
1274 *slot = first; 1251 HeapObject** slot,
1275 1252 HeapObject* object) {
1276 if (!Heap::InNewSpace(first)) { 1253 ASSERT(IsShortcutCandidate(map->instance_type()));
1277 object->set_map_word(MapWord::FromForwardingAddress(first)); 1254
1255 if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
1256 HeapObject* first =
1257 HeapObject::cast(ConsString::cast(object)->unchecked_first());
1258
1259 *slot = first;
1260
1261 if (!Heap::InNewSpace(first)) {
1262 object->set_map_word(MapWord::FromForwardingAddress(first));
1263 return;
1264 }
1265
1266 MapWord first_word = first->map_word();
1267 if (first_word.IsForwardingAddress()) {
1268 HeapObject* target = first_word.ToForwardingAddress();
1269
1270 *slot = target;
1271 object->set_map_word(MapWord::FromForwardingAddress(target));
1272 return;
1273 }
1274
1275 Scavenge(first->map(), slot, first);
1276 object->set_map_word(MapWord::FromForwardingAddress(*slot));
1278 return; 1277 return;
1279 } 1278 }
1280 1279
1281 MapWord first_word = first->map_word(); 1280 int object_size = ConsString::kSize;
1282 if (first_word.IsForwardingAddress()) { 1281 EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
1283 HeapObject* target = first_word.ToForwardingAddress(); 1282 }
1284 1283
1285 *slot = target; 1284 template<ObjectContents object_contents>
1286 object->set_map_word(MapWord::FromForwardingAddress(target)); 1285 class ObjectEvacuationStrategy {
1287 return; 1286 public:
1288 } 1287 template<int object_size>
1289 1288 static inline void VisitSpecialized(Map* map,
1290 first->map()->Scavenge(slot, first); 1289 HeapObject** slot,
1291 object->set_map_word(MapWord::FromForwardingAddress(*slot)); 1290 HeapObject* object) {
1292 return; 1291 EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
1293 } 1292 }
1294 1293
1295 int object_size = ConsString::kSize; 1294 static inline void Visit(Map* map,
1296 EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size); 1295 HeapObject** slot,
1297 } 1296 HeapObject* object) {
1298 1297 int object_size = map->instance_size();
1299 1298 EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
1300 Scavenger Heap::GetScavenger(int instance_type, int instance_size) { 1299 }
1301 if (instance_type < FIRST_NONSTRING_TYPE) { 1300 };
1302 switch (instance_type & kStringRepresentationMask) { 1301
1303 case kSeqStringTag: 1302 typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
1304 if ((instance_type & kStringEncodingMask) == kAsciiStringTag) { 1303
1305 return &EvacuateSeqAsciiString; 1304 static VisitorDispatchTable<Callback> table_;
1306 } else { 1305 };
1307 return &EvacuateSeqTwoByteString; 1306
1308 } 1307
1309 1308 VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
1310 case kConsStringTag:
1311 if (IsShortcutCandidate(instance_type)) {
1312 return &EvacuateShortcutCandidate;
1313 } else {
1314 ASSERT(instance_size == ConsString::kSize);
1315 return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
1316 }
1317
1318 case kExternalStringTag:
1319 ASSERT(instance_size == ExternalString::kSize);
1320 return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
1321 }
1322 UNREACHABLE();
1323 }
1324
1325 switch (instance_type) {
1326 case BYTE_ARRAY_TYPE:
1327 return reinterpret_cast<Scavenger>(&EvacuateByteArray);
1328
1329 case FIXED_ARRAY_TYPE:
1330 return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
1331
1332 case JS_OBJECT_TYPE:
1333 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1334 case JS_VALUE_TYPE:
1335 case JS_ARRAY_TYPE:
1336 case JS_REGEXP_TYPE:
1337 case JS_FUNCTION_TYPE:
1338 case JS_GLOBAL_PROXY_TYPE:
1339 case JS_GLOBAL_OBJECT_TYPE:
1340 case JS_BUILTINS_OBJECT_TYPE:
1341 return GetScavengerForSize(instance_size, POINTER_OBJECT);
1342
1343 case ODDBALL_TYPE:
1344 return NULL;
1345
1346 case PROXY_TYPE:
1347 return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
1348
1349 case MAP_TYPE:
1350 return NULL;
1351
1352 case CODE_TYPE:
1353 return NULL;
1354
1355 case JS_GLOBAL_PROPERTY_CELL_TYPE:
1356 return NULL;
1357
1358 case HEAP_NUMBER_TYPE:
1359 case FILLER_TYPE:
1360 case PIXEL_ARRAY_TYPE:
1361 case EXTERNAL_BYTE_ARRAY_TYPE:
1362 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
1363 case EXTERNAL_SHORT_ARRAY_TYPE:
1364 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
1365 case EXTERNAL_INT_ARRAY_TYPE:
1366 case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
1367 case EXTERNAL_FLOAT_ARRAY_TYPE:
1368 return GetScavengerForSize(instance_size, DATA_OBJECT);
1369
1370 case SHARED_FUNCTION_INFO_TYPE:
1371 return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
1372 POINTER_OBJECT);
1373
1374 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1375 case NAME##_TYPE:
1376 STRUCT_LIST(MAKE_STRUCT_CASE)
1377 #undef MAKE_STRUCT_CASE
1378 return GetScavengerForSize(instance_size, POINTER_OBJECT);
1379 default:
1380 UNREACHABLE();
1381 return NULL;
1382 }
1383 }
1384 1309
1385 1310
1386 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 1311 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1387 ASSERT(InFromSpace(object)); 1312 ASSERT(InFromSpace(object));
1388 MapWord first_word = object->map_word(); 1313 MapWord first_word = object->map_word();
1389 ASSERT(!first_word.IsForwardingAddress()); 1314 ASSERT(!first_word.IsForwardingAddress());
1390 Map* map = first_word.ToMap(); 1315 Map* map = first_word.ToMap();
1391 map->Scavenge(p, object); 1316 ScavengingVisitor::Scavenge(map, p, object);
1392 } 1317 }
1393 1318
1394 1319
1395 void Heap::ScavengePointer(HeapObject** p) { 1320 void Heap::ScavengePointer(HeapObject** p) {
1396 ScavengeObject(p, *p); 1321 ScavengeObject(p, *p);
1397 } 1322 }
1398 1323
1399 1324
1400 Object* Heap::AllocatePartialMap(InstanceType instance_type, 1325 Object* Heap::AllocatePartialMap(InstanceType instance_type,
1401 int instance_size) { 1326 int instance_size) {
1402 Object* result = AllocateRawMap(); 1327 Object* result = AllocateRawMap();
1403 if (result->IsFailure()) return result; 1328 if (result->IsFailure()) return result;
1404 1329
1405 // Map::cast cannot be used due to uninitialized map field. 1330 // Map::cast cannot be used due to uninitialized map field.
1406 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 1331 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1407 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1332 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1408 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1333 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1409 reinterpret_cast<Map*>(result)-> 1334 reinterpret_cast<Map*>(result)->
1410 set_scavenger(GetScavenger(instance_type, instance_size)); 1335 set_visitor_id(
1336 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1411 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 1337 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1412 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 1338 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1413 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 1339 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1414 reinterpret_cast<Map*>(result)->set_bit_field(0); 1340 reinterpret_cast<Map*>(result)->set_bit_field(0);
1415 reinterpret_cast<Map*>(result)->set_bit_field2(0); 1341 reinterpret_cast<Map*>(result)->set_bit_field2(0);
1416 return result; 1342 return result;
1417 } 1343 }
1418 1344
1419 1345
1420 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { 1346 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1421 Object* result = AllocateRawMap(); 1347 Object* result = AllocateRawMap();
1422 if (result->IsFailure()) return result; 1348 if (result->IsFailure()) return result;
1423 1349
1424 Map* map = reinterpret_cast<Map*>(result); 1350 Map* map = reinterpret_cast<Map*>(result);
1425 map->set_map(meta_map()); 1351 map->set_map(meta_map());
1426 map->set_instance_type(instance_type); 1352 map->set_instance_type(instance_type);
1427 map->set_scavenger(GetScavenger(instance_type, instance_size)); 1353 map->set_visitor_id(
1354 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1428 map->set_prototype(null_value()); 1355 map->set_prototype(null_value());
1429 map->set_constructor(null_value()); 1356 map->set_constructor(null_value());
1430 map->set_instance_size(instance_size); 1357 map->set_instance_size(instance_size);
1431 map->set_inobject_properties(0); 1358 map->set_inobject_properties(0);
1432 map->set_pre_allocated_property_fields(0); 1359 map->set_pre_allocated_property_fields(0);
1433 map->set_instance_descriptors(empty_descriptor_array()); 1360 map->set_instance_descriptors(empty_descriptor_array());
1434 map->set_code_cache(empty_fixed_array()); 1361 map->set_code_cache(empty_fixed_array());
1435 map->set_unused_property_fields(0); 1362 map->set_unused_property_fields(0);
1436 map->set_bit_field(0); 1363 map->set_bit_field(0);
1437 map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements)); 1364 map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
(...skipping 2729 matching lines...) Expand 10 before | Expand all | Expand 10 after
4167 // call Heap::TearDown() to release allocated memory. 4094 // call Heap::TearDown() to release allocated memory.
4168 // 4095 //
4169 // If the heap is not yet configured (eg, through the API), configure it. 4096 // If the heap is not yet configured (eg, through the API), configure it.
4170 // Configuration is based on the flags new-space-size (really the semispace 4097 // Configuration is based on the flags new-space-size (really the semispace
4171 // size) and old-space-size if set or the initial values of semispace_size_ 4098 // size) and old-space-size if set or the initial values of semispace_size_
4172 // and old_generation_size_ otherwise. 4099 // and old_generation_size_ otherwise.
4173 if (!heap_configured) { 4100 if (!heap_configured) {
4174 if (!ConfigureHeapDefault()) return false; 4101 if (!ConfigureHeapDefault()) return false;
4175 } 4102 }
4176 4103
4104 ScavengingVisitor::Initialize();
4105 NewSpaceScavenger::Initialize();
4106 MarkCompactCollector::Initialize();
4107
4177 // Setup memory allocator and reserve a chunk of memory for new 4108 // Setup memory allocator and reserve a chunk of memory for new
4178 // space. The chunk is double the size of the requested reserved 4109 // space. The chunk is double the size of the requested reserved
4179 // new space size to ensure that we can find a pair of semispaces that 4110 // new space size to ensure that we can find a pair of semispaces that
4180 // are contiguous and aligned to their size. 4111 // are contiguous and aligned to their size.
4181 if (!MemoryAllocator::Setup(MaxReserved())) return false; 4112 if (!MemoryAllocator::Setup(MaxReserved())) return false;
4182 void* chunk = 4113 void* chunk =
4183 MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_); 4114 MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
4184 if (chunk == NULL) return false; 4115 if (chunk == NULL) return false;
4185 4116
4186 // Align the pair of semispaces to their size, which must be a power 4117 // Align the pair of semispaces to their size, which must be a power
(...skipping 664 matching lines...) Expand 10 before | Expand all | Expand 10 after
4851 PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms"); 4782 PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
4852 break; 4783 break;
4853 default: 4784 default:
4854 UNREACHABLE(); 4785 UNREACHABLE();
4855 } 4786 }
4856 PrintF(" "); 4787 PrintF(" ");
4857 4788
4858 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL])); 4789 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
4859 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK])); 4790 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
4860 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP])); 4791 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
4792 PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
4861 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT])); 4793 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
4862 PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE])); 4794 PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE]));
4863 4795
4864 PrintF("total_size_before=%d ", start_size_); 4796 PrintF("total_size_before=%d ", start_size_);
4865 PrintF("total_size_after=%d ", Heap::SizeOfObjects()); 4797 PrintF("total_size_after=%d ", Heap::SizeOfObjects());
4866 PrintF("holes_size_before=%d ", in_free_list_or_wasted_before_gc_); 4798 PrintF("holes_size_before=%d ", in_free_list_or_wasted_before_gc_);
4867 PrintF("holes_size_after=%d ", CountTotalHolesSize()); 4799 PrintF("holes_size_after=%d ", CountTotalHolesSize());
4868 4800
4869 PrintF("allocated=%d ", allocated_since_last_gc_); 4801 PrintF("allocated=%d ", allocated_since_last_gc_);
4870 PrintF("promoted=%d ", promoted_objects_size_); 4802 PrintF("promoted=%d ", promoted_objects_size_);
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
5002 void ExternalStringTable::TearDown() { 4934 void ExternalStringTable::TearDown() {
5003 new_space_strings_.Free(); 4935 new_space_strings_.Free();
5004 old_space_strings_.Free(); 4936 old_space_strings_.Free();
5005 } 4937 }
5006 4938
5007 4939
5008 List<Object*> ExternalStringTable::new_space_strings_; 4940 List<Object*> ExternalStringTable::new_space_strings_;
5009 List<Object*> ExternalStringTable::old_space_strings_; 4941 List<Object*> ExternalStringTable::old_space_strings_;
5010 4942
5011 } } // namespace v8::internal 4943 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/ia32/assembler-ia32-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698