Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(149)

Side by Side Diff: src/heap.cc

Issue 3301008: [Isolates] Add heap pointer to all maps and use map->heap() more. (Closed)
Patch Set: even more Created 10 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/heap-profiler.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
81 code_range_size_(0), 81 code_range_size_(0),
82 #endif 82 #endif
83 // Variables set based on semispace_size_ and old_generation_size_ in 83 // Variables set based on semispace_size_ and old_generation_size_ in
84 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) 84 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
85 // Will be 4 * reserved_semispace_size_ to ensure that young 85 // Will be 4 * reserved_semispace_size_ to ensure that young
86 // generation can be aligned to its size. 86 // generation can be aligned to its size.
87 survived_since_last_expansion_(0), 87 survived_since_last_expansion_(0),
88 always_allocate_scope_depth_(0), 88 always_allocate_scope_depth_(0),
89 linear_allocation_scope_depth_(0), 89 linear_allocation_scope_depth_(0),
90 contexts_disposed_(0), 90 contexts_disposed_(0),
91 new_space_(this),
91 old_pointer_space_(NULL), 92 old_pointer_space_(NULL),
92 old_data_space_(NULL), 93 old_data_space_(NULL),
93 code_space_(NULL), 94 code_space_(NULL),
94 map_space_(NULL), 95 map_space_(NULL),
95 cell_space_(NULL), 96 cell_space_(NULL),
96 lo_space_(NULL), 97 lo_space_(NULL),
97 gc_state_(NOT_IN_GC), 98 gc_state_(NOT_IN_GC),
98 mc_count_(0), 99 mc_count_(0),
99 ms_count_(0), 100 ms_count_(0),
100 gc_count_(0), 101 gc_count_(0),
(...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after
819 820
820 void VisitPointers(Object** start, Object** end) { 821 void VisitPointers(Object** start, Object** end) {
821 // Copy all HeapObject pointers in [start, end) 822 // Copy all HeapObject pointers in [start, end)
822 for (Object** p = start; p < end; p++) ScavengePointer(p); 823 for (Object** p = start; p < end; p++) ScavengePointer(p);
823 } 824 }
824 825
825 private: 826 private:
826 void ScavengePointer(Object** p) { 827 void ScavengePointer(Object** p) {
827 Object* object = *p; 828 Object* object = *p;
828 if (!HEAP->InNewSpace(object)) return; 829 if (!HEAP->InNewSpace(object)) return;
829 HEAP->ScavengeObject(reinterpret_cast<HeapObject**>(p), 830 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
830 reinterpret_cast<HeapObject*>(object)); 831 reinterpret_cast<HeapObject*>(object));
831 } 832 }
832 }; 833 };
833 834
834 835
835 #ifdef DEBUG 836 #ifdef DEBUG
836 // Visitor class to verify pointers in code or data space do not point into 837 // Visitor class to verify pointers in code or data space do not point into
837 // new space. 838 // new space.
838 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 839 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
839 public: 840 public:
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
933 promotion_queue_.Initialize(new_space_.ToSpaceHigh()); 934 promotion_queue_.Initialize(new_space_.ToSpaceHigh());
934 935
935 is_safe_to_read_maps_ = false; 936 is_safe_to_read_maps_ = false;
936 ScavengeVisitor scavenge_visitor; 937 ScavengeVisitor scavenge_visitor;
937 // Copy roots. 938 // Copy roots.
938 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); 939 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
939 940
940 // Copy objects reachable from the old generation. By definition, 941 // Copy objects reachable from the old generation. By definition,
941 // there are no intergenerational pointers in code or data spaces. 942 // there are no intergenerational pointers in code or data spaces.
942 IterateDirtyRegions(old_pointer_space_, 943 IterateDirtyRegions(old_pointer_space_,
943 &IteratePointersInDirtyRegion, 944 &Heap::IteratePointersInDirtyRegion,
944 &ScavengePointer, 945 &ScavengePointer,
945 WATERMARK_CAN_BE_INVALID); 946 WATERMARK_CAN_BE_INVALID);
946 947
947 IterateDirtyRegions(map_space_, 948 IterateDirtyRegions(map_space_,
948 &IteratePointersInDirtyMapsRegion, 949 &IteratePointersInDirtyMapsRegion,
949 &ScavengePointer, 950 &ScavengePointer,
950 WATERMARK_CAN_BE_INVALID); 951 WATERMARK_CAN_BE_INVALID);
951 952
952 lo_space_->IterateDirtyRegions(&ScavengePointer); 953 lo_space_->IterateDirtyRegions(&ScavengePointer);
953 954
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
1030 ASSERT(last <= end); 1031 ASSERT(last <= end);
1031 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start)); 1032 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1032 } 1033 }
1033 1034
1034 1035
1035 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> { 1036 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1036 public: 1037 public:
1037 static inline void VisitPointer(Object** p) { 1038 static inline void VisitPointer(Object** p) {
1038 Object* object = *p; 1039 Object* object = *p;
1039 if (!HEAP->InNewSpace(object)) return; 1040 if (!HEAP->InNewSpace(object)) return;
1040 HEAP->ScavengeObject(reinterpret_cast<HeapObject**>(p), 1041 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1041 reinterpret_cast<HeapObject*>(object)); 1042 reinterpret_cast<HeapObject*>(object));
1042 } 1043 }
1043 }; 1044 };
1044 1045
1045 1046
1046 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, 1047 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1047 Address new_space_front) { 1048 Address new_space_front) {
1048 do { 1049 do {
1049 ASSERT(new_space_front <= new_space_.top()); 1050 ASSERT(new_space_front <= new_space_.top());
1050 1051
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1120 static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) { 1121 static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
1121 table_.GetVisitor(map)(map, slot, obj); 1122 table_.GetVisitor(map)(map, slot, obj);
1122 } 1123 }
1123 1124
1124 1125
1125 private: 1126 private:
1126 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 1127 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1127 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; 1128 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1128 1129
1129 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1130 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1130 static void RecordCopiedObject(HeapObject* obj) { 1131 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1131 bool should_record = false; 1132 bool should_record = false;
1132 #ifdef DEBUG 1133 #ifdef DEBUG
1133 should_record = FLAG_heap_stats; 1134 should_record = FLAG_heap_stats;
1134 #endif 1135 #endif
1135 #ifdef ENABLE_LOGGING_AND_PROFILING 1136 #ifdef ENABLE_LOGGING_AND_PROFILING
1136 should_record = should_record || FLAG_log_gc; 1137 should_record = should_record || FLAG_log_gc;
1137 #endif 1138 #endif
1138 if (should_record) { 1139 if (should_record) {
1139 if (HEAP->new_space()->Contains(obj)) { 1140 if (heap->new_space()->Contains(obj)) {
1140 HEAP->new_space()->RecordAllocation(obj); 1141 heap->new_space()->RecordAllocation(obj);
1141 } else { 1142 } else {
1142 HEAP->new_space()->RecordPromotion(obj); 1143 heap->new_space()->RecordPromotion(obj);
1143 } 1144 }
1144 } 1145 }
1145 } 1146 }
1146 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1147 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1147 1148
1148 // Helper function used by CopyObject to copy a source object to an 1149 // Helper function used by CopyObject to copy a source object to an
1149 // allocated target object and update the forwarding pointer in the source 1150 // allocated target object and update the forwarding pointer in the source
1150 // object. Returns the target object. 1151 // object. Returns the target object.
1151 INLINE(static HeapObject* MigrateObject(HeapObject* source, 1152 INLINE(static HeapObject* MigrateObject(Heap* heap,
1153 HeapObject* source,
1152 HeapObject* target, 1154 HeapObject* target,
1153 int size)) { 1155 int size)) {
1154 // Copy the content of source to target. 1156 // Copy the content of source to target.
1155 HEAP->CopyBlock(target->address(), source->address(), size); 1157 heap->CopyBlock(target->address(), source->address(), size);
1156 1158
1157 // Set the forwarding address. 1159 // Set the forwarding address.
1158 source->set_map_word(MapWord::FromForwardingAddress(target)); 1160 source->set_map_word(MapWord::FromForwardingAddress(target));
1159 1161
1160 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1162 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1161 // Update NewSpace stats if necessary. 1163 // Update NewSpace stats if necessary.
1162 RecordCopiedObject(target); 1164 RecordCopiedObject(heap, target);
1163 #endif 1165 #endif
1164 HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address())); 1166 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1165 1167
1166 return target; 1168 return target;
1167 } 1169 }
1168 1170
1169 1171
1170 template<ObjectContents object_contents, SizeRestriction size_restriction> 1172 template<ObjectContents object_contents, SizeRestriction size_restriction>
1171 static inline void EvacuateObject(Map* map, 1173 static inline void EvacuateObject(Map* map,
1172 HeapObject** slot, 1174 HeapObject** slot,
1173 HeapObject* object, 1175 HeapObject* object,
1174 int object_size) { 1176 int object_size) {
1175 ASSERT((size_restriction != SMALL) || 1177 ASSERT((size_restriction != SMALL) ||
1176 (object_size <= Page::kMaxHeapObjectSize)); 1178 (object_size <= Page::kMaxHeapObjectSize));
1177 ASSERT(object->Size() == object_size); 1179 ASSERT(object->Size() == object_size);
1178 1180
1179 if (HEAP->ShouldBePromoted(object->address(), object_size)) { 1181 if (map->heap()->ShouldBePromoted(object->address(), object_size)) {
Vitaly Repeshko 2010/09/10 12:35:25 We clearly need "heap" as a local variable here.
1180 Object* result; 1182 Object* result;
1181 1183
1182 if ((size_restriction != SMALL) && 1184 if ((size_restriction != SMALL) &&
1183 (object_size > Page::kMaxHeapObjectSize)) { 1185 (object_size > Page::kMaxHeapObjectSize)) {
1184 result = HEAP->lo_space()->AllocateRawFixedArray(object_size); 1186 result = map->heap()->lo_space()->AllocateRawFixedArray(object_size);
1185 } else { 1187 } else {
1186 if (object_contents == DATA_OBJECT) { 1188 if (object_contents == DATA_OBJECT) {
1187 result = HEAP->old_data_space()->AllocateRaw(object_size); 1189 result = map->heap()->old_data_space()->AllocateRaw(object_size);
1188 } else { 1190 } else {
1189 result = HEAP->old_pointer_space()->AllocateRaw(object_size); 1191 result = map->heap()->old_pointer_space()->AllocateRaw(object_size);
1190 } 1192 }
1191 } 1193 }
1192 1194
1193 if (!result->IsFailure()) { 1195 if (!result->IsFailure()) {
1194 HeapObject* target = HeapObject::cast(result); 1196 HeapObject* target = HeapObject::cast(result);
1195 *slot = MigrateObject(object, target, object_size); 1197 *slot = MigrateObject(map->heap(), object , target, object_size);
1196 1198
1197 if (object_contents == POINTER_OBJECT) { 1199 if (object_contents == POINTER_OBJECT) {
1198 HEAP->promotion_queue()->insert(target, object_size); 1200 map->heap()->promotion_queue()->insert(target, object_size);
1199 } 1201 }
1200 1202
1201 HEAP->tracer()->increment_promoted_objects_size(object_size); 1203 map->heap()->tracer()->increment_promoted_objects_size(object_size);
1202 return; 1204 return;
1203 } 1205 }
1204 } 1206 }
1205 Object* result = HEAP->new_space()->AllocateRaw(object_size); 1207 Object* result = map->heap()->new_space()->AllocateRaw(object_size);
1206 ASSERT(!result->IsFailure()); 1208 ASSERT(!result->IsFailure());
1207 *slot = MigrateObject(object, HeapObject::cast(result), object_size); 1209 *slot = MigrateObject(map->heap(), object, HeapObject::cast(result), object_ size);
Vitaly Repeshko 2010/09/10 12:35:25 nit: Long line.
1208 return; 1210 return;
1209 } 1211 }
1210 1212
1211 1213
1212 static inline void EvacuateFixedArray(Map* map, 1214 static inline void EvacuateFixedArray(Map* map,
1213 HeapObject** slot, 1215 HeapObject** slot,
1214 HeapObject* object) { 1216 HeapObject* object) {
1215 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); 1217 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1216 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, 1218 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
1217 slot, 1219 slot,
(...skipping 30 matching lines...) Expand all
1248 1250
1249 static inline bool IsShortcutCandidate(int type) { 1251 static inline bool IsShortcutCandidate(int type) {
1250 return ((type & kShortcutTypeMask) == kShortcutTypeTag); 1252 return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1251 } 1253 }
1252 1254
1253 static inline void EvacuateShortcutCandidate(Map* map, 1255 static inline void EvacuateShortcutCandidate(Map* map,
1254 HeapObject** slot, 1256 HeapObject** slot,
1255 HeapObject* object) { 1257 HeapObject* object) {
1256 ASSERT(IsShortcutCandidate(map->instance_type())); 1258 ASSERT(IsShortcutCandidate(map->instance_type()));
1257 1259
1258 if (ConsString::cast(object)->unchecked_second() == HEAP->empty_string()) { 1260 if (ConsString::cast(object)->unchecked_second() ==
1261 map->heap()->empty_string()) {
1259 HeapObject* first = 1262 HeapObject* first =
1260 HeapObject::cast(ConsString::cast(object)->unchecked_first()); 1263 HeapObject::cast(ConsString::cast(object)->unchecked_first());
1261 1264
1262 *slot = first; 1265 *slot = first;
1263 1266
1264 if (!HEAP->InNewSpace(first)) { 1267 if (!map->heap()->InNewSpace(first)) {
1265 object->set_map_word(MapWord::FromForwardingAddress(first)); 1268 object->set_map_word(MapWord::FromForwardingAddress(first));
1266 return; 1269 return;
1267 } 1270 }
1268 1271
1269 MapWord first_word = first->map_word(); 1272 MapWord first_word = first->map_word();
1270 if (first_word.IsForwardingAddress()) { 1273 if (first_word.IsForwardingAddress()) {
1271 HeapObject* target = first_word.ToForwardingAddress(); 1274 HeapObject* target = first_word.ToForwardingAddress();
1272 1275
1273 *slot = target; 1276 *slot = target;
1274 object->set_map_word(MapWord::FromForwardingAddress(target)); 1277 object->set_map_word(MapWord::FromForwardingAddress(target));
(...skipping 30 matching lines...) Expand all
1305 typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object); 1308 typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
1306 1309
1307 static VisitorDispatchTable<Callback> table_; 1310 static VisitorDispatchTable<Callback> table_;
1308 }; 1311 };
1309 1312
1310 1313
1311 VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_; 1314 VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
1312 1315
1313 1316
1314 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 1317 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1315 ASSERT(InFromSpace(object)); 1318 ASSERT(HEAP->InFromSpace(object));
1316 MapWord first_word = object->map_word(); 1319 MapWord first_word = object->map_word();
1317 ASSERT(!first_word.IsForwardingAddress()); 1320 ASSERT(!first_word.IsForwardingAddress());
1318 Map* map = first_word.ToMap(); 1321 Map* map = first_word.ToMap();
1319 ScavengingVisitor::Scavenge(map, p, object); 1322 ScavengingVisitor::Scavenge(map, p, object);
1320 } 1323 }
1321 1324
1322 1325
1323 void Heap::ScavengePointer(HeapObject** p) {
1324 HEAP->ScavengeObject(p, *p);
1325 }
1326
1327
1328 Object* Heap::AllocatePartialMap(InstanceType instance_type, 1326 Object* Heap::AllocatePartialMap(InstanceType instance_type,
1329 int instance_size) { 1327 int instance_size) {
1330 Object* result = AllocateRawMap(); 1328 Object* result = AllocateRawMap();
1331 if (result->IsFailure()) return result; 1329 if (result->IsFailure()) return result;
1332 1330
1333 // Map::cast cannot be used due to uninitialized map field. 1331 // Map::cast cannot be used due to uninitialized map field.
1334 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 1332 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1335 reinterpret_cast<Map*>(result)->set_heap(this); 1333 reinterpret_cast<Map*>(result)->set_heap(this);
1336 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1334 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1337 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1335 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1338 reinterpret_cast<Map*>(result)->set_visitor_id( 1336 reinterpret_cast<Map*>(result)->set_visitor_id(
1339 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 1337 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1340 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 1338 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1341 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 1339 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1342 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 1340 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1343 reinterpret_cast<Map*>(result)->set_bit_field(0); 1341 reinterpret_cast<Map*>(result)->set_bit_field(0);
1344 reinterpret_cast<Map*>(result)->set_bit_field2(0); 1342 reinterpret_cast<Map*>(result)->set_bit_field2(0);
1345 return result; 1343 return result;
1346 } 1344 }
1347 1345
1348 1346
1349 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { 1347 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1350 Object* result = AllocateRawMap(); 1348 Object* result = AllocateRawMap();
1351 if (result->IsFailure()) return result; 1349 if (result->IsFailure()) return result;
1352 1350
1353 Map* map = reinterpret_cast<Map*>(result); 1351 Map* map = reinterpret_cast<Map*>(result);
1354 map->set_map(meta_map()); 1352 map->set_map(meta_map());
1355 map->set_heap(this); 1353 map->set_heap(this);
1356 map->set_instance_type(instance_type); 1354 map->set_instance_type(instance_type);
1357 reinterpret_cast<Map*>(result)->set_heap(this);
1358 map->set_visitor_id( 1355 map->set_visitor_id(
1359 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 1356 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1360 map->set_prototype(null_value()); 1357 map->set_prototype(null_value());
1361 map->set_constructor(null_value()); 1358 map->set_constructor(null_value());
1362 map->set_instance_size(instance_size); 1359 map->set_instance_size(instance_size);
1363 map->set_inobject_properties(0); 1360 map->set_inobject_properties(0);
1364 map->set_pre_allocated_property_fields(0); 1361 map->set_pre_allocated_property_fields(0);
1365 map->set_instance_descriptors(empty_descriptor_array()); 1362 map->set_instance_descriptors(empty_descriptor_array());
1366 map->set_code_cache(empty_fixed_array()); 1363 map->set_code_cache(empty_fixed_array());
1367 map->set_unused_property_fields(0); 1364 map->set_unused_property_fields(0);
(...skipping 2246 matching lines...) Expand 10 before | Expand all | Expand 10 after
3614 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject()); 3611 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject());
3615 for (Address a = new_space_.FromSpaceLow(); 3612 for (Address a = new_space_.FromSpaceLow();
3616 a < new_space_.FromSpaceHigh(); 3613 a < new_space_.FromSpaceHigh();
3617 a += kPointerSize) { 3614 a += kPointerSize) {
3618 Memory::Address_at(a) = kFromSpaceZapValue; 3615 Memory::Address_at(a) = kFromSpaceZapValue;
3619 } 3616 }
3620 } 3617 }
3621 #endif // DEBUG 3618 #endif // DEBUG
3622 3619
3623 3620
3624 bool Heap::IteratePointersInDirtyRegion(Address start, 3621 bool Heap::IteratePointersInDirtyRegion(Heap* heap,
3622 Address start,
3625 Address end, 3623 Address end,
3626 ObjectSlotCallback copy_object_func) { 3624 ObjectSlotCallback copy_object_func) {
3627 Address slot_address = start; 3625 Address slot_address = start;
3628 bool pointers_to_new_space_found = false; 3626 bool pointers_to_new_space_found = false;
3629 3627
3630 while (slot_address < end) { 3628 while (slot_address < end) {
3631 Object** slot = reinterpret_cast<Object**>(slot_address); 3629 Object** slot = reinterpret_cast<Object**>(slot_address);
3632 if (HEAP->InNewSpace(*slot)) { 3630 if (heap->InNewSpace(*slot)) {
3633 ASSERT((*slot)->IsHeapObject()); 3631 ASSERT((*slot)->IsHeapObject());
3634 copy_object_func(reinterpret_cast<HeapObject**>(slot)); 3632 copy_object_func(reinterpret_cast<HeapObject**>(slot));
3635 if (HEAP->InNewSpace(*slot)) { 3633 if (heap->InNewSpace(*slot)) {
3636 ASSERT((*slot)->IsHeapObject()); 3634 ASSERT((*slot)->IsHeapObject());
3637 pointers_to_new_space_found = true; 3635 pointers_to_new_space_found = true;
3638 } 3636 }
3639 } 3637 }
3640 slot_address += kPointerSize; 3638 slot_address += kPointerSize;
3641 } 3639 }
3642 return pointers_to_new_space_found; 3640 return pointers_to_new_space_found;
3643 } 3641 }
3644 3642
3645 3643
(...skipping 20 matching lines...) Expand all
3666 Address map_address = start; 3664 Address map_address = start;
3667 bool pointers_to_new_space_found = false; 3665 bool pointers_to_new_space_found = false;
3668 3666
3669 while (map_address < end) { 3667 while (map_address < end) {
3670 ASSERT(!HEAP->InNewSpace(Memory::Object_at(map_address))); 3668 ASSERT(!HEAP->InNewSpace(Memory::Object_at(map_address)));
3671 ASSERT(Memory::Object_at(map_address)->IsMap()); 3669 ASSERT(Memory::Object_at(map_address)->IsMap());
3672 3670
3673 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; 3671 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
3674 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; 3672 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
3675 3673
3676 if (HEAP->IteratePointersInDirtyRegion(pointer_fields_start, 3674 if (Heap::IteratePointersInDirtyRegion(HEAP,
Vitaly Repeshko 2010/09/10 12:35:25 Heap lookup can be moved out of the while loop.
3675 pointer_fields_start,
3677 pointer_fields_end, 3676 pointer_fields_end,
3678 copy_object_func)) { 3677 copy_object_func)) {
3679 pointers_to_new_space_found = true; 3678 pointers_to_new_space_found = true;
3680 } 3679 }
3681 3680
3682 map_address += Map::kSize; 3681 map_address += Map::kSize;
3683 } 3682 }
3684 3683
3685 return pointers_to_new_space_found; 3684 return pointers_to_new_space_found;
3686 } 3685 }
3687 3686
3688 3687
3689 bool Heap::IteratePointersInDirtyMapsRegion( 3688 bool Heap::IteratePointersInDirtyMapsRegion(
3689 Heap* heap,
3690 Address start, 3690 Address start,
3691 Address end, 3691 Address end,
3692 ObjectSlotCallback copy_object_func) { 3692 ObjectSlotCallback copy_object_func) {
3693 Address map_aligned_start = MapStartAlign(start); 3693 Address map_aligned_start = MapStartAlign(start);
3694 Address map_aligned_end = MapEndAlign(end); 3694 Address map_aligned_end = MapEndAlign(end);
3695 3695
3696 bool contains_pointers_to_new_space = false; 3696 bool contains_pointers_to_new_space = false;
3697 3697
3698 if (map_aligned_start != start) { 3698 if (map_aligned_start != start) {
3699 Address prev_map = map_aligned_start - Map::kSize; 3699 Address prev_map = map_aligned_start - Map::kSize;
3700 ASSERT(Memory::Object_at(prev_map)->IsMap()); 3700 ASSERT(Memory::Object_at(prev_map)->IsMap());
3701 3701
3702 Address pointer_fields_start = 3702 Address pointer_fields_start =
3703 Max(start, prev_map + Map::kPointerFieldsBeginOffset); 3703 Max(start, prev_map + Map::kPointerFieldsBeginOffset);
3704 3704
3705 Address pointer_fields_end = 3705 Address pointer_fields_end =
3706 Min(prev_map + Map::kPointerFieldsEndOffset, end); 3706 Min(prev_map + Map::kPointerFieldsEndOffset, end);
3707 3707
3708 contains_pointers_to_new_space = 3708 contains_pointers_to_new_space =
3709 IteratePointersInDirtyRegion(pointer_fields_start, 3709 IteratePointersInDirtyRegion(heap,
3710 pointer_fields_start,
3710 pointer_fields_end, 3711 pointer_fields_end,
3711 copy_object_func) 3712 copy_object_func)
3712 || contains_pointers_to_new_space; 3713 || contains_pointers_to_new_space;
3713 } 3714 }
3714 3715
3715 contains_pointers_to_new_space = 3716 contains_pointers_to_new_space =
3716 IteratePointersInDirtyMaps(map_aligned_start, 3717 IteratePointersInDirtyMaps(map_aligned_start,
3717 map_aligned_end, 3718 map_aligned_end,
3718 copy_object_func) 3719 copy_object_func)
3719 || contains_pointers_to_new_space; 3720 || contains_pointers_to_new_space;
3720 3721
3721 if (map_aligned_end != end) { 3722 if (map_aligned_end != end) {
3722 ASSERT(Memory::Object_at(map_aligned_end)->IsMap()); 3723 ASSERT(Memory::Object_at(map_aligned_end)->IsMap());
3723 3724
3724 Address pointer_fields_start = 3725 Address pointer_fields_start =
3725 map_aligned_end + Map::kPointerFieldsBeginOffset; 3726 map_aligned_end + Map::kPointerFieldsBeginOffset;
3726 3727
3727 Address pointer_fields_end = 3728 Address pointer_fields_end =
3728 Min(end, map_aligned_end + Map::kPointerFieldsEndOffset); 3729 Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
3729 3730
3730 contains_pointers_to_new_space = 3731 contains_pointers_to_new_space =
3731 IteratePointersInDirtyRegion(pointer_fields_start, 3732 IteratePointersInDirtyRegion(heap,
3733 pointer_fields_start,
3732 pointer_fields_end, 3734 pointer_fields_end,
3733 copy_object_func) 3735 copy_object_func)
3734 || contains_pointers_to_new_space; 3736 || contains_pointers_to_new_space;
3735 } 3737 }
3736 3738
3737 return contains_pointers_to_new_space; 3739 return contains_pointers_to_new_space;
3738 } 3740 }
3739 3741
3740 3742
3741 void Heap::IterateAndMarkPointersToFromSpace(Address start, 3743 void Heap::IterateAndMarkPointersToFromSpace(Address start,
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3783 // area_start by Page::kRegionSize. 3785 // area_start by Page::kRegionSize.
3784 Address second_region = 3786 Address second_region =
3785 reinterpret_cast<Address>( 3787 reinterpret_cast<Address>(
3786 reinterpret_cast<intptr_t>(area_start + Page::kRegionSize) & 3788 reinterpret_cast<intptr_t>(area_start + Page::kRegionSize) &
3787 ~Page::kRegionAlignmentMask); 3789 ~Page::kRegionAlignmentMask);
3788 3790
3789 // Next region might be beyond area_end. 3791 // Next region might be beyond area_end.
3790 Address region_end = Min(second_region, area_end); 3792 Address region_end = Min(second_region, area_end);
3791 3793
3792 if (marks & mask) { 3794 if (marks & mask) {
3793 if (visit_dirty_region(region_start, region_end, copy_object_func)) { 3795 if (visit_dirty_region(this, region_start, region_end, copy_object_func)) {
3794 newmarks |= mask; 3796 newmarks |= mask;
3795 } 3797 }
3796 } 3798 }
3797 mask <<= 1; 3799 mask <<= 1;
3798 3800
3799 // Iterate subsequent regions which fully lay inside [area_start, area_end[. 3801 // Iterate subsequent regions which fully lay inside [area_start, area_end[.
3800 region_start = region_end; 3802 region_start = region_end;
3801 region_end = region_start + Page::kRegionSize; 3803 region_end = region_start + Page::kRegionSize;
3802 3804
3803 while (region_end <= area_end) { 3805 while (region_end <= area_end) {
3804 if (marks & mask) { 3806 if (marks & mask) {
3805 if (visit_dirty_region(region_start, region_end, copy_object_func)) { 3807 if (visit_dirty_region(this,
3808 region_start,
3809 region_end,
3810 copy_object_func)) {
3806 newmarks |= mask; 3811 newmarks |= mask;
3807 } 3812 }
3808 } 3813 }
3809 3814
3810 region_start = region_end; 3815 region_start = region_end;
3811 region_end = region_start + Page::kRegionSize; 3816 region_end = region_start + Page::kRegionSize;
3812 3817
3813 mask <<= 1; 3818 mask <<= 1;
3814 } 3819 }
3815 3820
3816 if (region_start != area_end) { 3821 if (region_start != area_end) {
3817 // A small piece of area left uniterated because area_end does not coincide 3822 // A small piece of area left uniterated because area_end does not coincide
3818 // with region end. Check whether region covering last part of area is 3823 // with region end. Check whether region covering last part of area is
3819 // dirty. 3824 // dirty.
3820 if (marks & mask) { 3825 if (marks & mask) {
3821 if (visit_dirty_region(region_start, area_end, copy_object_func)) { 3826 if (visit_dirty_region(this, region_start, area_end, copy_object_func)) {
3822 newmarks |= mask; 3827 newmarks |= mask;
3823 } 3828 }
3824 } 3829 }
3825 } 3830 }
3826 3831
3827 return newmarks; 3832 return newmarks;
3828 } 3833 }
3829 3834
3830 3835
3831 3836
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after
4261 // Align the pair of semispaces to their size, which must be a power 4266 // Align the pair of semispaces to their size, which must be a power
4262 // of 2. 4267 // of 2.
4263 Address new_space_start = 4268 Address new_space_start =
4264 RoundUp(reinterpret_cast<byte*>(chunk), 2 * reserved_semispace_size_); 4269 RoundUp(reinterpret_cast<byte*>(chunk), 2 * reserved_semispace_size_);
4265 if (!new_space_.Setup(new_space_start, 2 * reserved_semispace_size_)) { 4270 if (!new_space_.Setup(new_space_start, 2 * reserved_semispace_size_)) {
4266 return false; 4271 return false;
4267 } 4272 }
4268 4273
4269 // Initialize old pointer space. 4274 // Initialize old pointer space.
4270 old_pointer_space_ = 4275 old_pointer_space_ =
4271 new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE); 4276 new OldSpace(this,
4277 max_old_generation_size_,
4278 OLD_POINTER_SPACE,
4279 NOT_EXECUTABLE);
4272 if (old_pointer_space_ == NULL) return false; 4280 if (old_pointer_space_ == NULL) return false;
4273 if (!old_pointer_space_->Setup(NULL, 0)) return false; 4281 if (!old_pointer_space_->Setup(NULL, 0)) return false;
4274 4282
4275 // Initialize old data space. 4283 // Initialize old data space.
4276 old_data_space_ = 4284 old_data_space_ =
4277 new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE); 4285 new OldSpace(this,
4286 max_old_generation_size_,
4287 OLD_DATA_SPACE,
4288 NOT_EXECUTABLE);
4278 if (old_data_space_ == NULL) return false; 4289 if (old_data_space_ == NULL) return false;
4279 if (!old_data_space_->Setup(NULL, 0)) return false; 4290 if (!old_data_space_->Setup(NULL, 0)) return false;
4280 4291
4281 // Initialize the code space, set its maximum capacity to the old 4292 // Initialize the code space, set its maximum capacity to the old
4282 // generation size. It needs executable memory. 4293 // generation size. It needs executable memory.
4283 // On 64-bit platform(s), we put all code objects in a 2 GB range of 4294 // On 64-bit platform(s), we put all code objects in a 2 GB range of
4284 // virtual address space, so that they can call each other with near calls. 4295 // virtual address space, so that they can call each other with near calls.
4285 if (code_range_size_ > 0) { 4296 if (code_range_size_ > 0) {
4286 if (!isolate_->code_range()->Setup(code_range_size_)) { 4297 if (!isolate_->code_range()->Setup(code_range_size_)) {
4287 return false; 4298 return false;
4288 } 4299 }
4289 } 4300 }
4290 4301
4291 code_space_ = 4302 code_space_ =
4292 new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE); 4303 new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
4293 if (code_space_ == NULL) return false; 4304 if (code_space_ == NULL) return false;
4294 if (!code_space_->Setup(NULL, 0)) return false; 4305 if (!code_space_->Setup(NULL, 0)) return false;
4295 4306
4296 // Initialize map space. 4307 // Initialize map space.
4297 map_space_ = new MapSpace(FLAG_use_big_map_space 4308 map_space_ = new MapSpace(this, FLAG_use_big_map_space
4298 ? max_old_generation_size_ 4309 ? max_old_generation_size_
4299 : MapSpace::kMaxMapPageIndex * Page::kPageSize, 4310 : MapSpace::kMaxMapPageIndex * Page::kPageSize,
4300 FLAG_max_map_space_pages, 4311 FLAG_max_map_space_pages,
4301 MAP_SPACE); 4312 MAP_SPACE);
4302 if (map_space_ == NULL) return false; 4313 if (map_space_ == NULL) return false;
4303 if (!map_space_->Setup(NULL, 0)) return false; 4314 if (!map_space_->Setup(NULL, 0)) return false;
4304 4315
4305 // Initialize global property cell space. 4316 // Initialize global property cell space.
4306 cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE); 4317 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
4307 if (cell_space_ == NULL) return false; 4318 if (cell_space_ == NULL) return false;
4308 if (!cell_space_->Setup(NULL, 0)) return false; 4319 if (!cell_space_->Setup(NULL, 0)) return false;
4309 4320
4310 // The large object code space may contain code or data. We set the memory 4321 // The large object code space may contain code or data. We set the memory
4311 // to be non-executable here for safety, but this means we need to enable it 4322 // to be non-executable here for safety, but this means we need to enable it
4312 // explicitly when allocating large code objects. 4323 // explicitly when allocating large code objects.
4313 lo_space_ = new LargeObjectSpace(LO_SPACE); 4324 lo_space_ = new LargeObjectSpace(this, LO_SPACE);
4314 if (lo_space_ == NULL) return false; 4325 if (lo_space_ == NULL) return false;
4315 if (!lo_space_->Setup()) return false; 4326 if (!lo_space_->Setup()) return false;
4316 4327
4317 if (create_heap_objects) { 4328 if (create_heap_objects) {
4318 // Create initial maps. 4329 // Create initial maps.
4319 if (!CreateInitialMaps()) return false; 4330 if (!CreateInitialMaps()) return false;
4320 if (!CreateApiObjects()) return false; 4331 if (!CreateApiObjects()) return false;
4321 4332
4322 // Create initial objects 4333 // Create initial objects
4323 if (!CreateInitialObjects()) return false; 4334 if (!CreateInitialObjects()) return false;
(...skipping 608 matching lines...) Expand 10 before | Expand all | Expand 10 after
4932 } 4943 }
4933 4944
4934 4945
4935 void ExternalStringTable::TearDown() { 4946 void ExternalStringTable::TearDown() {
4936 new_space_strings_.Free(); 4947 new_space_strings_.Free();
4937 old_space_strings_.Free(); 4948 old_space_strings_.Free();
4938 } 4949 }
4939 4950
4940 4951
4941 } } // namespace v8::internal 4952 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/heap-profiler.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698