Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(619)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1259613006: Change RecordSlot interface. Make it more robust by replacing anchor slot with actual object. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 886 matching lines...) Expand 10 before | Expand all | Expand 10 after
897 candidate->set_code(lazy_compile); 897 candidate->set_code(lazy_compile);
898 } else { 898 } else {
899 DCHECK(Marking::IsBlack(code_mark)); 899 DCHECK(Marking::IsBlack(code_mark));
900 candidate->set_code(code); 900 candidate->set_code(code);
901 } 901 }
902 902
903 // We are in the middle of a GC cycle so the write barrier in the code 903 // We are in the middle of a GC cycle so the write barrier in the code
904 // setter did not record the slot update and we have to do that manually. 904 // setter did not record the slot update and we have to do that manually.
905 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; 905 Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
906 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); 906 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
907 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(slot, 907 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(
908 target); 908 candidate, slot, target);
909 909
910 Object** shared_code_slot = 910 Object** shared_code_slot =
911 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); 911 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
912 isolate_->heap()->mark_compact_collector()->RecordSlot( 912 isolate_->heap()->mark_compact_collector()->RecordSlot(
913 shared_code_slot, shared_code_slot, *shared_code_slot); 913 shared, shared_code_slot, *shared_code_slot);
914 914
915 candidate = next_candidate; 915 candidate = next_candidate;
916 } 916 }
917 917
918 jsfunction_candidates_head_ = NULL; 918 jsfunction_candidates_head_ = NULL;
919 } 919 }
920 920
921 921
922 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { 922 void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
923 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy); 923 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kCompileLazy);
(...skipping 14 matching lines...) Expand all
938 } 938 }
939 // Always flush the optimized code map if there is one. 939 // Always flush the optimized code map if there is one.
940 if (!candidate->optimized_code_map()->IsSmi()) { 940 if (!candidate->optimized_code_map()->IsSmi()) {
941 candidate->ClearOptimizedCodeMap(); 941 candidate->ClearOptimizedCodeMap();
942 } 942 }
943 candidate->set_code(lazy_compile); 943 candidate->set_code(lazy_compile);
944 } 944 }
945 945
946 Object** code_slot = 946 Object** code_slot =
947 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); 947 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
948 isolate_->heap()->mark_compact_collector()->RecordSlot(code_slot, code_slot, 948 isolate_->heap()->mark_compact_collector()->RecordSlot(candidate, code_slot,
949 *code_slot); 949 *code_slot);
950 950
951 candidate = next_candidate; 951 candidate = next_candidate;
952 } 952 }
953 953
954 shared_function_info_candidates_head_ = NULL; 954 shared_function_info_candidates_head_ = NULL;
955 } 955 }
956 956
957 957
958 void CodeFlusher::ProcessOptimizedCodeMaps() { 958 void CodeFlusher::ProcessOptimizedCodeMaps() {
(...skipping 29 matching lines...) Expand all
988 if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue; 988 if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue;
989 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals))); 989 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals)));
990 // Move every slot in the entry and record slots when needed. 990 // Move every slot in the entry and record slots when needed.
991 code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code); 991 code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code);
992 code_map->set(new_length + SharedFunctionInfo::kContextOffset, context); 992 code_map->set(new_length + SharedFunctionInfo::kContextOffset, context);
993 code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals); 993 code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals);
994 code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id); 994 code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id);
995 Object** code_slot = code_map->RawFieldOfElementAt( 995 Object** code_slot = code_map->RawFieldOfElementAt(
996 new_length + SharedFunctionInfo::kCachedCodeOffset); 996 new_length + SharedFunctionInfo::kCachedCodeOffset);
997 isolate_->heap()->mark_compact_collector()->RecordSlot( 997 isolate_->heap()->mark_compact_collector()->RecordSlot(
998 code_slot, code_slot, *code_slot); 998 code_map, code_slot, *code_slot);
999 Object** context_slot = code_map->RawFieldOfElementAt( 999 Object** context_slot = code_map->RawFieldOfElementAt(
1000 new_length + SharedFunctionInfo::kContextOffset); 1000 new_length + SharedFunctionInfo::kContextOffset);
1001 isolate_->heap()->mark_compact_collector()->RecordSlot( 1001 isolate_->heap()->mark_compact_collector()->RecordSlot(
1002 context_slot, context_slot, *context_slot); 1002 code_map, context_slot, *context_slot);
1003 Object** literals_slot = code_map->RawFieldOfElementAt( 1003 Object** literals_slot = code_map->RawFieldOfElementAt(
1004 new_length + SharedFunctionInfo::kLiteralsOffset); 1004 new_length + SharedFunctionInfo::kLiteralsOffset);
1005 isolate_->heap()->mark_compact_collector()->RecordSlot( 1005 isolate_->heap()->mark_compact_collector()->RecordSlot(
1006 literals_slot, literals_slot, *literals_slot); 1006 code_map, literals_slot, *literals_slot);
1007 new_length += SharedFunctionInfo::kEntryLength; 1007 new_length += SharedFunctionInfo::kEntryLength;
1008 } 1008 }
1009 1009
1010 // Process context-independent entry in the optimized code map. 1010 // Process context-independent entry in the optimized code map.
1011 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex); 1011 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
1012 if (shared_object->IsCode()) { 1012 if (shared_object->IsCode()) {
1013 Code* shared_code = Code::cast(shared_object); 1013 Code* shared_code = Code::cast(shared_object);
1014 if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) { 1014 if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) {
1015 code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex); 1015 code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex);
1016 } else { 1016 } else {
1017 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code))); 1017 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
1018 Object** slot = 1018 Object** slot =
1019 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex); 1019 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
1020 isolate_->heap()->mark_compact_collector()->RecordSlot(slot, slot, 1020 isolate_->heap()->mark_compact_collector()->RecordSlot(code_map, slot,
1021 *slot); 1021 *slot);
1022 } 1022 }
1023 } 1023 }
1024 1024
1025 // Trim the optimized code map if entries have been removed. 1025 // Trim the optimized code map if entries have been removed.
1026 if (new_length < old_length) { 1026 if (new_length < old_length) {
1027 holder->TrimOptimizedCodeMap(old_length - new_length); 1027 holder->TrimOptimizedCodeMap(old_length - new_length);
1028 } 1028 }
1029 1029
1030 holder = next_holder; 1030 holder = next_holder;
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
1247 FixedArraySubInstanceType dictionary_type); 1247 FixedArraySubInstanceType dictionary_type);
1248 1248
1249 template <MarkCompactMarkingVisitor::VisitorId id> 1249 template <MarkCompactMarkingVisitor::VisitorId id>
1250 class ObjectStatsTracker { 1250 class ObjectStatsTracker {
1251 public: 1251 public:
1252 static inline void Visit(Map* map, HeapObject* obj); 1252 static inline void Visit(Map* map, HeapObject* obj);
1253 }; 1253 };
1254 1254
1255 static void Initialize(); 1255 static void Initialize();
1256 1256
1257 INLINE(static void VisitPointer(Heap* heap, Object** p)) { 1257 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
1258 MarkObjectByPointer(heap->mark_compact_collector(), p, p); 1258 MarkObjectByPointer(heap->mark_compact_collector(), object, p);
1259 } 1259 }
1260 1260
1261 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { 1261 INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
1262 Object** start, Object** end)) {
1262 // Mark all objects pointed to in [start, end). 1263 // Mark all objects pointed to in [start, end).
1263 const int kMinRangeForMarkingRecursion = 64; 1264 const int kMinRangeForMarkingRecursion = 64;
1264 if (end - start >= kMinRangeForMarkingRecursion) { 1265 if (end - start >= kMinRangeForMarkingRecursion) {
1265 if (VisitUnmarkedObjects(heap, start, end)) return; 1266 if (VisitUnmarkedObjects(heap, object, start, end)) return;
1266 // We are close to a stack overflow, so just mark the objects. 1267 // We are close to a stack overflow, so just mark the objects.
1267 } 1268 }
1268 MarkCompactCollector* collector = heap->mark_compact_collector(); 1269 MarkCompactCollector* collector = heap->mark_compact_collector();
1269 for (Object** p = start; p < end; p++) { 1270 for (Object** p = start; p < end; p++) {
1270 MarkObjectByPointer(collector, start, p); 1271 MarkObjectByPointer(collector, object, p);
1271 } 1272 }
1272 } 1273 }
1273 1274
1274 // Marks the object black and pushes it on the marking stack. 1275 // Marks the object black and pushes it on the marking stack.
1275 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { 1276 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
1276 MarkBit mark = Marking::MarkBitFrom(object); 1277 MarkBit mark = Marking::MarkBitFrom(object);
1277 heap->mark_compact_collector()->MarkObject(object, mark); 1278 heap->mark_compact_collector()->MarkObject(object, mark);
1278 } 1279 }
1279 1280
1280 // Marks the object black without pushing it on the marking stack. 1281 // Marks the object black without pushing it on the marking stack.
1281 // Returns true if object needed marking and false otherwise. 1282 // Returns true if object needed marking and false otherwise.
1282 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { 1283 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
1283 MarkBit mark_bit = Marking::MarkBitFrom(object); 1284 MarkBit mark_bit = Marking::MarkBitFrom(object);
1284 if (Marking::IsWhite(mark_bit)) { 1285 if (Marking::IsWhite(mark_bit)) {
1285 heap->mark_compact_collector()->SetMark(object, mark_bit); 1286 heap->mark_compact_collector()->SetMark(object, mark_bit);
1286 return true; 1287 return true;
1287 } 1288 }
1288 return false; 1289 return false;
1289 } 1290 }
1290 1291
1291 // Mark object pointed to by p. 1292 // Mark object pointed to by p.
1292 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, 1293 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
1293 Object** anchor_slot, Object** p)) { 1294 HeapObject* object, Object** p)) {
1294 if (!(*p)->IsHeapObject()) return; 1295 if (!(*p)->IsHeapObject()) return;
1295 HeapObject* object = ShortCircuitConsString(p); 1296 HeapObject* target_object = ShortCircuitConsString(p);
1296 collector->RecordSlot(anchor_slot, p, object); 1297 collector->RecordSlot(object, p, target_object);
1297 MarkBit mark = Marking::MarkBitFrom(object); 1298 MarkBit mark = Marking::MarkBitFrom(target_object);
1298 collector->MarkObject(object, mark); 1299 collector->MarkObject(target_object, mark);
1299 } 1300 }
1300 1301
1301 1302
1302 // Visit an unmarked object. 1303 // Visit an unmarked object.
1303 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, 1304 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
1304 HeapObject* obj)) { 1305 HeapObject* obj)) {
1305 #ifdef DEBUG 1306 #ifdef DEBUG
1306 DCHECK(collector->heap()->Contains(obj)); 1307 DCHECK(collector->heap()->Contains(obj));
1307 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); 1308 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj));
1308 #endif 1309 #endif
1309 Map* map = obj->map(); 1310 Map* map = obj->map();
1310 Heap* heap = obj->GetHeap(); 1311 Heap* heap = obj->GetHeap();
1311 MarkBit mark = Marking::MarkBitFrom(obj); 1312 MarkBit mark = Marking::MarkBitFrom(obj);
1312 heap->mark_compact_collector()->SetMark(obj, mark); 1313 heap->mark_compact_collector()->SetMark(obj, mark);
1313 // Mark the map pointer and the body. 1314 // Mark the map pointer and the body.
1314 MarkBit map_mark = Marking::MarkBitFrom(map); 1315 MarkBit map_mark = Marking::MarkBitFrom(map);
1315 heap->mark_compact_collector()->MarkObject(map, map_mark); 1316 heap->mark_compact_collector()->MarkObject(map, map_mark);
1316 IterateBody(map, obj); 1317 IterateBody(map, obj);
1317 } 1318 }
1318 1319
1319 // Visit all unmarked objects pointed to by [start, end). 1320 // Visit all unmarked objects pointed to by [start, end).
1320 // Returns false if the operation fails (lack of stack space). 1321 // Returns false if the operation fails (lack of stack space).
1321 INLINE(static bool VisitUnmarkedObjects(Heap* heap, Object** start, 1322 INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object,
1322 Object** end)) { 1323 Object** start, Object** end)) {
1323 // Return false is we are close to the stack limit. 1324 // Return false is we are close to the stack limit.
1324 StackLimitCheck check(heap->isolate()); 1325 StackLimitCheck check(heap->isolate());
1325 if (check.HasOverflowed()) return false; 1326 if (check.HasOverflowed()) return false;
1326 1327
1327 MarkCompactCollector* collector = heap->mark_compact_collector(); 1328 MarkCompactCollector* collector = heap->mark_compact_collector();
1328 // Visit the unmarked objects. 1329 // Visit the unmarked objects.
1329 for (Object** p = start; p < end; p++) { 1330 for (Object** p = start; p < end; p++) {
1330 Object* o = *p; 1331 Object* o = *p;
1331 if (!o->IsHeapObject()) continue; 1332 if (!o->IsHeapObject()) continue;
1332 collector->RecordSlot(start, p, o); 1333 collector->RecordSlot(object, p, o);
1333 HeapObject* obj = HeapObject::cast(o); 1334 HeapObject* obj = HeapObject::cast(o);
1334 MarkBit mark = Marking::MarkBitFrom(obj); 1335 MarkBit mark = Marking::MarkBitFrom(obj);
1335 if (Marking::IsBlackOrGrey(mark)) continue; 1336 if (Marking::IsBlackOrGrey(mark)) continue;
1336 VisitUnmarkedObject(collector, obj); 1337 VisitUnmarkedObject(collector, obj);
1337 } 1338 }
1338 return true; 1339 return true;
1339 } 1340 }
1340 1341
1341 private: 1342 private:
1342 template <int id> 1343 template <int id>
(...skipping 20 matching lines...) Expand all
1363 // Save a copy that can be reinstated if we need the code again. 1364 // Save a copy that can be reinstated if we need the code again.
1364 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); 1365 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code);
1365 1366
1366 // Saving a copy might create a pointer into compaction candidate 1367 // Saving a copy might create a pointer into compaction candidate
1367 // that was not observed by marker. This might happen if JSRegExp data 1368 // that was not observed by marker. This might happen if JSRegExp data
1368 // was marked through the compilation cache before marker reached JSRegExp 1369 // was marked through the compilation cache before marker reached JSRegExp
1369 // object. 1370 // object.
1370 FixedArray* data = FixedArray::cast(re->data()); 1371 FixedArray* data = FixedArray::cast(re->data());
1371 Object** slot = 1372 Object** slot =
1372 data->data_start() + JSRegExp::saved_code_index(is_one_byte); 1373 data->data_start() + JSRegExp::saved_code_index(is_one_byte);
1373 heap->mark_compact_collector()->RecordSlot(slot, slot, code); 1374 heap->mark_compact_collector()->RecordSlot(data, slot, code);
1374 1375
1375 // Set a number in the 0-255 range to guarantee no smi overflow. 1376 // Set a number in the 0-255 range to guarantee no smi overflow.
1376 re->SetDataAt(JSRegExp::code_index(is_one_byte), 1377 re->SetDataAt(JSRegExp::code_index(is_one_byte),
1377 Smi::FromInt(heap->sweep_generation() & 0xff)); 1378 Smi::FromInt(heap->sweep_generation() & 0xff));
1378 } else if (code->IsSmi()) { 1379 } else if (code->IsSmi()) {
1379 int value = Smi::cast(code)->value(); 1380 int value = Smi::cast(code)->value();
1380 // The regexp has not been compiled yet or there was a compilation error. 1381 // The regexp has not been compiled yet or there was a compilation error.
1381 if (value == JSRegExp::kUninitializedValue || 1382 if (value == JSRegExp::kUninitializedValue ||
1382 value == JSRegExp::kCompilationErrorValue) { 1383 value == JSRegExp::kCompilationErrorValue) {
1383 return; 1384 return;
(...skipping 761 matching lines...) Expand 10 before | Expand all | Expand 10 after
2145 // transition tree alive, not JSObjects. Do not age the map. 2146 // transition tree alive, not JSObjects. Do not age the map.
2146 new_age = age; 2147 new_age = age;
2147 } 2148 }
2148 MarkObject(map, map_mark); 2149 MarkObject(map, map_mark);
2149 } else { 2150 } else {
2150 new_age = FLAG_retain_maps_for_n_gc; 2151 new_age = FLAG_retain_maps_for_n_gc;
2151 } 2152 }
2152 if (i != new_length) { 2153 if (i != new_length) {
2153 retained_maps->Set(new_length, cell); 2154 retained_maps->Set(new_length, cell);
2154 Object** slot = retained_maps->Slot(new_length); 2155 Object** slot = retained_maps->Slot(new_length);
2155 RecordSlot(slot, slot, cell); 2156 RecordSlot(retained_maps, slot, cell);
2156 retained_maps->Set(new_length + 1, Smi::FromInt(new_age)); 2157 retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
2157 } else if (new_age != age) { 2158 } else if (new_age != age) {
2158 retained_maps->Set(new_length + 1, Smi::FromInt(new_age)); 2159 retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
2159 } 2160 }
2160 new_length += 2; 2161 new_length += 2;
2161 } 2162 }
2162 Object* undefined = heap()->undefined_value(); 2163 Object* undefined = heap()->undefined_value();
2163 for (int i = new_length; i < length; i++) { 2164 for (int i = new_length; i < length; i++) {
2164 retained_maps->Clear(i, undefined); 2165 retained_maps->Clear(i, undefined);
2165 } 2166 }
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after
2415 2416
2416 const int header = TransitionArray::kProtoTransitionHeaderSize; 2417 const int header = TransitionArray::kProtoTransitionHeaderSize;
2417 int new_number_of_transitions = 0; 2418 int new_number_of_transitions = 0;
2418 for (int i = 0; i < number_of_transitions; i++) { 2419 for (int i = 0; i < number_of_transitions; i++) {
2419 Object* cell = prototype_transitions->get(header + i); 2420 Object* cell = prototype_transitions->get(header + i);
2420 if (!WeakCell::cast(cell)->cleared()) { 2421 if (!WeakCell::cast(cell)->cleared()) {
2421 if (new_number_of_transitions != i) { 2422 if (new_number_of_transitions != i) {
2422 prototype_transitions->set(header + new_number_of_transitions, cell); 2423 prototype_transitions->set(header + new_number_of_transitions, cell);
2423 Object** slot = prototype_transitions->RawFieldOfElementAt( 2424 Object** slot = prototype_transitions->RawFieldOfElementAt(
2424 header + new_number_of_transitions); 2425 header + new_number_of_transitions);
2425 RecordSlot(slot, slot, cell); 2426 RecordSlot(prototype_transitions, slot, cell);
2426 } 2427 }
2427 new_number_of_transitions++; 2428 new_number_of_transitions++;
2428 } 2429 }
2429 } 2430 }
2430 2431
2431 if (new_number_of_transitions != number_of_transitions) { 2432 if (new_number_of_transitions != number_of_transitions) {
2432 TransitionArray::SetNumberOfPrototypeTransitions(prototype_transitions, 2433 TransitionArray::SetNumberOfPrototypeTransitions(prototype_transitions,
2433 new_number_of_transitions); 2434 new_number_of_transitions);
2434 } 2435 }
2435 2436
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2498 if (target->instance_descriptors() == descriptors) { 2499 if (target->instance_descriptors() == descriptors) {
2499 descriptors_owner_died = true; 2500 descriptors_owner_died = true;
2500 } 2501 }
2501 } else { 2502 } else {
2502 if (i != transition_index) { 2503 if (i != transition_index) {
2503 DCHECK(TransitionArray::IsFullTransitionArray(transitions)); 2504 DCHECK(TransitionArray::IsFullTransitionArray(transitions));
2504 TransitionArray* t = TransitionArray::cast(transitions); 2505 TransitionArray* t = TransitionArray::cast(transitions);
2505 Name* key = t->GetKey(i); 2506 Name* key = t->GetKey(i);
2506 t->SetKey(transition_index, key); 2507 t->SetKey(transition_index, key);
2507 Object** key_slot = t->GetKeySlot(transition_index); 2508 Object** key_slot = t->GetKeySlot(transition_index);
2508 RecordSlot(key_slot, key_slot, key); 2509 RecordSlot(t, key_slot, key);
2509 // Target slots do not need to be recorded since maps are not compacted. 2510 // Target slots do not need to be recorded since maps are not compacted.
2510 t->SetTarget(transition_index, t->GetTarget(i)); 2511 t->SetTarget(transition_index, t->GetTarget(i));
2511 } 2512 }
2512 transition_index++; 2513 transition_index++;
2513 } 2514 }
2514 } 2515 }
2515 2516
2516 // If there are no transitions to be cleared, return. 2517 // If there are no transitions to be cleared, return.
2517 // TODO(verwaest) Should be an assert, otherwise back pointers are not 2518 // TODO(verwaest) Should be an assert, otherwise back pointers are not
2518 // properly cleared. 2519 // properly cleared.
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
2594 void MarkCompactCollector::ProcessWeakCollections() { 2595 void MarkCompactCollector::ProcessWeakCollections() {
2595 GCTracer::Scope gc_scope(heap()->tracer(), 2596 GCTracer::Scope gc_scope(heap()->tracer(),
2596 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); 2597 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2597 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2598 Object* weak_collection_obj = heap()->encountered_weak_collections();
2598 while (weak_collection_obj != Smi::FromInt(0)) { 2599 while (weak_collection_obj != Smi::FromInt(0)) {
2599 JSWeakCollection* weak_collection = 2600 JSWeakCollection* weak_collection =
2600 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2601 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2601 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2602 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2602 if (weak_collection->table()->IsHashTable()) { 2603 if (weak_collection->table()->IsHashTable()) {
2603 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2604 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2604 Object** anchor = reinterpret_cast<Object**>(table->address());
2605 for (int i = 0; i < table->Capacity(); i++) { 2605 for (int i = 0; i < table->Capacity(); i++) {
2606 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2606 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2607 Object** key_slot = 2607 Object** key_slot =
2608 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); 2608 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2609 RecordSlot(anchor, key_slot, *key_slot); 2609 RecordSlot(table, key_slot, *key_slot);
2610 Object** value_slot = 2610 Object** value_slot =
2611 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); 2611 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2612 MarkCompactMarkingVisitor::MarkObjectByPointer(this, anchor, 2612 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
2613 value_slot); 2613 value_slot);
2614 } 2614 }
2615 } 2615 }
2616 } 2616 }
2617 weak_collection_obj = weak_collection->next(); 2617 weak_collection_obj = weak_collection->next();
2618 } 2618 }
2619 } 2619 }
2620 2620
2621 2621
2622 void MarkCompactCollector::ClearWeakCollections() { 2622 void MarkCompactCollector::ClearWeakCollections() {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2671 // alive as long as the cell value is alive. 2671 // alive as long as the cell value is alive.
2672 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. 2672 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table.
2673 if (value->IsCell()) { 2673 if (value->IsCell()) {
2674 Object* cell_value = Cell::cast(value)->value(); 2674 Object* cell_value = Cell::cast(value)->value();
2675 if (cell_value->IsHeapObject() && 2675 if (cell_value->IsHeapObject() &&
2676 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { 2676 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) {
2677 // Resurrect the cell. 2677 // Resurrect the cell.
2678 MarkBit mark = Marking::MarkBitFrom(value); 2678 MarkBit mark = Marking::MarkBitFrom(value);
2679 SetMark(value, mark); 2679 SetMark(value, mark);
2680 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); 2680 Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
2681 RecordSlot(slot, slot, *slot); 2681 RecordSlot(value, slot, *slot);
2682 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 2682 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2683 RecordSlot(slot, slot, *slot); 2683 RecordSlot(weak_cell, slot, *slot);
2684 } else { 2684 } else {
2685 weak_cell->clear(); 2685 weak_cell->clear();
2686 } 2686 }
2687 } else { 2687 } else {
2688 weak_cell->clear(); 2688 weak_cell->clear();
2689 } 2689 }
2690 } else { 2690 } else {
2691 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 2691 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2692 RecordSlot(slot, slot, *slot); 2692 RecordSlot(weak_cell, slot, *slot);
2693 } 2693 }
2694 weak_cell_obj = weak_cell->next(); 2694 weak_cell_obj = weak_cell->next();
2695 weak_cell->clear_next(heap()); 2695 weak_cell->clear_next(heap());
2696 } 2696 }
2697 heap()->set_encountered_weak_cells(Smi::FromInt(0)); 2697 heap()->set_encountered_weak_cells(Smi::FromInt(0));
2698 } 2698 }
2699 2699
2700 2700
2701 void MarkCompactCollector::AbortWeakCells() { 2701 void MarkCompactCollector::AbortWeakCells() {
2702 Object* weak_cell_obj = heap()->encountered_weak_cells(); 2702 Object* weak_cell_obj = heap()->encountered_weak_cells();
(...skipping 2002 matching lines...) Expand 10 before | Expand all | Expand 10 after
4705 page->SetFlag(Page::POPULAR_PAGE); 4705 page->SetFlag(Page::POPULAR_PAGE);
4706 4706
4707 // We were not collecting slots on this page that point 4707 // We were not collecting slots on this page that point
4708 // to other evacuation candidates thus we have to 4708 // to other evacuation candidates thus we have to
4709 // rescan the page after evacuation to discover and update all 4709 // rescan the page after evacuation to discover and update all
4710 // pointers to evacuated objects. 4710 // pointers to evacuated objects.
4711 page->SetFlag(Page::RESCAN_ON_EVACUATION); 4711 page->SetFlag(Page::RESCAN_ON_EVACUATION);
4712 } 4712 }
4713 4713
4714 4714
4715 void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) { 4715 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
4716 Code* target) {
4716 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 4717 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
4717 if (target_page->IsEvacuationCandidate() && 4718 if (target_page->IsEvacuationCandidate() &&
4718 !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) { 4719 !ShouldSkipEvacuationSlotRecording(object)) {
4719 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, 4720 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
4720 target_page->slots_buffer_address(), 4721 target_page->slots_buffer_address(),
4721 SlotsBuffer::CODE_ENTRY_SLOT, slot, 4722 SlotsBuffer::CODE_ENTRY_SLOT, slot,
4722 SlotsBuffer::FAIL_ON_OVERFLOW)) { 4723 SlotsBuffer::FAIL_ON_OVERFLOW)) {
4723 EvictPopularEvacuationCandidate(target_page); 4724 EvictPopularEvacuationCandidate(target_page);
4724 } 4725 }
4725 } 4726 }
4726 } 4727 }
4727 4728
4728 4729
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
4800 SlotsBuffer* buffer = *buffer_address; 4801 SlotsBuffer* buffer = *buffer_address;
4801 while (buffer != NULL) { 4802 while (buffer != NULL) {
4802 SlotsBuffer* next_buffer = buffer->next(); 4803 SlotsBuffer* next_buffer = buffer->next();
4803 DeallocateBuffer(buffer); 4804 DeallocateBuffer(buffer);
4804 buffer = next_buffer; 4805 buffer = next_buffer;
4805 } 4806 }
4806 *buffer_address = NULL; 4807 *buffer_address = NULL;
4807 } 4808 }
4808 } // namespace internal 4809 } // namespace internal
4809 } // namespace v8 4810 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698