Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 254 void StaticMarkingVisitor<StaticVisitor>::VisitMap( | 254 void StaticMarkingVisitor<StaticVisitor>::VisitMap( |
| 255 Map* map, HeapObject* object) { | 255 Map* map, HeapObject* object) { |
| 256 Heap* heap = map->GetHeap(); | 256 Heap* heap = map->GetHeap(); |
| 257 Map* map_object = Map::cast(object); | 257 Map* map_object = Map::cast(object); |
| 258 | 258 |
| 259 // Clears the cache of ICs related to this map. | 259 // Clears the cache of ICs related to this map. |
| 260 if (FLAG_cleanup_code_caches_at_gc) { | 260 if (FLAG_cleanup_code_caches_at_gc) { |
| 261 map_object->ClearCodeCache(heap); | 261 map_object->ClearCodeCache(heap); |
| 262 } | 262 } |
| 263 | 263 |
| 264 // When map collection is enabled we have to mark through map's | 264 // When map collection is enabled we have to mark through map's transitions |
| 265 // transitions and back pointers in a special way to make these links | 265 // and back pointers in a special way to make these links weak. |
| 266 // weak. Only maps for subclasses of JSReceiver can have transitions. | 266 if (FLAG_collect_maps && map_object->CanTransition()) { |
| 267 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
| 268 if (FLAG_collect_maps && | |
| 269 map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | |
| 270 MarkMapContents(heap, map_object); | 267 MarkMapContents(heap, map_object); |
| 271 } else { | 268 } else { |
| 272 StaticVisitor::VisitPointers(heap, | 269 StaticVisitor::VisitPointers(heap, |
| 273 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), | 270 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), |
| 274 HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); | 271 HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); |
| 275 } | 272 } |
| 276 } | 273 } |
| 277 | 274 |
| 278 | 275 |
| 279 template<typename StaticVisitor> | 276 template<typename StaticVisitor> |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 387 // array slot, since it will be implicitly recorded when the pointer | 384 // array slot, since it will be implicitly recorded when the pointer |
| 388 // fields of this map are visited. | 385 // fields of this map are visited. |
| 389 TransitionArray* transitions = map->unchecked_transition_array(); | 386 TransitionArray* transitions = map->unchecked_transition_array(); |
| 390 if (transitions->IsTransitionArray()) { | 387 if (transitions->IsTransitionArray()) { |
| 391 MarkTransitionArray(heap, transitions); | 388 MarkTransitionArray(heap, transitions); |
| 392 } else { | 389 } else { |
| 393 // Already marked by marking map->GetBackPointer() above. | 390 // Already marked by marking map->GetBackPointer() above. |
| 394 ASSERT(transitions->IsMap() || transitions->IsUndefined()); | 391 ASSERT(transitions->IsMap() || transitions->IsUndefined()); |
| 395 } | 392 } |
| 396 | 393 |
| 394 // Mark prototype dependent codes array but do not push it onto marking | |
| 395 // stack, this will make references from it weak. We will clean dead | |
| 396 // dead codes when we iterate over maps in ClearNonLiveTransitions. | |
| 397 Object** slot = HeapObject::RawField(reinterpret_cast<HeapObject*>(map), | |
| 398 Map::kDependentCodesOffset); | |
| 399 HeapObject* obj = HeapObject::cast(*slot); | |
| 400 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | |
| 401 StaticVisitor::MarkObjectWithoutPush(heap, obj); | |
| 402 | |
| 397 // Mark the pointer fields of the Map. Since the transitions array has | 403 // Mark the pointer fields of the Map. Since the transitions array has |
| 398 // been marked already, it is fine that one of these fields contains a | 404 // been marked already, it is fine that one of these fields contains a |
| 399 // pointer to it. | 405 // pointer to it. |
| 400 StaticVisitor::VisitPointers(heap, | 406 StaticVisitor::VisitPointers(heap, |
| 401 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | 407 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), |
| 402 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | 408 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); |
| 403 } | 409 } |
| 404 | 410 |
| 405 | 411 |
| 406 template<typename StaticVisitor> | 412 template<typename StaticVisitor> |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 638 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | 644 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
| 639 | 645 |
| 640 // There are two places where we iterate code bodies: here and the | 646 // There are two places where we iterate code bodies: here and the |
| 641 // templated CodeIterateBody (below). They should be kept in sync. | 647 // templated CodeIterateBody (below). They should be kept in sync. |
| 642 IteratePointer(v, kRelocationInfoOffset); | 648 IteratePointer(v, kRelocationInfoOffset); |
| 643 IteratePointer(v, kHandlerTableOffset); | 649 IteratePointer(v, kHandlerTableOffset); |
| 644 IteratePointer(v, kDeoptimizationDataOffset); | 650 IteratePointer(v, kDeoptimizationDataOffset); |
| 645 IteratePointer(v, kTypeFeedbackInfoOffset); | 651 IteratePointer(v, kTypeFeedbackInfoOffset); |
| 646 | 652 |
| 647 RelocIterator it(this, mode_mask); | 653 RelocIterator it(this, mode_mask); |
| 648 for (; !it.done(); it.next()) { | 654 if (kind() == OPTIMIZED_FUNCTION) { |
| 649 it.rinfo()->Visit(v); | 655 // Treat embedded maps in optimized code as weak. |
|
Michael Starzinger
2012/12/14 14:15:26
I think this is the wrong place to skip embedded m
ulan
2013/01/17 09:35:10
Done.
| |
| 656 for (; !it.done(); it.next()) { | |
| 657 RelocInfo* info = it.rinfo(); | |
| 658 if (!RelocInfo::IsEmbeddedObject(info->rmode()) || | |
| 659 !info->target_object()->IsMap() || | |
| 660 !Map::cast(info->target_object())->CanTransition()) { | |
| 661 info->Visit(v); | |
| 662 } | |
| 663 } | |
| 664 } else { | |
| 665 for (; !it.done(); it.next()) { | |
| 666 it.rinfo()->Visit(v); | |
| 667 } | |
| 650 } | 668 } |
| 651 } | 669 } |
| 652 | 670 |
| 653 | 671 |
| 654 template<typename StaticVisitor> | 672 template<typename StaticVisitor> |
| 655 void Code::CodeIterateBody(Heap* heap) { | 673 void Code::CodeIterateBody(Heap* heap) { |
| 656 int mode_mask = RelocInfo::kCodeTargetMask | | 674 int mode_mask = RelocInfo::kCodeTargetMask | |
| 657 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 675 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
| 658 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | | 676 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | |
| 659 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 677 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 670 heap, | 688 heap, |
| 671 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); | 689 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); |
| 672 StaticVisitor::VisitPointer( | 690 StaticVisitor::VisitPointer( |
| 673 heap, | 691 heap, |
| 674 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); | 692 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); |
| 675 StaticVisitor::VisitPointer( | 693 StaticVisitor::VisitPointer( |
| 676 heap, | 694 heap, |
| 677 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); | 695 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); |
| 678 | 696 |
| 679 RelocIterator it(this, mode_mask); | 697 RelocIterator it(this, mode_mask); |
| 680 for (; !it.done(); it.next()) { | 698 if (kind() == OPTIMIZED_FUNCTION) { |
| 681 it.rinfo()->template Visit<StaticVisitor>(heap); | 699 // Treat embedded maps in optimized code as weak. |
|
Michael Starzinger
2012/12/14 14:15:26
Likewise.
ulan
2013/01/17 09:35:10
Done.
| |
| 700 for (; !it.done(); it.next()) { | |
| 701 RelocInfo* info = it.rinfo(); | |
| 702 if (!RelocInfo::IsEmbeddedObject(info->rmode()) || | |
| 703 !info->target_object()->IsMap() || | |
| 704 !Map::cast(info->target_object())->CanTransition()) { | |
| 705 it.rinfo()->template Visit<StaticVisitor>(heap); | |
| 706 } | |
| 707 } | |
| 708 } else { | |
| 709 for (; !it.done(); it.next()) { | |
| 710 it.rinfo()->template Visit<StaticVisitor>(heap); | |
| 711 } | |
| 682 } | 712 } |
| 683 } | 713 } |
| 684 | 714 |
| 685 | 715 |
| 686 } } // namespace v8::internal | 716 } } // namespace v8::internal |
| 687 | 717 |
| 688 #endif // V8_OBJECTS_VISITING_INL_H_ | 718 #endif // V8_OBJECTS_VISITING_INL_H_ |
| OLD | NEW |