| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 71 // Rather than passing the tracer around we stash it in a static member | 71 // Rather than passing the tracer around we stash it in a static member |
| 72 // variable. | 72 // variable. |
| 73 tracer_ = tracer; | 73 tracer_ = tracer; |
| 74 Prepare(); | 74 Prepare(); |
| 75 // Prepare has selected whether to compact the old generation or not. | 75 // Prepare has selected whether to compact the old generation or not. |
| 76 // Tell the tracer. | 76 // Tell the tracer. |
| 77 if (IsCompacting()) tracer_->set_is_compacting(); | 77 if (IsCompacting()) tracer_->set_is_compacting(); |
| 78 | 78 |
| 79 MarkLiveObjects(); | 79 MarkLiveObjects(); |
| 80 | 80 |
| 81 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
| 82 |
| 81 SweepLargeObjectSpace(); | 83 SweepLargeObjectSpace(); |
| 82 | 84 |
| 83 if (compacting_collection_) { | 85 if (compacting_collection_) { |
| 84 EncodeForwardingAddresses(); | 86 EncodeForwardingAddresses(); |
| 85 | 87 |
| 86 UpdatePointers(); | 88 UpdatePointers(); |
| 87 | 89 |
| 88 RelocateObjects(); | 90 RelocateObjects(); |
| 89 | 91 |
| 90 RebuildRSets(); | 92 RebuildRSets(); |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 128 old_gen_used += space->Size(); | 130 old_gen_used += space->Size(); |
| 129 } | 131 } |
| 130 int old_gen_fragmentation = | 132 int old_gen_fragmentation = |
| 131 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); | 133 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); |
| 132 if (old_gen_fragmentation > kFragmentationLimit) { | 134 if (old_gen_fragmentation > kFragmentationLimit) { |
| 133 compacting_collection_ = true; | 135 compacting_collection_ = true; |
| 134 } | 136 } |
| 135 } | 137 } |
| 136 | 138 |
| 137 if (FLAG_never_compact) compacting_collection_ = false; | 139 if (FLAG_never_compact) compacting_collection_ = false; |
| 140 if (FLAG_collect_maps) CreateBackPointers(); |
| 138 | 141 |
| 139 #ifdef DEBUG | 142 #ifdef DEBUG |
| 140 if (compacting_collection_) { | 143 if (compacting_collection_) { |
| 141 // We will write bookkeeping information to the remembered set area | 144 // We will write bookkeeping information to the remembered set area |
| 142 // starting now. | 145 // starting now. |
| 143 Page::set_rset_state(Page::NOT_IN_USE); | 146 Page::set_rset_state(Page::NOT_IN_USE); |
| 144 } | 147 } |
| 145 #endif | 148 #endif |
| 146 | 149 |
| 147 PagedSpaces spaces; | 150 PagedSpaces spaces; |
| (...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 315 | 318 |
| 316 // Retrieves the Code pointer from derived code entry. | 319 // Retrieves the Code pointer from derived code entry. |
| 317 Code* CodeFromDerivedPointer(Address addr) { | 320 Code* CodeFromDerivedPointer(Address addr) { |
| 318 ASSERT(addr != NULL); | 321 ASSERT(addr != NULL); |
| 319 return reinterpret_cast<Code*>( | 322 return reinterpret_cast<Code*>( |
| 320 HeapObject::FromAddress(addr - Code::kHeaderSize)); | 323 HeapObject::FromAddress(addr - Code::kHeaderSize)); |
| 321 } | 324 } |
| 322 | 325 |
| 323 // Visit an unmarked object. | 326 // Visit an unmarked object. |
| 324 void VisitUnmarkedObject(HeapObject* obj) { | 327 void VisitUnmarkedObject(HeapObject* obj) { |
| 328 #ifdef DEBUG |
| 325 ASSERT(Heap::Contains(obj)); | 329 ASSERT(Heap::Contains(obj)); |
| 326 #ifdef DEBUG | |
| 327 MarkCompactCollector::UpdateLiveObjectCount(obj); | 330 MarkCompactCollector::UpdateLiveObjectCount(obj); |
| 331 ASSERT(!obj->IsMarked()); |
| 328 #endif | 332 #endif |
| 329 Map* map = obj->map(); | 333 Map* map = obj->map(); |
| 330 obj->SetMark(); | 334 obj->SetMark(); |
| 331 MarkCompactCollector::tracer()->increment_marked_count(); | 335 MarkCompactCollector::tracer()->increment_marked_count(); |
| 332 // Mark the map pointer and the body. | 336 // Mark the map pointer and the body. |
| 333 MarkCompactCollector::MarkObject(map); | 337 MarkCompactCollector::MarkObject(map); |
| 334 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this); | 338 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this); |
| 335 } | 339 } |
| 336 | 340 |
| 337 // Visit all unmarked objects pointed to by [start, end). | 341 // Visit all unmarked objects pointed to by [start, end). |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 418 }; | 422 }; |
| 419 | 423 |
| 420 | 424 |
| 421 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { | 425 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { |
| 422 #ifdef DEBUG | 426 #ifdef DEBUG |
| 423 UpdateLiveObjectCount(object); | 427 UpdateLiveObjectCount(object); |
| 424 #endif | 428 #endif |
| 425 ASSERT(!object->IsMarked()); | 429 ASSERT(!object->IsMarked()); |
| 426 if (object->IsJSGlobalObject()) Counters::global_objects.Increment(); | 430 if (object->IsJSGlobalObject()) Counters::global_objects.Increment(); |
| 427 | 431 |
| 428 if (FLAG_cleanup_caches_in_maps_at_gc && object->IsMap()) { | |
| 429 Map::cast(object)->ClearCodeCache(); | |
| 430 } | |
| 431 | |
| 432 object->SetMark(); | |
| 433 tracer_->increment_marked_count(); | 432 tracer_->increment_marked_count(); |
| 434 ASSERT(Heap::Contains(object)); | 433 ASSERT(Heap::Contains(object)); |
| 435 marking_stack.Push(object); | 434 if (object->IsMap()) { |
| 435 Map* map = Map::cast(object); |
| 436 if (FLAG_cleanup_caches_in_maps_at_gc) { |
| 437 map->ClearCodeCache(); |
| 438 } |
| 439 map->SetMark(); |
| 440 if (FLAG_collect_maps && |
| 441 map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 442 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 443 MarkMapContents(map); |
| 444 } else { |
| 445 marking_stack.Push(map); |
| 446 } |
| 447 } else { |
| 448 object->SetMark(); |
| 449 marking_stack.Push(object); |
| 450 } |
| 451 } |
| 452 |
| 453 |
| 454 void MarkCompactCollector::MarkMapContents(Map* map) { |
| 455 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( |
| 456 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); |
| 457 |
| 458 // Mark the Object* fields of the Map. |
| 459 // Since the descriptor array has been marked already, it is fine |
| 460 // that one of these fields contains a pointer to it. |
| 461 MarkingVisitor visitor; // Has no state or contents. |
| 462 visitor.VisitPointers(HeapObject::RawField(map, Map::kPrototypeOffset), |
| 463 HeapObject::RawField(map, Map::kSize)); |
| 464 } |
| 465 |
| 466 |
| 467 void MarkCompactCollector::MarkDescriptorArray( |
| 468 DescriptorArray *descriptors) { |
| 469 if (descriptors->IsMarked()) return; |
| 470 // Empty descriptor array is marked as a root before any maps are marked. |
| 471 ASSERT(descriptors != Heap::empty_descriptor_array()); |
| 472 |
| 473 tracer_->increment_marked_count(); |
| 474 #ifdef DEBUG |
| 475 UpdateLiveObjectCount(descriptors); |
| 476 #endif |
| 477 descriptors->SetMark(); |
| 478 |
| 479 FixedArray* contents = reinterpret_cast<FixedArray*>( |
| 480 descriptors->get(DescriptorArray::kContentArrayIndex)); |
| 481 ASSERT(contents->IsHeapObject()); |
| 482 ASSERT(!contents->IsMarked()); |
| 483 ASSERT(contents->IsFixedArray()); |
| 484 ASSERT(contents->length() >= 2); |
| 485 tracer_->increment_marked_count(); |
| 486 #ifdef DEBUG |
| 487 UpdateLiveObjectCount(contents); |
| 488 #endif |
| 489 contents->SetMark(); |
| 490 // Contents contains (value, details) pairs. If the details say |
| 491 // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, |
| 492 // or NULL_DESCRIPTOR, we don't mark the value as live. Only for |
| 493 // type MAP_TRANSITION is the value a Object* (a Map*). |
| 494 for (int i = 0; i < contents->length(); i += 2) { |
| 495 // If the pair (value, details) at index i, i+1 is not |
| 496 // a transition or null descriptor, mark the value. |
| 497 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 498 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { |
| 499 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); |
| 500 if (object->IsHeapObject() && !object->IsMarked()) { |
| 501 tracer_->increment_marked_count(); |
| 502 #ifdef DEBUG |
| 503 UpdateLiveObjectCount(object); |
| 504 #endif |
| 505 object->SetMark(); |
| 506 marking_stack.Push(object); |
| 507 } |
| 508 } |
| 509 } |
| 510 // The DescriptorArray descriptors contains a pointer to its contents array, |
| 511 // but the contents array is already marked. |
| 512 marking_stack.Push(descriptors); |
| 513 } |
| 514 |
| 515 |
| 516 void MarkCompactCollector::CreateBackPointers() { |
| 517 HeapObjectIterator iterator(Heap::map_space()); |
| 518 while (iterator.has_next()) { |
| 519 Object* next_object = iterator.next(); |
| 520 if (next_object->IsMap()) { // Could also be ByteArray on free list. |
| 521 Map* map = Map::cast(next_object); |
| 522 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 523 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 524 map->CreateBackPointers(); |
| 525 } else { |
| 526 ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array()); |
| 527 } |
| 528 } |
| 529 } |
| 436 } | 530 } |
| 437 | 531 |
| 438 | 532 |
| 439 static int OverflowObjectSize(HeapObject* obj) { | 533 static int OverflowObjectSize(HeapObject* obj) { |
| 440 // Recover the normal map pointer, it might be marked as live and | 534 // Recover the normal map pointer, it might be marked as live and |
| 441 // overflowed. | 535 // overflowed. |
| 442 MapWord map_word = obj->map_word(); | 536 MapWord map_word = obj->map_word(); |
| 443 map_word.ClearMark(); | 537 map_word.ClearMark(); |
| 444 map_word.ClearOverflow(); | 538 map_word.ClearOverflow(); |
| 445 return obj->SizeFromMap(map_word.ToMap()); | 539 return obj->SizeFromMap(map_word.ToMap()); |
| (...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 668 | 762 |
| 669 #ifdef DEBUG | 763 #ifdef DEBUG |
| 670 if (FLAG_verify_global_gc) VerifyHeapAfterMarkingPhase(); | 764 if (FLAG_verify_global_gc) VerifyHeapAfterMarkingPhase(); |
| 671 #endif | 765 #endif |
| 672 | 766 |
| 673 // Remove object groups after marking phase. | 767 // Remove object groups after marking phase. |
| 674 GlobalHandles::RemoveObjectGroups(); | 768 GlobalHandles::RemoveObjectGroups(); |
| 675 } | 769 } |
| 676 | 770 |
| 677 | 771 |
| 772 static int CountMarkedCallback(HeapObject* obj) { |
| 773 MapWord map_word = obj->map_word(); |
| 774 map_word.ClearMark(); |
| 775 return obj->SizeFromMap(map_word.ToMap()); |
| 776 } |
| 777 |
| 778 |
| 678 #ifdef DEBUG | 779 #ifdef DEBUG |
| 679 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { | 780 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { |
| 680 live_bytes_ += obj->Size(); | 781 live_bytes_ += obj->Size(); |
| 681 if (Heap::new_space()->Contains(obj)) { | 782 if (Heap::new_space()->Contains(obj)) { |
| 682 live_young_objects_++; | 783 live_young_objects_++; |
| 683 } else if (Heap::map_space()->Contains(obj)) { | 784 } else if (Heap::map_space()->Contains(obj)) { |
| 684 ASSERT(obj->IsMap()); | 785 ASSERT(obj->IsMap()); |
| 685 live_map_objects_++; | 786 live_map_objects_++; |
| 686 } else if (Heap::old_pointer_space()->Contains(obj)) { | 787 } else if (Heap::old_pointer_space()->Contains(obj)) { |
| 687 live_old_pointer_objects_++; | 788 live_old_pointer_objects_++; |
| 688 } else if (Heap::old_data_space()->Contains(obj)) { | 789 } else if (Heap::old_data_space()->Contains(obj)) { |
| 689 live_old_data_objects_++; | 790 live_old_data_objects_++; |
| 690 } else if (Heap::code_space()->Contains(obj)) { | 791 } else if (Heap::code_space()->Contains(obj)) { |
| 691 live_code_objects_++; | 792 live_code_objects_++; |
| 692 } else if (Heap::lo_space()->Contains(obj)) { | 793 } else if (Heap::lo_space()->Contains(obj)) { |
| 693 live_lo_objects_++; | 794 live_lo_objects_++; |
| 694 } else { | 795 } else { |
| 695 UNREACHABLE(); | 796 UNREACHABLE(); |
| 696 } | 797 } |
| 697 } | 798 } |
| 698 | 799 |
| 699 | 800 |
| 700 static int CountMarkedCallback(HeapObject* obj) { | |
| 701 MapWord map_word = obj->map_word(); | |
| 702 map_word.ClearMark(); | |
| 703 return obj->SizeFromMap(map_word.ToMap()); | |
| 704 } | |
| 705 | |
| 706 | |
| 707 void MarkCompactCollector::VerifyHeapAfterMarkingPhase() { | 801 void MarkCompactCollector::VerifyHeapAfterMarkingPhase() { |
| 708 Heap::new_space()->Verify(); | 802 Heap::new_space()->Verify(); |
| 709 Heap::old_pointer_space()->Verify(); | 803 Heap::old_pointer_space()->Verify(); |
| 710 Heap::old_data_space()->Verify(); | 804 Heap::old_data_space()->Verify(); |
| 711 Heap::code_space()->Verify(); | 805 Heap::code_space()->Verify(); |
| 712 Heap::map_space()->Verify(); | 806 Heap::map_space()->Verify(); |
| 713 | 807 |
| 714 int live_objects; | 808 int live_objects; |
| 715 | 809 |
| 716 #define CHECK_LIVE_OBJECTS(it, expected) \ | 810 #define CHECK_LIVE_OBJECTS(it, expected) \ |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 748 void MarkCompactCollector::SweepLargeObjectSpace() { | 842 void MarkCompactCollector::SweepLargeObjectSpace() { |
| 749 #ifdef DEBUG | 843 #ifdef DEBUG |
| 750 ASSERT(state_ == MARK_LIVE_OBJECTS); | 844 ASSERT(state_ == MARK_LIVE_OBJECTS); |
| 751 state_ = | 845 state_ = |
| 752 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; | 846 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; |
| 753 #endif | 847 #endif |
| 754 // Deallocate unmarked objects and clear marked bits for marked objects. | 848 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 755 Heap::lo_space()->FreeUnmarkedObjects(); | 849 Heap::lo_space()->FreeUnmarkedObjects(); |
| 756 } | 850 } |
| 757 | 851 |
| 852 // Safe to use during marking phase only. |
| 853 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { |
| 854 MapWord metamap = object->map_word(); |
| 855 metamap.ClearMark(); |
| 856 return metamap.ToMap()->instance_type() == MAP_TYPE; |
| 857 } |
| 858 |
| 859 void MarkCompactCollector::ClearNonLiveTransitions() { |
| 860 HeapObjectIterator map_iterator(Heap::map_space(), &CountMarkedCallback); |
| 861 // Iterate over the map space, setting map transitions that go from |
| 862 // a marked map to an unmarked map to null transitions. At the same time, |
| 863 // set all the prototype fields of maps back to their original value, |
| 864 // dropping the back pointers temporarily stored in the prototype field. |
| 865 // Setting the prototype field requires following the linked list of |
| 866 // back pointers, reversing them all at once. This allows us to find |
| 867 // those maps with map transitions that need to be nulled, and only |
| 868 // scan the descriptor arrays of those maps, not all maps. |
| 869 // All of these actions are carried out only on maps of JSObects |
| 870 // and related subtypes. |
| 871 while (map_iterator.has_next()) { |
| 872 Map* map = reinterpret_cast<Map*>(map_iterator.next()); |
| 873 if (!map->IsMarked() && map->IsByteArray()) continue; |
| 874 |
| 875 ASSERT(SafeIsMap(map)); |
| 876 // Only JSObject and subtypes have map transitions and back pointers. |
| 877 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; |
| 878 if (map->instance_type() > JS_FUNCTION_TYPE) continue; |
| 879 // Follow the chain of back pointers to find the prototype. |
| 880 Map* current = map; |
| 881 while (SafeIsMap(current)) { |
| 882 current = reinterpret_cast<Map*>(current->prototype()); |
| 883 ASSERT(current->IsHeapObject()); |
| 884 } |
| 885 Object* real_prototype = current; |
| 886 |
| 887 // Follow back pointers, setting them to prototype, |
| 888 // clearing map transitions when necessary. |
| 889 current = map; |
| 890 bool on_dead_path = !current->IsMarked(); |
| 891 Object *next; |
| 892 while (SafeIsMap(current)) { |
| 893 next = current->prototype(); |
| 894 // There should never be a dead map above a live map. |
| 895 ASSERT(on_dead_path || current->IsMarked()); |
| 896 |
| 897 // A live map above a dead map indicates a dead transition. |
| 898 // This test will always be false on the first iteration. |
| 899 if (on_dead_path && current->IsMarked()) { |
| 900 on_dead_path = false; |
| 901 current->ClearNonLiveTransitions(real_prototype); |
| 902 } |
| 903 *HeapObject::RawField(current, Map::kPrototypeOffset) = |
| 904 real_prototype; |
| 905 current = reinterpret_cast<Map*>(next); |
| 906 } |
| 907 } |
| 908 } |
| 758 | 909 |
| 759 // ------------------------------------------------------------------------- | 910 // ------------------------------------------------------------------------- |
| 760 // Phase 2: Encode forwarding addresses. | 911 // Phase 2: Encode forwarding addresses. |
| 761 // When compacting, forwarding addresses for objects in old space and map | 912 // When compacting, forwarding addresses for objects in old space and map |
| 762 // space are encoded in their map pointer word (along with an encoding of | 913 // space are encoded in their map pointer word (along with an encoding of |
| 763 // their map pointers). | 914 // their map pointers). |
| 764 // | 915 // |
| 765 // 31 21 20 10 9 0 | 916 // 31 21 20 10 9 0 |
| 766 // +-----------------+------------------+-----------------+ | 917 // +-----------------+------------------+-----------------+ |
| 767 // |forwarding offset|page offset of map|page index of map| | 918 // |forwarding offset|page offset of map|page index of map| |
| (...skipping 999 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1767 | 1918 |
| 1768 void MarkCompactCollector::RebuildRSets() { | 1919 void MarkCompactCollector::RebuildRSets() { |
| 1769 #ifdef DEBUG | 1920 #ifdef DEBUG |
| 1770 ASSERT(state_ == RELOCATE_OBJECTS); | 1921 ASSERT(state_ == RELOCATE_OBJECTS); |
| 1771 state_ = REBUILD_RSETS; | 1922 state_ = REBUILD_RSETS; |
| 1772 #endif | 1923 #endif |
| 1773 Heap::RebuildRSets(); | 1924 Heap::RebuildRSets(); |
| 1774 } | 1925 } |
| 1775 | 1926 |
| 1776 } } // namespace v8::internal | 1927 } } // namespace v8::internal |
| OLD | NEW |