Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(608)

Side by Side Diff: src/mark-compact.cc

Issue 8190: Revert changes 601 and 602. TBR (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 12 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
71 // Rather than passing the tracer around we stash it in a static member 71 // Rather than passing the tracer around we stash it in a static member
72 // variable. 72 // variable.
73 tracer_ = tracer; 73 tracer_ = tracer;
74 Prepare(); 74 Prepare();
75 // Prepare has selected whether to compact the old generation or not. 75 // Prepare has selected whether to compact the old generation or not.
76 // Tell the tracer. 76 // Tell the tracer.
77 if (IsCompacting()) tracer_->set_is_compacting(); 77 if (IsCompacting()) tracer_->set_is_compacting();
78 78
79 MarkLiveObjects(); 79 MarkLiveObjects();
80 80
81 if (FLAG_collect_maps) ClearNonLiveTransitions();
82
83 SweepLargeObjectSpace(); 81 SweepLargeObjectSpace();
84 82
85 if (compacting_collection_) { 83 if (compacting_collection_) {
86 EncodeForwardingAddresses(); 84 EncodeForwardingAddresses();
87 85
88 UpdatePointers(); 86 UpdatePointers();
89 87
90 RelocateObjects(); 88 RelocateObjects();
91 89
92 RebuildRSets(); 90 RebuildRSets();
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
130 old_gen_used += space->Size(); 128 old_gen_used += space->Size();
131 } 129 }
132 int old_gen_fragmentation = 130 int old_gen_fragmentation =
133 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); 131 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
134 if (old_gen_fragmentation > kFragmentationLimit) { 132 if (old_gen_fragmentation > kFragmentationLimit) {
135 compacting_collection_ = true; 133 compacting_collection_ = true;
136 } 134 }
137 } 135 }
138 136
139 if (FLAG_never_compact) compacting_collection_ = false; 137 if (FLAG_never_compact) compacting_collection_ = false;
140 if (FLAG_collect_maps) CreateBackPointers();
141 138
142 #ifdef DEBUG 139 #ifdef DEBUG
143 if (compacting_collection_) { 140 if (compacting_collection_) {
144 // We will write bookkeeping information to the remembered set area 141 // We will write bookkeeping information to the remembered set area
145 // starting now. 142 // starting now.
146 Page::set_rset_state(Page::NOT_IN_USE); 143 Page::set_rset_state(Page::NOT_IN_USE);
147 } 144 }
148 #endif 145 #endif
149 146
150 PagedSpaces spaces; 147 PagedSpaces spaces;
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
318 315
319 // Retrieves the Code pointer from derived code entry. 316 // Retrieves the Code pointer from derived code entry.
320 Code* CodeFromDerivedPointer(Address addr) { 317 Code* CodeFromDerivedPointer(Address addr) {
321 ASSERT(addr != NULL); 318 ASSERT(addr != NULL);
322 return reinterpret_cast<Code*>( 319 return reinterpret_cast<Code*>(
323 HeapObject::FromAddress(addr - Code::kHeaderSize)); 320 HeapObject::FromAddress(addr - Code::kHeaderSize));
324 } 321 }
325 322
326 // Visit an unmarked object. 323 // Visit an unmarked object.
327 void VisitUnmarkedObject(HeapObject* obj) { 324 void VisitUnmarkedObject(HeapObject* obj) {
325 ASSERT(Heap::Contains(obj));
328 #ifdef DEBUG 326 #ifdef DEBUG
329 ASSERT(Heap::Contains(obj));
330 MarkCompactCollector::UpdateLiveObjectCount(obj); 327 MarkCompactCollector::UpdateLiveObjectCount(obj);
331 ASSERT(!obj->IsMarked());
332 // ASSERT(!obj->IsMap()); // Some maps are processed here.
333 // Their map transitions will be followed. If we do a test
334 // here to treat maps separately, will there be a performance impact?
335 #endif 328 #endif
336 Map* map = obj->map(); 329 Map* map = obj->map();
337 obj->SetMark(); 330 obj->SetMark();
338 MarkCompactCollector::tracer()->increment_marked_count(); 331 MarkCompactCollector::tracer()->increment_marked_count();
339 // Mark the map pointer and the body. 332 // Mark the map pointer and the body.
340 MarkCompactCollector::MarkObject(map); 333 MarkCompactCollector::MarkObject(map);
341 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this); 334 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this);
342 } 335 }
343 336
344 // Visit all unmarked objects pointed to by [start, end). 337 // Visit all unmarked objects pointed to by [start, end).
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
425 }; 418 };
426 419
427 420
428 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { 421 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
429 #ifdef DEBUG 422 #ifdef DEBUG
430 UpdateLiveObjectCount(object); 423 UpdateLiveObjectCount(object);
431 #endif 424 #endif
432 ASSERT(!object->IsMarked()); 425 ASSERT(!object->IsMarked());
433 if (object->IsJSGlobalObject()) Counters::global_objects.Increment(); 426 if (object->IsJSGlobalObject()) Counters::global_objects.Increment();
434 427
428 if (FLAG_cleanup_caches_in_maps_at_gc && object->IsMap()) {
429 Map::cast(object)->ClearCodeCache();
430 }
431
432 object->SetMark();
435 tracer_->increment_marked_count(); 433 tracer_->increment_marked_count();
436 ASSERT(Heap::Contains(object)); 434 ASSERT(Heap::Contains(object));
437 if (object->IsMap()) { 435 marking_stack.Push(object);
438 if (FLAG_cleanup_caches_in_maps_at_gc) {
439 Map::cast(object)->ClearCodeCache();
440 }
441 object->SetMark();
442 if (FLAG_collect_maps) {
443 MarkMapContents(reinterpret_cast<Map*>(object));
444 } else {
445 marking_stack.Push(object);
446 }
447 } else {
448 object->SetMark();
449 marking_stack.Push(object);
450 }
451 }
452
453
454 void MarkCompactCollector::MarkMapContents(Map* map) {
455 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
456 HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
457
458 // Mark the Object* fields of the Map.
459 // Since the descriptor array has been marked already, it is fine
460 // that one of these fields contains a pointer to it.
461 MarkingVisitor visitor; // Has no state or contents.
462 visitor.VisitPointers(&HeapObject::RawField(map, Map::kPrototypeOffset),
463 &HeapObject::RawField(map, Map::kSize));
464 }
465
466
467 void MarkCompactCollector::MarkDescriptorArray(
468 DescriptorArray *descriptors) {
469 if (descriptors->IsMarked()) return;
470 // Empty descriptor array is marked as a root before any maps are marked.
471 ASSERT(descriptors != Heap::empty_descriptor_array());
472
473 tracer_->increment_marked_count();
474 #ifdef DEBUG
475 UpdateLiveObjectCount(descriptors);
476 #endif
477 descriptors->SetMark();
478
479 FixedArray* contents = reinterpret_cast<FixedArray*>(
480 descriptors->get(DescriptorArray::kContentArrayIndex));
481 ASSERT(contents->IsHeapObject());
482 ASSERT(!contents->IsMarked());
483 ASSERT(contents->IsFixedArray());
484 ASSERT(contents->length() >= 2);
485 tracer_->increment_marked_count();
486 #ifdef DEBUG
487 UpdateLiveObjectCount(contents);
488 #endif
489 contents->SetMark();
490 // Contents contains (value, details) pairs. If the details say
491 // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
492 // or NULL_DESCRIPTOR, we don't mark the value as live. Only for
493 // type MAP_TRANSITION is the value a Object* (a Map*).
494 for (int i = 0; i < contents->length(); i += 2) {
495 // If the pair (value, details) at index i, i+1 is not
496 // a transition or null descriptor, mark the value.
497 PropertyDetails details(Smi::cast(contents->get(i + 1)));
498 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) {
499 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
500 if (object->IsHeapObject() && !object->IsMarked()) {
501 tracer_->increment_marked_count();
502 #ifdef DEBUG
503 UpdateLiveObjectCount(object);
504 #endif
505 object->SetMark();
506 marking_stack.Push(object);
507 }
508 }
509 }
510 // The DescriptorArray descriptors contains a pointer to its contents array,
511 // but the contents array is already marked.
512 marking_stack.Push(descriptors);
513 }
514
515
516 void MarkCompactCollector::CreateBackPointers() {
517 HeapObjectIterator iterator(Heap::map_space());
518 while (iterator.has_next()) {
519 Object* next_object = iterator.next();
520 if (next_object->IsMap()) { // Could also be ByteArray on free list.
521 Map* map = Map::cast(next_object);
522 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
523 map->instance_type() <= LAST_JS_OBJECT_TYPE) {
524 map->CreateBackPointers();
525 }
526 }
527 }
528 } 436 }
529 437
530 438
531 static int OverflowObjectSize(HeapObject* obj) { 439 static int OverflowObjectSize(HeapObject* obj) {
532 // Recover the normal map pointer, it might be marked as live and 440 // Recover the normal map pointer, it might be marked as live and
533 // overflowed. 441 // overflowed.
534 MapWord map_word = obj->map_word(); 442 MapWord map_word = obj->map_word();
535 map_word.ClearMark(); 443 map_word.ClearMark();
536 map_word.ClearOverflow(); 444 map_word.ClearOverflow();
537 return obj->SizeFromMap(map_word.ToMap()); 445 return obj->SizeFromMap(map_word.ToMap());
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
760 668
761 #ifdef DEBUG 669 #ifdef DEBUG
762 if (FLAG_verify_global_gc) VerifyHeapAfterMarkingPhase(); 670 if (FLAG_verify_global_gc) VerifyHeapAfterMarkingPhase();
763 #endif 671 #endif
764 672
765 // Remove object groups after marking phase. 673 // Remove object groups after marking phase.
766 GlobalHandles::RemoveObjectGroups(); 674 GlobalHandles::RemoveObjectGroups();
767 } 675 }
768 676
769 677
770 static int CountMarkedCallback(HeapObject* obj) {
771 MapWord map_word = obj->map_word();
772 map_word.ClearMark();
773 return obj->SizeFromMap(map_word.ToMap());
774 }
775
776
777 #ifdef DEBUG 678 #ifdef DEBUG
778 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { 679 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
779 live_bytes_ += obj->Size(); 680 live_bytes_ += obj->Size();
780 if (Heap::new_space()->Contains(obj)) { 681 if (Heap::new_space()->Contains(obj)) {
781 live_young_objects_++; 682 live_young_objects_++;
782 } else if (Heap::map_space()->Contains(obj)) { 683 } else if (Heap::map_space()->Contains(obj)) {
783 ASSERT(obj->IsMap()); 684 ASSERT(obj->IsMap());
784 live_map_objects_++; 685 live_map_objects_++;
785 } else if (Heap::old_pointer_space()->Contains(obj)) { 686 } else if (Heap::old_pointer_space()->Contains(obj)) {
786 live_old_pointer_objects_++; 687 live_old_pointer_objects_++;
787 } else if (Heap::old_data_space()->Contains(obj)) { 688 } else if (Heap::old_data_space()->Contains(obj)) {
788 live_old_data_objects_++; 689 live_old_data_objects_++;
789 } else if (Heap::code_space()->Contains(obj)) { 690 } else if (Heap::code_space()->Contains(obj)) {
790 live_code_objects_++; 691 live_code_objects_++;
791 } else if (Heap::lo_space()->Contains(obj)) { 692 } else if (Heap::lo_space()->Contains(obj)) {
792 live_lo_objects_++; 693 live_lo_objects_++;
793 } else { 694 } else {
794 UNREACHABLE(); 695 UNREACHABLE();
795 } 696 }
796 } 697 }
797 698
798 699
700 static int CountMarkedCallback(HeapObject* obj) {
701 MapWord map_word = obj->map_word();
702 map_word.ClearMark();
703 return obj->SizeFromMap(map_word.ToMap());
704 }
705
706
799 void MarkCompactCollector::VerifyHeapAfterMarkingPhase() { 707 void MarkCompactCollector::VerifyHeapAfterMarkingPhase() {
800 Heap::new_space()->Verify(); 708 Heap::new_space()->Verify();
801 Heap::old_pointer_space()->Verify(); 709 Heap::old_pointer_space()->Verify();
802 Heap::old_data_space()->Verify(); 710 Heap::old_data_space()->Verify();
803 Heap::code_space()->Verify(); 711 Heap::code_space()->Verify();
804 Heap::map_space()->Verify(); 712 Heap::map_space()->Verify();
805 713
806 int live_objects; 714 int live_objects;
807 715
808 #define CHECK_LIVE_OBJECTS(it, expected) \ 716 #define CHECK_LIVE_OBJECTS(it, expected) \
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
840 void MarkCompactCollector::SweepLargeObjectSpace() { 748 void MarkCompactCollector::SweepLargeObjectSpace() {
841 #ifdef DEBUG 749 #ifdef DEBUG
842 ASSERT(state_ == MARK_LIVE_OBJECTS); 750 ASSERT(state_ == MARK_LIVE_OBJECTS);
843 state_ = 751 state_ =
844 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; 752 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
845 #endif 753 #endif
846 // Deallocate unmarked objects and clear marked bits for marked objects. 754 // Deallocate unmarked objects and clear marked bits for marked objects.
847 Heap::lo_space()->FreeUnmarkedObjects(); 755 Heap::lo_space()->FreeUnmarkedObjects();
848 } 756 }
849 757
850 // Safe to use during marking phase only.
851 bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
852 MapWord metamap = object->map_word();
853 metamap.ClearMark();
854 return metamap.ToMap()->instance_type() == MAP_TYPE;
855 }
856
857 void MarkCompactCollector::ClearNonLiveTransitions() {
858 HeapObjectIterator map_iterator(Heap::map_space(), &CountMarkedCallback);
859 // Iterate over the map space, setting map transitions that go from
860 // a marked map to an unmarked map to null transitions. At the same time,
861 // set all the prototype fields of maps back to their original value,
862 // dropping the back pointers temporarily stored in the prototype field.
863 // Setting the prototype field requires following the linked list of
864 // back pointers, reversing them all at once. This allows us to find
865 // those maps with map transitions that need to be nulled, and only
866 // scan the descriptor arrays of those maps, not all maps.
867 // All of these actions are carried out only on maps of JSObects
868 // and related subtypes.
869 while (map_iterator.has_next()) {
870 Map* map = reinterpret_cast<Map*>(map_iterator.next());
871 if (!map->IsMarked() && map->IsByteArray()) continue;
872
873 ASSERT(SafeIsMap(map));
874 // Only JSObject and subtypes have map transitions and back pointers.
875 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
876 if (map->instance_type() > LAST_JS_OBJECT_TYPE) continue;
877 // Follow the chain of back pointers to find the prototype.
878 Map* current = map;
879 while (SafeIsMap(current)) {
880 current = reinterpret_cast<Map*>(current->prototype());
881 ASSERT(current->IsHeapObject());
882 }
883 Object* real_prototype = current;
884
885 // Follow back pointers, setting them to prototype,
886 // clearing map transitions when necessary.
887 current = map;
888 bool on_dead_path = !current->IsMarked();
889 Object *next;
890 while (SafeIsMap(current)) {
891 next = current->prototype();
892 // There should never be a dead map above a live map.
893 ASSERT(on_dead_path || current->IsMarked());
894
895 // A live map above a dead map indicates a dead transition.
896 // This test will always be false on the first iteration.
897 if (on_dead_path && current->IsMarked()) {
898 on_dead_path = false;
899 current->ClearNonLiveTransitions(real_prototype);
900 }
901 HeapObject::RawField(current, Map::kPrototypeOffset) =
902 real_prototype;
903 current = reinterpret_cast<Map*>(next);
904 }
905 }
906 }
907 758
908 // ------------------------------------------------------------------------- 759 // -------------------------------------------------------------------------
909 // Phase 2: Encode forwarding addresses. 760 // Phase 2: Encode forwarding addresses.
910 // When compacting, forwarding addresses for objects in old space and map 761 // When compacting, forwarding addresses for objects in old space and map
911 // space are encoded in their map pointer word (along with an encoding of 762 // space are encoded in their map pointer word (along with an encoding of
912 // their map pointers). 763 // their map pointers).
913 // 764 //
914 // 31 21 20 10 9 0 765 // 31 21 20 10 9 0
915 // +-----------------+------------------+-----------------+ 766 // +-----------------+------------------+-----------------+
916 // |forwarding offset|page offset of map|page index of map| 767 // |forwarding offset|page offset of map|page index of map|
(...skipping 999 matching lines...) Expand 10 before | Expand all | Expand 10 after
1916 1767
1917 void MarkCompactCollector::RebuildRSets() { 1768 void MarkCompactCollector::RebuildRSets() {
1918 #ifdef DEBUG 1769 #ifdef DEBUG
1919 ASSERT(state_ == RELOCATE_OBJECTS); 1770 ASSERT(state_ == RELOCATE_OBJECTS);
1920 state_ = REBUILD_RSETS; 1771 state_ = REBUILD_RSETS;
1921 #endif 1772 #endif
1922 Heap::RebuildRSets(); 1773 Heap::RebuildRSets();
1923 } 1774 }
1924 1775
1925 } } // namespace v8::internal 1776 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698