Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(131)

Side by Side Diff: src/heap.cc

Issue 113462: Fix debug builds. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 11 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 519 matching lines...) Expand 10 before | Expand all | Expand 10 after
530 private: 530 private:
531 void ScavengePointer(Object** p) { 531 void ScavengePointer(Object** p) {
532 Object* object = *p; 532 Object* object = *p;
533 if (!Heap::InNewSpace(object)) return; 533 if (!Heap::InNewSpace(object)) return;
534 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 534 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
535 reinterpret_cast<HeapObject*>(object)); 535 reinterpret_cast<HeapObject*>(object));
536 } 536 }
537 }; 537 };
538 538
539 539
540 // A queue of pointers and maps of to-be-promoted objects during a
541 // scavenge collection.
542 class PromotionQueue {
543 public:
544 void Initialize(Address start_address) {
545 front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
546 }
547
548 bool is_empty() { return front_ <= rear_; }
549
550 void insert(HeapObject* object, Map* map) {
551 *(--rear_) = object;
552 *(--rear_) = map;
553 // Assert no overflow into live objects.
554 ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
555 }
556
557 void remove(HeapObject** object, Map** map) {
558 *object = *(--front_);
559 *map = Map::cast(*(--front_));
560 // Assert no underflow.
561 ASSERT(front_ >= rear_);
562 }
563
564 private:
565 // The front of the queue is higher in memory than the rear.
566 HeapObject** front_;
567 HeapObject** rear_;
568 };
569
570
540 // Shared state read by the scavenge collector and set by ScavengeObject. 571 // Shared state read by the scavenge collector and set by ScavengeObject.
541 static Address promoted_rear = NULL; 572 static PromotionQueue promotion_queue;
542 573
543 574
544 #ifdef DEBUG 575 #ifdef DEBUG
545 // Visitor class to verify pointers in code or data space do not point into 576 // Visitor class to verify pointers in code or data space do not point into
546 // new space. 577 // new space.
547 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 578 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
548 public: 579 public:
549 void VisitPointers(Object** start, Object**end) { 580 void VisitPointers(Object** start, Object**end) {
550 for (Object** current = start; current < end; current++) { 581 for (Object** current = start; current < end; current++) {
551 if ((*current)->IsHeapObject()) { 582 if ((*current)->IsHeapObject()) {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
617 // We treat the top of the to space as a queue of addresses of 648 // We treat the top of the to space as a queue of addresses of
618 // promoted objects. The addresses of newly promoted and unswept 649 // promoted objects. The addresses of newly promoted and unswept
619 // objects lie between a 'front' mark and a 'rear' mark that is 650 // objects lie between a 'front' mark and a 'rear' mark that is
620 // updated as a side effect of promoting an object. 651 // updated as a side effect of promoting an object.
621 // 652 //
622 // There is guaranteed to be enough room at the top of the to space 653 // There is guaranteed to be enough room at the top of the to space
623 // for the addresses of promoted objects: every object promoted 654 // for the addresses of promoted objects: every object promoted
624 // frees up its size in bytes from the top of the new space, and 655 // frees up its size in bytes from the top of the new space, and
625 // objects are at least one pointer in size. 656 // objects are at least one pointer in size.
626 Address new_space_front = new_space_.ToSpaceLow(); 657 Address new_space_front = new_space_.ToSpaceLow();
627 Address promoted_front = new_space_.ToSpaceHigh(); 658 promotion_queue.Initialize(new_space_.ToSpaceHigh());
628 promoted_rear = new_space_.ToSpaceHigh();
629 659
630 ScavengeVisitor scavenge_visitor; 660 ScavengeVisitor scavenge_visitor;
631 // Copy roots. 661 // Copy roots.
632 IterateRoots(&scavenge_visitor); 662 IterateRoots(&scavenge_visitor);
633 663
634 // Copy objects reachable from weak pointers. 664 // Copy objects reachable from weak pointers.
635 GlobalHandles::IterateWeakRoots(&scavenge_visitor); 665 GlobalHandles::IterateWeakRoots(&scavenge_visitor);
636 666
637 // Copy objects reachable from the old generation. By definition, 667 // Copy objects reachable from the old generation. By definition,
638 // there are no intergenerational pointers in code or data spaces. 668 // there are no intergenerational pointers in code or data spaces.
639 IterateRSet(old_pointer_space_, &ScavengePointer); 669 IterateRSet(old_pointer_space_, &ScavengePointer);
640 IterateRSet(map_space_, &ScavengePointer); 670 IterateRSet(map_space_, &ScavengePointer);
641 lo_space_->IterateRSet(&ScavengePointer); 671 lo_space_->IterateRSet(&ScavengePointer);
642 672
643 do { 673 do {
644 ASSERT(new_space_front <= new_space_.top()); 674 ASSERT(new_space_front <= new_space_.top());
645 ASSERT(promoted_front >= promoted_rear);
646 675
647 // The addresses new_space_front and new_space_.top() define a 676 // The addresses new_space_front and new_space_.top() define a
648 // queue of unprocessed copied objects. Process them until the 677 // queue of unprocessed copied objects. Process them until the
649 // queue is empty. 678 // queue is empty.
650 while (new_space_front < new_space_.top()) { 679 while (new_space_front < new_space_.top()) {
651 HeapObject* object = HeapObject::FromAddress(new_space_front); 680 HeapObject* object = HeapObject::FromAddress(new_space_front);
652 object->Iterate(&scavenge_visitor); 681 object->Iterate(&scavenge_visitor);
653 new_space_front += object->Size(); 682 new_space_front += object->Size();
654 } 683 }
655 684
656 // The addresses promoted_front and promoted_rear define a queue 685 // Promote and process all the to-be-promoted objects.
657 // of unprocessed addresses of promoted objects. Process them 686 while (!promotion_queue.is_empty()) {
658 // until the queue is empty. 687 HeapObject* source;
659 while (promoted_front > promoted_rear) { 688 Map* map;
660 promoted_front -= kPointerSize; 689 promotion_queue.remove(&source, &map);
661 HeapObject* object = 690 // Copy the from-space object to its new location (given by the
662 HeapObject::cast(Memory::Object_at(promoted_front)); 691 // forwarding address) and fix its map.
663 object->Iterate(&scavenge_visitor); 692 HeapObject* target = source->map_word().ToForwardingAddress();
664 UpdateRSet(object); 693 CopyBlock(reinterpret_cast<Object**>(target->address()),
694 reinterpret_cast<Object**>(source->address()),
695 source->SizeFromMap(map));
696 target->set_map(map);
697
698 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
699 // Update NewSpace stats if necessary.
700 RecordCopiedObject(target);
701 #endif
702 // Visit the newly copied object for pointers to new space.
703 target->Iterate(&scavenge_visitor);
704 UpdateRSet(target);
665 } 705 }
666 706
667 // Take another spin if there are now unswept objects in new space 707 // Take another spin if there are now unswept objects in new space
668 // (there are currently no more unswept promoted objects). 708 // (there are currently no more unswept promoted objects).
669 } while (new_space_front < new_space_.top()); 709 } while (new_space_front < new_space_.top());
670 710
671 // Set age mark. 711 // Set age mark.
672 new_space_.set_age_mark(new_space_.top()); 712 new_space_.set_age_mark(new_space_.top());
673 713
674 LOG(ResourceEvent("scavenge", "end")); 714 LOG(ResourceEvent("scavenge", "end"));
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
811 HeapObject* target, 851 HeapObject* target,
812 int size) { 852 int size) {
813 // Copy the content of source to target. 853 // Copy the content of source to target.
814 CopyBlock(reinterpret_cast<Object**>(target->address()), 854 CopyBlock(reinterpret_cast<Object**>(target->address()),
815 reinterpret_cast<Object**>(source->address()), 855 reinterpret_cast<Object**>(source->address()),
816 size); 856 size);
817 857
818 // Set the forwarding address. 858 // Set the forwarding address.
819 source->set_map_word(MapWord::FromForwardingAddress(target)); 859 source->set_map_word(MapWord::FromForwardingAddress(target));
820 860
861 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
821 // Update NewSpace stats if necessary. 862 // Update NewSpace stats if necessary.
822 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
823 RecordCopiedObject(target); 863 RecordCopiedObject(target);
824 #endif 864 #endif
825 865
826 return target; 866 return target;
827 } 867 }
828 868
829 869
830 // Inlined function.
831 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
832 ASSERT(InFromSpace(object));
833
834 // We use the first word (where the map pointer usually is) of a heap
835 // object to record the forwarding pointer. A forwarding pointer can
836 // point to an old space, the code space, or the to space of the new
837 // generation.
838 MapWord first_word = object->map_word();
839
840 // If the first word is a forwarding address, the object has already been
841 // copied.
842 if (first_word.IsForwardingAddress()) {
843 *p = first_word.ToForwardingAddress();
844 return;
845 }
846
847 // Call the slow part of scavenge object.
848 return ScavengeObjectSlow(p, object);
849 }
850
851
852 static inline bool IsShortcutCandidate(HeapObject* object, Map* map) { 870 static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
853 STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0); 871 STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
854 ASSERT(object->map() == map); 872 ASSERT(object->map() == map);
855 InstanceType type = map->instance_type(); 873 InstanceType type = map->instance_type();
856 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false; 874 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
857 ASSERT(object->IsString() && !object->IsSymbol()); 875 ASSERT(object->IsString() && !object->IsSymbol());
858 return ConsString::cast(object)->unchecked_second() == Heap::empty_string(); 876 return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
859 } 877 }
860 878
861 879
(...skipping 10 matching lines...) Expand all
872 // active semispace of the young generation and not already copied. 890 // active semispace of the young generation and not already copied.
873 if (!InNewSpace(object)) return; 891 if (!InNewSpace(object)) return;
874 first_word = object->map_word(); 892 first_word = object->map_word();
875 if (first_word.IsForwardingAddress()) { 893 if (first_word.IsForwardingAddress()) {
876 *p = first_word.ToForwardingAddress(); 894 *p = first_word.ToForwardingAddress();
877 return; 895 return;
878 } 896 }
879 } 897 }
880 898
881 int object_size = object->SizeFromMap(first_word.ToMap()); 899 int object_size = object->SizeFromMap(first_word.ToMap());
900 // We rely on live objects in new space to be at least two pointers,
901 // so we can store the from-space address and map pointer of promoted
902 // objects in the to space.
903 ASSERT(object_size >= 2 * kPointerSize);
904
882 // If the object should be promoted, we try to copy it to old space. 905 // If the object should be promoted, we try to copy it to old space.
883 if (ShouldBePromoted(object->address(), object_size)) { 906 if (ShouldBePromoted(object->address(), object_size)) {
884 OldSpace* target_space = Heap::TargetSpace(object); 907 OldSpace* target_space = Heap::TargetSpace(object);
885 ASSERT(target_space == Heap::old_pointer_space_ || 908 ASSERT(target_space == Heap::old_pointer_space_ ||
886 target_space == Heap::old_data_space_); 909 target_space == Heap::old_data_space_);
887 Object* result = target_space->AllocateRaw(object_size); 910 Object* result = target_space->AllocateRaw(object_size);
888 if (!result->IsFailure()) { 911 if (!result->IsFailure()) {
889 *p = MigrateObject(object, HeapObject::cast(result), object_size); 912 HeapObject* target = HeapObject::cast(result);
890 if (target_space == Heap::old_pointer_space_) { 913 if (target_space == Heap::old_pointer_space_) {
891 // Record the object's address at the top of the to space, to allow 914 // Save the from-space object pointer and its map pointer at the
892 // it to be swept by the scavenger. 915 // top of the to space to be swept and copied later. Write the
893 promoted_rear -= kPointerSize; 916 // forwarding address over the map word of the from-space
894 Memory::Object_at(promoted_rear) = *p; 917 // object.
918 promotion_queue.insert(object, first_word.ToMap());
919 object->set_map_word(MapWord::FromForwardingAddress(target));
920
921 // Give the space allocated for the result a proper map by
922 // treating it as a free list node (not linked into the free
923 // list).
924 FreeListNode* node = FreeListNode::FromAddress(target->address());
925 node->set_size(object_size);
926
927 *p = target;
895 } else { 928 } else {
929 // Objects promoted to the data space can be copied immediately
930 // and not revisited---we will never sweep that space for
931 // pointers and the copied objects do not contain pointers to
932 // new space objects.
933 *p = MigrateObject(object, target, object_size);
896 #ifdef DEBUG 934 #ifdef DEBUG
897 // Objects promoted to the data space should not have pointers to
898 // new space.
899 VerifyNonPointerSpacePointersVisitor v; 935 VerifyNonPointerSpacePointersVisitor v;
900 (*p)->Iterate(&v); 936 (*p)->Iterate(&v);
901 #endif 937 #endif
902 } 938 }
903 return; 939 return;
904 } 940 }
905 } 941 }
906 942
907 // The object should remain in new space or the old space allocation failed. 943 // The object should remain in new space or the old space allocation failed.
908 Object* result = new_space_.AllocateRaw(object_size); 944 Object* result = new_space_.AllocateRaw(object_size);
(...skipping 2486 matching lines...) Expand 10 before | Expand all | Expand 10 after
3395 #ifdef DEBUG 3431 #ifdef DEBUG
3396 bool Heap::GarbageCollectionGreedyCheck() { 3432 bool Heap::GarbageCollectionGreedyCheck() {
3397 ASSERT(FLAG_gc_greedy); 3433 ASSERT(FLAG_gc_greedy);
3398 if (Bootstrapper::IsActive()) return true; 3434 if (Bootstrapper::IsActive()) return true;
3399 if (disallow_allocation_failure()) return true; 3435 if (disallow_allocation_failure()) return true;
3400 return CollectGarbage(0, NEW_SPACE); 3436 return CollectGarbage(0, NEW_SPACE);
3401 } 3437 }
3402 #endif 3438 #endif
3403 3439
3404 } } // namespace v8::internal 3440 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698