Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(867)

Side by Side Diff: src/heap.cc

Issue 113262: Revert r1900, r1897 and r1895 which are all gc changes. The changes... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 11 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/spaces.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 520 matching lines...) Expand 10 before | Expand all | Expand 10 after
531 void ScavengePointer(Object** p) { 531 void ScavengePointer(Object** p) {
532 Object* object = *p; 532 Object* object = *p;
533 if (!Heap::InNewSpace(object)) return; 533 if (!Heap::InNewSpace(object)) return;
534 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 534 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
535 reinterpret_cast<HeapObject*>(object)); 535 reinterpret_cast<HeapObject*>(object));
536 } 536 }
537 }; 537 };
538 538
539 539
540 // Shared state read by the scavenge collector and set by ScavengeObject. 540 // Shared state read by the scavenge collector and set by ScavengeObject.
541 static Address promoted_rear = NULL; 541 static Address promoted_top = NULL;
542 542
543 543
544 #ifdef DEBUG 544 #ifdef DEBUG
545 // Visitor class to verify pointers in code or data space do not point into 545 // Visitor class to verify pointers in code or data space do not point into
546 // new space. 546 // new space.
547 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 547 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
548 public: 548 public:
549 void VisitPointers(Object** start, Object**end) { 549 void VisitPointers(Object** start, Object**end) {
550 for (Object** current = start; current < end; current++) { 550 for (Object** current = start; current < end; current++) {
551 if ((*current)->IsHeapObject()) { 551 if ((*current)->IsHeapObject()) {
552 ASSERT(!Heap::InNewSpace(HeapObject::cast(*current))); 552 ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
553 } 553 }
554 } 554 }
555 } 555 }
556 }; 556 };
557
558
559 static void VerifyNonPointerSpacePointers() {
560 // Verify that there are no pointers to new space in spaces where we
561 // do not expect them.
562 VerifyNonPointerSpacePointersVisitor v;
563 HeapObjectIterator code_it(Heap::code_space());
564 while (code_it.has_next()) {
565 HeapObject* object = code_it.next();
566 if (object->IsCode()) {
567 Code::cast(object)->ConvertICTargetsFromAddressToObject();
568 object->Iterate(&v);
569 Code::cast(object)->ConvertICTargetsFromObjectToAddress();
570 } else {
571 // If we find non-code objects in code space (e.g., free list
572 // nodes) we want to verify them as well.
573 object->Iterate(&v);
574 }
575 }
576
577 HeapObjectIterator data_it(Heap::old_data_space());
578 while (data_it.has_next()) data_it.next()->Iterate(&v);
579 }
580 #endif 557 #endif
581 558
582 void Heap::Scavenge() { 559 void Heap::Scavenge() {
583 #ifdef DEBUG 560 #ifdef DEBUG
584 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); 561 if (FLAG_enable_slow_asserts) {
562 VerifyNonPointerSpacePointersVisitor v;
563 HeapObjectIterator it(code_space_);
564 while (it.has_next()) {
565 HeapObject* object = it.next();
566 if (object->IsCode()) {
567 Code::cast(object)->ConvertICTargetsFromAddressToObject();
568 }
569 object->Iterate(&v);
570 if (object->IsCode()) {
571 Code::cast(object)->ConvertICTargetsFromObjectToAddress();
572 }
573 }
574 }
585 #endif 575 #endif
586 576
587 gc_state_ = SCAVENGE; 577 gc_state_ = SCAVENGE;
588 578
589 // Implements Cheney's copying algorithm 579 // Implements Cheney's copying algorithm
590 LOG(ResourceEvent("scavenge", "begin")); 580 LOG(ResourceEvent("scavenge", "begin"));
591 581
592 scavenge_count_++; 582 scavenge_count_++;
593 if (new_space_.Capacity() < new_space_.MaximumCapacity() && 583 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
594 scavenge_count_ > new_space_growth_limit_) { 584 scavenge_count_ > new_space_growth_limit_) {
595 // Double the size of the new space, and double the limit. The next 585 // Double the size of the new space, and double the limit. The next
596 // doubling attempt will occur after the current new_space_growth_limit_ 586 // doubling attempt will occur after the current new_space_growth_limit_
597 // more collections. 587 // more collections.
598 // TODO(1240712): NewSpace::Double has a return value which is 588 // TODO(1240712): NewSpace::Double has a return value which is
599 // ignored here. 589 // ignored here.
600 new_space_.Double(); 590 new_space_.Double();
601 new_space_growth_limit_ *= 2; 591 new_space_growth_limit_ *= 2;
602 } 592 }
603 593
604 // Flip the semispaces. After flipping, to space is empty, from space has 594 // Flip the semispaces. After flipping, to space is empty, from space has
605 // live objects. 595 // live objects.
606 new_space_.Flip(); 596 new_space_.Flip();
607 new_space_.ResetAllocationInfo(); 597 new_space_.ResetAllocationInfo();
608 598
609 // We need to sweep newly copied objects which can be either in the 599 // We need to sweep newly copied objects which can be in either the to space
610 // to space or promoted to the old generation. For to-space 600 // or the old space. For to space objects, we use a mark. Newly copied
611 // objects, we treat the bottom of the to space as a queue. Newly 601 // objects lie between the mark and the allocation top. For objects
612 // copied and unswept objects lie between a 'front' mark and the 602 // promoted to old space, we write their addresses downward from the top of
613 // allocation pointer. 603 // the new space. Sweeping newly promoted objects requires an allocation
604 // pointer and a mark. Note that the allocation pointer 'top' actually
605 // moves downward from the high address in the to space.
614 // 606 //
615 // Promoted objects can go into various old-generation spaces, and 607 // There is guaranteed to be enough room at the top of the to space for the
616 // can be allocated internally in the spaces (from the free list). 608 // addresses of promoted objects: every object promoted frees up its size in
617 // We treat the top of the to space as a queue of addresses of 609 // bytes from the top of the new space, and objects are at least one pointer
618 // promoted objects. The addresses of newly promoted and unswept 610 // in size. Using the new space to record promoted addresses makes the
619 // objects lie between a 'front' mark and a 'rear' mark that is 611 // scavenge collector agnostic to the allocation strategy (eg, linear or
620 // updated as a side effect of promoting an object. 612 // free-list) used in old space.
621 // 613 Address new_mark = new_space_.ToSpaceLow();
622 // There is guaranteed to be enough room at the top of the to space 614 Address promoted_mark = new_space_.ToSpaceHigh();
623 // for the addresses of promoted objects: every object promoted 615 promoted_top = new_space_.ToSpaceHigh();
624 // frees up its size in bytes from the top of the new space, and
625 // objects are at least one pointer in size.
626 Address new_space_front = new_space_.ToSpaceLow();
627 Address promoted_front = new_space_.ToSpaceHigh();
628 promoted_rear = new_space_.ToSpaceHigh();
629 616
630 ScavengeVisitor scavenge_visitor; 617 ScavengeVisitor scavenge_visitor;
631 // Copy roots. 618 // Copy roots.
632 IterateRoots(&scavenge_visitor); 619 IterateRoots(&scavenge_visitor);
633 620
634 // Copy objects reachable from weak pointers. 621 // Copy objects reachable from the old generation. By definition, there
635 GlobalHandles::IterateWeakRoots(&scavenge_visitor); 622 // are no intergenerational pointers in code or data spaces.
636
637 // Copy objects reachable from the old generation. By definition,
638 // there are no intergenerational pointers in code or data spaces.
639 IterateRSet(old_pointer_space_, &ScavengePointer); 623 IterateRSet(old_pointer_space_, &ScavengePointer);
640 IterateRSet(map_space_, &ScavengePointer); 624 IterateRSet(map_space_, &ScavengePointer);
641 lo_space_->IterateRSet(&ScavengePointer); 625 lo_space_->IterateRSet(&ScavengePointer);
642 626
643 do { 627 bool has_processed_weak_pointers = false;
644 ASSERT(new_space_front <= new_space_.top());
645 ASSERT(promoted_front >= promoted_rear);
646 628
647 // The addresses new_space_front and new_space_.top() define a 629 while (true) {
648 // queue of unprocessed copied objects. Process them until the 630 ASSERT(new_mark <= new_space_.top());
649 // queue is empty. 631 ASSERT(promoted_mark >= promoted_top);
650 while (new_space_front < new_space_.top()) { 632
651 HeapObject* object = HeapObject::FromAddress(new_space_front); 633 // Copy objects reachable from newly copied objects.
652 object->Iterate(&scavenge_visitor); 634 while (new_mark < new_space_.top() || promoted_mark > promoted_top) {
653 new_space_front += object->Size(); 635 // Sweep newly copied objects in the to space. The allocation pointer
636 // can change during sweeping.
637 Address previous_top = new_space_.top();
638 SemiSpaceIterator new_it(new_space(), new_mark);
639 while (new_it.has_next()) {
640 new_it.next()->Iterate(&scavenge_visitor);
641 }
642 new_mark = previous_top;
643
644 // Sweep newly copied objects in the old space. The promotion 'top'
645 // pointer could change during sweeping.
646 previous_top = promoted_top;
647 for (Address current = promoted_mark - kPointerSize;
648 current >= previous_top;
649 current -= kPointerSize) {
650 HeapObject* object = HeapObject::cast(Memory::Object_at(current));
651 object->Iterate(&scavenge_visitor);
652 UpdateRSet(object);
653 }
654 promoted_mark = previous_top;
654 } 655 }
655 656
656 // The addresses promoted_front and promoted_rear define a queue 657 if (has_processed_weak_pointers) break; // We are done.
657 // of unprocessed addresses of promoted objects. Process them 658 // Copy objects reachable from weak pointers.
658 // until the queue is empty. 659 GlobalHandles::IterateWeakRoots(&scavenge_visitor);
659 while (promoted_front > promoted_rear) { 660 has_processed_weak_pointers = true;
660 promoted_front -= kPointerSize; 661 }
661 HeapObject* object =
662 HeapObject::cast(Memory::Object_at(promoted_front));
663 object->Iterate(&scavenge_visitor);
664 UpdateRSet(object);
665 }
666
667 // Take another spin if there are now unswept objects in new space
668 // (there are currently no more unswept promoted objects).
669 } while (new_space_front < new_space_.top());
670 662
671 // Set age mark. 663 // Set age mark.
672 new_space_.set_age_mark(new_space_.top()); 664 new_space_.set_age_mark(new_mark);
673 665
674 LOG(ResourceEvent("scavenge", "end")); 666 LOG(ResourceEvent("scavenge", "end"));
675 667
676 gc_state_ = NOT_IN_GC; 668 gc_state_ = NOT_IN_GC;
677 } 669 }
678 670
679 671
680 void Heap::ClearRSetRange(Address start, int size_in_bytes) { 672 void Heap::ClearRSetRange(Address start, int size_in_bytes) {
681 uint32_t start_bit; 673 uint32_t start_bit;
682 Address start_word_address = 674 Address start_word_address =
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after
883 if (ShouldBePromoted(object->address(), object_size)) { 875 if (ShouldBePromoted(object->address(), object_size)) {
884 OldSpace* target_space = Heap::TargetSpace(object); 876 OldSpace* target_space = Heap::TargetSpace(object);
885 ASSERT(target_space == Heap::old_pointer_space_ || 877 ASSERT(target_space == Heap::old_pointer_space_ ||
886 target_space == Heap::old_data_space_); 878 target_space == Heap::old_data_space_);
887 Object* result = target_space->AllocateRaw(object_size); 879 Object* result = target_space->AllocateRaw(object_size);
888 if (!result->IsFailure()) { 880 if (!result->IsFailure()) {
889 *p = MigrateObject(object, HeapObject::cast(result), object_size); 881 *p = MigrateObject(object, HeapObject::cast(result), object_size);
890 if (target_space == Heap::old_pointer_space_) { 882 if (target_space == Heap::old_pointer_space_) {
891 // Record the object's address at the top of the to space, to allow 883 // Record the object's address at the top of the to space, to allow
892 // it to be swept by the scavenger. 884 // it to be swept by the scavenger.
893 promoted_rear -= kPointerSize; 885 promoted_top -= kPointerSize;
894 Memory::Object_at(promoted_rear) = *p; 886 Memory::Object_at(promoted_top) = *p;
895 } else { 887 } else {
896 #ifdef DEBUG 888 #ifdef DEBUG
897 // Objects promoted to the data space should not have pointers to 889 // Objects promoted to the data space should not have pointers to
898 // new space. 890 // new space.
899 VerifyNonPointerSpacePointersVisitor v; 891 VerifyNonPointerSpacePointersVisitor v;
900 (*p)->Iterate(&v); 892 (*p)->Iterate(&v);
901 #endif 893 #endif
902 } 894 }
903 return; 895 return;
904 } 896 }
(...skipping 2490 matching lines...) Expand 10 before | Expand all | Expand 10 after
3395 #ifdef DEBUG 3387 #ifdef DEBUG
3396 bool Heap::GarbageCollectionGreedyCheck() { 3388 bool Heap::GarbageCollectionGreedyCheck() {
3397 ASSERT(FLAG_gc_greedy); 3389 ASSERT(FLAG_gc_greedy);
3398 if (Bootstrapper::IsActive()) return true; 3390 if (Bootstrapper::IsActive()) return true;
3399 if (disallow_allocation_failure()) return true; 3391 if (disallow_allocation_failure()) return true;
3400 return CollectGarbage(0, NEW_SPACE); 3392 return CollectGarbage(0, NEW_SPACE);
3401 } 3393 }
3402 #endif 3394 #endif
3403 3395
3404 } } // namespace v8::internal 3396 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/spaces.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698