Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(284)

Side by Side Diff: src/heap.cc

Issue 6639024: Get rid of distinction between below- and above-watermark in page allocation.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/spaces-inl.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
416 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); 416 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
417 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 417 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
418 ReportStatisticsAfterGC(); 418 ReportStatisticsAfterGC();
419 #endif 419 #endif
420 #ifdef ENABLE_DEBUGGER_SUPPORT 420 #ifdef ENABLE_DEBUGGER_SUPPORT
421 Debug::AfterGarbageCollection(); 421 Debug::AfterGarbageCollection();
422 #endif 422 #endif
423 } 423 }
424 424
425 425
426 void Heap::CollectAllGarbage(bool force_compaction) { 426 void Heap::CollectAllGarbage(int flags) {
427 // Since we are ignoring the return value, the exact choice of space does 427 // Since we are ignoring the return value, the exact choice of space does
428 // not matter, so long as we do not specify NEW_SPACE, which would not 428 // not matter, so long as we do not specify NEW_SPACE, which would not
429 // cause a full GC. 429 // cause a full GC.
430 MarkCompactCollector::SetForceCompaction(force_compaction); 430 MarkCompactCollector::SetFlags(flags);
431 CollectGarbage(OLD_POINTER_SPACE); 431 CollectGarbage(OLD_POINTER_SPACE);
432 MarkCompactCollector::SetForceCompaction(false); 432 MarkCompactCollector::SetFlags(kNoGCFlags);
433 } 433 }
434 434
435 435
436 void Heap::CollectAllAvailableGarbage() { 436 void Heap::CollectAllAvailableGarbage() {
437 // Since we are ignoring the return value, the exact choice of space does 437 // Since we are ignoring the return value, the exact choice of space does
438 // not matter, so long as we do not specify NEW_SPACE, which would not 438 // not matter, so long as we do not specify NEW_SPACE, which would not
439 // cause a full GC. 439 // cause a full GC.
440 MarkCompactCollector::SetForceCompaction(true); 440 MarkCompactCollector::SetFlags(kMakeHeapIterableMask | kForceCompactionMask);
441 441
442 // Major GC would invoke weak handle callbacks on weakly reachable 442 // Major GC would invoke weak handle callbacks on weakly reachable
443 // handles, but won't collect weakly reachable objects until next 443 // handles, but won't collect weakly reachable objects until next
444 // major GC. Therefore if we collect aggressively and weak handle callback 444 // major GC. Therefore if we collect aggressively and weak handle callback
445 // has been invoked, we rerun major GC to release objects which become 445 // has been invoked, we rerun major GC to release objects which become
446 // garbage. 446 // garbage.
447 // Note: as weak callbacks can execute arbitrary code, we cannot 447 // Note: as weak callbacks can execute arbitrary code, we cannot
448 // hope that eventually there will be no weak callbacks invocations. 448 // hope that eventually there will be no weak callbacks invocations.
449 // Therefore stop recollecting after several attempts. 449 // Therefore stop recollecting after several attempts.
450 const int kMaxNumberOfAttempts = 7; 450 const int kMaxNumberOfAttempts = 7;
451 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { 451 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
452 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { 452 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
453 break; 453 break;
454 } 454 }
455 } 455 }
456 MarkCompactCollector::SetForceCompaction(false); 456 MarkCompactCollector::SetFlags(kNoGCFlags);
457 } 457 }
458 458
459 459
460 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { 460 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
461 // The VM is in the GC state until exiting this function. 461 // The VM is in the GC state until exiting this function.
462 VMState state(GC); 462 VMState state(GC);
463 463
464 #ifdef DEBUG 464 #ifdef DEBUG
465 // Reset the allocation timeout to the GC interval, but make sure to 465 // Reset the allocation timeout to the GC interval, but make sure to
466 // allow at least a few allocations after a collection. The reason 466 // allow at least a few allocations after a collection. The reason
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after
905 } 905 }
906 } 906 }
907 }; 907 };
908 908
909 909
910 static void VerifyNonPointerSpacePointers() { 910 static void VerifyNonPointerSpacePointers() {
911 // Verify that there are no pointers to new space in spaces where we 911 // Verify that there are no pointers to new space in spaces where we
912 // do not expect them. 912 // do not expect them.
913 VerifyNonPointerSpacePointersVisitor v; 913 VerifyNonPointerSpacePointersVisitor v;
914 HeapObjectIterator code_it(Heap::code_space()); 914 HeapObjectIterator code_it(Heap::code_space());
915 for (HeapObject* object = code_it.next(); 915 for (HeapObject* object = code_it.Next();
916 object != NULL; object = code_it.next()) 916 object != NULL; object = code_it.Next())
917 object->Iterate(&v); 917 object->Iterate(&v);
918 918
919 HeapObjectIterator data_it(Heap::old_data_space()); 919 // The old data space was normally swept conservatively so that the iterator
920 for (HeapObject* object = data_it.next(); 920 // doesn't work, so we normally skip the next bit.
921 object != NULL; object = data_it.next()) 921 if (!Heap::old_data_space()->was_swept_conservatively()) {
922 object->Iterate(&v); 922 HeapObjectIterator data_it(Heap::old_data_space());
923 for (HeapObject* object = data_it.Next();
924 object != NULL; object = data_it.Next())
925 object->Iterate(&v);
926 }
923 } 927 }
924 #endif 928 #endif
925 929
926 930
927 void Heap::CheckNewSpaceExpansionCriteria() { 931 void Heap::CheckNewSpaceExpansionCriteria() {
928 if (new_space_.Capacity() < new_space_.MaximumCapacity() && 932 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
929 survived_since_last_expansion_ > new_space_.Capacity()) { 933 survived_since_last_expansion_ > new_space_.Capacity()) {
930 // Grow the size of new space if there is room to grow and enough 934 // Grow the size of new space if there is room to grow and enough
931 // data has survived scavenge since the last expansion. 935 // data has survived scavenge since the last expansion.
932 new_space_.Grow(); 936 new_space_.Grow();
933 survived_since_last_expansion_ = 0; 937 survived_since_last_expansion_ = 0;
934 } 938 }
935 } 939 }
936 940
937 941
938 void Heap::Scavenge() { 942 void Heap::Scavenge() {
939 #ifdef DEBUG 943 #ifdef DEBUG
940 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); 944 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
941 #endif 945 #endif
942 946
943 gc_state_ = SCAVENGE; 947 gc_state_ = SCAVENGE;
944 948
945 Page::FlipMeaningOfInvalidatedWatermarkFlag();
946
947 // We do not update an allocation watermark of the top page during linear
948 // allocation to avoid overhead. So to maintain the watermark invariant
949 // we have to manually cache the watermark and mark the top page as having an
950 // invalid watermark. This guarantees that old space pointer iteration will
951 // use a correct watermark even if a linear allocation happens.
952 old_pointer_space_->FlushTopPageWatermark();
953 map_space_->FlushTopPageWatermark();
954
955 // Implements Cheney's copying algorithm 949 // Implements Cheney's copying algorithm
956 LOG(ResourceEvent("scavenge", "begin")); 950 LOG(ResourceEvent("scavenge", "begin"));
957 951
958 // Clear descriptor cache. 952 // Clear descriptor cache.
959 DescriptorLookupCache::Clear(); 953 DescriptorLookupCache::Clear();
960 954
961 // Used for updating survived_since_last_expansion_ at function end. 955 // Used for updating survived_since_last_expansion_ at function end.
962 intptr_t survived_watermark = PromotedSpaceSize(); 956 intptr_t survived_watermark = PromotedSpaceSize();
963 957
964 CheckNewSpaceExpansionCriteria(); 958 CheckNewSpaceExpansionCriteria();
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
997 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); 991 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
998 992
999 // Copy objects reachable from the old generation. 993 // Copy objects reachable from the old generation.
1000 { 994 {
1001 StoreBufferRebuildScope scope; 995 StoreBufferRebuildScope scope;
1002 StoreBuffer::IteratePointersToNewSpace(&ScavengeObject); 996 StoreBuffer::IteratePointersToNewSpace(&ScavengeObject);
1003 } 997 }
1004 998
1005 // Copy objects reachable from cells by scavenging cell values directly. 999 // Copy objects reachable from cells by scavenging cell values directly.
1006 HeapObjectIterator cell_iterator(cell_space_); 1000 HeapObjectIterator cell_iterator(cell_space_);
1007 for (HeapObject* cell = cell_iterator.next(); 1001 for (HeapObject* cell = cell_iterator.Next();
1008 cell != NULL; cell = cell_iterator.next()) { 1002 cell != NULL; cell = cell_iterator.Next()) {
1009 if (cell->IsJSGlobalPropertyCell()) { 1003 if (cell->IsJSGlobalPropertyCell()) {
1010 Address value_address = 1004 Address value_address =
1011 reinterpret_cast<Address>(cell) + 1005 reinterpret_cast<Address>(cell) +
1012 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); 1006 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
1013 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); 1007 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1014 } 1008 }
1015 } 1009 }
1016 1010
1017 // Scavenge object reachable from the global contexts list directly. 1011 // Scavenge object reachable from the global contexts list directly.
1018 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); 1012 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
(...skipping 646 matching lines...) Expand 10 before | Expand all | Expand 10 after
1665 } 1659 }
1666 set_undetectable_ascii_string_map(Map::cast(obj)); 1660 set_undetectable_ascii_string_map(Map::cast(obj));
1667 Map::cast(obj)->set_is_undetectable(); 1661 Map::cast(obj)->set_is_undetectable();
1668 1662
1669 { MaybeObject* maybe_obj = 1663 { MaybeObject* maybe_obj =
1670 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); 1664 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
1671 if (!maybe_obj->ToObject(&obj)) return false; 1665 if (!maybe_obj->ToObject(&obj)) return false;
1672 } 1666 }
1673 set_byte_array_map(Map::cast(obj)); 1667 set_byte_array_map(Map::cast(obj));
1674 1668
1669 { MaybeObject* maybe_obj =
1670 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel);
1671 if (!maybe_obj->ToObject(&obj)) return false;
1672 }
1673 set_free_space_map(Map::cast(obj));
1674
1675 { MaybeObject* maybe_obj = AllocateByteArray(0, TENURED); 1675 { MaybeObject* maybe_obj = AllocateByteArray(0, TENURED);
1676 if (!maybe_obj->ToObject(&obj)) return false; 1676 if (!maybe_obj->ToObject(&obj)) return false;
1677 } 1677 }
1678 set_empty_byte_array(ByteArray::cast(obj)); 1678 set_empty_byte_array(ByteArray::cast(obj));
1679 1679
1680 { MaybeObject* maybe_obj = 1680 { MaybeObject* maybe_obj =
1681 AllocateMap(EXTERNAL_PIXEL_ARRAY_TYPE, ExternalArray::kAlignedSize); 1681 AllocateMap(EXTERNAL_PIXEL_ARRAY_TYPE, ExternalArray::kAlignedSize);
1682 if (!maybe_obj->ToObject(&obj)) return false; 1682 if (!maybe_obj->ToObject(&obj)) return false;
1683 } 1683 }
1684 set_external_pixel_array_map(Map::cast(obj)); 1684 set_external_pixel_array_map(Map::cast(obj));
(...skipping 937 matching lines...) Expand 10 before | Expand all | Expand 10 after
2622 2622
2623 2623
2624 void Heap::CreateFillerObjectAt(Address addr, int size) { 2624 void Heap::CreateFillerObjectAt(Address addr, int size) {
2625 if (size == 0) return; 2625 if (size == 0) return;
2626 HeapObject* filler = HeapObject::FromAddress(addr); 2626 HeapObject* filler = HeapObject::FromAddress(addr);
2627 if (size == kPointerSize) { 2627 if (size == kPointerSize) {
2628 filler->set_map(one_pointer_filler_map()); 2628 filler->set_map(one_pointer_filler_map());
2629 } else if (size == 2 * kPointerSize) { 2629 } else if (size == 2 * kPointerSize) {
2630 filler->set_map(two_pointer_filler_map()); 2630 filler->set_map(two_pointer_filler_map());
2631 } else { 2631 } else {
2632 filler->set_map(byte_array_map()); 2632 filler->set_map(free_space_map());
2633 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); 2633 FreeSpace::cast(filler)->set_size(size);
2634 } 2634 }
2635 } 2635 }
2636 2636
2637 2637
2638 MaybeObject* Heap::AllocateExternalArray(int length, 2638 MaybeObject* Heap::AllocateExternalArray(int length,
2639 ExternalArrayType array_type, 2639 ExternalArrayType array_type,
2640 void* external_pointer, 2640 void* external_pointer,
2641 PretenureFlag pretenure) { 2641 PretenureFlag pretenure) {
2642 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2642 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2643 Object* result; 2643 Object* result;
(...skipping 1058 matching lines...) Expand 10 before | Expand all | Expand 10 after
3702 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE; 3702 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
3703 Object* result; 3703 Object* result;
3704 { MaybeObject* maybe_result = Heap::Allocate(map, space); 3704 { MaybeObject* maybe_result = Heap::Allocate(map, space);
3705 if (!maybe_result->ToObject(&result)) return maybe_result; 3705 if (!maybe_result->ToObject(&result)) return maybe_result;
3706 } 3706 }
3707 Struct::cast(result)->InitializeBody(size); 3707 Struct::cast(result)->InitializeBody(size);
3708 return result; 3708 return result;
3709 } 3709 }
3710 3710
3711 3711
3712 void Heap::EnsureHeapIsIterable() {
3713 ASSERT(IsAllocationAllowed());
3714 if (IncrementalMarking::state() != IncrementalMarking::STOPPED ||
3715 old_pointer_space()->was_swept_conservatively() ||
3716 old_data_space()->was_swept_conservatively()) {
3717 CollectAllGarbage(kMakeHeapIterableMask);
3718 }
3719 ASSERT(!old_pointer_space()->was_swept_conservatively());
3720 ASSERT(!old_data_space()->was_swept_conservatively());
3721 }
3722
3723
3712 bool Heap::IdleNotification() { 3724 bool Heap::IdleNotification() {
3713 static const int kIdlesBeforeScavenge = 4; 3725 static const int kIdlesBeforeScavenge = 4;
3714 static const int kIdlesBeforeMarkSweep = 7; 3726 static const int kIdlesBeforeMarkSweep = 7;
3715 static const int kIdlesBeforeMarkCompact = 8; 3727 static const int kIdlesBeforeMarkCompact = 8;
3716 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1; 3728 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
3717 static const unsigned int kGCsBetweenCleanup = 4; 3729 static const unsigned int kGCsBetweenCleanup = 4;
3718 static int number_idle_notifications = 0; 3730 static int number_idle_notifications = 0;
3719 static unsigned int last_gc_count = gc_count_; 3731 static unsigned int last_gc_count = gc_count_;
3720 3732
3721 bool uncommit = true; 3733 bool uncommit = true;
3722 bool finished = false; 3734 bool finished = false;
3723 3735
3724 // Reset the number of idle notifications received when a number of 3736 // Reset the number of idle notifications received when a number of
3725 // GCs have taken place. This allows another round of cleanup based 3737 // GCs have taken place. This allows another round of cleanup based
3726 // on idle notifications if enough work has been carried out to 3738 // on idle notifications if enough work has been carried out to
3727 // provoke a number of garbage collections. 3739 // provoke a number of garbage collections.
3728 if (gc_count_ - last_gc_count < kGCsBetweenCleanup) { 3740 if (gc_count_ - last_gc_count < kGCsBetweenCleanup) {
3729 number_idle_notifications = 3741 number_idle_notifications =
3730 Min(number_idle_notifications + 1, kMaxIdleCount); 3742 Min(number_idle_notifications + 1, kMaxIdleCount);
3731 } else { 3743 } else {
3732 number_idle_notifications = 0; 3744 number_idle_notifications = 0;
3733 last_gc_count = gc_count_; 3745 last_gc_count = gc_count_;
3734 } 3746 }
3735 3747
3736 if (number_idle_notifications == kIdlesBeforeScavenge) { 3748 if (number_idle_notifications == kIdlesBeforeScavenge) {
3737 if (contexts_disposed_ > 0) { 3749 if (contexts_disposed_ > 0) {
3738 HistogramTimerScope scope(&Counters::gc_context); 3750 HistogramTimerScope scope(&Counters::gc_context);
3739 CollectAllGarbage(false); 3751 CollectAllGarbage(kNoGCFlags);
3740 } else { 3752 } else {
3741 CollectGarbage(NEW_SPACE); 3753 CollectGarbage(NEW_SPACE);
3742 } 3754 }
3743 new_space_.Shrink(); 3755 new_space_.Shrink();
3744 last_gc_count = gc_count_; 3756 last_gc_count = gc_count_;
3745 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) { 3757 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
3746 // Before doing the mark-sweep collections we clear the 3758 // Before doing the mark-sweep collections we clear the
3747 // compilation cache to avoid hanging on to source code and 3759 // compilation cache to avoid hanging on to source code and
3748 // generated code for cached functions. 3760 // generated code for cached functions.
3749 CompilationCache::Clear(); 3761 CompilationCache::Clear();
3750 3762
3751 CollectAllGarbage(false); 3763 CollectAllGarbage(kNoGCFlags);
3752 new_space_.Shrink(); 3764 new_space_.Shrink();
3753 last_gc_count = gc_count_; 3765 last_gc_count = gc_count_;
3754 3766
3755 } else if (number_idle_notifications == kIdlesBeforeMarkCompact) { 3767 } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
3756 CollectAllGarbage(true); 3768 CollectAllGarbage(kForceCompactionMask);
3757 new_space_.Shrink(); 3769 new_space_.Shrink();
3758 last_gc_count = gc_count_; 3770 last_gc_count = gc_count_;
3759 finished = true; 3771 finished = true;
3760 3772
3761 } else if (contexts_disposed_ > 0) { 3773 } else if (contexts_disposed_ > 0) {
3762 if (FLAG_expose_gc) { 3774 if (FLAG_expose_gc) {
3763 contexts_disposed_ = 0; 3775 contexts_disposed_ = 0;
3764 } else { 3776 } else {
3765 HistogramTimerScope scope(&Counters::gc_context); 3777 HistogramTimerScope scope(&Counters::gc_context);
3766 CollectAllGarbage(false); 3778 CollectAllGarbage(kNoGCFlags);
3767 last_gc_count = gc_count_; 3779 last_gc_count = gc_count_;
3768 } 3780 }
3769 // If this is the first idle notification, we reset the 3781 // If this is the first idle notification, we reset the
3770 // notification count to avoid letting idle notifications for 3782 // notification count to avoid letting idle notifications for
3771 // context disposal garbage collections start a potentially too 3783 // context disposal garbage collections start a potentially too
3772 // aggressive idle GC cycle. 3784 // aggressive idle GC cycle.
3773 if (number_idle_notifications <= 1) { 3785 if (number_idle_notifications <= 1) {
3774 number_idle_notifications = 0; 3786 number_idle_notifications = 0;
3775 uncommit = false; 3787 uncommit = false;
3776 } 3788 }
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
3900 3912
3901 #ifdef DEBUG 3913 #ifdef DEBUG
3902 static void DummyScavengePointer(HeapObject** p, HeapObject* o) { 3914 static void DummyScavengePointer(HeapObject** p, HeapObject* o) {
3903 // When we are not in GC the Heap::InNewSpace() predicate 3915 // When we are not in GC the Heap::InNewSpace() predicate
3904 // checks that pointers which satisfy predicate point into 3916 // checks that pointers which satisfy predicate point into
3905 // the active semispace. 3917 // the active semispace.
3906 Heap::InNewSpace(*p); 3918 Heap::InNewSpace(*p);
3907 } 3919 }
3908 3920
3909 3921
3910 static void VerifyPointersUnderWatermark( 3922 static void VerifyPointers(
3911 PagedSpace* space, 3923 PagedSpace* space,
3912 PointerRegionCallback visit_pointer_region) { 3924 PointerRegionCallback visit_pointer_region) {
3913 PageIterator it(space, PageIterator::PAGES_IN_USE); 3925 PageIterator it(space);
3914 3926
3915 while (it.has_next()) { 3927 while (it.has_next()) {
3916 Page* page = it.next(); 3928 Page* page = it.next();
3917 Address start = page->ObjectAreaStart(); 3929 Address start = page->ObjectAreaStart();
3918 Address end = page->AllocationWatermark(); 3930 Address end = page->ObjectAreaEnd();
3919 3931
3920 Heap::IteratePointersToNewSpace(start, 3932 Heap::IteratePointersToNewSpace(start,
3921 end, 3933 end,
3922 &DummyScavengePointer); 3934 &DummyScavengePointer);
3923 } 3935 }
3924 } 3936 }
3925 3937
3926 3938
3927 static void VerifyPointersUnderWatermark(LargeObjectSpace* space) { 3939 static void VerifyPointers(LargeObjectSpace* space) {
3928 LargeObjectIterator it(space); 3940 LargeObjectIterator it(space);
3929 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { 3941 for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
3930 if (object->IsFixedArray()) { 3942 if (object->IsFixedArray()) {
3931 Address slot_address = object->address(); 3943 Address slot_address = object->address();
3932 Address end = object->address() + object->Size(); 3944 Address end = object->address() + object->Size();
3933 3945
3934 while (slot_address < end) { 3946 while (slot_address < end) {
3935 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); 3947 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address);
3936 // When we are not in GC the Heap::InNewSpace() predicate 3948 // When we are not in GC the Heap::InNewSpace() predicate
3937 // checks that pointers which satisfy predicate point into 3949 // checks that pointers which satisfy predicate point into
(...skipping 12 matching lines...) Expand all
3950 StoreBuffer::Verify(); 3962 StoreBuffer::Verify();
3951 3963
3952 VerifyPointersVisitor visitor; 3964 VerifyPointersVisitor visitor;
3953 IterateRoots(&visitor, VISIT_ONLY_STRONG); 3965 IterateRoots(&visitor, VISIT_ONLY_STRONG);
3954 3966
3955 new_space_.Verify(); 3967 new_space_.Verify();
3956 3968
3957 old_pointer_space_->Verify(&visitor); 3969 old_pointer_space_->Verify(&visitor);
3958 map_space_->Verify(&visitor); 3970 map_space_->Verify(&visitor);
3959 3971
3960 VerifyPointersUnderWatermark(old_pointer_space_, 3972 VerifyPointers(old_pointer_space_, &IteratePointersToNewSpace);
3961 &IteratePointersToNewSpace); 3973 VerifyPointers(map_space_, &IteratePointersFromMapsToNewSpace);
3962 VerifyPointersUnderWatermark(map_space_, 3974 VerifyPointers(lo_space_);
3963 &IteratePointersFromMapsToNewSpace);
3964 VerifyPointersUnderWatermark(lo_space_);
3965 3975
3966 VerifyPointersVisitor no_dirty_regions_visitor; 3976 VerifyPointersVisitor no_dirty_regions_visitor;
3967 old_data_space_->Verify(&no_dirty_regions_visitor); 3977 old_data_space_->Verify(&no_dirty_regions_visitor);
3968 code_space_->Verify(&no_dirty_regions_visitor); 3978 code_space_->Verify(&no_dirty_regions_visitor);
3969 cell_space_->Verify(&no_dirty_regions_visitor); 3979 cell_space_->Verify(&no_dirty_regions_visitor);
3970 3980
3971 lo_space_->Verify(); 3981 lo_space_->Verify();
3972 } 3982 }
3973 #endif // DEBUG 3983 #endif // DEBUG
3974 3984
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
4150 ASSERT(StoreBuffer::CellIsInStoreBuffer( 4160 ASSERT(StoreBuffer::CellIsInStoreBuffer(
4151 reinterpret_cast<Address>(slot))); 4161 reinterpret_cast<Address>(slot)));
4152 } 4162 }
4153 } 4163 }
4154 slot_address += kPointerSize; 4164 slot_address += kPointerSize;
4155 } 4165 }
4156 } 4166 }
4157 4167
4158 4168
4159 #ifdef DEBUG 4169 #ifdef DEBUG
4170 typedef bool (*CheckStoreBufferFilter)(Object** addr);
4171
4172
4173 bool IsAMapPointerAddress(Object** addr) {
4174 uintptr_t a = reinterpret_cast<uintptr_t>(addr);
4175 int mod = a % Map::kSize;
4176 return mod >= Map::kPointerFieldsBeginOffset &&
4177 mod < Map::kPointerFieldsEndOffset;
4178 }
4179
4180
4181 bool EverythingsAPointer(Object** addr) {
4182 return true;
4183 }
4184
4185
4160 static void CheckStoreBuffer(Object** current, 4186 static void CheckStoreBuffer(Object** current,
4161 Object** limit, 4187 Object** limit,
4162 Object**** store_buffer_position, 4188 Object**** store_buffer_position,
4163 Object*** store_buffer_top) { 4189 Object*** store_buffer_top,
4190 CheckStoreBufferFilter filter,
4191 Address special_garbage_start,
4192 Address special_garbage_end) {
4164 for ( ; current < limit; current++) { 4193 for ( ; current < limit; current++) {
4165 Object* o = *current; 4194 Object* o = *current;
4166 if (reinterpret_cast<uintptr_t>(o) == kFreeListZapValue) { 4195 Address current_address = reinterpret_cast<Address>(current);
4167 Object*** zap_checker = *store_buffer_position; 4196 // Skip free space.
4168 while (*zap_checker < current) { 4197 if (o == Heap::free_space_map()) {
4169 zap_checker++; 4198 Address current_address = reinterpret_cast<Address>(current);
4170 if (zap_checker >= store_buffer_top) break; 4199 FreeSpace* free_space =
4171 } 4200 FreeSpace::cast(HeapObject::FromAddress(current_address));
4172 if (zap_checker < store_buffer_top) { 4201 int skip = free_space->Size();
4173 // Objects in the free list shouldn't be in the store buffer. 4202 ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
4174 ASSERT(*zap_checker != current); 4203 ASSERT(skip > 0);
4175 } 4204 current_address += skip - kPointerSize;
4205 current = reinterpret_cast<Object**>(current_address);
4176 continue; 4206 continue;
4177 } 4207 }
4208 // Skip the current linear allocation space between top and limit which is
4209 // unmarked with the free space map, but can contain junk.
4210 if (current_address == special_garbage_start &&
4211 special_garbage_end != special_garbage_start) {
4212 current_address = special_garbage_end - kPointerSize;
4213 current = reinterpret_cast<Object**>(current_address);
4214 continue;
4215 }
4216 if (!(*filter)(current)) continue;
4217 ASSERT(current_address < special_garbage_start ||
4218 current_address >= special_garbage_end);
4219 ASSERT(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue);
4178 // We have to check that the pointer does not point into new space 4220 // We have to check that the pointer does not point into new space
4179 // without trying to cast it to a heap object since the hash field of 4221 // without trying to cast it to a heap object since the hash field of
4180 // a string can contain values like 1 and 3 which are tagged null 4222 // a string can contain values like 1 and 3 which are tagged null
4181 // pointers. 4223 // pointers.
4182 if (!Heap::InNewSpace(o)) continue; 4224 if (!Heap::InNewSpace(o)) continue;
4183 while (**store_buffer_position < current && 4225 while (**store_buffer_position < current &&
4184 *store_buffer_position < store_buffer_top) { 4226 *store_buffer_position < store_buffer_top) {
4185 (*store_buffer_position)++; 4227 (*store_buffer_position)++;
4186 } 4228 }
4187 if (**store_buffer_position != current || 4229 if (**store_buffer_position != current ||
4188 *store_buffer_position == store_buffer_top) { 4230 *store_buffer_position == store_buffer_top) {
4189 Object** obj_start = current; 4231 Object** obj_start = current;
4190 while (!(*obj_start)->IsMap()) obj_start--; 4232 while (!(*obj_start)->IsMap()) obj_start--;
4191 UNREACHABLE(); 4233 UNREACHABLE();
4192 } 4234 }
4193 } 4235 }
4194 } 4236 }
4195 4237
4196 4238
4197 // Check that the store buffer contains all intergenerational pointers by 4239 // Check that the store buffer contains all intergenerational pointers by
4198 // scanning a page and ensuring that all pointers to young space are in the 4240 // scanning a page and ensuring that all pointers to young space are in the
4199 // store buffer. 4241 // store buffer.
4200 void Heap::OldPointerSpaceCheckStoreBuffer( 4242 void Heap::OldPointerSpaceCheckStoreBuffer(
4201 ExpectedPageWatermarkState watermark_state) { 4243 ExpectedPageWatermarkState watermark_state) {
4202 OldSpace* space = old_pointer_space(); 4244 OldSpace* space = old_pointer_space();
4203 PageIterator pages(space, PageIterator::PAGES_IN_USE); 4245 PageIterator pages(space);
4204
4205 space->free_list()->Zap();
4206 4246
4207 StoreBuffer::SortUniq(); 4247 StoreBuffer::SortUniq();
4208 4248
4209 while (pages.has_next()) { 4249 while (pages.has_next()) {
4210 Page* page = pages.next(); 4250 Page* page = pages.next();
4211 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); 4251 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
4212 4252
4213 // Do not try to visit pointers beyond page allocation watermark. 4253 Address end = page->ObjectAreaEnd();
4214 // Page can contain garbage pointers there.
4215 Address end;
4216
4217 if (watermark_state == WATERMARK_SHOULD_BE_VALID ||
4218 page->IsWatermarkValid()) {
4219 end = page->AllocationWatermark();
4220 } else {
4221 end = page->CachedAllocationWatermark();
4222 }
4223 4254
4224 Object*** store_buffer_position = StoreBuffer::Start(); 4255 Object*** store_buffer_position = StoreBuffer::Start();
4225 Object*** store_buffer_top = StoreBuffer::Top(); 4256 Object*** store_buffer_top = StoreBuffer::Top();
4226 4257
4227 Object** limit = reinterpret_cast<Object**>(end); 4258 Object** limit = reinterpret_cast<Object**>(end);
4228 CheckStoreBuffer(current, limit, &store_buffer_position, store_buffer_top); 4259 CheckStoreBuffer(current,
4260 limit,
4261 &store_buffer_position,
4262 store_buffer_top,
4263 &EverythingsAPointer,
4264 space->top(),
4265 space->limit());
4229 } 4266 }
4230 } 4267 }
4231 4268
4232 4269
4233 void Heap::MapSpaceCheckStoreBuffer( 4270 void Heap::MapSpaceCheckStoreBuffer(
4234 ExpectedPageWatermarkState watermark_state) { 4271 ExpectedPageWatermarkState watermark_state) {
4235 MapSpace* space = map_space(); 4272 MapSpace* space = map_space();
4236 PageIterator pages(space, PageIterator::PAGES_IN_USE); 4273 PageIterator pages(space);
4237
4238 space->free_list()->Zap();
4239 4274
4240 StoreBuffer::SortUniq(); 4275 StoreBuffer::SortUniq();
4241 4276
4242 while (pages.has_next()) { 4277 while (pages.has_next()) {
4243 Page* page = pages.next(); 4278 Page* page = pages.next();
4279 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
4244 4280
4245 // Do not try to visit pointers beyond page allocation watermark. 4281 Address end = page->ObjectAreaEnd();
4246 // Page can contain garbage pointers there.
4247 Address end;
4248
4249 if (watermark_state == WATERMARK_SHOULD_BE_VALID ||
4250 page->IsWatermarkValid()) {
4251 end = page->AllocationWatermark();
4252 } else {
4253 end = page->CachedAllocationWatermark();
4254 }
4255
4256 Address map_aligned_current = page->ObjectAreaStart();
4257
4258 ASSERT(map_aligned_current == MapStartAlign(map_aligned_current));
4259 ASSERT(end == MapEndAlign(end));
4260 4282
4261 Object*** store_buffer_position = StoreBuffer::Start(); 4283 Object*** store_buffer_position = StoreBuffer::Start();
4262 Object*** store_buffer_top = StoreBuffer::Top(); 4284 Object*** store_buffer_top = StoreBuffer::Top();
4263 4285
4264 for ( ; map_aligned_current < end; map_aligned_current += Map::kSize) { 4286 Object** limit = reinterpret_cast<Object**>(end);
4265 ASSERT(!Heap::InNewSpace(Memory::Object_at(map_aligned_current))); 4287 CheckStoreBuffer(current,
4266 ASSERT(Memory::Object_at(map_aligned_current)->IsMap()); 4288 limit,
4267 4289 &store_buffer_position,
4268 Object** current = reinterpret_cast<Object**>( 4290 store_buffer_top,
4269 map_aligned_current + Map::kPointerFieldsBeginOffset); 4291 &IsAMapPointerAddress,
4270 Object** limit = reinterpret_cast<Object**>( 4292 space->top(),
4271 map_aligned_current + Map::kPointerFieldsEndOffset); 4293 space->limit());
4272
4273 CheckStoreBuffer(current,
4274 limit,
4275 &store_buffer_position,
4276 store_buffer_top);
4277 }
4278 } 4294 }
4279 } 4295 }
4280 4296
4281 4297
4282 void Heap::LargeObjectSpaceCheckStoreBuffer() { 4298 void Heap::LargeObjectSpaceCheckStoreBuffer() {
4283 LargeObjectIterator it(lo_space()); 4299 LargeObjectIterator it(lo_space());
4284 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { 4300 for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
4285 // We only have code, sequential strings, or fixed arrays in large 4301 // We only have code, sequential strings, or fixed arrays in large
4286 // object space, and only fixed arrays can possibly contain pointers to 4302 // object space, and only fixed arrays can possibly contain pointers to
4287 // the young generation. 4303 // the young generation.
4288 if (object->IsFixedArray()) { 4304 if (object->IsFixedArray()) {
4289 Object*** store_buffer_position = StoreBuffer::Start(); 4305 Object*** store_buffer_position = StoreBuffer::Start();
4290 Object*** store_buffer_top = StoreBuffer::Top(); 4306 Object*** store_buffer_top = StoreBuffer::Top();
4291 Object** current = reinterpret_cast<Object**>(object->address()); 4307 Object** current = reinterpret_cast<Object**>(object->address());
4292 Object** limit = 4308 Object** limit =
4293 reinterpret_cast<Object**>(object->address() + object->Size()); 4309 reinterpret_cast<Object**>(object->address() + object->Size());
4294 CheckStoreBuffer(current, 4310 CheckStoreBuffer(current,
4295 limit, 4311 limit,
4296 &store_buffer_position, 4312 &store_buffer_position,
4297 store_buffer_top); 4313 store_buffer_top,
4314 &EverythingsAPointer,
4315 NULL,
4316 NULL);
4298 } 4317 }
4299 } 4318 }
4300 } 4319 }
4301 4320
4302 4321
4303 #endif 4322 #endif
4304 4323
4305 4324
4325 // This function iterates over all the pointers in a paged space in the heap,
4326 // looking for pointers into new space. Within the pages there may be dead
4327 // objects that have not been overwritten by free spaces or fillers because of
4328 // lazy sweeping. These dead objects may not contain pointers to new space.
4329 // The garbage areas that have been swept properly (these will normally be the
4330 // large ones) will be marked with free space and filler map words. In
4331 // addition any area that has never been used at all for object allocation must
4332 // be marked with a free space or filler. Because the free space and filler
4333 // maps do not move we can always recognize these even after a compaction.
4334 // Normal objects like FixedArrays and JSObjects should not contain references
4335 // to these maps. The special garbage section (see comment in spaces.h) is
4336 // skipped since it can contain absolutely anything. Any objects that are
4337 // allocated during iteration may or may not be visited by the iteration, but
4338 // they will not be partially visited.
4306 void Heap::IteratePointers( 4339 void Heap::IteratePointers(
4307 PagedSpace* space, 4340 PagedSpace* space,
4308 PointerRegionCallback visit_pointer_region, 4341 PointerRegionCallback visit_pointer_region,
4309 ObjectSlotCallback copy_object_func, 4342 ObjectSlotCallback copy_object_func,
4310 ExpectedPageWatermarkState expected_page_watermark_state) { 4343 ExpectedPageWatermarkState expected_page_watermark_state) {
4311 4344
4312 PageIterator pages(space, PageIterator::PAGES_IN_USE); 4345 PageIterator pages(space);
4313 4346
4314 while (pages.has_next()) { 4347 while (pages.has_next()) {
4315 Page* page = pages.next(); 4348 Page* page = pages.next();
4316 Address start = page->ObjectAreaStart(); 4349 Address start = page->ObjectAreaStart();
4350 Address limit = page->ObjectAreaEnd();
4317 4351
4318 // Do not try to visit pointers beyond page allocation watermark. 4352 Address end = start;
4319 // Page can contain garbage pointers there.
4320 Address end;
4321 4353
4322 if ((expected_page_watermark_state == WATERMARK_SHOULD_BE_VALID) || 4354 Object* free_space_map = Heap::free_space_map();
4323 page->IsWatermarkValid()) { 4355 Object* two_pointer_filler_map = Heap::two_pointer_filler_map();
4324 end = page->AllocationWatermark(); 4356
4325 } else { 4357 while (end < limit) {
4326 end = page->CachedAllocationWatermark(); 4358 Object* o = *reinterpret_cast<Object**>(end);
4359 // Skip fillers but not things that look like fillers in the special
4360 // garbage section which can contain anything.
4361 if (o == free_space_map ||
4362 o == two_pointer_filler_map ||
4363 end == space->top()) {
4364 if (start != end) {
4365 // After calling this the special garbage section may have moved.
4366 visit_pointer_region(start, end, copy_object_func);
4367 if (end >= space->top() && end < space->limit()) {
4368 end = space->limit();
4369 start = end;
4370 continue;
4371 }
4372 }
4373 if (end == space->top()) {
4374 start = end = space->limit();
4375 } else {
4376 // At this point we are either at the start of a filler or we are at
4377 // the point where the space->top() used to be before the
4378 // visit_pointer_region call above. Either way we can skip the
4379 // object at the current spot: We don't promise to visit objects
4380 // allocated during heap traversal, and if space->top() moved then it
4381 // must be because an object was allocated at this point.
4382 start = end + HeapObject::FromAddress(end)->Size();
4383 end = start;
4384 }
4385 } else {
4386 ASSERT(o != free_space_map);
4387 ASSERT(o != two_pointer_filler_map);
4388 ASSERT(end < space->top() || end >= space->limit());
4389 end += kPointerSize;
4390 }
4327 } 4391 }
4328 4392 ASSERT(end == limit);
4329 ASSERT(space == old_pointer_space_ || 4393 if (start != end) {
4330 (space == map_space_ && 4394 visit_pointer_region(start, end, copy_object_func);
4331 ((page->ObjectAreaStart() - end) % Map::kSize == 0))); 4395 }
4332
4333 visit_pointer_region(start, end, copy_object_func);
4334
4335 // Mark page watermark as invalid to maintain watermark validity invariant.
4336 // See Page::FlipMeaningOfInvalidatedWatermarkFlag() for details.
4337 page->InvalidateWatermark(true);
4338 } 4396 }
4339 } 4397 }
4340 4398
4341 4399
4342 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { 4400 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
4343 IterateStrongRoots(v, mode); 4401 IterateStrongRoots(v, mode);
4344 IterateWeakRoots(v, mode); 4402 IterateWeakRoots(v, mode);
4345 } 4403 }
4346 4404
4347 4405
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
4494 *stats->map_space_capacity = map_space_->Capacity(); 4552 *stats->map_space_capacity = map_space_->Capacity();
4495 *stats->cell_space_size = cell_space_->Size(); 4553 *stats->cell_space_size = cell_space_->Size();
4496 *stats->cell_space_capacity = cell_space_->Capacity(); 4554 *stats->cell_space_capacity = cell_space_->Capacity();
4497 *stats->lo_space_size = lo_space_->Size(); 4555 *stats->lo_space_size = lo_space_->Size();
4498 GlobalHandles::RecordStats(stats); 4556 GlobalHandles::RecordStats(stats);
4499 *stats->memory_allocator_size = MemoryAllocator::Size(); 4557 *stats->memory_allocator_size = MemoryAllocator::Size();
4500 *stats->memory_allocator_capacity = 4558 *stats->memory_allocator_capacity =
4501 MemoryAllocator::Size() + MemoryAllocator::Available(); 4559 MemoryAllocator::Size() + MemoryAllocator::Available();
4502 *stats->os_error = OS::GetLastError(); 4560 *stats->os_error = OS::GetLastError();
4503 if (take_snapshot) { 4561 if (take_snapshot) {
4504 HeapIterator iterator(HeapIterator::kFilterFreeListNodes); 4562 HeapIterator iterator;
4505 for (HeapObject* obj = iterator.next(); 4563 for (HeapObject* obj = iterator.Next();
4506 obj != NULL; 4564 obj != NULL;
4507 obj = iterator.next()) { 4565 obj = iterator.Next()) {
4508 InstanceType type = obj->map()->instance_type(); 4566 InstanceType type = obj->map()->instance_type();
4509 ASSERT(0 <= type && type <= LAST_TYPE); 4567 ASSERT(0 <= type && type <= LAST_TYPE);
4510 stats->objects_per_type[type]++; 4568 stats->objects_per_type[type]++;
4511 stats->size_per_type[type] += obj->Size(); 4569 stats->size_per_type[type] += obj->Size();
4512 } 4570 }
4513 } 4571 }
4514 } 4572 }
4515 4573
4516 4574
4517 intptr_t Heap::PromotedSpaceSize() { 4575 intptr_t Heap::PromotedSpaceSize() {
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
4928 } 4986 }
4929 4987
4930 4988
4931 class HeapObjectsFilter { 4989 class HeapObjectsFilter {
4932 public: 4990 public:
4933 virtual ~HeapObjectsFilter() {} 4991 virtual ~HeapObjectsFilter() {}
4934 virtual bool SkipObject(HeapObject* object) = 0; 4992 virtual bool SkipObject(HeapObject* object) = 0;
4935 }; 4993 };
4936 4994
4937 4995
4938 class FreeListNodesFilter : public HeapObjectsFilter {
4939 public:
4940 FreeListNodesFilter() {
4941 MarkFreeListNodes();
4942 }
4943
4944 bool SkipObject(HeapObject* object) {
4945 if (IntrusiveMarking::IsMarked(object)) {
4946 IntrusiveMarking::ClearMark(object);
4947 return true;
4948 } else {
4949 return false;
4950 }
4951 }
4952
4953 private:
4954 void MarkFreeListNodes() {
4955 Heap::old_pointer_space()->MarkFreeListNodes();
4956 Heap::old_data_space()->MarkFreeListNodes();
4957 MarkCodeSpaceFreeListNodes();
4958 Heap::map_space()->MarkFreeListNodes();
4959 Heap::cell_space()->MarkFreeListNodes();
4960 }
4961
4962 void MarkCodeSpaceFreeListNodes() {
4963 // For code space, using FreeListNode::IsFreeListNode is OK.
4964 HeapObjectIterator iter(Heap::code_space());
4965 for (HeapObject* obj = iter.next_object();
4966 obj != NULL;
4967 obj = iter.next_object()) {
4968 if (FreeListNode::IsFreeListNode(obj)) {
4969 IntrusiveMarking::SetMark(obj);
4970 }
4971 }
4972 }
4973
4974 AssertNoAllocation no_alloc;
4975 };
4976
4977
4978 class UnreachableObjectsFilter : public HeapObjectsFilter { 4996 class UnreachableObjectsFilter : public HeapObjectsFilter {
4979 public: 4997 public:
4980 UnreachableObjectsFilter() { 4998 UnreachableObjectsFilter() {
4981 MarkUnreachableObjects(); 4999 MarkUnreachableObjects();
4982 } 5000 }
4983 5001
4984 bool SkipObject(HeapObject* object) { 5002 bool SkipObject(HeapObject* object) {
4985 if (IntrusiveMarking::IsMarked(object)) { 5003 if (IntrusiveMarking::IsMarked(object)) {
4986 IntrusiveMarking::ClearMark(object); 5004 IntrusiveMarking::ClearMark(object);
4987 return true; 5005 return true;
(...skipping 24 matching lines...) Expand all
5012 HeapObject* obj = list_.RemoveLast(); 5030 HeapObject* obj = list_.RemoveLast();
5013 obj->Iterate(this); 5031 obj->Iterate(this);
5014 } 5032 }
5015 5033
5016 private: 5034 private:
5017 List<HeapObject*> list_; 5035 List<HeapObject*> list_;
5018 }; 5036 };
5019 5037
5020 void MarkUnreachableObjects() { 5038 void MarkUnreachableObjects() {
5021 HeapIterator iterator; 5039 HeapIterator iterator;
5022 for (HeapObject* obj = iterator.next(); 5040 for (HeapObject* obj = iterator.Next();
5023 obj != NULL; 5041 obj != NULL;
5024 obj = iterator.next()) { 5042 obj = iterator.Next()) {
5025 IntrusiveMarking::SetMark(obj); 5043 IntrusiveMarking::SetMark(obj);
5026 } 5044 }
5027 UnmarkingVisitor visitor; 5045 UnmarkingVisitor visitor;
5028 Heap::IterateRoots(&visitor, VISIT_ALL); 5046 Heap::IterateRoots(&visitor, VISIT_ALL);
5029 while (visitor.can_process()) 5047 while (visitor.can_process())
5030 visitor.ProcessNext(); 5048 visitor.ProcessNext();
5031 } 5049 }
5032 5050
5033 AssertNoAllocation no_alloc; 5051 AssertNoAllocation no_alloc;
5034 }; 5052 };
5035 5053
5036 5054
5037 HeapIterator::HeapIterator() 5055 HeapIterator::HeapIterator() {
5038 : filtering_(HeapIterator::kNoFiltering),
5039 filter_(NULL) {
5040 Init();
5041 }
5042
5043
5044 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
5045 : filtering_(filtering),
5046 filter_(NULL) {
5047 Init(); 5056 Init();
5048 } 5057 }
5049 5058
5050 5059
5051 HeapIterator::~HeapIterator() { 5060 HeapIterator::~HeapIterator() {
5052 Shutdown(); 5061 Shutdown();
5053 } 5062 }
5054 5063
5055 5064
5056 void HeapIterator::Init() { 5065 void HeapIterator::Init() {
5057 // Start the iteration. 5066 // Start the iteration.
5058 space_iterator_ = filtering_ == kNoFiltering ? new SpaceIterator : 5067 Heap::EnsureHeapIsIterable();
5059 new SpaceIterator(IntrusiveMarking::SizeOfMarkedObject); 5068 space_iterator_ = new SpaceIterator();
5060 switch (filtering_) {
5061 case kFilterFreeListNodes:
5062 filter_ = new FreeListNodesFilter;
5063 break;
5064 case kFilterUnreachable:
5065 filter_ = new UnreachableObjectsFilter;
5066 break;
5067 default:
5068 break;
5069 }
5070 object_iterator_ = space_iterator_->next(); 5069 object_iterator_ = space_iterator_->next();
5071 } 5070 }
5072 5071
5073 5072
5074 void HeapIterator::Shutdown() { 5073 void HeapIterator::Shutdown() {
5075 #ifdef DEBUG
5076 // Assert that in filtering mode we have iterated through all
5077 // objects. Otherwise, heap will be left in an inconsistent state.
5078 if (filtering_ != kNoFiltering) {
5079 ASSERT(object_iterator_ == NULL);
5080 }
5081 #endif
5082 // Make sure the last iterator is deallocated. 5074 // Make sure the last iterator is deallocated.
5083 delete space_iterator_; 5075 delete space_iterator_;
5084 space_iterator_ = NULL; 5076 space_iterator_ = NULL;
5085 object_iterator_ = NULL; 5077 object_iterator_ = NULL;
5086 delete filter_;
5087 filter_ = NULL;
5088 } 5078 }
5089 5079
5090 5080
5091 HeapObject* HeapIterator::next() { 5081 HeapObject* HeapIterator::Next() {
5092 if (filter_ == NULL) return NextObject();
5093
5094 HeapObject* obj = NextObject();
5095 while (obj != NULL && filter_->SkipObject(obj)) obj = NextObject();
5096 return obj;
5097 }
5098
5099
5100 HeapObject* HeapIterator::NextObject() {
5101 // No iterator means we are done. 5082 // No iterator means we are done.
5102 if (object_iterator_ == NULL) return NULL; 5083 if (object_iterator_ == NULL) return NULL;
5103 5084
5104 if (HeapObject* obj = object_iterator_->next_object()) { 5085 if (HeapObject* obj = object_iterator_->next_object()) {
5105 // If the current iterator has more objects we are fine. 5086 // If the current iterator has more objects we are fine.
5106 return obj; 5087 return obj;
5107 } else { 5088 } else {
5108 // Go though the spaces looking for one that has objects. 5089 // Go though the spaces looking for one that has objects.
5109 while (space_iterator_->has_next()) { 5090 while (space_iterator_->has_next()) {
5110 object_iterator_ = space_iterator_->next(); 5091 object_iterator_ = space_iterator_->next();
5111 if (HeapObject* obj = object_iterator_->next_object()) { 5092 if (HeapObject* obj = object_iterator_->next_object()) {
5112 return obj; 5093 return obj;
5113 } 5094 }
5114 } 5095 }
5115 } 5096 }
5116 // Done with the last space. 5097 // Done with the last space.
5117 object_iterator_ = NULL; 5098 object_iterator_ = NULL;
5118 return NULL; 5099 return NULL;
5119 } 5100 }
5120 5101
5121 5102
5122 void HeapIterator::reset() { 5103 void HeapIterator::Reset() {
5123 // Restart the iterator. 5104 // Restart the iterator.
5124 Shutdown(); 5105 Shutdown();
5125 Init(); 5106 Init();
5126 } 5107 }
5127 5108
5128 5109
5129 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST) 5110 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
5130 5111
5131 Object* const PathTracer::kAnyGlobalObject = reinterpret_cast<Object*>(NULL); 5112 Object* const PathTracer::kAnyGlobalObject = reinterpret_cast<Object*>(NULL);
5132 5113
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
5315 } 5296 }
5316 #endif 5297 #endif
5317 5298
5318 5299
5319 static intptr_t CountTotalHolesSize() { 5300 static intptr_t CountTotalHolesSize() {
5320 intptr_t holes_size = 0; 5301 intptr_t holes_size = 0;
5321 OldSpaces spaces; 5302 OldSpaces spaces;
5322 for (OldSpace* space = spaces.next(); 5303 for (OldSpace* space = spaces.next();
5323 space != NULL; 5304 space != NULL;
5324 space = spaces.next()) { 5305 space = spaces.next()) {
5325 holes_size += space->Waste() + space->AvailableFree(); 5306 holes_size += space->Waste() + space->Available();
5326 } 5307 }
5327 return holes_size; 5308 return holes_size;
5328 } 5309 }
5329 5310
5330 5311
5331 GCTracer::GCTracer() 5312 GCTracer::GCTracer()
5332 : start_time_(0.0), 5313 : start_time_(0.0),
5333 start_size_(0), 5314 start_size_(0),
5334 gc_count_(0), 5315 gc_count_(0),
5335 full_gc_count_(0), 5316 full_gc_count_(0),
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
5556 void ExternalStringTable::TearDown() { 5537 void ExternalStringTable::TearDown() {
5557 new_space_strings_.Free(); 5538 new_space_strings_.Free();
5558 old_space_strings_.Free(); 5539 old_space_strings_.Free();
5559 } 5540 }
5560 5541
5561 5542
5562 List<Object*> ExternalStringTable::new_space_strings_; 5543 List<Object*> ExternalStringTable::new_space_strings_;
5563 List<Object*> ExternalStringTable::old_space_strings_; 5544 List<Object*> ExternalStringTable::old_space_strings_;
5564 5545
5565 } } // namespace v8::internal 5546 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/spaces-inl.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698