OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 24 matching lines...) Expand all Loading... | |
35 #include "debug.h" | 35 #include "debug.h" |
36 #include "heap-profiler.h" | 36 #include "heap-profiler.h" |
37 #include "global-handles.h" | 37 #include "global-handles.h" |
38 #include "mark-compact.h" | 38 #include "mark-compact.h" |
39 #include "natives.h" | 39 #include "natives.h" |
40 #include "objects-visiting.h" | 40 #include "objects-visiting.h" |
41 #include "runtime-profiler.h" | 41 #include "runtime-profiler.h" |
42 #include "scanner-base.h" | 42 #include "scanner-base.h" |
43 #include "scopeinfo.h" | 43 #include "scopeinfo.h" |
44 #include "snapshot.h" | 44 #include "snapshot.h" |
45 #include "store-buffer.h" | |
45 #include "v8threads.h" | 46 #include "v8threads.h" |
46 #include "vm-state-inl.h" | 47 #include "vm-state-inl.h" |
47 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP | 48 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP |
48 #include "regexp-macro-assembler.h" | 49 #include "regexp-macro-assembler.h" |
49 #include "arm/regexp-macro-assembler-arm.h" | 50 #include "arm/regexp-macro-assembler-arm.h" |
50 #endif | 51 #endif |
51 | 52 |
52 | 53 |
53 namespace v8 { | 54 namespace v8 { |
54 namespace internal { | 55 namespace internal { |
(...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
624 | 625 |
625 Object* context = global_contexts_list_; | 626 Object* context = global_contexts_list_; |
626 while (!context->IsUndefined()) { | 627 while (!context->IsUndefined()) { |
627 Context::cast(context)->normalized_map_cache()->Clear(); | 628 Context::cast(context)->normalized_map_cache()->Clear(); |
628 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 629 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
629 } | 630 } |
630 } | 631 } |
631 | 632 |
632 | 633 |
633 #ifdef DEBUG | 634 #ifdef DEBUG |
634 | |
635 enum PageWatermarkValidity { | 635 enum PageWatermarkValidity { |
636 ALL_VALID, | 636 ALL_VALID, |
637 ALL_INVALID | 637 ALL_INVALID |
638 }; | 638 }; |
639 | 639 |
640 static void VerifyPageWatermarkValidity(PagedSpace* space, | 640 static void VerifyPageWatermarkValidity(PagedSpace* space, |
641 PageWatermarkValidity validity) { | 641 PageWatermarkValidity validity) { |
642 PageIterator it(space, PageIterator::PAGES_IN_USE); | 642 PageIterator it(space, PageIterator::PAGES_IN_USE); |
Vyacheslav Egorov (Chromium)
2011/02/02 13:15:47
indentation
Erik Corry
2011/02/03 13:21:17
Done.
| |
643 bool expected_value = (validity == ALL_VALID); | 643 bool expected_value = (validity == ALL_VALID); |
644 while (it.has_next()) { | 644 while (it.has_next()) { |
645 Page* page = it.next(); | 645 Page* page = it.next(); |
646 ASSERT(page->IsWatermarkValid() == expected_value); | 646 ASSERT(page->IsWatermarkValid() == expected_value); |
647 } | 647 } |
648 } | 648 } |
649 #endif | 649 #endif |
650 | 650 |
651 | |
651 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { | 652 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { |
652 double survival_rate = | 653 double survival_rate = |
653 (static_cast<double>(young_survivors_after_last_gc_) * 100) / | 654 (static_cast<double>(young_survivors_after_last_gc_) * 100) / |
654 start_new_space_size; | 655 start_new_space_size; |
655 | 656 |
656 if (survival_rate > kYoungSurvivalRateThreshold) { | 657 if (survival_rate > kYoungSurvivalRateThreshold) { |
657 high_survival_rate_period_length_++; | 658 high_survival_rate_period_length_++; |
658 } else { | 659 } else { |
659 high_survival_rate_period_length_ = 0; | 660 high_survival_rate_period_length_ = 0; |
660 } | 661 } |
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
940 #endif | 941 #endif |
941 | 942 |
942 gc_state_ = SCAVENGE; | 943 gc_state_ = SCAVENGE; |
943 | 944 |
944 Page::FlipMeaningOfInvalidatedWatermarkFlag(); | 945 Page::FlipMeaningOfInvalidatedWatermarkFlag(); |
945 #ifdef DEBUG | 946 #ifdef DEBUG |
946 VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID); | 947 VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID); |
947 VerifyPageWatermarkValidity(map_space_, ALL_VALID); | 948 VerifyPageWatermarkValidity(map_space_, ALL_VALID); |
948 #endif | 949 #endif |
949 | 950 |
951 | |
950 // We do not update an allocation watermark of the top page during linear | 952 // We do not update an allocation watermark of the top page during linear |
951 // allocation to avoid overhead. So to maintain the watermark invariant | 953 // allocation to avoid overhead. So to maintain the watermark invariant |
952 // we have to manually cache the watermark and mark the top page as having an | 954 // we have to manually cache the watermark and mark the top page as having an |
953 // invalid watermark. This guarantees that dirty regions iteration will use a | 955 // invalid watermark. This guarantees that old space pointer iteration will |
954 // correct watermark even if a linear allocation happens. | 956 // use a correct watermark even if a linear allocation happens. |
955 old_pointer_space_->FlushTopPageWatermark(); | 957 old_pointer_space_->FlushTopPageWatermark(); |
956 map_space_->FlushTopPageWatermark(); | 958 map_space_->FlushTopPageWatermark(); |
957 | 959 |
958 // Implements Cheney's copying algorithm | 960 // Implements Cheney's copying algorithm |
959 LOG(ResourceEvent("scavenge", "begin")); | 961 LOG(ResourceEvent("scavenge", "begin")); |
960 | 962 |
961 // Clear descriptor cache. | 963 // Clear descriptor cache. |
962 DescriptorLookupCache::Clear(); | 964 DescriptorLookupCache::Clear(); |
963 | 965 |
964 // Used for updating survived_since_last_expansion_ at function end. | 966 // Used for updating survived_since_last_expansion_ at function end. |
(...skipping 27 matching lines...) Expand all Loading... | |
992 promotion_queue.Initialize(new_space_.ToSpaceHigh()); | 994 promotion_queue.Initialize(new_space_.ToSpaceHigh()); |
993 | 995 |
994 #ifdef DEBUG | 996 #ifdef DEBUG |
995 StoreBuffer::Clean(); | 997 StoreBuffer::Clean(); |
996 #endif | 998 #endif |
997 | 999 |
998 ScavengeVisitor scavenge_visitor; | 1000 ScavengeVisitor scavenge_visitor; |
999 // Copy roots. | 1001 // Copy roots. |
1000 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1002 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
1001 | 1003 |
1002 // Copy objects reachable from the old generation. By definition, | 1004 // Copy objects reachable from the old generation. |
1003 // there are no intergenerational pointers in code or data spaces. | 1005 { |
1004 IterateDirtyRegions(old_pointer_space_, | 1006 StoreBufferRebuildScope scope; |
1005 &IteratePointersInDirtyRegion, | 1007 StoreBuffer::IteratePointersToNewSpace(&ScavengeObject); |
1006 &ScavengePointer, | 1008 } |
1007 WATERMARK_CAN_BE_INVALID); | |
1008 | |
1009 IterateDirtyRegions(map_space_, | |
1010 &IteratePointersInDirtyMapsRegion, | |
1011 &ScavengePointer, | |
1012 WATERMARK_CAN_BE_INVALID); | |
1013 | |
1014 lo_space_->IterateDirtyRegions(&ScavengePointer); | |
1015 | 1009 |
1016 // Copy objects reachable from cells by scavenging cell values directly. | 1010 // Copy objects reachable from cells by scavenging cell values directly. |
1017 HeapObjectIterator cell_iterator(cell_space_); | 1011 HeapObjectIterator cell_iterator(cell_space_); |
1018 for (HeapObject* cell = cell_iterator.next(); | 1012 for (HeapObject* cell = cell_iterator.next(); |
1019 cell != NULL; cell = cell_iterator.next()) { | 1013 cell != NULL; cell = cell_iterator.next()) { |
1020 if (cell->IsJSGlobalPropertyCell()) { | 1014 if (cell->IsJSGlobalPropertyCell()) { |
1021 Address value_address = | 1015 Address value_address = |
1022 reinterpret_cast<Address>(cell) + | 1016 reinterpret_cast<Address>(cell) + |
1023 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 1017 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
1024 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 1018 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1202 | 1196 |
1203 // The addresses new_space_front and new_space_.top() define a | 1197 // The addresses new_space_front and new_space_.top() define a |
1204 // queue of unprocessed copied objects. Process them until the | 1198 // queue of unprocessed copied objects. Process them until the |
1205 // queue is empty. | 1199 // queue is empty. |
1206 while (new_space_front < new_space_.top()) { | 1200 while (new_space_front < new_space_.top()) { |
1207 HeapObject* object = HeapObject::FromAddress(new_space_front); | 1201 HeapObject* object = HeapObject::FromAddress(new_space_front); |
1208 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object); | 1202 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object); |
1209 } | 1203 } |
1210 | 1204 |
1211 // Promote and process all the to-be-promoted objects. | 1205 // Promote and process all the to-be-promoted objects. |
1212 while (!promotion_queue.is_empty()) { | 1206 { |
1213 HeapObject* target; | 1207 StoreBufferRebuildScope scope; |
1214 int size; | 1208 while (!promotion_queue.is_empty()) { |
1215 promotion_queue.remove(&target, &size); | 1209 HeapObject* target; |
1210 int size; | |
1211 promotion_queue.remove(&target, &size); | |
1216 | 1212 |
1217 // Promoted object might be already partially visited | 1213 // Promoted object might be already partially visited |
1218 // during dirty regions iteration. Thus we search specificly | 1214 // during old space pointer iteration. Thus we search specificly |
1219 // for pointers to from semispace instead of looking for pointers | 1215 // for pointers to from semispace instead of looking for pointers |
1220 // to new space. | 1216 // to new space. |
1221 ASSERT(!target->IsMap()); | 1217 ASSERT(!target->IsMap()); |
1222 IterateAndMarkPointersToFromSpace(target->address(), | 1218 IterateAndMarkPointersToFromSpace(target->address(), |
1223 target->address() + size, | 1219 target->address() + size, |
1224 &ScavengePointer); | 1220 &ScavengeObject); |
1221 } | |
1225 } | 1222 } |
1226 | 1223 |
1227 // Take another spin if there are now unswept objects in new space | 1224 // Take another spin if there are now unswept objects in new space |
1228 // (there are currently no more unswept promoted objects). | 1225 // (there are currently no more unswept promoted objects). |
1229 } while (new_space_front < new_space_.top()); | 1226 } while (new_space_front < new_space_.top()); |
1230 | 1227 |
1231 return new_space_front; | 1228 return new_space_front; |
1232 } | 1229 } |
1233 | 1230 |
1234 | 1231 |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1361 promotion_queue.insert(target, object_size); | 1358 promotion_queue.insert(target, object_size); |
1362 } | 1359 } |
1363 | 1360 |
1364 Heap::tracer()->increment_promoted_objects_size(object_size); | 1361 Heap::tracer()->increment_promoted_objects_size(object_size); |
1365 return; | 1362 return; |
1366 } | 1363 } |
1367 } | 1364 } |
1368 Object* result = | 1365 Object* result = |
1369 Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked(); | 1366 Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked(); |
1370 *slot = MigrateObject(object, HeapObject::cast(result), object_size); | 1367 *slot = MigrateObject(object, HeapObject::cast(result), object_size); |
1368 if (!Heap::InNewSpace(reinterpret_cast<Address>(slot))) { | |
1369 StoreBuffer::EnterDirectlyIntoStoreBuffer( | |
1370 reinterpret_cast<Address>(slot)); | |
1371 } | |
1371 return; | 1372 return; |
1372 } | 1373 } |
1373 | 1374 |
1374 | 1375 |
1375 static inline void EvacuateFixedArray(Map* map, | 1376 static inline void EvacuateFixedArray(Map* map, |
1376 HeapObject** slot, | 1377 HeapObject** slot, |
1377 HeapObject* object) { | 1378 HeapObject* object) { |
1378 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); | 1379 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); |
1379 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, | 1380 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, |
1380 slot, | 1381 slot, |
(...skipping 2511 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3892 return cell_space_->Contains(addr); | 3893 return cell_space_->Contains(addr); |
3893 case LO_SPACE: | 3894 case LO_SPACE: |
3894 return lo_space_->SlowContains(addr); | 3895 return lo_space_->SlowContains(addr); |
3895 } | 3896 } |
3896 | 3897 |
3897 return false; | 3898 return false; |
3898 } | 3899 } |
3899 | 3900 |
3900 | 3901 |
3901 #ifdef DEBUG | 3902 #ifdef DEBUG |
3902 static void DummyScavengePointer(HeapObject** p) { | 3903 static void VerifyPointersUnderWatermark( |
Vyacheslav Egorov (Chromium)
2011/02/02 13:15:47
Why we have function that does not do anything?
Erik Corry
2011/02/03 13:21:17
Function restored.
| |
3904 PagedSpace* space, | |
3905 PointerRegionCallback visit_pointer_region) { | |
3903 } | 3906 } |
3904 | 3907 |
3905 | 3908 |
3906 static void VerifyPointersUnderWatermark( | |
3907 PagedSpace* space, | |
3908 DirtyRegionCallback visit_dirty_region) { | |
3909 PageIterator it(space, PageIterator::PAGES_IN_USE); | |
3910 | |
3911 while (it.has_next()) { | |
3912 Page* page = it.next(); | |
3913 Address start = page->ObjectAreaStart(); | |
3914 Address end = page->AllocationWatermark(); | |
3915 | |
3916 Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks, | |
3917 start, | |
3918 end, | |
3919 visit_dirty_region, | |
3920 &DummyScavengePointer); | |
3921 } | |
3922 } | |
3923 | |
3924 | |
3925 static void VerifyPointersUnderWatermark(LargeObjectSpace* space) { | 3909 static void VerifyPointersUnderWatermark(LargeObjectSpace* space) { |
3926 LargeObjectIterator it(space); | 3910 LargeObjectIterator it(space); |
3927 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { | 3911 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { |
3928 if (object->IsFixedArray()) { | 3912 if (object->IsFixedArray()) { |
3929 Address slot_address = object->address(); | 3913 Address slot_address = object->address(); |
3930 Address end = object->address() + object->Size(); | 3914 Address end = object->address() + object->Size(); |
3931 | 3915 |
3932 while (slot_address < end) { | 3916 while (slot_address < end) { |
3933 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); | 3917 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); |
3934 // When we are not in GC the Heap::InNewSpace() predicate | 3918 // When we are not in GC the Heap::InNewSpace() predicate |
(...skipping 14 matching lines...) Expand all Loading... | |
3949 | 3933 |
3950 VerifyPointersVisitor visitor; | 3934 VerifyPointersVisitor visitor; |
3951 IterateRoots(&visitor, VISIT_ONLY_STRONG); | 3935 IterateRoots(&visitor, VISIT_ONLY_STRONG); |
3952 | 3936 |
3953 new_space_.Verify(); | 3937 new_space_.Verify(); |
3954 | 3938 |
3955 old_pointer_space_->Verify(&visitor); | 3939 old_pointer_space_->Verify(&visitor); |
3956 map_space_->Verify(&visitor); | 3940 map_space_->Verify(&visitor); |
3957 | 3941 |
3958 VerifyPointersUnderWatermark(old_pointer_space_, | 3942 VerifyPointersUnderWatermark(old_pointer_space_, |
3959 &IteratePointersInDirtyRegion); | 3943 &IteratePointersToNewSpace); |
3960 VerifyPointersUnderWatermark(map_space_, | 3944 VerifyPointersUnderWatermark(map_space_, |
3961 &IteratePointersInDirtyMapsRegion); | 3945 &IteratePointersFromMapsToNewSpace); |
3962 VerifyPointersUnderWatermark(lo_space_); | 3946 VerifyPointersUnderWatermark(lo_space_); |
3963 | 3947 |
3964 VerifyPageWatermarkValidity(old_pointer_space_, ALL_INVALID); | 3948 VerifyPageWatermarkValidity(old_pointer_space_, ALL_INVALID); |
3965 VerifyPageWatermarkValidity(map_space_, ALL_INVALID); | 3949 VerifyPageWatermarkValidity(map_space_, ALL_INVALID); |
3966 | 3950 |
3967 VerifyPointersVisitor no_dirty_regions_visitor; | 3951 VerifyPointersVisitor no_dirty_regions_visitor; |
3968 old_data_space_->Verify(&no_dirty_regions_visitor); | 3952 old_data_space_->Verify(&no_dirty_regions_visitor); |
3969 code_space_->Verify(&no_dirty_regions_visitor); | 3953 code_space_->Verify(&no_dirty_regions_visitor); |
3970 cell_space_->Verify(&no_dirty_regions_visitor); | 3954 cell_space_->Verify(&no_dirty_regions_visitor); |
3971 | 3955 |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4049 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject()); | 4033 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject()); |
4050 for (Address a = new_space_.FromSpaceLow(); | 4034 for (Address a = new_space_.FromSpaceLow(); |
4051 a < new_space_.FromSpaceHigh(); | 4035 a < new_space_.FromSpaceHigh(); |
4052 a += kPointerSize) { | 4036 a += kPointerSize) { |
4053 Memory::Address_at(a) = kFromSpaceZapValue; | 4037 Memory::Address_at(a) = kFromSpaceZapValue; |
4054 } | 4038 } |
4055 } | 4039 } |
4056 #endif // DEBUG | 4040 #endif // DEBUG |
4057 | 4041 |
4058 | 4042 |
4059 bool Heap::IteratePointersInDirtyRegion(Address start, | 4043 void Heap::IteratePointersToNewSpace(Address start, |
4060 Address end, | 4044 Address end, |
4061 ObjectSlotCallback copy_object_func) { | 4045 ObjectSlotCallback copy_object_func) { |
4062 bool pointers_to_new_space_found = false; | |
4063 | |
4064 for (Address slot_address = start; | 4046 for (Address slot_address = start; |
4065 slot_address < end; | 4047 slot_address < end; |
4066 slot_address += kPointerSize) { | 4048 slot_address += kPointerSize) { |
4067 Object** slot = reinterpret_cast<Object**>(slot_address); | 4049 Object** slot = reinterpret_cast<Object**>(slot_address); |
4068 if (Heap::InNewSpace(*slot)) { | 4050 if (Heap::InNewSpace(*slot)) { |
4069 ASSERT((*slot)->IsHeapObject()); | 4051 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); |
4070 copy_object_func(reinterpret_cast<HeapObject**>(slot)); | 4052 ASSERT(object->IsHeapObject()); |
4071 if (Heap::InNewSpace(*slot)) { | 4053 copy_object_func(reinterpret_cast<HeapObject**>(slot), object); |
4072 ASSERT((*slot)->IsHeapObject()); | |
4073 StoreBuffer::Mark(reinterpret_cast<Address>(slot)); | |
4074 pointers_to_new_space_found = true; | |
4075 } | |
4076 } | 4054 } |
4077 } | 4055 } |
4078 return pointers_to_new_space_found; | |
4079 } | 4056 } |
4080 | 4057 |
4081 | 4058 |
4082 // Compute start address of the first map following given addr. | 4059 // Compute start address of the first map following given addr. |
4083 static inline Address MapStartAlign(Address addr) { | 4060 static inline Address MapStartAlign(Address addr) { |
4084 Address page = Page::FromAddress(addr)->ObjectAreaStart(); | 4061 Address page = Page::FromAddress(addr)->ObjectAreaStart(); |
4085 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize); | 4062 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize); |
4086 } | 4063 } |
4087 | 4064 |
4088 | 4065 |
4089 // Compute end address of the first map preceding given addr. | 4066 // Compute end address of the first map preceding given addr. |
4090 static inline Address MapEndAlign(Address addr) { | 4067 static inline Address MapEndAlign(Address addr) { |
4091 Address page = Page::FromAllocationTop(addr)->ObjectAreaStart(); | 4068 Address page = Page::FromAllocationTop(addr)->ObjectAreaStart(); |
4092 return page + ((addr - page) / Map::kSize * Map::kSize); | 4069 return page + ((addr - page) / Map::kSize * Map::kSize); |
4093 } | 4070 } |
4094 | 4071 |
4095 | 4072 |
4096 static bool IteratePointersInDirtyMaps(Address start, | 4073 static void IteratePointersToNewSpaceInMaps( |
4097 Address end, | 4074 Address start, |
4098 ObjectSlotCallback copy_object_func) { | 4075 Address end, |
4076 ObjectSlotCallback copy_object_func) { | |
4099 ASSERT(MapStartAlign(start) == start); | 4077 ASSERT(MapStartAlign(start) == start); |
4100 ASSERT(MapEndAlign(end) == end); | 4078 ASSERT(MapEndAlign(end) == end); |
4101 | 4079 |
4102 Address map_address = start; | 4080 Address map_address = start; |
4103 bool pointers_to_new_space_found = false; | |
4104 | 4081 |
4105 while (map_address < end) { | 4082 while (map_address < end) { |
4106 ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address))); | 4083 ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address))); |
4107 ASSERT(Memory::Object_at(map_address)->IsMap()); | 4084 ASSERT(Memory::Object_at(map_address)->IsMap()); |
4108 | 4085 |
4109 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; | 4086 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; |
4110 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; | 4087 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; |
4111 | 4088 |
4112 if (Heap::IteratePointersInDirtyRegion(pointer_fields_start, | 4089 Heap::IteratePointersToNewSpace(pointer_fields_start, |
4113 pointer_fields_end, | 4090 pointer_fields_end, |
4114 copy_object_func)) { | 4091 copy_object_func); |
4115 pointers_to_new_space_found = true; | |
4116 } | |
4117 | |
4118 map_address += Map::kSize; | 4092 map_address += Map::kSize; |
4119 } | 4093 } |
4120 | |
4121 return pointers_to_new_space_found; | |
4122 } | 4094 } |
4123 | 4095 |
4124 | 4096 |
4125 bool Heap::IteratePointersInDirtyMapsRegion( | 4097 void Heap::IteratePointersFromMapsToNewSpace( |
4126 Address start, | 4098 Address start, |
4127 Address end, | 4099 Address end, |
4128 ObjectSlotCallback copy_object_func) { | 4100 ObjectSlotCallback copy_object_func) { |
4129 Address map_aligned_start = MapStartAlign(start); | 4101 Address map_aligned_start = MapStartAlign(start); |
4130 Address map_aligned_end = MapEndAlign(end); | 4102 Address map_aligned_end = MapEndAlign(end); |
4131 | 4103 |
4132 bool contains_pointers_to_new_space = false; | |
4133 | |
4134 if (map_aligned_start != start) { | 4104 if (map_aligned_start != start) { |
4135 Address prev_map = map_aligned_start - Map::kSize; | 4105 Address prev_map = map_aligned_start - Map::kSize; |
4136 ASSERT(Memory::Object_at(prev_map)->IsMap()); | 4106 ASSERT(Memory::Object_at(prev_map)->IsMap()); |
4137 | 4107 |
4138 Address pointer_fields_start = | 4108 Address pointer_fields_start = |
4139 Max(start, prev_map + Map::kPointerFieldsBeginOffset); | 4109 Max(start, prev_map + Map::kPointerFieldsBeginOffset); |
4140 | 4110 |
4141 Address pointer_fields_end = | 4111 Address pointer_fields_end = |
4142 Min(prev_map + Map::kPointerFieldsEndOffset, end); | 4112 Min(prev_map + Map::kPointerFieldsEndOffset, end); |
4143 | 4113 |
Vyacheslav Egorov (Chromium)
2011/02/02 13:15:47
If you are not using region marks -> your start an
Erik Corry
2011/02/03 13:21:17
Done.
| |
4144 contains_pointers_to_new_space = | 4114 IteratePointersToNewSpace(pointer_fields_start, |
4145 IteratePointersInDirtyRegion(pointer_fields_start, | 4115 pointer_fields_end, |
4146 pointer_fields_end, | 4116 copy_object_func); |
4147 copy_object_func) | |
4148 || contains_pointers_to_new_space; | |
4149 } | 4117 } |
4150 | 4118 |
4151 contains_pointers_to_new_space = | 4119 IteratePointersToNewSpaceInMaps(map_aligned_start, |
4152 IteratePointersInDirtyMaps(map_aligned_start, | 4120 map_aligned_end, |
4153 map_aligned_end, | 4121 copy_object_func); |
4154 copy_object_func) | |
4155 || contains_pointers_to_new_space; | |
4156 | 4122 |
4157 if (map_aligned_end != end) { | 4123 if (map_aligned_end != end) { |
4158 ASSERT(Memory::Object_at(map_aligned_end)->IsMap()); | 4124 ASSERT(Memory::Object_at(map_aligned_end)->IsMap()); |
4159 | 4125 |
4160 Address pointer_fields_start = | 4126 Address pointer_fields_start = |
4161 map_aligned_end + Map::kPointerFieldsBeginOffset; | 4127 map_aligned_end + Map::kPointerFieldsBeginOffset; |
4162 | 4128 |
4163 Address pointer_fields_end = | 4129 Address pointer_fields_end = |
4164 Min(end, map_aligned_end + Map::kPointerFieldsEndOffset); | 4130 Min(end, map_aligned_end + Map::kPointerFieldsEndOffset); |
4165 | 4131 |
4166 contains_pointers_to_new_space = | 4132 IteratePointersToNewSpace(pointer_fields_start, |
4167 IteratePointersInDirtyRegion(pointer_fields_start, | 4133 pointer_fields_end, |
Vyacheslav Egorov (Chromium)
2011/02/02 13:15:47
indent
Erik Corry
2011/02/03 13:21:17
Done.
| |
4168 pointer_fields_end, | 4134 copy_object_func); |
4169 copy_object_func) | |
4170 || contains_pointers_to_new_space; | |
4171 } | 4135 } |
4172 | |
4173 return contains_pointers_to_new_space; | |
4174 } | 4136 } |
4175 | 4137 |
4176 | 4138 |
4177 void Heap::IterateAndMarkPointersToFromSpace(Address start, | 4139 void Heap::IterateAndMarkPointersToFromSpace(Address start, |
4178 Address end, | 4140 Address end, |
4179 ObjectSlotCallback callback) { | 4141 ObjectSlotCallback callback) { |
4180 Address slot_address = start; | 4142 Address slot_address = start; |
4181 while (slot_address < end) { | 4143 while (slot_address < end) { |
4182 Object** slot = reinterpret_cast<Object**>(slot_address); | 4144 Object** slot = reinterpret_cast<Object**>(slot_address); |
4183 if (Heap::InFromSpace(*slot)) { | 4145 Object* object = *slot; |
4184 ASSERT((*slot)->IsHeapObject()); | 4146 // In normal store buffer operation we use this function to process the |
4185 callback(reinterpret_cast<HeapObject**>(slot)); | 4147 // promotion queue and we never scan an object twice so we will not see |
4148 // pointers that have already been updated to point to to-space. But | |
4149 // in the case of store buffer overflow we scan the entire old space to | |
4150 // find pointers that point to new-space and in that case we may hit | |
4151 // newly promoted objects and fix the pointers before the promotion | |
4152 // queue gets to them. | |
4153 ASSERT(StoreBuffer::store_buffer_mode() != | |
4154 StoreBuffer::kStoreBufferFunctional || | |
4155 !Heap::InToSpace(object)); | |
4156 if (Heap::InFromSpace(object)) { | |
4157 callback(reinterpret_cast<HeapObject**>(slot), HeapObject::cast(object)); | |
4186 if (Heap::InNewSpace(*slot)) { | 4158 if (Heap::InNewSpace(*slot)) { |
4159 ASSERT(Heap::InToSpace(*slot)); | |
4187 ASSERT((*slot)->IsHeapObject()); | 4160 ASSERT((*slot)->IsHeapObject()); |
4188 StoreBuffer::Mark(reinterpret_cast<Address>(slot)); | 4161 StoreBuffer::EnterDirectlyIntoStoreBuffer( |
4162 reinterpret_cast<Address>(slot)); | |
4189 } | 4163 } |
4190 } | 4164 } |
4191 slot_address += kPointerSize; | 4165 slot_address += kPointerSize; |
4192 } | 4166 } |
4193 } | 4167 } |
4194 | 4168 |
4195 | 4169 |
4196 uint32_t Heap::IterateDirtyRegions( | |
4197 uint32_t marks, | |
4198 Address area_start, | |
4199 Address area_end, | |
4200 DirtyRegionCallback visit_dirty_region, | |
4201 ObjectSlotCallback copy_object_func) { | |
4202 ASSERT(marks == Page::kAllRegionsDirtyMarks); | |
4203 visit_dirty_region(area_start, area_end, copy_object_func); | |
4204 return Page::kAllRegionsDirtyMarks; | |
4205 } | |
4206 | |
4207 | |
4208 #ifdef DEBUG | 4170 #ifdef DEBUG |
4209 static void CheckStoreBuffer(Object** current, | 4171 static void CheckStoreBuffer(Object** current, |
4210 Object** limit, | 4172 Object** limit, |
4211 Object**** store_buffer_position, | 4173 Object**** store_buffer_position, |
4212 Object*** store_buffer_top) { | 4174 Object*** store_buffer_top) { |
4213 for ( ; current < limit; current++) { | 4175 for ( ; current < limit; current++) { |
4214 Object* o = *current; | 4176 Object* o = *current; |
4215 if (reinterpret_cast<uintptr_t>(o) == kFreeListZapValue) { | 4177 if (reinterpret_cast<uintptr_t>(o) == kFreeListZapValue) { |
4216 Object*** zap_checker = *store_buffer_position; | 4178 Object*** zap_checker = *store_buffer_position; |
4217 while (*zap_checker < current) { | 4179 while (*zap_checker < current) { |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4346 &store_buffer_position, | 4308 &store_buffer_position, |
4347 store_buffer_top); | 4309 store_buffer_top); |
4348 } | 4310 } |
4349 } | 4311 } |
4350 } | 4312 } |
4351 | 4313 |
4352 | 4314 |
4353 #endif | 4315 #endif |
4354 | 4316 |
4355 | 4317 |
4356 void Heap::IterateDirtyRegions( | 4318 void Heap::IteratePointers( |
4357 PagedSpace* space, | 4319 PagedSpace* space, |
4358 DirtyRegionCallback visit_dirty_region, | 4320 PointerRegionCallback visit_pointer_region, |
4359 ObjectSlotCallback copy_object_func, | 4321 ObjectSlotCallback copy_object_func, |
4360 ExpectedPageWatermarkState expected_page_watermark_state) { | 4322 ExpectedPageWatermarkState expected_page_watermark_state) { |
4361 | 4323 |
4362 PageIterator pages(space, PageIterator::PAGES_IN_USE); | 4324 PageIterator pages(space, PageIterator::PAGES_IN_USE); |
4363 | 4325 |
4364 while (pages.has_next()) { | 4326 while (pages.has_next()) { |
4365 Page* page = pages.next(); | 4327 Page* page = pages.next(); |
4366 Address start = page->ObjectAreaStart(); | 4328 Address start = page->ObjectAreaStart(); |
4367 | 4329 |
4368 // Do not try to visit pointers beyond page allocation watermark. | 4330 // Do not try to visit pointers beyond page allocation watermark. |
4369 // Page can contain garbage pointers there. | 4331 // Page can contain garbage pointers there. |
4370 Address end; | 4332 Address end; |
4371 | 4333 |
4372 if ((expected_page_watermark_state == WATERMARK_SHOULD_BE_VALID) || | 4334 if ((expected_page_watermark_state == WATERMARK_SHOULD_BE_VALID) || |
4373 page->IsWatermarkValid()) { | 4335 page->IsWatermarkValid()) { |
4374 end = page->AllocationWatermark(); | 4336 end = page->AllocationWatermark(); |
4375 } else { | 4337 } else { |
4376 end = page->CachedAllocationWatermark(); | 4338 end = page->CachedAllocationWatermark(); |
4377 } | 4339 } |
4378 | 4340 |
4379 ASSERT(space == old_pointer_space_ || | 4341 ASSERT(space == old_pointer_space_ || |
4380 (space == map_space_ && | 4342 (space == map_space_ && |
4381 ((page->ObjectAreaStart() - end) % Map::kSize == 0))); | 4343 ((page->ObjectAreaStart() - end) % Map::kSize == 0))); |
4382 | 4344 |
4383 IterateDirtyRegions(Page::kAllRegionsDirtyMarks, | 4345 visit_pointer_region(start, end, copy_object_func); |
4384 start, | |
4385 end, | |
4386 visit_dirty_region, | |
4387 copy_object_func); | |
4388 | 4346 |
4389 // Mark page watermark as invalid to maintain watermark validity invariant. | 4347 // Mark page watermark as invalid to maintain watermark validity invariant. |
4390 // See Page::FlipMeaningOfInvalidatedWatermarkFlag() for details. | 4348 // See Page::FlipMeaningOfInvalidatedWatermarkFlag() for details. |
4391 page->InvalidateWatermark(true); | 4349 page->InvalidateWatermark(true); |
4392 } | 4350 } |
4393 } | 4351 } |
4394 | 4352 |
4395 | 4353 |
4396 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | 4354 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { |
4397 IterateStrongRoots(v, mode); | 4355 IterateStrongRoots(v, mode); |
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5585 void ExternalStringTable::TearDown() { | 5543 void ExternalStringTable::TearDown() { |
5586 new_space_strings_.Free(); | 5544 new_space_strings_.Free(); |
5587 old_space_strings_.Free(); | 5545 old_space_strings_.Free(); |
5588 } | 5546 } |
5589 | 5547 |
5590 | 5548 |
5591 List<Object*> ExternalStringTable::new_space_strings_; | 5549 List<Object*> ExternalStringTable::new_space_strings_; |
5592 List<Object*> ExternalStringTable::old_space_strings_; | 5550 List<Object*> ExternalStringTable::old_space_strings_; |
5593 | 5551 |
5594 } } // namespace v8::internal | 5552 } } // namespace v8::internal |
OLD | NEW |