| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 713 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 724 } | 724 } |
| 725 } | 725 } |
| 726 | 726 |
| 727 | 727 |
| 728 void Heap::MoveElements(FixedArray* array, | 728 void Heap::MoveElements(FixedArray* array, |
| 729 int dst_index, | 729 int dst_index, |
| 730 int src_index, | 730 int src_index, |
| 731 int len) { | 731 int len) { |
| 732 if (len == 0) return; | 732 if (len == 0) return; |
| 733 | 733 |
| 734 ASSERT(array->map() != HEAP->fixed_cow_array_map()); | 734 ASSERT(array->map() != fixed_cow_array_map()); |
| 735 Object** dst_objects = array->data_start() + dst_index; | 735 Object** dst_objects = array->data_start() + dst_index; |
| 736 OS::MemMove(dst_objects, | 736 OS::MemMove(dst_objects, |
| 737 array->data_start() + src_index, | 737 array->data_start() + src_index, |
| 738 len * kPointerSize); | 738 len * kPointerSize); |
| 739 if (!InNewSpace(array)) { | 739 if (!InNewSpace(array)) { |
| 740 for (int i = 0; i < len; i++) { | 740 for (int i = 0; i < len; i++) { |
| 741 // TODO(hpayer): check store buffer for entries | 741 // TODO(hpayer): check store buffer for entries |
| 742 if (InNewSpace(dst_objects[i])) { | 742 if (InNewSpace(dst_objects[i])) { |
| 743 RecordWrite(array->address(), array->OffsetOfElementAt(dst_index + i)); | 743 RecordWrite(array->address(), array->OffsetOfElementAt(dst_index + i)); |
| 744 } | 744 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 758 if ((*p)->IsHeapObject()) { | 758 if ((*p)->IsHeapObject()) { |
| 759 // Check that the string is actually internalized. | 759 // Check that the string is actually internalized. |
| 760 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || | 760 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || |
| 761 (*p)->IsInternalizedString()); | 761 (*p)->IsInternalizedString()); |
| 762 } | 762 } |
| 763 } | 763 } |
| 764 } | 764 } |
| 765 }; | 765 }; |
| 766 | 766 |
| 767 | 767 |
| 768 static void VerifyStringTable() { | 768 static void VerifyStringTable(Heap* heap) { |
| 769 StringTableVerifier verifier; | 769 StringTableVerifier verifier; |
| 770 HEAP->string_table()->IterateElements(&verifier); | 770 heap->string_table()->IterateElements(&verifier); |
| 771 } | 771 } |
| 772 #endif // VERIFY_HEAP | 772 #endif // VERIFY_HEAP |
| 773 | 773 |
| 774 | 774 |
| 775 static bool AbortIncrementalMarkingAndCollectGarbage( | 775 static bool AbortIncrementalMarkingAndCollectGarbage( |
| 776 Heap* heap, | 776 Heap* heap, |
| 777 AllocationSpace space, | 777 AllocationSpace space, |
| 778 const char* gc_reason = NULL) { | 778 const char* gc_reason = NULL) { |
| 779 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); | 779 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); |
| 780 bool result = heap->CollectGarbage(space, gc_reason); | 780 bool result = heap->CollectGarbage(space, gc_reason); |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 915 bool Heap::PerformGarbageCollection(GarbageCollector collector, | 915 bool Heap::PerformGarbageCollection(GarbageCollector collector, |
| 916 GCTracer* tracer) { | 916 GCTracer* tracer) { |
| 917 bool next_gc_likely_to_collect_more = false; | 917 bool next_gc_likely_to_collect_more = false; |
| 918 | 918 |
| 919 if (collector != SCAVENGER) { | 919 if (collector != SCAVENGER) { |
| 920 PROFILE(isolate_, CodeMovingGCEvent()); | 920 PROFILE(isolate_, CodeMovingGCEvent()); |
| 921 } | 921 } |
| 922 | 922 |
| 923 #ifdef VERIFY_HEAP | 923 #ifdef VERIFY_HEAP |
| 924 if (FLAG_verify_heap) { | 924 if (FLAG_verify_heap) { |
| 925 VerifyStringTable(); | 925 VerifyStringTable(this); |
| 926 } | 926 } |
| 927 #endif | 927 #endif |
| 928 | 928 |
| 929 GCType gc_type = | 929 GCType gc_type = |
| 930 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 930 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
| 931 | 931 |
| 932 { | 932 { |
| 933 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 933 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
| 934 VMState<EXTERNAL> state(isolate_); | 934 VMState<EXTERNAL> state(isolate_); |
| 935 HandleScope handle_scope(isolate_); | 935 HandleScope handle_scope(isolate_); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1039 | 1039 |
| 1040 { | 1040 { |
| 1041 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 1041 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
| 1042 VMState<EXTERNAL> state(isolate_); | 1042 VMState<EXTERNAL> state(isolate_); |
| 1043 HandleScope handle_scope(isolate_); | 1043 HandleScope handle_scope(isolate_); |
| 1044 CallGCEpilogueCallbacks(gc_type); | 1044 CallGCEpilogueCallbacks(gc_type); |
| 1045 } | 1045 } |
| 1046 | 1046 |
| 1047 #ifdef VERIFY_HEAP | 1047 #ifdef VERIFY_HEAP |
| 1048 if (FLAG_verify_heap) { | 1048 if (FLAG_verify_heap) { |
| 1049 VerifyStringTable(); | 1049 VerifyStringTable(this); |
| 1050 } | 1050 } |
| 1051 #endif | 1051 #endif |
| 1052 | 1052 |
| 1053 return next_gc_likely_to_collect_more; | 1053 return next_gc_likely_to_collect_more; |
| 1054 } | 1054 } |
| 1055 | 1055 |
| 1056 | 1056 |
| 1057 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { | 1057 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { |
| 1058 if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) { | 1058 if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) { |
| 1059 global_gc_prologue_callback_(); | 1059 global_gc_prologue_callback_(); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1147 | 1147 |
| 1148 Heap* heap_; | 1148 Heap* heap_; |
| 1149 }; | 1149 }; |
| 1150 | 1150 |
| 1151 | 1151 |
| 1152 #ifdef VERIFY_HEAP | 1152 #ifdef VERIFY_HEAP |
| 1153 // Visitor class to verify pointers in code or data space do not point into | 1153 // Visitor class to verify pointers in code or data space do not point into |
| 1154 // new space. | 1154 // new space. |
| 1155 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { | 1155 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { |
| 1156 public: | 1156 public: |
| 1157 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {} |
| 1157 void VisitPointers(Object** start, Object**end) { | 1158 void VisitPointers(Object** start, Object**end) { |
| 1158 for (Object** current = start; current < end; current++) { | 1159 for (Object** current = start; current < end; current++) { |
| 1159 if ((*current)->IsHeapObject()) { | 1160 if ((*current)->IsHeapObject()) { |
| 1160 CHECK(!HEAP->InNewSpace(HeapObject::cast(*current))); | 1161 CHECK(!heap_->InNewSpace(HeapObject::cast(*current))); |
| 1161 } | 1162 } |
| 1162 } | 1163 } |
| 1163 } | 1164 } |
| 1165 |
| 1166 private: |
| 1167 Heap* heap_; |
| 1164 }; | 1168 }; |
| 1165 | 1169 |
| 1166 | 1170 |
| 1167 static void VerifyNonPointerSpacePointers() { | 1171 static void VerifyNonPointerSpacePointers(Heap* heap) { |
| 1168 // Verify that there are no pointers to new space in spaces where we | 1172 // Verify that there are no pointers to new space in spaces where we |
| 1169 // do not expect them. | 1173 // do not expect them. |
| 1170 VerifyNonPointerSpacePointersVisitor v; | 1174 VerifyNonPointerSpacePointersVisitor v(heap); |
| 1171 HeapObjectIterator code_it(HEAP->code_space()); | 1175 HeapObjectIterator code_it(heap->code_space()); |
| 1172 for (HeapObject* object = code_it.Next(); | 1176 for (HeapObject* object = code_it.Next(); |
| 1173 object != NULL; object = code_it.Next()) | 1177 object != NULL; object = code_it.Next()) |
| 1174 object->Iterate(&v); | 1178 object->Iterate(&v); |
| 1175 | 1179 |
| 1176 // The old data space was normally swept conservatively so that the iterator | 1180 // The old data space was normally swept conservatively so that the iterator |
| 1177 // doesn't work, so we normally skip the next bit. | 1181 // doesn't work, so we normally skip the next bit. |
| 1178 if (!HEAP->old_data_space()->was_swept_conservatively()) { | 1182 if (!heap->old_data_space()->was_swept_conservatively()) { |
| 1179 HeapObjectIterator data_it(HEAP->old_data_space()); | 1183 HeapObjectIterator data_it(heap->old_data_space()); |
| 1180 for (HeapObject* object = data_it.Next(); | 1184 for (HeapObject* object = data_it.Next(); |
| 1181 object != NULL; object = data_it.Next()) | 1185 object != NULL; object = data_it.Next()) |
| 1182 object->Iterate(&v); | 1186 object->Iterate(&v); |
| 1183 } | 1187 } |
| 1184 } | 1188 } |
| 1185 #endif // VERIFY_HEAP | 1189 #endif // VERIFY_HEAP |
| 1186 | 1190 |
| 1187 | 1191 |
| 1188 void Heap::CheckNewSpaceExpansionCriteria() { | 1192 void Heap::CheckNewSpaceExpansionCriteria() { |
| 1189 if (new_space_.Capacity() < new_space_.MaximumCapacity() && | 1193 if (new_space_.Capacity() < new_space_.MaximumCapacity() && |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1316 | 1320 |
| 1317 private: | 1321 private: |
| 1318 Heap* heap_; | 1322 Heap* heap_; |
| 1319 }; | 1323 }; |
| 1320 | 1324 |
| 1321 | 1325 |
| 1322 void Heap::Scavenge() { | 1326 void Heap::Scavenge() { |
| 1323 RelocationLock relocation_lock(this); | 1327 RelocationLock relocation_lock(this); |
| 1324 | 1328 |
| 1325 #ifdef VERIFY_HEAP | 1329 #ifdef VERIFY_HEAP |
| 1326 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); | 1330 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this); |
| 1327 #endif | 1331 #endif |
| 1328 | 1332 |
| 1329 gc_state_ = SCAVENGE; | 1333 gc_state_ = SCAVENGE; |
| 1330 | 1334 |
| 1331 // Implements Cheney's copying algorithm | 1335 // Implements Cheney's copying algorithm |
| 1332 LOG(isolate_, ResourceEvent("scavenge", "begin")); | 1336 LOG(isolate_, ResourceEvent("scavenge", "begin")); |
| 1333 | 1337 |
| 1334 // Clear descriptor cache. | 1338 // Clear descriptor cache. |
| 1335 isolate_->descriptor_lookup_cache()->Clear(); | 1339 isolate_->descriptor_lookup_cache()->Clear(); |
| 1336 | 1340 |
| (...skipping 1033 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2370 scavenging_visitors_table_.Register( | 2374 scavenging_visitors_table_.Register( |
| 2371 StaticVisitorBase::kVisitShortcutCandidate, | 2375 StaticVisitorBase::kVisitShortcutCandidate, |
| 2372 scavenging_visitors_table_.GetVisitorById( | 2376 scavenging_visitors_table_.GetVisitorById( |
| 2373 StaticVisitorBase::kVisitConsString)); | 2377 StaticVisitorBase::kVisitConsString)); |
| 2374 } | 2378 } |
| 2375 } | 2379 } |
| 2376 } | 2380 } |
| 2377 | 2381 |
| 2378 | 2382 |
| 2379 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { | 2383 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { |
| 2380 SLOW_ASSERT(HEAP->InFromSpace(object)); | 2384 SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
| 2381 MapWord first_word = object->map_word(); | 2385 MapWord first_word = object->map_word(); |
| 2382 SLOW_ASSERT(!first_word.IsForwardingAddress()); | 2386 SLOW_ASSERT(!first_word.IsForwardingAddress()); |
| 2383 Map* map = first_word.ToMap(); | 2387 Map* map = first_word.ToMap(); |
| 2384 map->GetHeap()->DoScavengeObject(map, p, object); | 2388 map->GetHeap()->DoScavengeObject(map, p, object); |
| 2385 } | 2389 } |
| 2386 | 2390 |
| 2387 | 2391 |
| 2388 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, | 2392 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, |
| 2389 int instance_size) { | 2393 int instance_size) { |
| 2390 Object* result; | 2394 Object* result; |
| (...skipping 5443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7834 return field_offsets_[index + i]; | 7838 return field_offsets_[index + i]; |
| 7835 } | 7839 } |
| 7836 } | 7840 } |
| 7837 return kNotFound; | 7841 return kNotFound; |
| 7838 } | 7842 } |
| 7839 | 7843 |
| 7840 | 7844 |
| 7841 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { | 7845 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { |
| 7842 if (!name->IsUniqueName()) { | 7846 if (!name->IsUniqueName()) { |
| 7843 String* internalized_string; | 7847 String* internalized_string; |
| 7844 if (!HEAP->InternalizeStringIfExists( | 7848 if (!map->GetIsolate()->heap()->InternalizeStringIfExists( |
| 7845 String::cast(name), &internalized_string)) { | 7849 String::cast(name), &internalized_string)) { |
| 7846 return; | 7850 return; |
| 7847 } | 7851 } |
| 7848 name = internalized_string; | 7852 name = internalized_string; |
| 7849 } | 7853 } |
| 7850 // This cache is cleared only between mark compact passes, so we expect the | 7854 // This cache is cleared only between mark compact passes, so we expect the |
| 7851 // cache to only contain old space names. | 7855 // cache to only contain old space names. |
| 7852 ASSERT(!HEAP->InNewSpace(name)); | 7856 ASSERT(!map->GetIsolate()->heap()->InNewSpace(name)); |
| 7853 | 7857 |
| 7854 int index = (Hash(map, name) & kHashMask); | 7858 int index = (Hash(map, name) & kHashMask); |
| 7855 // After a GC there will be free slots, so we use them in order (this may | 7859 // After a GC there will be free slots, so we use them in order (this may |
| 7856 // help to get the most frequently used one in position 0). | 7860 // help to get the most frequently used one in position 0). |
| 7857 for (int i = 0; i< kEntriesPerBucket; i++) { | 7861 for (int i = 0; i< kEntriesPerBucket; i++) { |
| 7858 Key& key = keys_[index]; | 7862 Key& key = keys_[index]; |
| 7859 Object* free_entry_indicator = NULL; | 7863 Object* free_entry_indicator = NULL; |
| 7860 if (key.map == free_entry_indicator) { | 7864 if (key.map == free_entry_indicator) { |
| 7861 key.map = map; | 7865 key.map = map; |
| 7862 key.name = name; | 7866 key.name = name; |
| (...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8097 if (FLAG_concurrent_recompilation) { | 8101 if (FLAG_concurrent_recompilation) { |
| 8098 heap_->relocation_mutex_->Lock(); | 8102 heap_->relocation_mutex_->Lock(); |
| 8099 #ifdef DEBUG | 8103 #ifdef DEBUG |
| 8100 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8104 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 8101 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8105 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 8102 #endif // DEBUG | 8106 #endif // DEBUG |
| 8103 } | 8107 } |
| 8104 } | 8108 } |
| 8105 | 8109 |
| 8106 } } // namespace v8::internal | 8110 } } // namespace v8::internal |
| OLD | NEW |