| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 20 matching lines...) Expand all Loading... |
| 31 #include "api.h" | 31 #include "api.h" |
| 32 #include "bootstrapper.h" | 32 #include "bootstrapper.h" |
| 33 #include "codegen.h" | 33 #include "codegen.h" |
| 34 #include "compilation-cache.h" | 34 #include "compilation-cache.h" |
| 35 #include "cpu-profiler.h" | 35 #include "cpu-profiler.h" |
| 36 #include "debug.h" | 36 #include "debug.h" |
| 37 #include "deoptimizer.h" | 37 #include "deoptimizer.h" |
| 38 #include "global-handles.h" | 38 #include "global-handles.h" |
| 39 #include "heap-profiler.h" | 39 #include "heap-profiler.h" |
| 40 #include "incremental-marking.h" | 40 #include "incremental-marking.h" |
| 41 #include "isolate-inl.h" |
| 41 #include "mark-compact.h" | 42 #include "mark-compact.h" |
| 42 #include "natives.h" | 43 #include "natives.h" |
| 43 #include "objects-visiting.h" | 44 #include "objects-visiting.h" |
| 44 #include "objects-visiting-inl.h" | 45 #include "objects-visiting-inl.h" |
| 45 #include "once.h" | 46 #include "once.h" |
| 46 #include "runtime-profiler.h" | 47 #include "runtime-profiler.h" |
| 47 #include "scopeinfo.h" | 48 #include "scopeinfo.h" |
| 48 #include "snapshot.h" | 49 #include "snapshot.h" |
| 49 #include "store-buffer.h" | 50 #include "store-buffer.h" |
| 51 #include "utils/random-number-generator.h" |
| 50 #include "v8threads.h" | 52 #include "v8threads.h" |
| 51 #include "v8utils.h" | 53 #include "v8utils.h" |
| 52 #include "vm-state-inl.h" | 54 #include "vm-state-inl.h" |
| 53 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP | 55 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP |
| 54 #include "regexp-macro-assembler.h" | 56 #include "regexp-macro-assembler.h" |
| 55 #include "arm/regexp-macro-assembler-arm.h" | 57 #include "arm/regexp-macro-assembler-arm.h" |
| 56 #endif | 58 #endif |
| 57 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP | 59 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP |
| 58 #include "regexp-macro-assembler.h" | 60 #include "regexp-macro-assembler.h" |
| 59 #include "mips/regexp-macro-assembler-mips.h" | 61 #include "mips/regexp-macro-assembler-mips.h" |
| (...skipping 662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 722 } | 724 } |
| 723 } | 725 } |
| 724 | 726 |
| 725 | 727 |
| 726 void Heap::MoveElements(FixedArray* array, | 728 void Heap::MoveElements(FixedArray* array, |
| 727 int dst_index, | 729 int dst_index, |
| 728 int src_index, | 730 int src_index, |
| 729 int len) { | 731 int len) { |
| 730 if (len == 0) return; | 732 if (len == 0) return; |
| 731 | 733 |
| 732 ASSERT(array->map() != HEAP->fixed_cow_array_map()); | 734 ASSERT(array->map() != fixed_cow_array_map()); |
| 733 Object** dst_objects = array->data_start() + dst_index; | 735 Object** dst_objects = array->data_start() + dst_index; |
| 734 OS::MemMove(dst_objects, | 736 OS::MemMove(dst_objects, |
| 735 array->data_start() + src_index, | 737 array->data_start() + src_index, |
| 736 len * kPointerSize); | 738 len * kPointerSize); |
| 737 if (!InNewSpace(array)) { | 739 if (!InNewSpace(array)) { |
| 738 for (int i = 0; i < len; i++) { | 740 for (int i = 0; i < len; i++) { |
| 739 // TODO(hpayer): check store buffer for entries | 741 // TODO(hpayer): check store buffer for entries |
| 740 if (InNewSpace(dst_objects[i])) { | 742 if (InNewSpace(dst_objects[i])) { |
| 741 RecordWrite(array->address(), array->OffsetOfElementAt(dst_index + i)); | 743 RecordWrite(array->address(), array->OffsetOfElementAt(dst_index + i)); |
| 742 } | 744 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 756 if ((*p)->IsHeapObject()) { | 758 if ((*p)->IsHeapObject()) { |
| 757 // Check that the string is actually internalized. | 759 // Check that the string is actually internalized. |
| 758 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || | 760 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || |
| 759 (*p)->IsInternalizedString()); | 761 (*p)->IsInternalizedString()); |
| 760 } | 762 } |
| 761 } | 763 } |
| 762 } | 764 } |
| 763 }; | 765 }; |
| 764 | 766 |
| 765 | 767 |
| 766 static void VerifyStringTable() { | 768 static void VerifyStringTable(Heap* heap) { |
| 767 StringTableVerifier verifier; | 769 StringTableVerifier verifier; |
| 768 HEAP->string_table()->IterateElements(&verifier); | 770 heap->string_table()->IterateElements(&verifier); |
| 769 } | 771 } |
| 770 #endif // VERIFY_HEAP | 772 #endif // VERIFY_HEAP |
| 771 | 773 |
| 772 | 774 |
| 773 static bool AbortIncrementalMarkingAndCollectGarbage( | 775 static bool AbortIncrementalMarkingAndCollectGarbage( |
| 774 Heap* heap, | 776 Heap* heap, |
| 775 AllocationSpace space, | 777 AllocationSpace space, |
| 776 const char* gc_reason = NULL) { | 778 const char* gc_reason = NULL) { |
| 777 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); | 779 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); |
| 778 bool result = heap->CollectGarbage(space, gc_reason); | 780 bool result = heap->CollectGarbage(space, gc_reason); |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 913 bool Heap::PerformGarbageCollection(GarbageCollector collector, | 915 bool Heap::PerformGarbageCollection(GarbageCollector collector, |
| 914 GCTracer* tracer) { | 916 GCTracer* tracer) { |
| 915 bool next_gc_likely_to_collect_more = false; | 917 bool next_gc_likely_to_collect_more = false; |
| 916 | 918 |
| 917 if (collector != SCAVENGER) { | 919 if (collector != SCAVENGER) { |
| 918 PROFILE(isolate_, CodeMovingGCEvent()); | 920 PROFILE(isolate_, CodeMovingGCEvent()); |
| 919 } | 921 } |
| 920 | 922 |
| 921 #ifdef VERIFY_HEAP | 923 #ifdef VERIFY_HEAP |
| 922 if (FLAG_verify_heap) { | 924 if (FLAG_verify_heap) { |
| 923 VerifyStringTable(); | 925 VerifyStringTable(this); |
| 924 } | 926 } |
| 925 #endif | 927 #endif |
| 926 | 928 |
| 927 GCType gc_type = | 929 GCType gc_type = |
| 928 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 930 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
| 929 | 931 |
| 930 { | 932 { |
| 931 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 933 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
| 932 VMState<EXTERNAL> state(isolate_); | 934 VMState<EXTERNAL> state(isolate_); |
| 933 HandleScope handle_scope(isolate_); | 935 HandleScope handle_scope(isolate_); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1037 | 1039 |
| 1038 { | 1040 { |
| 1039 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 1041 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
| 1040 VMState<EXTERNAL> state(isolate_); | 1042 VMState<EXTERNAL> state(isolate_); |
| 1041 HandleScope handle_scope(isolate_); | 1043 HandleScope handle_scope(isolate_); |
| 1042 CallGCEpilogueCallbacks(gc_type); | 1044 CallGCEpilogueCallbacks(gc_type); |
| 1043 } | 1045 } |
| 1044 | 1046 |
| 1045 #ifdef VERIFY_HEAP | 1047 #ifdef VERIFY_HEAP |
| 1046 if (FLAG_verify_heap) { | 1048 if (FLAG_verify_heap) { |
| 1047 VerifyStringTable(); | 1049 VerifyStringTable(this); |
| 1048 } | 1050 } |
| 1049 #endif | 1051 #endif |
| 1050 | 1052 |
| 1051 return next_gc_likely_to_collect_more; | 1053 return next_gc_likely_to_collect_more; |
| 1052 } | 1054 } |
| 1053 | 1055 |
| 1054 | 1056 |
| 1055 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { | 1057 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { |
| 1056 if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) { | 1058 if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) { |
| 1057 global_gc_prologue_callback_(); | 1059 global_gc_prologue_callback_(); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1145 | 1147 |
| 1146 Heap* heap_; | 1148 Heap* heap_; |
| 1147 }; | 1149 }; |
| 1148 | 1150 |
| 1149 | 1151 |
| 1150 #ifdef VERIFY_HEAP | 1152 #ifdef VERIFY_HEAP |
| 1151 // Visitor class to verify pointers in code or data space do not point into | 1153 // Visitor class to verify pointers in code or data space do not point into |
| 1152 // new space. | 1154 // new space. |
| 1153 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { | 1155 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { |
| 1154 public: | 1156 public: |
| 1157 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {} |
| 1155 void VisitPointers(Object** start, Object**end) { | 1158 void VisitPointers(Object** start, Object**end) { |
| 1156 for (Object** current = start; current < end; current++) { | 1159 for (Object** current = start; current < end; current++) { |
| 1157 if ((*current)->IsHeapObject()) { | 1160 if ((*current)->IsHeapObject()) { |
| 1158 CHECK(!HEAP->InNewSpace(HeapObject::cast(*current))); | 1161 CHECK(!heap_->InNewSpace(HeapObject::cast(*current))); |
| 1159 } | 1162 } |
| 1160 } | 1163 } |
| 1161 } | 1164 } |
| 1165 |
| 1166 private: |
| 1167 Heap* heap_; |
| 1162 }; | 1168 }; |
| 1163 | 1169 |
| 1164 | 1170 |
| 1165 static void VerifyNonPointerSpacePointers() { | 1171 static void VerifyNonPointerSpacePointers(Heap* heap) { |
| 1166 // Verify that there are no pointers to new space in spaces where we | 1172 // Verify that there are no pointers to new space in spaces where we |
| 1167 // do not expect them. | 1173 // do not expect them. |
| 1168 VerifyNonPointerSpacePointersVisitor v; | 1174 VerifyNonPointerSpacePointersVisitor v(heap); |
| 1169 HeapObjectIterator code_it(HEAP->code_space()); | 1175 HeapObjectIterator code_it(heap->code_space()); |
| 1170 for (HeapObject* object = code_it.Next(); | 1176 for (HeapObject* object = code_it.Next(); |
| 1171 object != NULL; object = code_it.Next()) | 1177 object != NULL; object = code_it.Next()) |
| 1172 object->Iterate(&v); | 1178 object->Iterate(&v); |
| 1173 | 1179 |
| 1174 // The old data space was normally swept conservatively so that the iterator | 1180 // The old data space was normally swept conservatively so that the iterator |
| 1175 // doesn't work, so we normally skip the next bit. | 1181 // doesn't work, so we normally skip the next bit. |
| 1176 if (!HEAP->old_data_space()->was_swept_conservatively()) { | 1182 if (!heap->old_data_space()->was_swept_conservatively()) { |
| 1177 HeapObjectIterator data_it(HEAP->old_data_space()); | 1183 HeapObjectIterator data_it(heap->old_data_space()); |
| 1178 for (HeapObject* object = data_it.Next(); | 1184 for (HeapObject* object = data_it.Next(); |
| 1179 object != NULL; object = data_it.Next()) | 1185 object != NULL; object = data_it.Next()) |
| 1180 object->Iterate(&v); | 1186 object->Iterate(&v); |
| 1181 } | 1187 } |
| 1182 } | 1188 } |
| 1183 #endif // VERIFY_HEAP | 1189 #endif // VERIFY_HEAP |
| 1184 | 1190 |
| 1185 | 1191 |
| 1186 void Heap::CheckNewSpaceExpansionCriteria() { | 1192 void Heap::CheckNewSpaceExpansionCriteria() { |
| 1187 if (new_space_.Capacity() < new_space_.MaximumCapacity() && | 1193 if (new_space_.Capacity() < new_space_.MaximumCapacity() && |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1314 | 1320 |
| 1315 private: | 1321 private: |
| 1316 Heap* heap_; | 1322 Heap* heap_; |
| 1317 }; | 1323 }; |
| 1318 | 1324 |
| 1319 | 1325 |
| 1320 void Heap::Scavenge() { | 1326 void Heap::Scavenge() { |
| 1321 RelocationLock relocation_lock(this); | 1327 RelocationLock relocation_lock(this); |
| 1322 | 1328 |
| 1323 #ifdef VERIFY_HEAP | 1329 #ifdef VERIFY_HEAP |
| 1324 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); | 1330 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this); |
| 1325 #endif | 1331 #endif |
| 1326 | 1332 |
| 1327 gc_state_ = SCAVENGE; | 1333 gc_state_ = SCAVENGE; |
| 1328 | 1334 |
| 1329 // Implements Cheney's copying algorithm | 1335 // Implements Cheney's copying algorithm |
| 1330 LOG(isolate_, ResourceEvent("scavenge", "begin")); | 1336 LOG(isolate_, ResourceEvent("scavenge", "begin")); |
| 1331 | 1337 |
| 1332 // Clear descriptor cache. | 1338 // Clear descriptor cache. |
| 1333 isolate_->descriptor_lookup_cache()->Clear(); | 1339 isolate_->descriptor_lookup_cache()->Clear(); |
| 1334 | 1340 |
| (...skipping 1033 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2368 scavenging_visitors_table_.Register( | 2374 scavenging_visitors_table_.Register( |
| 2369 StaticVisitorBase::kVisitShortcutCandidate, | 2375 StaticVisitorBase::kVisitShortcutCandidate, |
| 2370 scavenging_visitors_table_.GetVisitorById( | 2376 scavenging_visitors_table_.GetVisitorById( |
| 2371 StaticVisitorBase::kVisitConsString)); | 2377 StaticVisitorBase::kVisitConsString)); |
| 2372 } | 2378 } |
| 2373 } | 2379 } |
| 2374 } | 2380 } |
| 2375 | 2381 |
| 2376 | 2382 |
| 2377 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { | 2383 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { |
| 2378 SLOW_ASSERT(HEAP->InFromSpace(object)); | 2384 SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); |
| 2379 MapWord first_word = object->map_word(); | 2385 MapWord first_word = object->map_word(); |
| 2380 SLOW_ASSERT(!first_word.IsForwardingAddress()); | 2386 SLOW_ASSERT(!first_word.IsForwardingAddress()); |
| 2381 Map* map = first_word.ToMap(); | 2387 Map* map = first_word.ToMap(); |
| 2382 map->GetHeap()->DoScavengeObject(map, p, object); | 2388 map->GetHeap()->DoScavengeObject(map, p, object); |
| 2383 } | 2389 } |
| 2384 | 2390 |
| 2385 | 2391 |
| 2386 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, | 2392 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, |
| 2387 int instance_size) { | 2393 int instance_size) { |
| 2388 Object* result; | 2394 Object* result; |
| (...skipping 3371 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5760 MaybeObject* maybe = | 5766 MaybeObject* maybe = |
| 5761 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); | 5767 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); |
| 5762 if (!maybe->ToObject(&result)) return maybe; | 5768 if (!maybe->ToObject(&result)) return maybe; |
| 5763 | 5769 |
| 5764 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); | 5770 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); |
| 5765 | 5771 |
| 5766 // Generate a random hash value. | 5772 // Generate a random hash value. |
| 5767 int hash; | 5773 int hash; |
| 5768 int attempts = 0; | 5774 int attempts = 0; |
| 5769 do { | 5775 do { |
| 5770 hash = V8::RandomPrivate(isolate()) & Name::kHashBitMask; | 5776 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; |
| 5771 attempts++; | 5777 attempts++; |
| 5772 } while (hash == 0 && attempts < 30); | 5778 } while (hash == 0 && attempts < 30); |
| 5773 if (hash == 0) hash = 1; // never return 0 | 5779 if (hash == 0) hash = 1; // never return 0 |
| 5774 | 5780 |
| 5775 Symbol::cast(result)->set_hash_field( | 5781 Symbol::cast(result)->set_hash_field( |
| 5776 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); | 5782 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); |
| 5777 Symbol::cast(result)->set_name(undefined_value()); | 5783 Symbol::cast(result)->set_name(undefined_value()); |
| 5778 | 5784 |
| 5779 ASSERT(result->IsSymbol()); | 5785 ASSERT(result->IsSymbol()); |
| 5780 return result; | 5786 return result; |
| (...skipping 1144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6925 // to be non-executable here for safety, but this means we need to enable it | 6931 // to be non-executable here for safety, but this means we need to enable it |
| 6926 // explicitly when allocating large code objects. | 6932 // explicitly when allocating large code objects. |
| 6927 lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE); | 6933 lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE); |
| 6928 if (lo_space_ == NULL) return false; | 6934 if (lo_space_ == NULL) return false; |
| 6929 if (!lo_space_->SetUp()) return false; | 6935 if (!lo_space_->SetUp()) return false; |
| 6930 | 6936 |
| 6931 // Set up the seed that is used to randomize the string hash function. | 6937 // Set up the seed that is used to randomize the string hash function. |
| 6932 ASSERT(hash_seed() == 0); | 6938 ASSERT(hash_seed() == 0); |
| 6933 if (FLAG_randomize_hashes) { | 6939 if (FLAG_randomize_hashes) { |
| 6934 if (FLAG_hash_seed == 0) { | 6940 if (FLAG_hash_seed == 0) { |
| 6935 set_hash_seed( | 6941 int rnd = isolate()->random_number_generator()->NextInt(); |
| 6936 Smi::FromInt(V8::RandomPrivate(isolate()) & 0x3fffffff)); | 6942 set_hash_seed(Smi::FromInt(rnd & Name::kHashBitMask)); |
| 6937 } else { | 6943 } else { |
| 6938 set_hash_seed(Smi::FromInt(FLAG_hash_seed)); | 6944 set_hash_seed(Smi::FromInt(FLAG_hash_seed)); |
| 6939 } | 6945 } |
| 6940 } | 6946 } |
| 6941 | 6947 |
| 6942 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); | 6948 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); |
| 6943 LOG(isolate_, IntPtrTEvent("heap-available", Available())); | 6949 LOG(isolate_, IntPtrTEvent("heap-available", Available())); |
| 6944 | 6950 |
| 6945 store_buffer()->SetUp(); | 6951 store_buffer()->SetUp(); |
| 6946 | 6952 |
| (...skipping 890 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7837 return field_offsets_[index + i]; | 7843 return field_offsets_[index + i]; |
| 7838 } | 7844 } |
| 7839 } | 7845 } |
| 7840 return kNotFound; | 7846 return kNotFound; |
| 7841 } | 7847 } |
| 7842 | 7848 |
| 7843 | 7849 |
| 7844 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { | 7850 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { |
| 7845 if (!name->IsUniqueName()) { | 7851 if (!name->IsUniqueName()) { |
| 7846 String* internalized_string; | 7852 String* internalized_string; |
| 7847 if (!HEAP->InternalizeStringIfExists( | 7853 if (!map->GetIsolate()->heap()->InternalizeStringIfExists( |
| 7848 String::cast(name), &internalized_string)) { | 7854 String::cast(name), &internalized_string)) { |
| 7849 return; | 7855 return; |
| 7850 } | 7856 } |
| 7851 name = internalized_string; | 7857 name = internalized_string; |
| 7852 } | 7858 } |
| 7853 // This cache is cleared only between mark compact passes, so we expect the | 7859 // This cache is cleared only between mark compact passes, so we expect the |
| 7854 // cache to only contain old space names. | 7860 // cache to only contain old space names. |
| 7855 ASSERT(!HEAP->InNewSpace(name)); | 7861 ASSERT(!map->GetIsolate()->heap()->InNewSpace(name)); |
| 7856 | 7862 |
| 7857 int index = (Hash(map, name) & kHashMask); | 7863 int index = (Hash(map, name) & kHashMask); |
| 7858 // After a GC there will be free slots, so we use them in order (this may | 7864 // After a GC there will be free slots, so we use them in order (this may |
| 7859 // help to get the most frequently used one in position 0). | 7865 // help to get the most frequently used one in position 0). |
| 7860 for (int i = 0; i< kEntriesPerBucket; i++) { | 7866 for (int i = 0; i< kEntriesPerBucket; i++) { |
| 7861 Key& key = keys_[index]; | 7867 Key& key = keys_[index]; |
| 7862 Object* free_entry_indicator = NULL; | 7868 Object* free_entry_indicator = NULL; |
| 7863 if (key.map == free_entry_indicator) { | 7869 if (key.map == free_entry_indicator) { |
| 7864 key.map = map; | 7870 key.map = map; |
| 7865 key.name = name; | 7871 key.name = name; |
| (...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8100 if (FLAG_concurrent_recompilation) { | 8106 if (FLAG_concurrent_recompilation) { |
| 8101 heap_->relocation_mutex_->Lock(); | 8107 heap_->relocation_mutex_->Lock(); |
| 8102 #ifdef DEBUG | 8108 #ifdef DEBUG |
| 8103 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8109 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 8104 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8110 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 8105 #endif // DEBUG | 8111 #endif // DEBUG |
| 8106 } | 8112 } |
| 8107 } | 8113 } |
| 8108 | 8114 |
| 8109 } } // namespace v8::internal | 8115 } } // namespace v8::internal |
| OLD | NEW |