| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
| 6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
| 7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
| 8 #include "src/frames.h" | 8 #include "src/frames.h" |
| 9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
| 10 | 10 |
| (...skipping 2845 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2856 var_handler->Bind(handler); | 2856 var_handler->Bind(handler); |
| 2857 Goto(if_handler); | 2857 Goto(if_handler); |
| 2858 | 2858 |
| 2859 Bind(&next_entry); | 2859 Bind(&next_entry); |
| 2860 var_index.Bind(Int32Add(index, Int32Constant(kEntrySize))); | 2860 var_index.Bind(Int32Add(index, Int32Constant(kEntrySize))); |
| 2861 Goto(&loop); | 2861 Goto(&loop); |
| 2862 } | 2862 } |
| 2863 } | 2863 } |
| 2864 | 2864 |
| 2865 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name, | 2865 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name, |
| 2866 Code::Flags flags, | |
| 2867 compiler::Node* map) { | 2866 compiler::Node* map) { |
| 2868 // See v8::internal::StubCache::PrimaryOffset(). | 2867 // See v8::internal::StubCache::PrimaryOffset(). |
| 2869 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift); | 2868 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift); |
| 2870 // Compute the hash of the name (use entire hash field). | 2869 // Compute the hash of the name (use entire hash field). |
| 2871 Node* hash_field = LoadNameHashField(name); | 2870 Node* hash_field = LoadNameHashField(name); |
| 2872 Assert(WordEqual( | 2871 Assert(WordEqual( |
| 2873 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)), | 2872 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)), |
| 2874 Int32Constant(0))); | 2873 Int32Constant(0))); |
| 2875 | 2874 |
| 2876 // Using only the low bits in 64-bit mode is unlikely to increase the | 2875 // Using only the low bits in 64-bit mode is unlikely to increase the |
| 2877 // risk of collision even if the heap is spread over an area larger than | 2876 // risk of collision even if the heap is spread over an area larger than |
| 2878 // 4Gb (and not at all if it isn't). | 2877 // 4Gb (and not at all if it isn't). |
| 2879 Node* hash = Int32Add(hash_field, map); | 2878 Node* hash = Int32Add(hash_field, map); |
| 2880 // We always set the in_loop bit to zero when generating the lookup code | 2879 // Base the offset on a simple combination of name and map. |
| 2881 // so do it here too so the hash codes match. | |
| 2882 uint32_t iflags = | |
| 2883 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); | |
| 2884 // Base the offset on a simple combination of name, flags, and map. | |
| 2885 hash = Word32Xor(hash, Int32Constant(iflags)); | |
| 2886 uint32_t mask = (StubCache::kPrimaryTableSize - 1) | 2880 uint32_t mask = (StubCache::kPrimaryTableSize - 1) |
| 2887 << StubCache::kCacheIndexShift; | 2881 << StubCache::kCacheIndexShift; |
| 2888 return Word32And(hash, Int32Constant(mask)); | 2882 return Word32And(hash, Int32Constant(mask)); |
| 2889 } | 2883 } |
| 2890 | 2884 |
| 2891 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset( | 2885 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset( |
| 2892 compiler::Node* name, Code::Flags flags, compiler::Node* seed) { | 2886 compiler::Node* name, compiler::Node* seed) { |
| 2893 // See v8::internal::StubCache::SecondaryOffset(). | 2887 // See v8::internal::StubCache::SecondaryOffset(). |
| 2894 | 2888 |
| 2895 // Use the seed from the primary cache in the secondary cache. | 2889 // Use the seed from the primary cache in the secondary cache. |
| 2896 Node* hash = Int32Sub(seed, name); | 2890 Node* hash = |
| 2897 // We always set the in_loop bit to zero when generating the lookup code | 2891 Int32Add(Int32Sub(seed, name), Int32Constant(StubCache::kSecondaryMagic)); |
| 2898 // so do it here too so the hash codes match. | |
| 2899 uint32_t iflags = | |
| 2900 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); | |
| 2901 hash = Int32Add(hash, Int32Constant(iflags)); | |
| 2902 int32_t mask = (StubCache::kSecondaryTableSize - 1) | 2892 int32_t mask = (StubCache::kSecondaryTableSize - 1) |
| 2903 << StubCache::kCacheIndexShift; | 2893 << StubCache::kCacheIndexShift; |
| 2904 return Word32And(hash, Int32Constant(mask)); | 2894 return Word32And(hash, Int32Constant(mask)); |
| 2905 } | 2895 } |
| 2906 | 2896 |
| 2907 enum CodeStubAssembler::StubCacheTable : int { | 2897 enum CodeStubAssembler::StubCacheTable : int { |
| 2908 kPrimary = static_cast<int>(StubCache::kPrimary), | 2898 kPrimary = static_cast<int>(StubCache::kPrimary), |
| 2909 kSecondary = static_cast<int>(StubCache::kSecondary) | 2899 kSecondary = static_cast<int>(StubCache::kSecondary) |
| 2910 }; | 2900 }; |
| 2911 | 2901 |
| 2912 void CodeStubAssembler::TryProbeStubCacheTable( | 2902 void CodeStubAssembler::TryProbeStubCacheTable( |
| 2913 StubCache* stub_cache, StubCacheTable table_id, | 2903 StubCache* stub_cache, StubCacheTable table_id, |
| 2914 compiler::Node* entry_offset, compiler::Node* name, Code::Flags flags, | 2904 compiler::Node* entry_offset, compiler::Node* name, compiler::Node* map, |
| 2915 compiler::Node* map, Label* if_handler, Variable* var_handler, | 2905 Label* if_handler, Variable* var_handler, Label* if_miss) { |
| 2916 Label* if_miss) { | |
| 2917 StubCache::Table table = static_cast<StubCache::Table>(table_id); | 2906 StubCache::Table table = static_cast<StubCache::Table>(table_id); |
| 2918 #ifdef DEBUG | 2907 #ifdef DEBUG |
| 2919 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | 2908 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
| 2920 Goto(if_miss); | 2909 Goto(if_miss); |
| 2921 return; | 2910 return; |
| 2922 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | 2911 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
| 2923 Goto(if_miss); | 2912 Goto(if_miss); |
| 2924 return; | 2913 return; |
| 2925 } | 2914 } |
| 2926 #endif | 2915 #endif |
| 2927 // The {table_offset} holds the entry offset times four (due to masking | 2916 // The {table_offset} holds the entry offset times four (due to masking |
| 2928 // and shifting optimizations). | 2917 // and shifting optimizations). |
| 2929 const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift; | 2918 const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift; |
| 2930 entry_offset = Int32Mul(entry_offset, Int32Constant(kMultiplier)); | 2919 entry_offset = Int32Mul(entry_offset, Int32Constant(kMultiplier)); |
| 2931 | 2920 |
| 2932 // Check that the key in the entry matches the name. | 2921 // Check that the key in the entry matches the name. |
| 2933 Node* key_base = | 2922 Node* key_base = |
| 2934 ExternalConstant(ExternalReference(stub_cache->key_reference(table))); | 2923 ExternalConstant(ExternalReference(stub_cache->key_reference(table))); |
| 2935 Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset); | 2924 Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset); |
| 2936 GotoIf(WordNotEqual(name, entry_key), if_miss); | 2925 GotoIf(WordNotEqual(name, entry_key), if_miss); |
| 2937 | 2926 |
| 2938 // Get the map entry from the cache. | 2927 // Get the map entry from the cache. |
| 2939 DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() - | 2928 DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() - |
| 2940 stub_cache->key_reference(table).address()); | 2929 stub_cache->key_reference(table).address()); |
| 2941 Node* entry_map = | 2930 Node* entry_map = |
| 2942 Load(MachineType::Pointer(), key_base, | 2931 Load(MachineType::Pointer(), key_base, |
| 2943 Int32Add(entry_offset, Int32Constant(kPointerSize * 2))); | 2932 Int32Add(entry_offset, Int32Constant(kPointerSize * 2))); |
| 2944 GotoIf(WordNotEqual(map, entry_map), if_miss); | 2933 GotoIf(WordNotEqual(map, entry_map), if_miss); |
| 2945 | 2934 |
| 2946 // Check that the flags match what we're looking for. | |
| 2947 DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() - | 2935 DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() - |
| 2948 stub_cache->key_reference(table).address()); | 2936 stub_cache->key_reference(table).address()); |
| 2949 Node* code = Load(MachineType::Pointer(), key_base, | 2937 Node* code = Load(MachineType::Pointer(), key_base, |
| 2950 Int32Add(entry_offset, Int32Constant(kPointerSize))); | 2938 Int32Add(entry_offset, Int32Constant(kPointerSize))); |
| 2951 | 2939 |
| 2940 // Check that the flags match what we're looking for. |
| 2941 Code::Flags flags = Code::RemoveHolderFromFlags( |
| 2942 Code::ComputeHandlerFlags(stub_cache->ic_kind())); |
| 2952 Node* code_flags = | 2943 Node* code_flags = |
| 2953 LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32()); | 2944 LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32()); |
| 2954 GotoIf(Word32NotEqual(Int32Constant(flags), | 2945 Assert(Word32Equal( |
| 2955 Word32And(code_flags, | 2946 Int32Constant(flags), |
| 2956 Int32Constant(~Code::kFlagsNotUsedInLookup))), | 2947 Word32And(code_flags, Int32Constant(~Code::kFlagsNotUsedInLookup)))); |
| 2957 if_miss); | |
| 2958 | 2948 |
| 2959 // We found the handler. | 2949 // We found the handler. |
| 2960 var_handler->Bind(code); | 2950 var_handler->Bind(code); |
| 2961 Goto(if_handler); | 2951 Goto(if_handler); |
| 2962 } | 2952 } |
| 2963 | 2953 |
| 2964 void CodeStubAssembler::TryProbeStubCache( | 2954 void CodeStubAssembler::TryProbeStubCache( |
| 2965 StubCache* stub_cache, compiler::Node* receiver, compiler::Node* name, | 2955 StubCache* stub_cache, compiler::Node* receiver, compiler::Node* name, |
| 2966 Label* if_handler, Variable* var_handler, Label* if_miss) { | 2956 Label* if_handler, Variable* var_handler, Label* if_miss) { |
| 2967 Code::Flags flags = Code::RemoveHolderFromFlags( | |
| 2968 Code::ComputeHandlerFlags(stub_cache->ic_kind())); | |
| 2969 | |
| 2970 Label try_secondary(this), miss(this); | 2957 Label try_secondary(this), miss(this); |
| 2971 | 2958 |
| 2972 Counters* counters = isolate()->counters(); | 2959 Counters* counters = isolate()->counters(); |
| 2973 IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); | 2960 IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); |
| 2974 | 2961 |
| 2975 // Check that the {receiver} isn't a smi. | 2962 // Check that the {receiver} isn't a smi. |
| 2976 GotoIf(WordIsSmi(receiver), &miss); | 2963 GotoIf(WordIsSmi(receiver), &miss); |
| 2977 | 2964 |
| 2978 Node* receiver_map = LoadMap(receiver); | 2965 Node* receiver_map = LoadMap(receiver); |
| 2979 | 2966 |
| 2980 // Probe the primary table. | 2967 // Probe the primary table. |
| 2981 Node* primary_offset = StubCachePrimaryOffset(name, flags, receiver_map); | 2968 Node* primary_offset = StubCachePrimaryOffset(name, receiver_map); |
| 2982 TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, flags, | 2969 TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, |
| 2983 receiver_map, if_handler, var_handler, &try_secondary); | 2970 receiver_map, if_handler, var_handler, &try_secondary); |
| 2984 | 2971 |
| 2985 Bind(&try_secondary); | 2972 Bind(&try_secondary); |
| 2986 { | 2973 { |
| 2987 // Probe the secondary table. | 2974 // Probe the secondary table. |
| 2988 Node* secondary_offset = | 2975 Node* secondary_offset = StubCacheSecondaryOffset(name, primary_offset); |
| 2989 StubCacheSecondaryOffset(name, flags, primary_offset); | |
| 2990 TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name, | 2976 TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name, |
| 2991 flags, receiver_map, if_handler, var_handler, &miss); | 2977 receiver_map, if_handler, var_handler, &miss); |
| 2992 } | 2978 } |
| 2993 | 2979 |
| 2994 Bind(&miss); | 2980 Bind(&miss); |
| 2995 { | 2981 { |
| 2996 IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | 2982 IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
| 2997 Goto(if_miss); | 2983 Goto(if_miss); |
| 2998 } | 2984 } |
| 2999 } | 2985 } |
| 3000 | 2986 |
| 3001 void CodeStubAssembler::LoadIC(const LoadICParameters* p) { | 2987 void CodeStubAssembler::LoadIC(const LoadICParameters* p) { |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3132 } | 3118 } |
| 3133 Bind(&miss); | 3119 Bind(&miss); |
| 3134 { | 3120 { |
| 3135 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot, | 3121 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot, |
| 3136 p->vector); | 3122 p->vector); |
| 3137 } | 3123 } |
| 3138 } | 3124 } |
| 3139 | 3125 |
| 3140 } // namespace internal | 3126 } // namespace internal |
| 3141 } // namespace v8 | 3127 } // namespace v8 |
| OLD | NEW |