OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
10 #include "src/interface-descriptors.h" | 10 #include "src/interface-descriptors.h" |
11 | 11 |
12 namespace v8 { | 12 namespace v8 { |
13 namespace internal { | 13 namespace internal { |
14 | 14 |
15 | 15 |
16 #define __ ACCESS_MASM(masm) | 16 #define __ ACCESS_MASM(masm) |
17 | 17 |
18 | 18 |
19 // Probe primary or secondary table. | 19 // Probe primary or secondary table. |
20 // If the entry is found in the cache, the generated code jump to the first | 20 // If the entry is found in the cache, the generated code jump to the first |
21 // instruction of the stub in the cache. | 21 // instruction of the stub in the cache. |
22 // If there is a miss the code fall trough. | 22 // If there is a miss the code fall trough. |
23 // | 23 // |
24 // 'receiver', 'name' and 'offset' registers are preserved on miss. | 24 // 'receiver', 'name' and 'offset' registers are preserved on miss. |
25 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, | 25 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, |
26 Code::Flags flags, StubCache::Table table, | 26 Code::Flags flags, StubCache::Table table, |
27 Register receiver, Register name, Register offset, | 27 Register receiver, Register name, |
28 Register scratch, Register scratch2, Register scratch3) { | 28 // The offset is scaled by 4, based on |
| 29 // kCacheIndexShift, which is two bits |
| 30 Register offset, Register scratch, Register scratch2, |
| 31 Register scratch3) { |
29 // Some code below relies on the fact that the Entry struct contains | 32 // Some code below relies on the fact that the Entry struct contains |
30 // 3 pointers (name, code, map). | 33 // 3 pointers (name, code, map). |
31 STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize)); | 34 STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize)); |
32 | 35 |
33 ExternalReference key_offset(stub_cache->key_reference(table)); | 36 ExternalReference key_offset(stub_cache->key_reference(table)); |
34 ExternalReference value_offset(stub_cache->value_reference(table)); | 37 ExternalReference value_offset(stub_cache->value_reference(table)); |
35 ExternalReference map_offset(stub_cache->map_reference(table)); | 38 ExternalReference map_offset(stub_cache->map_reference(table)); |
36 | 39 |
37 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address()); | 40 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address()); |
38 uintptr_t value_off_addr = | 41 uintptr_t value_off_addr = |
39 reinterpret_cast<uintptr_t>(value_offset.address()); | 42 reinterpret_cast<uintptr_t>(value_offset.address()); |
40 uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address()); | 43 uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address()); |
41 | 44 |
42 Label miss; | 45 Label miss; |
43 | 46 |
44 DCHECK(!AreAliased(name, offset, scratch, scratch2, scratch3)); | 47 DCHECK(!AreAliased(name, offset, scratch, scratch2, scratch3)); |
45 | 48 |
46 // Multiply by 3 because there are 3 fields per entry. | 49 // Multiply by 3 because there are 3 fields per entry. |
47 __ Add(scratch3, offset, Operand(offset, LSL, 1)); | 50 __ Add(scratch3, offset, Operand(offset, LSL, 1)); |
48 | 51 |
49 // Calculate the base address of the entry. | 52 // Calculate the base address of the entry. |
50 __ Mov(scratch, key_offset); | 53 __ Mov(scratch, key_offset); |
51 __ Add(scratch, scratch, Operand(scratch3, LSL, kPointerSizeLog2)); | 54 __ Add( |
| 55 scratch, scratch, |
| 56 Operand(scratch3, LSL, kPointerSizeLog2 - StubCache::kCacheIndexShift)); |
52 | 57 |
53 // Check that the key in the entry matches the name. | 58 // Check that the key in the entry matches the name. |
54 __ Ldr(scratch2, MemOperand(scratch)); | 59 __ Ldr(scratch2, MemOperand(scratch)); |
55 __ Cmp(name, scratch2); | 60 __ Cmp(name, scratch2); |
56 __ B(ne, &miss); | 61 __ B(ne, &miss); |
57 | 62 |
58 // Check the map matches. | 63 // Check the map matches. |
59 __ Ldr(scratch2, MemOperand(scratch, map_off_addr - key_off_addr)); | 64 __ Ldr(scratch2, MemOperand(scratch, map_off_addr - key_off_addr)); |
60 __ Ldr(scratch3, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 65 __ Ldr(scratch3, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
61 __ Cmp(scratch2, scratch3); | 66 __ Cmp(scratch2, scratch3); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
121 #endif | 126 #endif |
122 | 127 |
123 Counters* counters = masm->isolate()->counters(); | 128 Counters* counters = masm->isolate()->counters(); |
124 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, | 129 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, |
125 extra3); | 130 extra3); |
126 | 131 |
127 // Check that the receiver isn't a smi. | 132 // Check that the receiver isn't a smi. |
128 __ JumpIfSmi(receiver, &miss); | 133 __ JumpIfSmi(receiver, &miss); |
129 | 134 |
130 // Compute the hash for primary table. | 135 // Compute the hash for primary table. |
131 __ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | 136 __ Ldr(scratch.W(), FieldMemOperand(name, Name::kHashFieldOffset)); |
132 __ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 137 __ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
133 __ Add(scratch, scratch, extra); | 138 __ Add(scratch, scratch, extra); |
134 __ Eor(scratch, scratch, flags); | 139 __ Eor(scratch, scratch, flags); |
135 // We shift out the last two bits because they are not part of the hash. | 140 __ And(scratch, scratch, |
136 __ Ubfx(scratch, scratch, kCacheIndexShift, | 141 Operand((kPrimaryTableSize - 1) << kCacheIndexShift)); |
137 CountTrailingZeros(kPrimaryTableSize, 64)); | |
138 | 142 |
139 // Probe the primary table. | 143 // Probe the primary table. |
140 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, | 144 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, |
141 extra2, extra3); | 145 extra2, extra3); |
142 | 146 |
143 // Primary miss: Compute hash for secondary table. | 147 // Primary miss: Compute hash for secondary table. |
144 __ Sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift)); | 148 __ Sub(scratch, scratch, Operand(name)); |
145 __ Add(scratch, scratch, flags >> kCacheIndexShift); | 149 __ Add(scratch, scratch, Operand(flags)); |
146 __ And(scratch, scratch, kSecondaryTableSize - 1); | 150 __ And(scratch, scratch, |
| 151 Operand((kSecondaryTableSize - 1) << kCacheIndexShift)); |
147 | 152 |
148 // Probe the secondary table. | 153 // Probe the secondary table. |
149 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, | 154 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, |
150 extra2, extra3); | 155 extra2, extra3); |
151 | 156 |
152 // Cache miss: Fall-through and let caller handle the miss by | 157 // Cache miss: Fall-through and let caller handle the miss by |
153 // entering the runtime system. | 158 // entering the runtime system. |
154 __ Bind(&miss); | 159 __ Bind(&miss); |
155 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, | 160 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, |
156 extra3); | 161 extra3); |
157 } | 162 } |
158 } // namespace internal | 163 } // namespace internal |
159 } // namespace v8 | 164 } // namespace v8 |
160 | 165 |
161 #endif // V8_TARGET_ARCH_ARM64 | 166 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |