OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
10 #include "src/interface-descriptors.h" | 10 #include "src/interface-descriptors.h" |
11 | 11 |
12 namespace v8 { | 12 namespace v8 { |
13 namespace internal { | 13 namespace internal { |
14 | 14 |
15 #define __ ACCESS_MASM(masm) | 15 #define __ ACCESS_MASM(masm) |
16 | 16 |
17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, | 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, |
18 Code::Flags flags, StubCache::Table table, | 18 StubCache::Table table, Register receiver, Register name, |
19 Register receiver, Register name, | |
20 // Number of the cache entry, not scaled. | 19 // Number of the cache entry, not scaled. |
21 Register offset, Register scratch, Register scratch2, | 20 Register offset, Register scratch, Register scratch2, |
22 Register offset_scratch) { | 21 Register offset_scratch) { |
23 ExternalReference key_offset(stub_cache->key_reference(table)); | 22 ExternalReference key_offset(stub_cache->key_reference(table)); |
24 ExternalReference value_offset(stub_cache->value_reference(table)); | 23 ExternalReference value_offset(stub_cache->value_reference(table)); |
25 ExternalReference map_offset(stub_cache->map_reference(table)); | 24 ExternalReference map_offset(stub_cache->map_reference(table)); |
26 | 25 |
27 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); | 26 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); |
28 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); | 27 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); |
29 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); | 28 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); |
30 | 29 |
31 // Check the relative positions of the address fields. | 30 // Check the relative positions of the address fields. |
32 DCHECK(value_off_addr > key_off_addr); | 31 DCHECK(value_off_addr > key_off_addr); |
33 DCHECK((value_off_addr - key_off_addr) % 4 == 0); | 32 DCHECK((value_off_addr - key_off_addr) % 4 == 0); |
34 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); | 33 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); |
35 DCHECK(map_off_addr > key_off_addr); | 34 DCHECK(map_off_addr > key_off_addr); |
36 DCHECK((map_off_addr - key_off_addr) % 4 == 0); | 35 DCHECK((map_off_addr - key_off_addr) % 4 == 0); |
37 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); | 36 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); |
38 | 37 |
39 Label miss; | 38 Label miss; |
40 Register base_addr = scratch; | 39 Register base_addr = scratch; |
41 scratch = no_reg; | 40 scratch = no_reg; |
42 | 41 |
43 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 42 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
44 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); | 43 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); |
45 | 44 |
46 // Calculate the base address of the entry. | 45 // Calculate the base address of the entry. |
47 __ mov(base_addr, Operand(key_offset)); | 46 __ add(base_addr, offset_scratch, Operand(key_offset)); |
48 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2)); | |
49 | 47 |
50 // Check that the key in the entry matches the name. | 48 // Check that the key in the entry matches the name. |
51 __ ldr(ip, MemOperand(base_addr, 0)); | 49 __ ldr(ip, MemOperand(base_addr, 0)); |
52 __ cmp(name, ip); | 50 __ cmp(name, ip); |
53 __ b(ne, &miss); | 51 __ b(ne, &miss); |
54 | 52 |
55 // Check the map matches. | 53 // Check the map matches. |
56 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); | 54 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); |
57 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 55 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
58 __ cmp(ip, scratch2); | 56 __ cmp(ip, scratch2); |
59 __ b(ne, &miss); | 57 __ b(ne, &miss); |
60 | 58 |
61 // Get the code entry from the cache. | 59 // Get the code entry from the cache. |
62 Register code = scratch2; | 60 Register code = scratch2; |
63 scratch2 = no_reg; | 61 scratch2 = no_reg; |
64 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr)); | 62 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr)); |
65 | 63 |
| 64 #ifdef DEBUG |
66 // Check that the flags match what we're looking for. | 65 // Check that the flags match what we're looking for. |
67 Register flags_reg = base_addr; | 66 Register flags_reg = base_addr; |
68 base_addr = no_reg; | 67 base_addr = no_reg; |
69 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); | 68 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); |
70 // It's a nice optimization if this constant is encodable in the bic insn. | 69 // It's a nice optimization if this constant is encodable in the bic insn. |
71 | 70 |
| 71 Code::Flags flags = Code::RemoveHolderFromFlags( |
| 72 Code::ComputeHandlerFlags(stub_cache->ic_kind())); |
72 uint32_t mask = Code::kFlagsNotUsedInLookup; | 73 uint32_t mask = Code::kFlagsNotUsedInLookup; |
73 DCHECK(__ ImmediateFitsAddrMode1Instruction(mask)); | 74 DCHECK(__ ImmediateFitsAddrMode1Instruction(mask)); |
74 __ bic(flags_reg, flags_reg, Operand(mask)); | 75 __ bic(flags_reg, flags_reg, Operand(mask)); |
75 __ cmp(flags_reg, Operand(flags)); | 76 __ cmp(flags_reg, Operand(flags)); |
76 __ b(ne, &miss); | 77 __ Check(eq, kUnexpectedValue); |
77 | 78 |
78 #ifdef DEBUG | |
79 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | 79 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
80 __ jmp(&miss); | 80 __ jmp(&miss); |
81 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | 81 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
82 __ jmp(&miss); | 82 __ jmp(&miss); |
83 } | 83 } |
84 #endif | 84 #endif |
85 | 85 |
86 // Jump to the first instruction in the code stub. | 86 // Jump to the first instruction in the code stub. |
87 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag)); | 87 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag)); |
88 | 88 |
89 // Miss: fall through. | 89 // Miss: fall through. |
90 __ bind(&miss); | 90 __ bind(&miss); |
91 } | 91 } |
92 | 92 |
93 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, | 93 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, |
94 Register name, Register scratch, Register extra, | 94 Register name, Register scratch, Register extra, |
95 Register extra2, Register extra3) { | 95 Register extra2, Register extra3) { |
96 Code::Flags flags = | |
97 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_)); | |
98 | |
99 Label miss; | 96 Label miss; |
100 | 97 |
101 // Make sure that code is valid. The multiplying code relies on the | 98 // Make sure that code is valid. The multiplying code relies on the |
102 // entry size being 12. | 99 // entry size being 12. |
103 DCHECK(sizeof(Entry) == 12); | 100 DCHECK(sizeof(Entry) == 12); |
104 | 101 |
105 // Make sure that there are no register conflicts. | 102 // Make sure that there are no register conflicts. |
106 DCHECK(!AreAliased(receiver, name, scratch, extra, extra2, extra3)); | 103 DCHECK(!AreAliased(receiver, name, scratch, extra, extra2, extra3)); |
107 | 104 |
108 // Check scratch, extra and extra2 registers are valid. | 105 // Check scratch, extra and extra2 registers are valid. |
(...skipping 24 matching lines...) Expand all Loading... |
133 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, | 130 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, |
134 extra3); | 131 extra3); |
135 | 132 |
136 // Check that the receiver isn't a smi. | 133 // Check that the receiver isn't a smi. |
137 __ JumpIfSmi(receiver, &miss); | 134 __ JumpIfSmi(receiver, &miss); |
138 | 135 |
139 // Get the map of the receiver and compute the hash. | 136 // Get the map of the receiver and compute the hash. |
140 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | 137 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); |
141 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 138 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
142 __ add(scratch, scratch, Operand(ip)); | 139 __ add(scratch, scratch, Operand(ip)); |
143 uint32_t mask = kPrimaryTableSize - 1; | 140 __ mov(ip, Operand(kPrimaryTableSize - 1)); |
144 // We shift out the last two bits because they are not part of the hash and | 141 __ and_(scratch, scratch, Operand(ip, LSL, kCacheIndexShift)); |
145 // they are always 01 for maps. | |
146 __ mov(scratch, Operand(scratch, LSR, kCacheIndexShift)); | |
147 // Mask down the eor argument to the minimum to keep the immediate | |
148 // ARM-encodable. | |
149 __ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); | |
150 // Prefer and_ to ubfx here because ubfx takes 2 cycles. | |
151 __ and_(scratch, scratch, Operand(mask)); | |
152 | 142 |
153 // Probe the primary table. | 143 // Probe the primary table. |
154 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, | 144 ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2, |
155 extra2, extra3); | 145 extra3); |
156 | 146 |
157 // Primary miss: Compute hash for secondary probe. | 147 // Primary miss: Compute hash for secondary probe. |
158 __ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift)); | 148 __ sub(scratch, scratch, Operand(name)); |
159 uint32_t mask2 = kSecondaryTableSize - 1; | 149 __ add(scratch, scratch, Operand(kSecondaryMagic)); |
160 __ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); | 150 __ mov(ip, Operand(kSecondaryTableSize - 1)); |
161 __ and_(scratch, scratch, Operand(mask2)); | 151 __ and_(scratch, scratch, Operand(ip, LSL, kCacheIndexShift)); |
162 | 152 |
163 // Probe the secondary table. | 153 // Probe the secondary table. |
164 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, | 154 ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2, |
165 extra2, extra3); | 155 extra3); |
166 | 156 |
167 // Cache miss: Fall-through and let caller handle the miss by | 157 // Cache miss: Fall-through and let caller handle the miss by |
168 // entering the runtime system. | 158 // entering the runtime system. |
169 __ bind(&miss); | 159 __ bind(&miss); |
170 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, | 160 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, |
171 extra3); | 161 extra3); |
172 } | 162 } |
173 | 163 |
174 | 164 |
175 #undef __ | 165 #undef __ |
176 } // namespace internal | 166 } // namespace internal |
177 } // namespace v8 | 167 } // namespace v8 |
178 | 168 |
179 #endif // V8_TARGET_ARCH_ARM | 169 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |