OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
10 #include "src/interface-descriptors.h" | 10 #include "src/interface-descriptors.h" |
11 | 11 |
12 namespace v8 { | 12 namespace v8 { |
13 namespace internal { | 13 namespace internal { |
14 | 14 |
15 #define __ ACCESS_MASM(masm) | 15 #define __ ACCESS_MASM(masm) |
16 | 16 |
17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, | 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, |
18 Code::Flags flags, StubCache::Table table, | 18 Code::Flags flags, StubCache::Table table, |
19 Register receiver, Register name, | 19 Register receiver, Register name, |
20 // Number of the cache entry, not scaled. | 20 // Number of the cache entry, not scaled. |
Jakob Kummerow
2016/07/20 12:09:45
outdated comment: |offset| is scaled now.
Igor Sheludko
2016/07/20 13:40:26
Done.
| |
21 Register offset, Register scratch, Register scratch2, | 21 Register offset, Register scratch, Register scratch2, |
22 Register offset_scratch) { | 22 Register offset_scratch) { |
23 ExternalReference key_offset(stub_cache->key_reference(table)); | 23 ExternalReference key_offset(stub_cache->key_reference(table)); |
24 ExternalReference value_offset(stub_cache->value_reference(table)); | 24 ExternalReference value_offset(stub_cache->value_reference(table)); |
25 ExternalReference map_offset(stub_cache->map_reference(table)); | 25 ExternalReference map_offset(stub_cache->map_reference(table)); |
26 | 26 |
27 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); | 27 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); |
28 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); | 28 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); |
29 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); | 29 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); |
30 | 30 |
31 // Check the relative positions of the address fields. | 31 // Check the relative positions of the address fields. |
32 DCHECK(value_off_addr > key_off_addr); | 32 DCHECK(value_off_addr > key_off_addr); |
33 DCHECK((value_off_addr - key_off_addr) % 4 == 0); | 33 DCHECK((value_off_addr - key_off_addr) % 4 == 0); |
34 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); | 34 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); |
35 DCHECK(map_off_addr > key_off_addr); | 35 DCHECK(map_off_addr > key_off_addr); |
36 DCHECK((map_off_addr - key_off_addr) % 4 == 0); | 36 DCHECK((map_off_addr - key_off_addr) % 4 == 0); |
37 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); | 37 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); |
38 | 38 |
39 Label miss; | 39 Label miss; |
40 Register base_addr = scratch; | 40 Register base_addr = scratch; |
41 scratch = no_reg; | 41 scratch = no_reg; |
42 | 42 |
43 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 43 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
44 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); | 44 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); |
45 | 45 |
46 // Calculate the base address of the entry. | 46 // Calculate the base address of the entry. |
47 __ mov(base_addr, Operand(key_offset)); | 47 __ add(base_addr, offset_scratch, Operand(key_offset)); |
48 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2)); | |
49 | 48 |
50 // Check that the key in the entry matches the name. | 49 // Check that the key in the entry matches the name. |
51 __ ldr(ip, MemOperand(base_addr, 0)); | 50 __ ldr(ip, MemOperand(base_addr, 0)); |
52 __ cmp(name, ip); | 51 __ cmp(name, ip); |
53 __ b(ne, &miss); | 52 __ b(ne, &miss); |
54 | 53 |
55 // Check the map matches. | 54 // Check the map matches. |
56 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); | 55 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); |
57 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 56 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
58 __ cmp(ip, scratch2); | 57 __ cmp(ip, scratch2); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
133 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, | 132 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, |
134 extra3); | 133 extra3); |
135 | 134 |
136 // Check that the receiver isn't a smi. | 135 // Check that the receiver isn't a smi. |
137 __ JumpIfSmi(receiver, &miss); | 136 __ JumpIfSmi(receiver, &miss); |
138 | 137 |
139 // Get the map of the receiver and compute the hash. | 138 // Get the map of the receiver and compute the hash. |
140 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | 139 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); |
141 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 140 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
142 __ add(scratch, scratch, Operand(ip)); | 141 __ add(scratch, scratch, Operand(ip)); |
143 uint32_t mask = kPrimaryTableSize - 1; | 142 __ eor(scratch, scratch, Operand(flags)); |
144 // We shift out the last two bits because they are not part of the hash and | 143 __ mov(ip, Operand(kPrimaryTableSize - 1)); |
145 // they are always 01 for maps. | 144 __ and_(scratch, scratch, Operand(ip, LSL, kCacheIndexShift)); |
146 __ mov(scratch, Operand(scratch, LSR, kCacheIndexShift)); | |
147 // Mask down the eor argument to the minimum to keep the immediate | |
148 // ARM-encodable. | |
149 __ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); | |
150 // Prefer and_ to ubfx here because ubfx takes 2 cycles. | |
151 __ and_(scratch, scratch, Operand(mask)); | |
152 | 145 |
153 // Probe the primary table. | 146 // Probe the primary table. |
154 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, | 147 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, |
155 extra2, extra3); | 148 extra2, extra3); |
156 | 149 |
157 // Primary miss: Compute hash for secondary probe. | 150 // Primary miss: Compute hash for secondary probe. |
158 __ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift)); | 151 __ sub(scratch, scratch, Operand(name)); |
159 uint32_t mask2 = kSecondaryTableSize - 1; | 152 __ add(scratch, scratch, Operand(flags)); |
160 __ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); | 153 __ mov(ip, Operand(kSecondaryTableSize - 1)); |
161 __ and_(scratch, scratch, Operand(mask2)); | 154 __ and_(scratch, scratch, Operand(ip, LSL, kCacheIndexShift)); |
162 | 155 |
163 // Probe the secondary table. | 156 // Probe the secondary table. |
164 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, | 157 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, |
165 extra2, extra3); | 158 extra2, extra3); |
166 | 159 |
167 // Cache miss: Fall-through and let caller handle the miss by | 160 // Cache miss: Fall-through and let caller handle the miss by |
168 // entering the runtime system. | 161 // entering the runtime system. |
169 __ bind(&miss); | 162 __ bind(&miss); |
170 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, | 163 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, |
171 extra3); | 164 extra3); |
172 } | 165 } |
173 | 166 |
174 | 167 |
175 #undef __ | 168 #undef __ |
176 } // namespace internal | 169 } // namespace internal |
177 } // namespace v8 | 170 } // namespace v8 |
178 | 171 |
179 #endif // V8_TARGET_ARCH_ARM | 172 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |