OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
10 #include "src/interface-descriptors.h" | 10 #include "src/interface-descriptors.h" |
(...skipping 27 matching lines...) Expand all Loading... | |
38 | 38 |
39 Label miss; | 39 Label miss; |
40 Register base_addr = scratch; | 40 Register base_addr = scratch; |
41 scratch = no_reg; | 41 scratch = no_reg; |
42 | 42 |
43 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 43 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
44 __ Lsa(offset_scratch, offset, offset, 1); | 44 __ Lsa(offset_scratch, offset, offset, 1); |
45 | 45 |
46 // Calculate the base address of the entry. | 46 // Calculate the base address of the entry. |
47 __ li(base_addr, Operand(key_offset)); | 47 __ li(base_addr, Operand(key_offset)); |
48 __ Lsa(base_addr, base_addr, offset_scratch, kPointerSizeLog2); | 48 __ Addu(base_addr, base_addr, offset_scratch); |
49 | 49 |
50 // Check that the key in the entry matches the name. | 50 // Check that the key in the entry matches the name. |
51 __ lw(at, MemOperand(base_addr, 0)); | 51 __ lw(at, MemOperand(base_addr, 0)); |
52 __ Branch(&miss, ne, name, Operand(at)); | 52 __ Branch(&miss, ne, name, Operand(at)); |
53 | 53 |
54 // Check the map matches. | 54 // Check the map matches. |
55 __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr)); | 55 __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr)); |
56 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 56 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
57 __ Branch(&miss, ne, at, Operand(scratch2)); | 57 __ Branch(&miss, ne, at, Operand(scratch2)); |
58 | 58 |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
127 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, | 127 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, |
128 extra3); | 128 extra3); |
129 | 129 |
130 // Check that the receiver isn't a smi. | 130 // Check that the receiver isn't a smi. |
131 __ JumpIfSmi(receiver, &miss); | 131 __ JumpIfSmi(receiver, &miss); |
132 | 132 |
133 // Get the map of the receiver and compute the hash. | 133 // Get the map of the receiver and compute the hash. |
134 __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | 134 __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); |
135 __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 135 __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
136 __ Addu(scratch, scratch, at); | 136 __ Addu(scratch, scratch, at); |
137 uint32_t mask = kPrimaryTableSize - 1; | 137 __ Xor(scratch, scratch, Operand(flags)); |
138 // We shift out the last two bits because they are not part of the hash and | 138 __ li(at, Operand(kPrimaryTableSize - 1)); |
139 // they are always 01 for maps. | 139 __ sll(at, at, kCacheIndexShift); |
Jakob Kummerow
2016/07/20 12:09:45
Why not fold this into the instruction above?
__
Igor Sheludko
2016/07/20 13:40:26
Done.
| |
140 __ srl(scratch, scratch, kCacheIndexShift); | 140 __ And(scratch, scratch, at); |
141 __ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); | |
142 __ And(scratch, scratch, Operand(mask)); | |
143 | 141 |
144 // Probe the primary table. | 142 // Probe the primary table. |
145 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, | 143 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, |
146 extra2, extra3); | 144 extra2, extra3); |
147 | 145 |
148 // Primary miss: Compute hash for secondary probe. | 146 // Primary miss: Compute hash for secondary probe. |
149 __ srl(at, name, kCacheIndexShift); | 147 __ Subu(scratch, scratch, name); |
150 __ Subu(scratch, scratch, at); | 148 __ Addu(scratch, scratch, Operand(flags)); |
151 uint32_t mask2 = kSecondaryTableSize - 1; | 149 __ li(at, Operand(kSecondaryTableSize - 1)); |
152 __ Addu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); | 150 __ sll(at, at, kCacheIndexShift); |
153 __ And(scratch, scratch, Operand(mask2)); | 151 __ And(scratch, scratch, at); |
154 | 152 |
155 // Probe the secondary table. | 153 // Probe the secondary table. |
156 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, | 154 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, |
157 extra2, extra3); | 155 extra2, extra3); |
158 | 156 |
159 // Cache miss: Fall-through and let caller handle the miss by | 157 // Cache miss: Fall-through and let caller handle the miss by |
160 // entering the runtime system. | 158 // entering the runtime system. |
161 __ bind(&miss); | 159 __ bind(&miss); |
162 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, | 160 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, |
163 extra3); | 161 extra3); |
164 } | 162 } |
165 | 163 |
166 | 164 |
167 #undef __ | 165 #undef __ |
168 } // namespace internal | 166 } // namespace internal |
169 } // namespace v8 | 167 } // namespace v8 |
170 | 168 |
171 #endif // V8_TARGET_ARCH_MIPS | 169 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |