| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
| 9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
| 10 #include "src/interface-descriptors.h" | 10 #include "src/interface-descriptors.h" |
| 11 | 11 |
| 12 namespace v8 { | 12 namespace v8 { |
| 13 namespace internal { | 13 namespace internal { |
| 14 | 14 |
| 15 #define __ ACCESS_MASM(masm) | 15 #define __ ACCESS_MASM(masm) |
| 16 | 16 |
| 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, | 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, |
| 18 Code::Flags flags, StubCache::Table table, | 18 Code::Flags flags, StubCache::Table table, |
| 19 Register receiver, Register name, | 19 Register receiver, Register name, |
| 20 // Number of the cache entry, not scaled. | 20 // The offset is scaled by 4, based on |
| 21 // kCacheIndexShift, which is two bits |
| 21 Register offset, Register scratch, Register scratch2, | 22 Register offset, Register scratch, Register scratch2, |
| 22 Register offset_scratch) { | 23 Register offset_scratch) { |
| 23 ExternalReference key_offset(stub_cache->key_reference(table)); | 24 ExternalReference key_offset(stub_cache->key_reference(table)); |
| 24 ExternalReference value_offset(stub_cache->value_reference(table)); | 25 ExternalReference value_offset(stub_cache->value_reference(table)); |
| 25 ExternalReference map_offset(stub_cache->map_reference(table)); | 26 ExternalReference map_offset(stub_cache->map_reference(table)); |
| 26 | 27 |
| 27 uint64_t key_off_addr = reinterpret_cast<uint64_t>(key_offset.address()); | 28 uint64_t key_off_addr = reinterpret_cast<uint64_t>(key_offset.address()); |
| 28 uint64_t value_off_addr = reinterpret_cast<uint64_t>(value_offset.address()); | 29 uint64_t value_off_addr = reinterpret_cast<uint64_t>(value_offset.address()); |
| 29 uint64_t map_off_addr = reinterpret_cast<uint64_t>(map_offset.address()); | 30 uint64_t map_off_addr = reinterpret_cast<uint64_t>(map_offset.address()); |
| 30 | 31 |
| 31 // Check the relative positions of the address fields. | 32 // Check the relative positions of the address fields. |
| 32 DCHECK(value_off_addr > key_off_addr); | 33 DCHECK(value_off_addr > key_off_addr); |
| 33 DCHECK((value_off_addr - key_off_addr) % 4 == 0); | 34 DCHECK((value_off_addr - key_off_addr) % 4 == 0); |
| 34 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); | 35 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); |
| 35 DCHECK(map_off_addr > key_off_addr); | 36 DCHECK(map_off_addr > key_off_addr); |
| 36 DCHECK((map_off_addr - key_off_addr) % 4 == 0); | 37 DCHECK((map_off_addr - key_off_addr) % 4 == 0); |
| 37 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); | 38 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); |
| 38 | 39 |
| 39 Label miss; | 40 Label miss; |
| 40 Register base_addr = scratch; | 41 Register base_addr = scratch; |
| 41 scratch = no_reg; | 42 scratch = no_reg; |
| 42 | 43 |
| 43 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 44 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
| 44 __ Dlsa(offset_scratch, offset, offset, 1); | 45 __ Dlsa(offset_scratch, offset, offset, 1); |
| 45 | 46 |
| 46 // Calculate the base address of the entry. | 47 // Calculate the base address of the entry. |
| 47 __ li(base_addr, Operand(key_offset)); | 48 __ li(base_addr, Operand(key_offset)); |
| 48 __ Dlsa(base_addr, base_addr, offset_scratch, kPointerSizeLog2); | 49 __ Dlsa(base_addr, base_addr, offset_scratch, |
| 50 kPointerSizeLog2 - StubCache::kCacheIndexShift); |
| 49 | 51 |
| 50 // Check that the key in the entry matches the name. | 52 // Check that the key in the entry matches the name. |
| 51 __ ld(at, MemOperand(base_addr, 0)); | 53 __ ld(at, MemOperand(base_addr, 0)); |
| 52 __ Branch(&miss, ne, name, Operand(at)); | 54 __ Branch(&miss, ne, name, Operand(at)); |
| 53 | 55 |
| 54 // Check the map matches. | 56 // Check the map matches. |
| 55 __ ld(at, MemOperand(base_addr, | 57 __ ld(at, MemOperand(base_addr, |
| 56 static_cast<int32_t>(map_off_addr - key_off_addr))); | 58 static_cast<int32_t>(map_off_addr - key_off_addr))); |
| 57 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 59 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 58 __ Branch(&miss, ne, at, Operand(scratch2)); | 60 __ Branch(&miss, ne, at, Operand(scratch2)); |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 127 #endif | 129 #endif |
| 128 | 130 |
| 129 Counters* counters = masm->isolate()->counters(); | 131 Counters* counters = masm->isolate()->counters(); |
| 130 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, | 132 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, |
| 131 extra3); | 133 extra3); |
| 132 | 134 |
| 133 // Check that the receiver isn't a smi. | 135 // Check that the receiver isn't a smi. |
| 134 __ JumpIfSmi(receiver, &miss); | 136 __ JumpIfSmi(receiver, &miss); |
| 135 | 137 |
| 136 // Get the map of the receiver and compute the hash. | 138 // Get the map of the receiver and compute the hash. |
| 137 __ ld(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | 139 __ lwu(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); |
| 138 __ ld(at, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 140 __ ld(at, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 139 __ Daddu(scratch, scratch, at); | 141 __ Addu(scratch, scratch, at); |
| 140 uint64_t mask = kPrimaryTableSize - 1; | 142 __ Xor(scratch, scratch, Operand(flags)); |
| 141 // We shift out the last two bits because they are not part of the hash and | 143 __ And(scratch, scratch, |
| 142 // they are always 01 for maps. | 144 Operand((kPrimaryTableSize - 1) << kCacheIndexShift)); |
| 143 __ dsrl(scratch, scratch, kCacheIndexShift); | |
| 144 __ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); | |
| 145 __ And(scratch, scratch, Operand(mask)); | |
| 146 | 145 |
| 147 // Probe the primary table. | 146 // Probe the primary table. |
| 148 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, | 147 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra, |
| 149 extra2, extra3); | 148 extra2, extra3); |
| 150 | 149 |
| 151 // Primary miss: Compute hash for secondary probe. | 150 // Primary miss: Compute hash for secondary probe. |
| 152 __ dsrl(at, name, kCacheIndexShift); | 151 __ Subu(scratch, scratch, name); |
| 153 __ Dsubu(scratch, scratch, at); | 152 __ Addu(scratch, scratch, flags); |
| 154 uint64_t mask2 = kSecondaryTableSize - 1; | 153 __ And(scratch, scratch, |
| 155 __ Daddu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); | 154 Operand((kSecondaryTableSize - 1) << kCacheIndexShift)); |
| 156 __ And(scratch, scratch, Operand(mask2)); | |
| 157 | 155 |
| 158 // Probe the secondary table. | 156 // Probe the secondary table. |
| 159 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, | 157 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra, |
| 160 extra2, extra3); | 158 extra2, extra3); |
| 161 | 159 |
| 162 // Cache miss: Fall-through and let caller handle the miss by | 160 // Cache miss: Fall-through and let caller handle the miss by |
| 163 // entering the runtime system. | 161 // entering the runtime system. |
| 164 __ bind(&miss); | 162 __ bind(&miss); |
| 165 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, | 163 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, |
| 166 extra3); | 164 extra3); |
| 167 } | 165 } |
| 168 | 166 |
| 169 | 167 |
| 170 #undef __ | 168 #undef __ |
| 171 } // namespace internal | 169 } // namespace internal |
| 172 } // namespace v8 | 170 } // namespace v8 |
| 173 | 171 |
| 174 #endif // V8_TARGET_ARCH_MIPS64 | 172 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |