OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_PPC | 7 #if V8_TARGET_ARCH_PPC |
8 | 8 |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/ic.h" |
10 #include "src/ic/stub-cache.h" | 11 #include "src/ic/stub-cache.h" |
| 12 #include "src/interface-descriptors.h" |
11 | 13 |
12 namespace v8 { | 14 namespace v8 { |
13 namespace internal { | 15 namespace internal { |
14 | 16 |
15 #define __ ACCESS_MASM(masm) | 17 #define __ ACCESS_MASM(masm) |
16 | 18 |
17 | 19 |
18 static void ProbeTable(Isolate* isolate, MacroAssembler* masm, | 20 static void ProbeTable(Isolate* isolate, MacroAssembler* masm, |
19 Code::Flags flags, bool leave_frame, | 21 Code::Kind ic_kind, Code::Flags flags, bool leave_frame, |
20 StubCache::Table table, Register receiver, Register name, | 22 StubCache::Table table, Register receiver, Register name, |
21 // Number of the cache entry, not scaled. | 23 // Number of the cache entry, not scaled. |
22 Register offset, Register scratch, Register scratch2, | 24 Register offset, Register scratch, Register scratch2, |
23 Register offset_scratch) { | 25 Register offset_scratch) { |
24 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 26 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
25 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | 27 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
26 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); | 28 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); |
27 | 29 |
28 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address()); | 30 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address()); |
29 uintptr_t value_off_addr = | 31 uintptr_t value_off_addr = |
(...skipping 11 matching lines...) Expand all Loading... |
41 Label miss; | 43 Label miss; |
42 Register base_addr = scratch; | 44 Register base_addr = scratch; |
43 scratch = no_reg; | 45 scratch = no_reg; |
44 | 46 |
45 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 47 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
46 __ ShiftLeftImm(offset_scratch, offset, Operand(1)); | 48 __ ShiftLeftImm(offset_scratch, offset, Operand(1)); |
47 __ add(offset_scratch, offset, offset_scratch); | 49 __ add(offset_scratch, offset, offset_scratch); |
48 | 50 |
49 // Calculate the base address of the entry. | 51 // Calculate the base address of the entry. |
50 __ mov(base_addr, Operand(key_offset)); | 52 __ mov(base_addr, Operand(key_offset)); |
51 __ ShiftLeftImm(scratch2, offset_scratch, Operand(kPointerSizeLog2)); | 53 #if V8_TARGET_ARCH_PPC64 |
52 __ add(base_addr, base_addr, scratch2); | 54 DCHECK(kPointerSizeLog2 > StubCache::kCacheIndexShift); |
| 55 __ ShiftLeftImm(offset_scratch, offset_scratch, |
| 56 Operand(kPointerSizeLog2 - StubCache::kCacheIndexShift)); |
| 57 #else |
| 58 DCHECK(kPointerSizeLog2 == StubCache::kCacheIndexShift); |
| 59 #endif |
| 60 __ add(base_addr, base_addr, offset_scratch); |
53 | 61 |
54 // Check that the key in the entry matches the name. | 62 // Check that the key in the entry matches the name. |
55 __ LoadP(ip, MemOperand(base_addr, 0)); | 63 __ LoadP(ip, MemOperand(base_addr, 0)); |
56 __ cmp(name, ip); | 64 __ cmp(name, ip); |
57 __ bne(&miss); | 65 __ bne(&miss); |
58 | 66 |
59 // Check the map matches. | 67 // Check the map matches. |
60 __ LoadP(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); | 68 __ LoadP(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); |
61 __ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 69 __ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
62 __ cmp(ip, scratch2); | 70 __ cmp(ip, scratch2); |
(...skipping 29 matching lines...) Expand all Loading... |
92 // Jump to the first instruction in the code stub. | 100 // Jump to the first instruction in the code stub. |
93 __ addi(r0, code, Operand(Code::kHeaderSize - kHeapObjectTag)); | 101 __ addi(r0, code, Operand(Code::kHeaderSize - kHeapObjectTag)); |
94 __ mtctr(r0); | 102 __ mtctr(r0); |
95 __ bctr(); | 103 __ bctr(); |
96 | 104 |
97 // Miss: fall through. | 105 // Miss: fall through. |
98 __ bind(&miss); | 106 __ bind(&miss); |
99 } | 107 } |
100 | 108 |
101 | 109 |
102 void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags, | 110 void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, |
103 bool leave_frame, Register receiver, | 111 Code::Flags flags, bool leave_frame, |
104 Register name, Register scratch, Register extra, | 112 Register receiver, Register name, |
105 Register extra2, Register extra3) { | 113 Register scratch, Register extra, Register extra2, |
| 114 Register extra3) { |
106 Isolate* isolate = masm->isolate(); | 115 Isolate* isolate = masm->isolate(); |
107 Label miss; | 116 Label miss; |
108 | 117 |
109 #if V8_TARGET_ARCH_PPC64 | 118 #if V8_TARGET_ARCH_PPC64 |
110 // Make sure that code is valid. The multiplying code relies on the | 119 // Make sure that code is valid. The multiplying code relies on the |
111 // entry size being 24. | 120 // entry size being 24. |
112 DCHECK(sizeof(Entry) == 24); | 121 DCHECK(sizeof(Entry) == 24); |
113 #else | 122 #else |
114 // Make sure that code is valid. The multiplying code relies on the | 123 // Make sure that code is valid. The multiplying code relies on the |
115 // entry size being 12. | 124 // entry size being 12. |
116 DCHECK(sizeof(Entry) == 12); | 125 DCHECK(sizeof(Entry) == 12); |
117 #endif | 126 #endif |
118 | 127 |
119 // Make sure the flags does not name a specific type. | 128 // Make sure the flags does not name a specific type. |
120 DCHECK(Code::ExtractTypeFromFlags(flags) == 0); | 129 DCHECK(Code::ExtractTypeFromFlags(flags) == 0); |
121 | 130 |
122 // Make sure that there are no register conflicts. | 131 // Make sure that there are no register conflicts. |
123 DCHECK(!scratch.is(receiver)); | 132 DCHECK(!AreAliased(receiver, name, scratch, extra, extra2, extra3)); |
124 DCHECK(!scratch.is(name)); | |
125 DCHECK(!extra.is(receiver)); | |
126 DCHECK(!extra.is(name)); | |
127 DCHECK(!extra.is(scratch)); | |
128 DCHECK(!extra2.is(receiver)); | |
129 DCHECK(!extra2.is(name)); | |
130 DCHECK(!extra2.is(scratch)); | |
131 DCHECK(!extra2.is(extra)); | |
132 | 133 |
133 // Check scratch, extra and extra2 registers are valid. | 134 // Check scratch, extra and extra2 registers are valid. |
134 DCHECK(!scratch.is(no_reg)); | 135 DCHECK(!scratch.is(no_reg)); |
135 DCHECK(!extra.is(no_reg)); | 136 DCHECK(!extra.is(no_reg)); |
136 DCHECK(!extra2.is(no_reg)); | 137 DCHECK(!extra2.is(no_reg)); |
137 DCHECK(!extra3.is(no_reg)); | 138 DCHECK(!extra3.is(no_reg)); |
138 | 139 |
| 140 #ifdef DEBUG |
| 141 // If vector-based ics are in use, ensure that scratch, extra, extra2 and |
| 142 // extra3 don't conflict with the vector and slot registers, which need |
| 143 // to be preserved for a handler call or miss. |
| 144 if (IC::ICUseVector(ic_kind)) { |
| 145 Register vector = VectorLoadICDescriptor::VectorRegister(); |
| 146 Register slot = VectorLoadICDescriptor::SlotRegister(); |
| 147 DCHECK(!AreAliased(vector, slot, scratch, extra, extra2, extra3)); |
| 148 } |
| 149 #endif |
| 150 |
139 Counters* counters = masm->isolate()->counters(); | 151 Counters* counters = masm->isolate()->counters(); |
140 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, | 152 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, |
141 extra3); | 153 extra3); |
142 | 154 |
143 // Check that the receiver isn't a smi. | 155 // Check that the receiver isn't a smi. |
144 __ JumpIfSmi(receiver, &miss); | 156 __ JumpIfSmi(receiver, &miss); |
145 | 157 |
146 // Get the map of the receiver and compute the hash. | 158 // Get the map of the receiver and compute the hash. |
147 __ lwz(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | 159 __ lwz(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); |
148 __ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 160 __ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
149 __ add(scratch, scratch, ip); | 161 __ add(scratch, scratch, ip); |
150 #if V8_TARGET_ARCH_PPC64 | 162 __ xori(scratch, scratch, Operand(flags)); |
151 // Use only the low 32 bits of the map pointer. | 163 // The mask omits the last two bits because they are not part of the hash. |
152 __ rldicl(scratch, scratch, 0, 32); | 164 __ andi(scratch, scratch, |
153 #endif | 165 Operand((kPrimaryTableSize - 1) << kCacheIndexShift)); |
154 uint32_t mask = kPrimaryTableSize - 1; | |
155 // We shift out the last two bits because they are not part of the hash and | |
156 // they are always 01 for maps. | |
157 __ ShiftRightImm(scratch, scratch, Operand(kCacheIndexShift)); | |
158 // Mask down the eor argument to the minimum to keep the immediate | |
159 // encodable. | |
160 __ xori(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); | |
161 // Prefer and_ to ubfx here because ubfx takes 2 cycles. | |
162 __ andi(scratch, scratch, Operand(mask)); | |
163 | 166 |
164 // Probe the primary table. | 167 // Probe the primary table. |
165 ProbeTable(isolate, masm, flags, leave_frame, kPrimary, receiver, name, | 168 ProbeTable(isolate, masm, ic_kind, flags, leave_frame, kPrimary, receiver, |
166 scratch, extra, extra2, extra3); | 169 name, scratch, extra, extra2, extra3); |
167 | 170 |
168 // Primary miss: Compute hash for secondary probe. | 171 // Primary miss: Compute hash for secondary probe. |
169 __ ShiftRightImm(extra, name, Operand(kCacheIndexShift)); | 172 __ sub(scratch, scratch, name); |
170 __ sub(scratch, scratch, extra); | 173 __ addi(scratch, scratch, Operand(flags)); |
171 uint32_t mask2 = kSecondaryTableSize - 1; | 174 __ andi(scratch, scratch, |
172 __ addi(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); | 175 Operand((kSecondaryTableSize - 1) << kCacheIndexShift)); |
173 __ andi(scratch, scratch, Operand(mask2)); | |
174 | 176 |
175 // Probe the secondary table. | 177 // Probe the secondary table. |
176 ProbeTable(isolate, masm, flags, leave_frame, kSecondary, receiver, name, | 178 ProbeTable(isolate, masm, ic_kind, flags, leave_frame, kSecondary, receiver, |
177 scratch, extra, extra2, extra3); | 179 name, scratch, extra, extra2, extra3); |
178 | 180 |
179 // Cache miss: Fall-through and let caller handle the miss by | 181 // Cache miss: Fall-through and let caller handle the miss by |
180 // entering the runtime system. | 182 // entering the runtime system. |
181 __ bind(&miss); | 183 __ bind(&miss); |
182 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, | 184 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, |
183 extra3); | 185 extra3); |
184 } | 186 } |
185 | 187 |
186 | 188 |
187 #undef __ | 189 #undef __ |
188 } | 190 } |
189 } // namespace v8::internal | 191 } // namespace v8::internal |
190 | 192 |
191 #endif // V8_TARGET_ARCH_PPC | 193 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |