OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
136 // Shared routine for multiple binary operations. | 136 // Shared routine for multiple binary operations. |
137 static void VisitBinop(InstructionSelector* selector, Node* node, | 137 static void VisitBinop(InstructionSelector* selector, Node* node, |
138 ArchOpcode opcode, ImmediateMode operand_mode) { | 138 ArchOpcode opcode, ImmediateMode operand_mode) { |
139 FlagsContinuation cont; | 139 FlagsContinuation cont; |
140 VisitBinop(selector, node, opcode, operand_mode, &cont); | 140 VisitBinop(selector, node, opcode, operand_mode, &cont); |
141 } | 141 } |
142 | 142 |
143 | 143 |
144 void InstructionSelector::VisitLoad(Node* node) { | 144 void InstructionSelector::VisitLoad(Node* node) { |
145 MachineType rep = RepresentationOf(OpParameter<MachineType>(node)); | 145 MachineType rep = RepresentationOf(OpParameter<MachineType>(node)); |
| 146 MachineType typ = TypeOf(OpParameter<MachineType>(node)); |
146 Arm64OperandGenerator g(this); | 147 Arm64OperandGenerator g(this); |
147 Node* base = node->InputAt(0); | 148 Node* base = node->InputAt(0); |
148 Node* index = node->InputAt(1); | 149 Node* index = node->InputAt(1); |
149 | 150 |
150 InstructionOperand* result = rep == kRepFloat64 | 151 InstructionOperand* result = rep == kRepFloat64 |
151 ? g.DefineAsDoubleRegister(node) | 152 ? g.DefineAsDoubleRegister(node) |
152 : g.DefineAsRegister(node); | 153 : g.DefineAsRegister(node); |
153 | 154 |
154 ArchOpcode opcode; | 155 ArchOpcode opcode; |
155 // TODO(titzer): signed/unsigned small loads | 156 // TODO(titzer): signed/unsigned small loads |
156 switch (rep) { | 157 switch (rep) { |
157 case kRepFloat64: | 158 case kRepFloat64: |
158 opcode = kArm64Float64Load; | 159 opcode = kArm64LdrD; |
159 break; | 160 break; |
160 case kRepBit: // Fall through. | 161 case kRepBit: // Fall through. |
161 case kRepWord8: | 162 case kRepWord8: |
162 opcode = kArm64LoadWord8; | 163 opcode = typ == kTypeInt32 ? kArm64Ldrsb : kArm64Ldrb; |
163 break; | 164 break; |
164 case kRepWord16: | 165 case kRepWord16: |
165 opcode = kArm64LoadWord16; | 166 opcode = typ == kTypeInt32 ? kArm64Ldrsh : kArm64Ldrh; |
166 break; | 167 break; |
167 case kRepWord32: | 168 case kRepWord32: |
168 opcode = kArm64LoadWord32; | 169 opcode = kArm64LdrW; |
169 break; | 170 break; |
170 case kRepTagged: // Fall through. | 171 case kRepTagged: // Fall through. |
171 case kRepWord64: | 172 case kRepWord64: |
172 opcode = kArm64LoadWord64; | 173 opcode = kArm64Ldr; |
173 break; | 174 break; |
174 default: | 175 default: |
175 UNREACHABLE(); | 176 UNREACHABLE(); |
176 return; | 177 return; |
177 } | 178 } |
178 if (g.CanBeImmediate(index, kLoadStoreImm)) { | 179 if (g.CanBeImmediate(index, kLoadStoreImm)) { |
179 Emit(opcode | AddressingModeField::encode(kMode_MRI), result, | 180 Emit(opcode | AddressingModeField::encode(kMode_MRI), result, |
180 g.UseRegister(base), g.UseImmediate(index)); | 181 g.UseRegister(base), g.UseImmediate(index)); |
181 } else if (g.CanBeImmediate(base, kLoadStoreImm)) { | |
182 Emit(opcode | AddressingModeField::encode(kMode_MRI), result, | |
183 g.UseRegister(index), g.UseImmediate(base)); | |
184 } else { | 182 } else { |
185 Emit(opcode | AddressingModeField::encode(kMode_MRR), result, | 183 Emit(opcode | AddressingModeField::encode(kMode_MRR), result, |
186 g.UseRegister(base), g.UseRegister(index)); | 184 g.UseRegister(base), g.UseRegister(index)); |
187 } | 185 } |
188 } | 186 } |
189 | 187 |
190 | 188 |
191 void InstructionSelector::VisitStore(Node* node) { | 189 void InstructionSelector::VisitStore(Node* node) { |
192 Arm64OperandGenerator g(this); | 190 Arm64OperandGenerator g(this); |
193 Node* base = node->InputAt(0); | 191 Node* base = node->InputAt(0); |
(...skipping 16 matching lines...) Expand all Loading... |
210 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind); | 208 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind); |
211 InstructionOperand* val; | 209 InstructionOperand* val; |
212 if (rep == kRepFloat64) { | 210 if (rep == kRepFloat64) { |
213 val = g.UseDoubleRegister(value); | 211 val = g.UseDoubleRegister(value); |
214 } else { | 212 } else { |
215 val = g.UseRegister(value); | 213 val = g.UseRegister(value); |
216 } | 214 } |
217 ArchOpcode opcode; | 215 ArchOpcode opcode; |
218 switch (rep) { | 216 switch (rep) { |
219 case kRepFloat64: | 217 case kRepFloat64: |
220 opcode = kArm64Float64Store; | 218 opcode = kArm64StrD; |
221 break; | 219 break; |
222 case kRepBit: // Fall through. | 220 case kRepBit: // Fall through. |
223 case kRepWord8: | 221 case kRepWord8: |
224 opcode = kArm64StoreWord8; | 222 opcode = kArm64Strb; |
225 break; | 223 break; |
226 case kRepWord16: | 224 case kRepWord16: |
227 opcode = kArm64StoreWord16; | 225 opcode = kArm64Strh; |
228 break; | 226 break; |
229 case kRepWord32: | 227 case kRepWord32: |
230 opcode = kArm64StoreWord32; | 228 opcode = kArm64StrW; |
231 break; | 229 break; |
232 case kRepTagged: // Fall through. | 230 case kRepTagged: // Fall through. |
233 case kRepWord64: | 231 case kRepWord64: |
234 opcode = kArm64StoreWord64; | 232 opcode = kArm64Str; |
235 break; | 233 break; |
236 default: | 234 default: |
237 UNREACHABLE(); | 235 UNREACHABLE(); |
238 return; | 236 return; |
239 } | 237 } |
240 if (g.CanBeImmediate(index, kLoadStoreImm)) { | 238 if (g.CanBeImmediate(index, kLoadStoreImm)) { |
241 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, | 239 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, |
242 g.UseRegister(base), g.UseImmediate(index), val); | 240 g.UseRegister(base), g.UseImmediate(index), val); |
243 } else if (g.CanBeImmediate(base, kLoadStoreImm)) { | |
244 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, | |
245 g.UseRegister(index), g.UseImmediate(base), val); | |
246 } else { | 241 } else { |
247 Emit(opcode | AddressingModeField::encode(kMode_MRR), NULL, | 242 Emit(opcode | AddressingModeField::encode(kMode_MRR), NULL, |
248 g.UseRegister(base), g.UseRegister(index), val); | 243 g.UseRegister(base), g.UseRegister(index), val); |
249 } | 244 } |
250 } | 245 } |
251 | 246 |
252 | 247 |
253 void InstructionSelector::VisitWord32And(Node* node) { | 248 void InstructionSelector::VisitWord32And(Node* node) { |
254 VisitBinop(this, node, kArm64And32, kLogical32Imm); | 249 VisitBinop(this, node, kArm64And32, kLogical32Imm); |
255 } | 250 } |
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
678 // Caller clean up of stack for C-style calls. | 673 // Caller clean up of stack for C-style calls. |
679 if (is_c_frame && aligned_push_count > 0) { | 674 if (is_c_frame && aligned_push_count > 0) { |
680 DCHECK(deoptimization == NULL && continuation == NULL); | 675 DCHECK(deoptimization == NULL && continuation == NULL); |
681 Emit(kArm64Drop | MiscField::encode(aligned_push_count), NULL); | 676 Emit(kArm64Drop | MiscField::encode(aligned_push_count), NULL); |
682 } | 677 } |
683 } | 678 } |
684 | 679 |
685 } // namespace compiler | 680 } // namespace compiler |
686 } // namespace internal | 681 } // namespace internal |
687 } // namespace v8 | 682 } // namespace v8 |
OLD | NEW |