OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
11 | 11 |
12 enum ImmediateMode { | 12 enum ImmediateMode { |
13 kArithmeticImm, // 12 bit unsigned immediate shifted left 0 or 12 bits | 13 kArithmeticImm, // 12 bit unsigned immediate shifted left 0 or 12 bits |
14 kShift32Imm, // 0 - 31 | 14 kShift32Imm, // 0 - 31 |
15 kShift64Imm, // 0 - 63 | 15 kShift64Imm, // 0 - 63 |
16 kLogical32Imm, | 16 kLogical32Imm, |
17 kLogical64Imm, | 17 kLogical64Imm, |
18 kLoadStoreImm, // unsigned 9 bit or signed 7 bit | 18 kLoadStoreImm8, // signed 8 bit or 12 bit unsigned scaled by access size |
| 19 kLoadStoreImm16, |
| 20 kLoadStoreImm32, |
| 21 kLoadStoreImm64, |
19 kNoImmediate | 22 kNoImmediate |
20 }; | 23 }; |
21 | 24 |
22 | 25 |
23 // Adds Arm64-specific methods for generating operands. | 26 // Adds Arm64-specific methods for generating operands. |
24 class Arm64OperandGenerator FINAL : public OperandGenerator { | 27 class Arm64OperandGenerator FINAL : public OperandGenerator { |
25 public: | 28 public: |
26 explicit Arm64OperandGenerator(InstructionSelector* selector) | 29 explicit Arm64OperandGenerator(InstructionSelector* selector) |
27 : OperandGenerator(selector) {} | 30 : OperandGenerator(selector) {} |
28 | 31 |
(...skipping 18 matching lines...) Expand all Loading... |
47 case kLogical64Imm: | 50 case kLogical64Imm: |
48 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 64, | 51 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 64, |
49 &ignored, &ignored, &ignored); | 52 &ignored, &ignored, &ignored); |
50 case kArithmeticImm: | 53 case kArithmeticImm: |
51 // TODO(dcarney): -values can be handled by instruction swapping | 54 // TODO(dcarney): -values can be handled by instruction swapping |
52 return Assembler::IsImmAddSub(value); | 55 return Assembler::IsImmAddSub(value); |
53 case kShift32Imm: | 56 case kShift32Imm: |
54 return 0 <= value && value < 32; | 57 return 0 <= value && value < 32; |
55 case kShift64Imm: | 58 case kShift64Imm: |
56 return 0 <= value && value < 64; | 59 return 0 <= value && value < 64; |
57 case kLoadStoreImm: | 60 case kLoadStoreImm8: |
58 return (0 <= value && value < (1 << 9)) || | 61 return IsLoadStoreImmediate(value, LSByte); |
59 (-(1 << 6) <= value && value < (1 << 6)); | 62 case kLoadStoreImm16: |
| 63 return IsLoadStoreImmediate(value, LSHalfword); |
| 64 case kLoadStoreImm32: |
| 65 return IsLoadStoreImmediate(value, LSWord); |
| 66 case kLoadStoreImm64: |
| 67 return IsLoadStoreImmediate(value, LSDoubleWord); |
60 case kNoImmediate: | 68 case kNoImmediate: |
61 return false; | 69 return false; |
62 } | 70 } |
63 return false; | 71 return false; |
64 } | 72 } |
| 73 |
| 74 private: |
| 75 bool IsLoadStoreImmediate(int64_t value, LSDataSize size) { |
| 76 return Assembler::IsImmLSScaled(value, size) || |
| 77 Assembler::IsImmLSUnscaled(value); |
| 78 } |
65 }; | 79 }; |
66 | 80 |
67 | 81 |
68 static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, | 82 static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, |
69 Node* node) { | 83 Node* node) { |
70 Arm64OperandGenerator g(selector); | 84 Arm64OperandGenerator g(selector); |
71 selector->Emit(opcode, g.DefineAsRegister(node), | 85 selector->Emit(opcode, g.DefineAsRegister(node), |
72 g.UseRegister(node->InputAt(0)), | 86 g.UseRegister(node->InputAt(0)), |
73 g.UseRegister(node->InputAt(1))); | 87 g.UseRegister(node->InputAt(1))); |
74 } | 88 } |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
135 } | 149 } |
136 | 150 |
137 | 151 |
138 void InstructionSelector::VisitLoad(Node* node) { | 152 void InstructionSelector::VisitLoad(Node* node) { |
139 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node)); | 153 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node)); |
140 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node)); | 154 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node)); |
141 Arm64OperandGenerator g(this); | 155 Arm64OperandGenerator g(this); |
142 Node* base = node->InputAt(0); | 156 Node* base = node->InputAt(0); |
143 Node* index = node->InputAt(1); | 157 Node* index = node->InputAt(1); |
144 ArchOpcode opcode; | 158 ArchOpcode opcode; |
| 159 ImmediateMode immediate_mode = kNoImmediate; |
145 switch (rep) { | 160 switch (rep) { |
146 case kRepFloat32: | 161 case kRepFloat32: |
147 opcode = kArm64LdrS; | 162 opcode = kArm64LdrS; |
| 163 immediate_mode = kLoadStoreImm32; |
148 break; | 164 break; |
149 case kRepFloat64: | 165 case kRepFloat64: |
150 opcode = kArm64LdrD; | 166 opcode = kArm64LdrD; |
| 167 immediate_mode = kLoadStoreImm64; |
151 break; | 168 break; |
152 case kRepBit: // Fall through. | 169 case kRepBit: // Fall through. |
153 case kRepWord8: | 170 case kRepWord8: |
154 opcode = typ == kTypeInt32 ? kArm64Ldrsb : kArm64Ldrb; | 171 opcode = typ == kTypeInt32 ? kArm64Ldrsb : kArm64Ldrb; |
| 172 immediate_mode = kLoadStoreImm8; |
155 break; | 173 break; |
156 case kRepWord16: | 174 case kRepWord16: |
157 opcode = typ == kTypeInt32 ? kArm64Ldrsh : kArm64Ldrh; | 175 opcode = typ == kTypeInt32 ? kArm64Ldrsh : kArm64Ldrh; |
| 176 immediate_mode = kLoadStoreImm16; |
158 break; | 177 break; |
159 case kRepWord32: | 178 case kRepWord32: |
160 opcode = kArm64LdrW; | 179 opcode = kArm64LdrW; |
| 180 immediate_mode = kLoadStoreImm32; |
161 break; | 181 break; |
162 case kRepTagged: // Fall through. | 182 case kRepTagged: // Fall through. |
163 case kRepWord64: | 183 case kRepWord64: |
164 opcode = kArm64Ldr; | 184 opcode = kArm64Ldr; |
| 185 immediate_mode = kLoadStoreImm64; |
165 break; | 186 break; |
166 default: | 187 default: |
167 UNREACHABLE(); | 188 UNREACHABLE(); |
168 return; | 189 return; |
169 } | 190 } |
170 if (g.CanBeImmediate(index, kLoadStoreImm)) { | 191 if (g.CanBeImmediate(index, immediate_mode)) { |
171 Emit(opcode | AddressingModeField::encode(kMode_MRI), | 192 Emit(opcode | AddressingModeField::encode(kMode_MRI), |
172 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index)); | 193 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index)); |
173 } else { | 194 } else { |
174 Emit(opcode | AddressingModeField::encode(kMode_MRR), | 195 Emit(opcode | AddressingModeField::encode(kMode_MRR), |
175 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index)); | 196 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index)); |
176 } | 197 } |
177 } | 198 } |
178 | 199 |
179 | 200 |
180 void InstructionSelector::VisitStore(Node* node) { | 201 void InstructionSelector::VisitStore(Node* node) { |
(...skipping 10 matching lines...) Expand all Loading... |
191 // and pass them here instead of using fixed regs | 212 // and pass them here instead of using fixed regs |
192 // TODO(dcarney): handle immediate indices. | 213 // TODO(dcarney): handle immediate indices. |
193 InstructionOperand* temps[] = {g.TempRegister(x11), g.TempRegister(x12)}; | 214 InstructionOperand* temps[] = {g.TempRegister(x11), g.TempRegister(x12)}; |
194 Emit(kArm64StoreWriteBarrier, NULL, g.UseFixed(base, x10), | 215 Emit(kArm64StoreWriteBarrier, NULL, g.UseFixed(base, x10), |
195 g.UseFixed(index, x11), g.UseFixed(value, x12), arraysize(temps), | 216 g.UseFixed(index, x11), g.UseFixed(value, x12), arraysize(temps), |
196 temps); | 217 temps); |
197 return; | 218 return; |
198 } | 219 } |
199 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind()); | 220 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind()); |
200 ArchOpcode opcode; | 221 ArchOpcode opcode; |
| 222 ImmediateMode immediate_mode = kNoImmediate; |
201 switch (rep) { | 223 switch (rep) { |
202 case kRepFloat32: | 224 case kRepFloat32: |
203 opcode = kArm64StrS; | 225 opcode = kArm64StrS; |
| 226 immediate_mode = kLoadStoreImm32; |
204 break; | 227 break; |
205 case kRepFloat64: | 228 case kRepFloat64: |
206 opcode = kArm64StrD; | 229 opcode = kArm64StrD; |
| 230 immediate_mode = kLoadStoreImm64; |
207 break; | 231 break; |
208 case kRepBit: // Fall through. | 232 case kRepBit: // Fall through. |
209 case kRepWord8: | 233 case kRepWord8: |
210 opcode = kArm64Strb; | 234 opcode = kArm64Strb; |
| 235 immediate_mode = kLoadStoreImm8; |
211 break; | 236 break; |
212 case kRepWord16: | 237 case kRepWord16: |
213 opcode = kArm64Strh; | 238 opcode = kArm64Strh; |
| 239 immediate_mode = kLoadStoreImm16; |
214 break; | 240 break; |
215 case kRepWord32: | 241 case kRepWord32: |
216 opcode = kArm64StrW; | 242 opcode = kArm64StrW; |
| 243 immediate_mode = kLoadStoreImm32; |
217 break; | 244 break; |
218 case kRepTagged: // Fall through. | 245 case kRepTagged: // Fall through. |
219 case kRepWord64: | 246 case kRepWord64: |
220 opcode = kArm64Str; | 247 opcode = kArm64Str; |
| 248 immediate_mode = kLoadStoreImm64; |
221 break; | 249 break; |
222 default: | 250 default: |
223 UNREACHABLE(); | 251 UNREACHABLE(); |
224 return; | 252 return; |
225 } | 253 } |
226 if (g.CanBeImmediate(index, kLoadStoreImm)) { | 254 if (g.CanBeImmediate(index, immediate_mode)) { |
227 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, | 255 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, |
228 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value)); | 256 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value)); |
229 } else { | 257 } else { |
230 Emit(opcode | AddressingModeField::encode(kMode_MRR), NULL, | 258 Emit(opcode | AddressingModeField::encode(kMode_MRR), NULL, |
231 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value)); | 259 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value)); |
232 } | 260 } |
233 } | 261 } |
234 | 262 |
235 | 263 |
236 void InstructionSelector::VisitWord32And(Node* node) { | 264 void InstructionSelector::VisitWord32And(Node* node) { |
(...skipping 424 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
661 // Caller clean up of stack for C-style calls. | 689 // Caller clean up of stack for C-style calls. |
662 if (is_c_frame && aligned_push_count > 0) { | 690 if (is_c_frame && aligned_push_count > 0) { |
663 DCHECK(deoptimization == NULL && continuation == NULL); | 691 DCHECK(deoptimization == NULL && continuation == NULL); |
664 Emit(kArchDrop | MiscField::encode(aligned_push_count), NULL); | 692 Emit(kArchDrop | MiscField::encode(aligned_push_count), NULL); |
665 } | 693 } |
666 } | 694 } |
667 | 695 |
668 } // namespace compiler | 696 } // namespace compiler |
669 } // namespace internal | 697 } // namespace internal |
670 } // namespace v8 | 698 } // namespace v8 |
OLD | NEW |