OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
8 #include "src/compiler/node-properties.h" | 8 #include "src/compiler/node-properties.h" |
9 | 9 |
10 namespace v8 { | 10 namespace v8 { |
11 namespace internal { | 11 namespace internal { |
12 namespace compiler { | 12 namespace compiler { |
13 | 13 |
14 // Adds IA32-specific methods for generating operands. | 14 // Adds X87-specific methods for generating operands. |
15 class IA32OperandGenerator final : public OperandGenerator { | 15 class X87OperandGenerator final : public OperandGenerator { |
16 public: | 16 public: |
17 explicit IA32OperandGenerator(InstructionSelector* selector) | 17 explicit X87OperandGenerator(InstructionSelector* selector) |
18 : OperandGenerator(selector) {} | 18 : OperandGenerator(selector) {} |
19 | 19 |
20 InstructionOperand UseByteRegister(Node* node) { | 20 InstructionOperand UseByteRegister(Node* node) { |
21 // TODO(titzer): encode byte register use constraints. | 21 // TODO(titzer): encode byte register use constraints. |
22 return UseFixed(node, edx); | 22 return UseFixed(node, edx); |
23 } | 23 } |
24 | 24 |
25 InstructionOperand DefineAsByteRegister(Node* node) { | 25 InstructionOperand DefineAsByteRegister(Node* node) { |
26 // TODO(titzer): encode byte register def constraints. | 26 // TODO(titzer): encode byte register def constraints. |
27 return DefineAsRegister(node); | 27 return DefineAsRegister(node); |
28 } | 28 } |
29 | 29 |
30 InstructionOperand CreateImmediate(int imm) { | |
31 return sequence()->AddImmediate(Constant(imm)); | |
32 } | |
33 | |
30 bool CanBeImmediate(Node* node) { | 34 bool CanBeImmediate(Node* node) { |
31 switch (node->opcode()) { | 35 switch (node->opcode()) { |
32 case IrOpcode::kInt32Constant: | 36 case IrOpcode::kInt32Constant: |
33 case IrOpcode::kNumberConstant: | 37 case IrOpcode::kNumberConstant: |
34 case IrOpcode::kExternalConstant: | 38 case IrOpcode::kExternalConstant: |
35 return true; | 39 return true; |
36 case IrOpcode::kHeapConstant: { | 40 case IrOpcode::kHeapConstant: { |
37 // Constants in new space cannot be used as immediates in V8 because | 41 // Constants in new space cannot be used as immediates in V8 because |
38 // the GC does not scan code objects when collecting the new generation. | 42 // the GC does not scan code objects when collecting the new generation. |
39 Unique<HeapObject> value = OpParameter<Unique<HeapObject> >(node); | 43 Unique<HeapObject> value = OpParameter<Unique<HeapObject> >(node); |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
118 return kMode_MR1; | 122 return kMode_MR1; |
119 } | 123 } |
120 } | 124 } |
121 | 125 |
122 bool CanBeBetterLeftOperand(Node* node) const { | 126 bool CanBeBetterLeftOperand(Node* node) const { |
123 return !selector()->IsLive(node); | 127 return !selector()->IsLive(node); |
124 } | 128 } |
125 }; | 129 }; |
126 | 130 |
127 | 131 |
128 namespace { | |
129 | |
130 void VisitROFloat(InstructionSelector* selector, Node* node, | |
131 ArchOpcode opcode) { | |
132 IA32OperandGenerator g(selector); | |
133 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | |
134 } | |
135 | |
136 | |
137 void VisitRRFloat(InstructionSelector* selector, Node* node, | |
138 InstructionCode opcode) { | |
139 IA32OperandGenerator g(selector); | |
140 selector->Emit(opcode, g.DefineAsRegister(node), | |
141 g.UseRegister(node->InputAt(0))); | |
142 } | |
143 | |
144 | |
145 void VisitRROFloat(InstructionSelector* selector, Node* node, | |
146 ArchOpcode avx_opcode, ArchOpcode sse_opcode) { | |
147 IA32OperandGenerator g(selector); | |
148 InstructionOperand operand0 = g.UseRegister(node->InputAt(0)); | |
149 InstructionOperand operand1 = g.Use(node->InputAt(1)); | |
150 if (selector->IsSupported(AVX)) { | |
151 selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1); | |
152 } else { | |
153 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1); | |
154 } | |
155 } | |
156 | |
157 | |
158 void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input, | |
159 ArchOpcode avx_opcode, ArchOpcode sse_opcode) { | |
160 IA32OperandGenerator g(selector); | |
161 if (selector->IsSupported(AVX)) { | |
162 selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input)); | |
163 } else { | |
164 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input)); | |
165 } | |
166 } | |
167 | |
168 | |
169 } // namespace | |
170 | |
171 | |
172 void InstructionSelector::VisitLoad(Node* node) { | 132 void InstructionSelector::VisitLoad(Node* node) { |
173 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node)); | 133 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node)); |
174 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node)); | 134 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node)); |
175 | 135 |
176 ArchOpcode opcode; | 136 ArchOpcode opcode; |
177 switch (rep) { | 137 switch (rep) { |
178 case kRepFloat32: | 138 case kRepFloat32: |
179 opcode = kIA32Movss; | 139 opcode = kX87Movss; |
180 break; | 140 break; |
181 case kRepFloat64: | 141 case kRepFloat64: |
182 opcode = kIA32Movsd; | 142 opcode = kX87Movsd; |
183 break; | 143 break; |
184 case kRepBit: // Fall through. | 144 case kRepBit: // Fall through. |
185 case kRepWord8: | 145 case kRepWord8: |
186 opcode = typ == kTypeInt32 ? kIA32Movsxbl : kIA32Movzxbl; | 146 opcode = typ == kTypeInt32 ? kX87Movsxbl : kX87Movzxbl; |
187 break; | 147 break; |
188 case kRepWord16: | 148 case kRepWord16: |
189 opcode = typ == kTypeInt32 ? kIA32Movsxwl : kIA32Movzxwl; | 149 opcode = typ == kTypeInt32 ? kX87Movsxwl : kX87Movzxwl; |
190 break; | 150 break; |
191 case kRepTagged: // Fall through. | 151 case kRepTagged: // Fall through. |
192 case kRepWord32: | 152 case kRepWord32: |
193 opcode = kIA32Movl; | 153 opcode = kX87Movl; |
194 break; | 154 break; |
195 default: | 155 default: |
196 UNREACHABLE(); | 156 UNREACHABLE(); |
197 return; | 157 return; |
198 } | 158 } |
199 | 159 |
200 IA32OperandGenerator g(this); | 160 X87OperandGenerator g(this); |
201 InstructionOperand outputs[1]; | 161 InstructionOperand outputs[1]; |
202 outputs[0] = g.DefineAsRegister(node); | 162 outputs[0] = g.DefineAsRegister(node); |
203 InstructionOperand inputs[3]; | 163 InstructionOperand inputs[3]; |
204 size_t input_count = 0; | 164 size_t input_count = 0; |
205 AddressingMode mode = | 165 AddressingMode mode = |
206 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); | 166 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); |
207 InstructionCode code = opcode | AddressingModeField::encode(mode); | 167 InstructionCode code = opcode | AddressingModeField::encode(mode); |
208 Emit(code, 1, outputs, input_count, inputs); | 168 Emit(code, 1, outputs, input_count, inputs); |
209 } | 169 } |
210 | 170 |
211 | 171 |
212 void InstructionSelector::VisitStore(Node* node) { | 172 void InstructionSelector::VisitStore(Node* node) { |
213 IA32OperandGenerator g(this); | 173 X87OperandGenerator g(this); |
214 Node* base = node->InputAt(0); | 174 Node* base = node->InputAt(0); |
215 Node* index = node->InputAt(1); | 175 Node* index = node->InputAt(1); |
216 Node* value = node->InputAt(2); | 176 Node* value = node->InputAt(2); |
217 | 177 |
218 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node); | 178 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node); |
219 MachineType rep = RepresentationOf(store_rep.machine_type()); | 179 MachineType rep = RepresentationOf(store_rep.machine_type()); |
220 if (store_rep.write_barrier_kind() == kFullWriteBarrier) { | 180 if (store_rep.write_barrier_kind() == kFullWriteBarrier) { |
221 DCHECK_EQ(kRepTagged, rep); | 181 DCHECK_EQ(kRepTagged, rep); |
222 // TODO(dcarney): refactor RecordWrite function to take temp registers | 182 // TODO(dcarney): refactor RecordWrite function to take temp registers |
223 // and pass them here instead of using fixed regs | 183 // and pass them here instead of using fixed regs |
224 if (g.CanBeImmediate(index)) { | 184 if (g.CanBeImmediate(index)) { |
225 InstructionOperand temps[] = {g.TempRegister(ecx), g.TempRegister()}; | 185 InstructionOperand temps[] = {g.TempRegister(ecx), g.TempRegister()}; |
226 Emit(kIA32StoreWriteBarrier, g.NoOutput(), g.UseFixed(base, ebx), | 186 Emit(kX87StoreWriteBarrier, g.NoOutput(), g.UseFixed(base, ebx), |
227 g.UseImmediate(index), g.UseFixed(value, ecx), arraysize(temps), | 187 g.UseImmediate(index), g.UseFixed(value, ecx), arraysize(temps), |
228 temps); | 188 temps); |
229 } else { | 189 } else { |
230 InstructionOperand temps[] = {g.TempRegister(ecx), g.TempRegister(edx)}; | 190 InstructionOperand temps[] = {g.TempRegister(ecx), g.TempRegister(edx)}; |
231 Emit(kIA32StoreWriteBarrier, g.NoOutput(), g.UseFixed(base, ebx), | 191 Emit(kX87StoreWriteBarrier, g.NoOutput(), g.UseFixed(base, ebx), |
232 g.UseFixed(index, ecx), g.UseFixed(value, edx), arraysize(temps), | 192 g.UseFixed(index, ecx), g.UseFixed(value, edx), arraysize(temps), |
233 temps); | 193 temps); |
234 } | 194 } |
235 return; | 195 return; |
236 } | 196 } |
237 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind()); | 197 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind()); |
238 | 198 |
239 ArchOpcode opcode; | 199 ArchOpcode opcode; |
240 switch (rep) { | 200 switch (rep) { |
241 case kRepFloat32: | 201 case kRepFloat32: |
242 opcode = kIA32Movss; | 202 opcode = kX87Movss; |
243 break; | 203 break; |
244 case kRepFloat64: | 204 case kRepFloat64: |
245 opcode = kIA32Movsd; | 205 opcode = kX87Movsd; |
246 break; | 206 break; |
247 case kRepBit: // Fall through. | 207 case kRepBit: // Fall through. |
248 case kRepWord8: | 208 case kRepWord8: |
249 opcode = kIA32Movb; | 209 opcode = kX87Movb; |
250 break; | 210 break; |
251 case kRepWord16: | 211 case kRepWord16: |
252 opcode = kIA32Movw; | 212 opcode = kX87Movw; |
253 break; | 213 break; |
254 case kRepTagged: // Fall through. | 214 case kRepTagged: // Fall through. |
255 case kRepWord32: | 215 case kRepWord32: |
256 opcode = kIA32Movl; | 216 opcode = kX87Movl; |
257 break; | 217 break; |
258 default: | 218 default: |
259 UNREACHABLE(); | 219 UNREACHABLE(); |
260 return; | 220 return; |
261 } | 221 } |
262 | 222 |
263 InstructionOperand val; | 223 InstructionOperand val; |
264 if (g.CanBeImmediate(value)) { | 224 if (g.CanBeImmediate(value)) { |
265 val = g.UseImmediate(value); | 225 val = g.UseImmediate(value); |
266 } else if (rep == kRepWord8 || rep == kRepBit) { | 226 } else if (rep == kRepWord8 || rep == kRepBit) { |
267 val = g.UseByteRegister(value); | 227 val = g.UseByteRegister(value); |
268 } else { | 228 } else { |
269 val = g.UseRegister(value); | 229 val = g.UseRegister(value); |
270 } | 230 } |
271 | 231 |
272 InstructionOperand inputs[4]; | 232 InstructionOperand inputs[4]; |
273 size_t input_count = 0; | 233 size_t input_count = 0; |
274 AddressingMode mode = | 234 AddressingMode mode = |
275 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); | 235 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); |
276 InstructionCode code = opcode | AddressingModeField::encode(mode); | 236 InstructionCode code = opcode | AddressingModeField::encode(mode); |
277 inputs[input_count++] = val; | 237 inputs[input_count++] = val; |
278 Emit(code, 0, static_cast<InstructionOperand*>(NULL), input_count, inputs); | 238 Emit(code, 0, static_cast<InstructionOperand*>(NULL), input_count, inputs); |
279 } | 239 } |
280 | 240 |
281 | 241 |
282 void InstructionSelector::VisitCheckedLoad(Node* node) { | 242 void InstructionSelector::VisitCheckedLoad(Node* node) { |
283 MachineType rep = RepresentationOf(OpParameter<MachineType>(node)); | 243 MachineType rep = RepresentationOf(OpParameter<MachineType>(node)); |
284 MachineType typ = TypeOf(OpParameter<MachineType>(node)); | 244 MachineType typ = TypeOf(OpParameter<MachineType>(node)); |
285 IA32OperandGenerator g(this); | 245 X87OperandGenerator g(this); |
286 Node* const buffer = node->InputAt(0); | 246 Node* const buffer = node->InputAt(0); |
287 Node* const offset = node->InputAt(1); | 247 Node* const offset = node->InputAt(1); |
288 Node* const length = node->InputAt(2); | 248 Node* const length = node->InputAt(2); |
289 ArchOpcode opcode; | 249 ArchOpcode opcode; |
290 switch (rep) { | 250 switch (rep) { |
291 case kRepWord8: | 251 case kRepWord8: |
292 opcode = typ == kTypeInt32 ? kCheckedLoadInt8 : kCheckedLoadUint8; | 252 opcode = typ == kTypeInt32 ? kCheckedLoadInt8 : kCheckedLoadUint8; |
293 break; | 253 break; |
294 case kRepWord16: | 254 case kRepWord16: |
295 opcode = typ == kTypeInt32 ? kCheckedLoadInt16 : kCheckedLoadUint16; | 255 opcode = typ == kTypeInt32 ? kCheckedLoadInt16 : kCheckedLoadUint16; |
(...skipping 21 matching lines...) Expand all Loading... | |
317 } else { | 277 } else { |
318 Emit(opcode | AddressingModeField::encode(kMode_MR1), | 278 Emit(opcode | AddressingModeField::encode(kMode_MR1), |
319 g.DefineAsRegister(node), offset_operand, length_operand, | 279 g.DefineAsRegister(node), offset_operand, length_operand, |
320 g.UseRegister(buffer), offset_operand); | 280 g.UseRegister(buffer), offset_operand); |
321 } | 281 } |
322 } | 282 } |
323 | 283 |
324 | 284 |
325 void InstructionSelector::VisitCheckedStore(Node* node) { | 285 void InstructionSelector::VisitCheckedStore(Node* node) { |
326 MachineType rep = RepresentationOf(OpParameter<MachineType>(node)); | 286 MachineType rep = RepresentationOf(OpParameter<MachineType>(node)); |
327 IA32OperandGenerator g(this); | 287 X87OperandGenerator g(this); |
328 Node* const buffer = node->InputAt(0); | 288 Node* const buffer = node->InputAt(0); |
329 Node* const offset = node->InputAt(1); | 289 Node* const offset = node->InputAt(1); |
330 Node* const length = node->InputAt(2); | 290 Node* const length = node->InputAt(2); |
331 Node* const value = node->InputAt(3); | 291 Node* const value = node->InputAt(3); |
332 ArchOpcode opcode; | 292 ArchOpcode opcode; |
333 switch (rep) { | 293 switch (rep) { |
334 case kRepWord8: | 294 case kRepWord8: |
335 opcode = kCheckedStoreWord8; | 295 opcode = kCheckedStoreWord8; |
336 break; | 296 break; |
337 case kRepWord16: | 297 case kRepWord16: |
(...skipping 28 matching lines...) Expand all Loading... | |
366 Emit(opcode | AddressingModeField::encode(kMode_MR1), g.NoOutput(), | 326 Emit(opcode | AddressingModeField::encode(kMode_MR1), g.NoOutput(), |
367 offset_operand, length_operand, value_operand, g.UseRegister(buffer), | 327 offset_operand, length_operand, value_operand, g.UseRegister(buffer), |
368 offset_operand); | 328 offset_operand); |
369 } | 329 } |
370 } | 330 } |
371 | 331 |
372 | 332 |
373 // Shared routine for multiple binary operations. | 333 // Shared routine for multiple binary operations. |
374 static void VisitBinop(InstructionSelector* selector, Node* node, | 334 static void VisitBinop(InstructionSelector* selector, Node* node, |
375 InstructionCode opcode, FlagsContinuation* cont) { | 335 InstructionCode opcode, FlagsContinuation* cont) { |
376 IA32OperandGenerator g(selector); | 336 X87OperandGenerator g(selector); |
377 Int32BinopMatcher m(node); | 337 Int32BinopMatcher m(node); |
378 Node* left = m.left().node(); | 338 Node* left = m.left().node(); |
379 Node* right = m.right().node(); | 339 Node* right = m.right().node(); |
380 InstructionOperand inputs[4]; | 340 InstructionOperand inputs[4]; |
381 size_t input_count = 0; | 341 size_t input_count = 0; |
382 InstructionOperand outputs[2]; | 342 InstructionOperand outputs[2]; |
383 size_t output_count = 0; | 343 size_t output_count = 0; |
384 | 344 |
385 // TODO(turbofan): match complex addressing modes. | 345 // TODO(turbofan): match complex addressing modes. |
386 if (left == right) { | 346 if (left == right) { |
(...skipping 19 matching lines...) Expand all Loading... | |
406 inputs[input_count++] = g.Use(right); | 366 inputs[input_count++] = g.Use(right); |
407 } | 367 } |
408 | 368 |
409 if (cont->IsBranch()) { | 369 if (cont->IsBranch()) { |
410 inputs[input_count++] = g.Label(cont->true_block()); | 370 inputs[input_count++] = g.Label(cont->true_block()); |
411 inputs[input_count++] = g.Label(cont->false_block()); | 371 inputs[input_count++] = g.Label(cont->false_block()); |
412 } | 372 } |
413 | 373 |
414 outputs[output_count++] = g.DefineSameAsFirst(node); | 374 outputs[output_count++] = g.DefineSameAsFirst(node); |
415 if (cont->IsSet()) { | 375 if (cont->IsSet()) { |
416 outputs[output_count++] = g.DefineAsByteRegister(cont->result()); | 376 outputs[output_count++] = g.DefineAsRegister(cont->result()); |
417 } | 377 } |
418 | 378 |
419 DCHECK_NE(0u, input_count); | 379 DCHECK_NE(0u, input_count); |
420 DCHECK_NE(0u, output_count); | 380 DCHECK_NE(0u, output_count); |
421 DCHECK_GE(arraysize(inputs), input_count); | 381 DCHECK_GE(arraysize(inputs), input_count); |
422 DCHECK_GE(arraysize(outputs), output_count); | 382 DCHECK_GE(arraysize(outputs), output_count); |
423 | 383 |
424 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count, | 384 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count, |
425 inputs); | 385 inputs); |
426 } | 386 } |
427 | 387 |
428 | 388 |
429 // Shared routine for multiple binary operations. | 389 // Shared routine for multiple binary operations. |
430 static void VisitBinop(InstructionSelector* selector, Node* node, | 390 static void VisitBinop(InstructionSelector* selector, Node* node, |
431 InstructionCode opcode) { | 391 InstructionCode opcode) { |
432 FlagsContinuation cont; | 392 FlagsContinuation cont; |
433 VisitBinop(selector, node, opcode, &cont); | 393 VisitBinop(selector, node, opcode, &cont); |
434 } | 394 } |
435 | 395 |
436 | 396 |
437 void InstructionSelector::VisitWord32And(Node* node) { | 397 void InstructionSelector::VisitWord32And(Node* node) { |
438 VisitBinop(this, node, kIA32And); | 398 VisitBinop(this, node, kX87And); |
439 } | 399 } |
440 | 400 |
441 | 401 |
442 void InstructionSelector::VisitWord32Or(Node* node) { | 402 void InstructionSelector::VisitWord32Or(Node* node) { |
443 VisitBinop(this, node, kIA32Or); | 403 VisitBinop(this, node, kX87Or); |
444 } | 404 } |
445 | 405 |
446 | 406 |
447 void InstructionSelector::VisitWord32Xor(Node* node) { | 407 void InstructionSelector::VisitWord32Xor(Node* node) { |
448 IA32OperandGenerator g(this); | 408 X87OperandGenerator g(this); |
449 Int32BinopMatcher m(node); | 409 Int32BinopMatcher m(node); |
450 if (m.right().Is(-1)) { | 410 if (m.right().Is(-1)) { |
451 Emit(kIA32Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node())); | 411 Emit(kX87Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node())); |
452 } else { | 412 } else { |
453 VisitBinop(this, node, kIA32Xor); | 413 VisitBinop(this, node, kX87Xor); |
454 } | 414 } |
455 } | 415 } |
456 | 416 |
457 | 417 |
458 // Shared routine for multiple shift operations. | 418 // Shared routine for multiple shift operations. |
459 static inline void VisitShift(InstructionSelector* selector, Node* node, | 419 static inline void VisitShift(InstructionSelector* selector, Node* node, |
460 ArchOpcode opcode) { | 420 ArchOpcode opcode) { |
461 IA32OperandGenerator g(selector); | 421 X87OperandGenerator g(selector); |
462 Node* left = node->InputAt(0); | 422 Node* left = node->InputAt(0); |
463 Node* right = node->InputAt(1); | 423 Node* right = node->InputAt(1); |
464 | 424 |
465 if (g.CanBeImmediate(right)) { | 425 if (g.CanBeImmediate(right)) { |
466 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), | 426 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), |
467 g.UseImmediate(right)); | 427 g.UseImmediate(right)); |
468 } else { | 428 } else { |
469 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), | 429 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), |
470 g.UseFixed(right, ecx)); | 430 g.UseFixed(right, ecx)); |
471 } | 431 } |
472 } | 432 } |
473 | 433 |
474 | 434 |
475 namespace { | 435 namespace { |
476 | 436 |
477 void VisitMulHigh(InstructionSelector* selector, Node* node, | 437 void VisitMulHigh(InstructionSelector* selector, Node* node, |
478 ArchOpcode opcode) { | 438 ArchOpcode opcode) { |
479 IA32OperandGenerator g(selector); | 439 X87OperandGenerator g(selector); |
480 selector->Emit(opcode, g.DefineAsFixed(node, edx), | 440 selector->Emit(opcode, g.DefineAsFixed(node, edx), |
481 g.UseFixed(node->InputAt(0), eax), | 441 g.UseFixed(node->InputAt(0), eax), |
482 g.UseUniqueRegister(node->InputAt(1))); | 442 g.UseUniqueRegister(node->InputAt(1))); |
483 } | 443 } |
484 | 444 |
485 | 445 |
486 void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) { | 446 void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) { |
487 IA32OperandGenerator g(selector); | 447 X87OperandGenerator g(selector); |
488 InstructionOperand temps[] = {g.TempRegister(edx)}; | 448 InstructionOperand temps[] = {g.TempRegister(edx)}; |
489 selector->Emit(opcode, g.DefineAsFixed(node, eax), | 449 selector->Emit(opcode, g.DefineAsFixed(node, eax), |
490 g.UseFixed(node->InputAt(0), eax), | 450 g.UseFixed(node->InputAt(0), eax), |
491 g.UseUnique(node->InputAt(1)), arraysize(temps), temps); | 451 g.UseUnique(node->InputAt(1)), arraysize(temps), temps); |
492 } | 452 } |
493 | 453 |
494 | 454 |
495 void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) { | 455 void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) { |
496 IA32OperandGenerator g(selector); | 456 X87OperandGenerator g(selector); |
497 selector->Emit(opcode, g.DefineAsFixed(node, edx), | 457 selector->Emit(opcode, g.DefineAsFixed(node, edx), |
498 g.UseFixed(node->InputAt(0), eax), | 458 g.UseFixed(node->InputAt(0), eax), |
499 g.UseUnique(node->InputAt(1))); | 459 g.UseUnique(node->InputAt(1))); |
500 } | 460 } |
501 | 461 |
502 void EmitLea(InstructionSelector* selector, Node* result, Node* index, | 462 void EmitLea(InstructionSelector* selector, Node* result, Node* index, |
503 int scale, Node* base, Node* displacement) { | 463 int scale, Node* base, Node* displacement) { |
504 IA32OperandGenerator g(selector); | 464 X87OperandGenerator g(selector); |
505 InstructionOperand inputs[4]; | 465 InstructionOperand inputs[4]; |
506 size_t input_count = 0; | 466 size_t input_count = 0; |
507 AddressingMode mode = g.GenerateMemoryOperandInputs( | 467 AddressingMode mode = g.GenerateMemoryOperandInputs( |
508 index, scale, base, displacement, inputs, &input_count); | 468 index, scale, base, displacement, inputs, &input_count); |
509 | 469 |
510 DCHECK_NE(0u, input_count); | 470 DCHECK_NE(0u, input_count); |
511 DCHECK_GE(arraysize(inputs), input_count); | 471 DCHECK_GE(arraysize(inputs), input_count); |
512 | 472 |
513 InstructionOperand outputs[1]; | 473 InstructionOperand outputs[1]; |
514 outputs[0] = g.DefineAsRegister(result); | 474 outputs[0] = g.DefineAsRegister(result); |
515 | 475 |
516 InstructionCode opcode = AddressingModeField::encode(mode) | kIA32Lea; | 476 InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea; |
517 | 477 |
518 selector->Emit(opcode, 1, outputs, input_count, inputs); | 478 selector->Emit(opcode, 1, outputs, input_count, inputs); |
519 } | 479 } |
520 | 480 |
521 } // namespace | 481 } // namespace |
522 | 482 |
523 | 483 |
524 void InstructionSelector::VisitWord32Shl(Node* node) { | 484 void InstructionSelector::VisitWord32Shl(Node* node) { |
525 Int32ScaleMatcher m(node, true); | 485 Int32ScaleMatcher m(node, true); |
526 if (m.matches()) { | 486 if (m.matches()) { |
527 Node* index = node->InputAt(0); | 487 Node* index = node->InputAt(0); |
528 Node* base = m.power_of_two_plus_one() ? index : NULL; | 488 Node* base = m.power_of_two_plus_one() ? index : NULL; |
529 EmitLea(this, node, index, m.scale(), base, NULL); | 489 EmitLea(this, node, index, m.scale(), base, NULL); |
530 return; | 490 return; |
531 } | 491 } |
532 VisitShift(this, node, kIA32Shl); | 492 VisitShift(this, node, kX87Shl); |
533 } | 493 } |
534 | 494 |
535 | 495 |
536 void InstructionSelector::VisitWord32Shr(Node* node) { | 496 void InstructionSelector::VisitWord32Shr(Node* node) { |
537 VisitShift(this, node, kIA32Shr); | 497 VisitShift(this, node, kX87Shr); |
538 } | 498 } |
539 | 499 |
540 | 500 |
541 void InstructionSelector::VisitWord32Sar(Node* node) { | 501 void InstructionSelector::VisitWord32Sar(Node* node) { |
542 VisitShift(this, node, kIA32Sar); | 502 VisitShift(this, node, kX87Sar); |
543 } | 503 } |
544 | 504 |
545 | 505 |
546 void InstructionSelector::VisitWord32Ror(Node* node) { | 506 void InstructionSelector::VisitWord32Ror(Node* node) { |
547 VisitShift(this, node, kIA32Ror); | 507 VisitShift(this, node, kX87Ror); |
548 } | 508 } |
549 | 509 |
550 | 510 |
551 void InstructionSelector::VisitWord32Clz(Node* node) { | 511 void InstructionSelector::VisitWord32Clz(Node* node) { |
552 IA32OperandGenerator g(this); | 512 X87OperandGenerator g(this); |
553 Emit(kIA32Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 513 Emit(kX87Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); |
554 } | 514 } |
555 | 515 |
556 | 516 |
557 void InstructionSelector::VisitInt32Add(Node* node) { | 517 void InstructionSelector::VisitInt32Add(Node* node) { |
558 IA32OperandGenerator g(this); | 518 X87OperandGenerator g(this); |
559 | 519 |
560 // Try to match the Add to a lea pattern | 520 // Try to match the Add to a lea pattern |
561 BaseWithIndexAndDisplacement32Matcher m(node); | 521 BaseWithIndexAndDisplacement32Matcher m(node); |
562 if (m.matches() && | 522 if (m.matches() && |
563 (m.displacement() == NULL || g.CanBeImmediate(m.displacement()))) { | 523 (m.displacement() == NULL || g.CanBeImmediate(m.displacement()))) { |
564 InstructionOperand inputs[4]; | 524 InstructionOperand inputs[4]; |
565 size_t input_count = 0; | 525 size_t input_count = 0; |
566 AddressingMode mode = g.GenerateMemoryOperandInputs( | 526 AddressingMode mode = g.GenerateMemoryOperandInputs( |
567 m.index(), m.scale(), m.base(), m.displacement(), inputs, &input_count); | 527 m.index(), m.scale(), m.base(), m.displacement(), inputs, &input_count); |
568 | 528 |
569 DCHECK_NE(0u, input_count); | 529 DCHECK_NE(0u, input_count); |
570 DCHECK_GE(arraysize(inputs), input_count); | 530 DCHECK_GE(arraysize(inputs), input_count); |
571 | 531 |
572 InstructionOperand outputs[1]; | 532 InstructionOperand outputs[1]; |
573 outputs[0] = g.DefineAsRegister(node); | 533 outputs[0] = g.DefineAsRegister(node); |
574 | 534 |
575 InstructionCode opcode = AddressingModeField::encode(mode) | kIA32Lea; | 535 InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea; |
576 Emit(opcode, 1, outputs, input_count, inputs); | 536 Emit(opcode, 1, outputs, input_count, inputs); |
577 return; | 537 return; |
578 } | 538 } |
579 | 539 |
580 // No lea pattern match, use add | 540 // No lea pattern match, use add |
581 VisitBinop(this, node, kIA32Add); | 541 VisitBinop(this, node, kX87Add); |
582 } | 542 } |
583 | 543 |
584 | 544 |
585 void InstructionSelector::VisitInt32Sub(Node* node) { | 545 void InstructionSelector::VisitInt32Sub(Node* node) { |
586 IA32OperandGenerator g(this); | 546 X87OperandGenerator g(this); |
587 Int32BinopMatcher m(node); | 547 Int32BinopMatcher m(node); |
588 if (m.left().Is(0)) { | 548 if (m.left().Is(0)) { |
589 Emit(kIA32Neg, g.DefineSameAsFirst(node), g.Use(m.right().node())); | 549 Emit(kX87Neg, g.DefineSameAsFirst(node), g.Use(m.right().node())); |
590 } else { | 550 } else { |
591 VisitBinop(this, node, kIA32Sub); | 551 VisitBinop(this, node, kX87Sub); |
592 } | 552 } |
593 } | 553 } |
594 | 554 |
595 | 555 |
596 void InstructionSelector::VisitInt32Mul(Node* node) { | 556 void InstructionSelector::VisitInt32Mul(Node* node) { |
597 Int32ScaleMatcher m(node, true); | 557 Int32ScaleMatcher m(node, true); |
598 if (m.matches()) { | 558 if (m.matches()) { |
599 Node* index = node->InputAt(0); | 559 Node* index = node->InputAt(0); |
600 Node* base = m.power_of_two_plus_one() ? index : NULL; | 560 Node* base = m.power_of_two_plus_one() ? index : NULL; |
601 EmitLea(this, node, index, m.scale(), base, NULL); | 561 EmitLea(this, node, index, m.scale(), base, NULL); |
602 return; | 562 return; |
603 } | 563 } |
604 IA32OperandGenerator g(this); | 564 X87OperandGenerator g(this); |
605 Node* left = node->InputAt(0); | 565 Node* left = node->InputAt(0); |
606 Node* right = node->InputAt(1); | 566 Node* right = node->InputAt(1); |
607 if (g.CanBeImmediate(right)) { | 567 if (g.CanBeImmediate(right)) { |
608 Emit(kIA32Imul, g.DefineAsRegister(node), g.Use(left), | 568 Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left), |
609 g.UseImmediate(right)); | 569 g.UseImmediate(right)); |
610 } else { | 570 } else { |
611 if (g.CanBeBetterLeftOperand(right)) { | 571 if (g.CanBeBetterLeftOperand(right)) { |
612 std::swap(left, right); | 572 std::swap(left, right); |
613 } | 573 } |
614 Emit(kIA32Imul, g.DefineSameAsFirst(node), g.UseRegister(left), | 574 Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(left), |
615 g.Use(right)); | 575 g.Use(right)); |
616 } | 576 } |
617 } | 577 } |
618 | 578 |
619 | 579 |
620 void InstructionSelector::VisitInt32MulHigh(Node* node) { | 580 void InstructionSelector::VisitInt32MulHigh(Node* node) { |
621 VisitMulHigh(this, node, kIA32ImulHigh); | 581 VisitMulHigh(this, node, kX87ImulHigh); |
622 } | 582 } |
623 | 583 |
624 | 584 |
625 void InstructionSelector::VisitUint32MulHigh(Node* node) { | 585 void InstructionSelector::VisitUint32MulHigh(Node* node) { |
626 VisitMulHigh(this, node, kIA32UmulHigh); | 586 VisitMulHigh(this, node, kX87UmulHigh); |
627 } | 587 } |
628 | 588 |
629 | 589 |
630 void InstructionSelector::VisitInt32Div(Node* node) { | 590 void InstructionSelector::VisitInt32Div(Node* node) { |
631 VisitDiv(this, node, kIA32Idiv); | 591 VisitDiv(this, node, kX87Idiv); |
632 } | 592 } |
633 | 593 |
634 | 594 |
635 void InstructionSelector::VisitUint32Div(Node* node) { | 595 void InstructionSelector::VisitUint32Div(Node* node) { |
636 VisitDiv(this, node, kIA32Udiv); | 596 VisitDiv(this, node, kX87Udiv); |
637 } | 597 } |
638 | 598 |
639 | 599 |
640 void InstructionSelector::VisitInt32Mod(Node* node) { | 600 void InstructionSelector::VisitInt32Mod(Node* node) { |
641 VisitMod(this, node, kIA32Idiv); | 601 VisitMod(this, node, kX87Idiv); |
642 } | 602 } |
643 | 603 |
644 | 604 |
645 void InstructionSelector::VisitUint32Mod(Node* node) { | 605 void InstructionSelector::VisitUint32Mod(Node* node) { |
646 VisitMod(this, node, kIA32Udiv); | 606 VisitMod(this, node, kX87Udiv); |
647 } | 607 } |
648 | 608 |
649 | 609 |
650 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) { | 610 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) { |
651 IA32OperandGenerator g(this); | 611 X87OperandGenerator g(this); |
652 Emit(kSSEFloat32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 612 Emit(kX87Float32ToFloat64, g.DefineAsFixed(node, stX_0), |
613 g.Use(node->InputAt(0))); | |
653 } | 614 } |
654 | 615 |
655 | 616 |
656 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) { | 617 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) { |
657 IA32OperandGenerator g(this); | 618 X87OperandGenerator g(this); |
658 Emit(kSSEInt32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 619 Emit(kX87Int32ToFloat64, g.DefineAsFixed(node, stX_0), |
620 g.Use(node->InputAt(0))); | |
659 } | 621 } |
660 | 622 |
661 | 623 |
662 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) { | 624 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) { |
663 IA32OperandGenerator g(this); | 625 X87OperandGenerator g(this); |
664 Emit(kSSEUint32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 626 Emit(kX87Uint32ToFloat64, g.DefineAsFixed(node, stX_0), |
627 g.UseRegister(node->InputAt(0))); | |
665 } | 628 } |
666 | 629 |
667 | 630 |
668 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) { | 631 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) { |
669 IA32OperandGenerator g(this); | 632 X87OperandGenerator g(this); |
670 Emit(kSSEFloat64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 633 Emit(kX87Float64ToInt32, g.DefineAsRegister(node), |
634 g.Use(node->InputAt(0))); | |
671 } | 635 } |
672 | 636 |
673 | 637 |
674 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) { | 638 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) { |
675 IA32OperandGenerator g(this); | 639 X87OperandGenerator g(this); |
676 Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 640 Emit(kX87Float64ToUint32, g.DefineAsRegister(node), |
641 g.Use(node->InputAt(0))); | |
677 } | 642 } |
678 | 643 |
679 | 644 |
680 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) { | 645 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) { |
681 IA32OperandGenerator g(this); | 646 X87OperandGenerator g(this); |
682 Emit(kSSEFloat64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 647 Emit(kX87Float64ToFloat32, g.DefineAsFixed(node, stX_0), |
648 g.Use(node->InputAt(0))); | |
683 } | 649 } |
684 | 650 |
685 | 651 |
686 void InstructionSelector::VisitFloat32Add(Node* node) { | 652 void InstructionSelector::VisitFloat32Add(Node* node) { |
687 VisitRROFloat(this, node, kAVXFloat32Add, kSSEFloat32Add); | 653 X87OperandGenerator g(this); |
654 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | |
655 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | |
656 Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, NULL); | |
688 } | 657 } |
689 | 658 |
690 | 659 |
691 void InstructionSelector::VisitFloat64Add(Node* node) { | 660 void InstructionSelector::VisitFloat64Add(Node* node) { |
692 VisitRROFloat(this, node, kAVXFloat64Add, kSSEFloat64Add); | 661 X87OperandGenerator g(this); |
662 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | |
663 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | |
664 Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, NULL); | |
693 } | 665 } |
694 | 666 |
695 | 667 |
696 void InstructionSelector::VisitFloat32Sub(Node* node) { | 668 void InstructionSelector::VisitFloat32Sub(Node* node) { |
697 IA32OperandGenerator g(this); | 669 X87OperandGenerator g(this); |
698 Float32BinopMatcher m(node); | 670 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
699 if (m.left().IsMinusZero()) { | 671 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
700 VisitFloatUnop(this, node, m.right().node(), kAVXFloat32Neg, | 672 Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, NULL); |
701 kSSEFloat32Neg); | |
702 return; | |
703 } | |
704 VisitRROFloat(this, node, kAVXFloat32Sub, kSSEFloat32Sub); | |
705 } | 673 } |
706 | 674 |
707 | 675 |
708 void InstructionSelector::VisitFloat64Sub(Node* node) { | 676 void InstructionSelector::VisitFloat64Sub(Node* node) { |
709 IA32OperandGenerator g(this); | 677 X87OperandGenerator g(this); |
710 Float64BinopMatcher m(node); | 678 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
711 if (m.left().IsMinusZero()) { | 679 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
712 if (m.right().IsFloat64RoundDown() && | 680 Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, NULL); |
713 CanCover(m.node(), m.right().node())) { | |
714 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && | |
715 CanCover(m.right().node(), m.right().InputAt(0))) { | |
716 Float64BinopMatcher mright0(m.right().InputAt(0)); | |
717 if (mright0.left().IsMinusZero()) { | |
718 Emit(kSSEFloat64Round | MiscField::encode(kRoundUp), | |
719 g.DefineAsRegister(node), g.UseRegister(mright0.right().node())); | |
720 return; | |
721 } | |
722 } | |
723 } | |
724 VisitFloatUnop(this, node, m.right().node(), kAVXFloat64Neg, | |
725 kSSEFloat64Neg); | |
726 return; | |
727 } | |
728 VisitRROFloat(this, node, kAVXFloat64Sub, kSSEFloat64Sub); | |
729 } | 681 } |
730 | 682 |
731 | 683 |
732 void InstructionSelector::VisitFloat32Mul(Node* node) { | 684 void InstructionSelector::VisitFloat32Mul(Node* node) { |
733 VisitRROFloat(this, node, kAVXFloat32Mul, kSSEFloat32Mul); | 685 X87OperandGenerator g(this); |
686 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | |
687 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | |
688 Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, NULL); | |
734 } | 689 } |
735 | 690 |
736 | 691 |
737 void InstructionSelector::VisitFloat64Mul(Node* node) { | 692 void InstructionSelector::VisitFloat64Mul(Node* node) { |
738 VisitRROFloat(this, node, kAVXFloat64Mul, kSSEFloat64Mul); | 693 X87OperandGenerator g(this); |
694 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | |
695 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | |
696 Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, NULL); | |
739 } | 697 } |
740 | 698 |
741 | 699 |
742 void InstructionSelector::VisitFloat32Div(Node* node) { | 700 void InstructionSelector::VisitFloat32Div(Node* node) { |
743 VisitRROFloat(this, node, kAVXFloat32Div, kSSEFloat32Div); | 701 X87OperandGenerator g(this); |
702 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | |
703 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | |
704 Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, NULL); | |
744 } | 705 } |
745 | 706 |
746 | 707 |
747 void InstructionSelector::VisitFloat64Div(Node* node) { | 708 void InstructionSelector::VisitFloat64Div(Node* node) { |
748 VisitRROFloat(this, node, kAVXFloat64Div, kSSEFloat64Div); | 709 X87OperandGenerator g(this); |
710 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | |
711 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | |
712 Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, NULL); | |
749 } | 713 } |
750 | 714 |
751 | 715 |
752 void InstructionSelector::VisitFloat64Mod(Node* node) { | 716 void InstructionSelector::VisitFloat64Mod(Node* node) { |
753 IA32OperandGenerator g(this); | 717 X87OperandGenerator g(this); |
754 InstructionOperand temps[] = {g.TempRegister(eax)}; | 718 InstructionOperand temps[] = {g.TempRegister(eax)}; |
755 Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node), | 719 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
756 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1, | 720 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
757 temps); | 721 Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall(); |
758 } | 722 } |
759 | 723 |
760 | 724 |
761 void InstructionSelector::VisitFloat32Max(Node* node) { | 725 void InstructionSelector::VisitFloat32Max(Node* node) { |
762 VisitRROFloat(this, node, kAVXFloat32Max, kSSEFloat32Max); | 726 X87OperandGenerator g(this); |
727 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | |
728 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | |
729 Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, NULL); | |
763 } | 730 } |
764 | 731 |
765 | 732 |
766 void InstructionSelector::VisitFloat64Max(Node* node) { | 733 void InstructionSelector::VisitFloat64Max(Node* node) { |
767 VisitRROFloat(this, node, kAVXFloat64Max, kSSEFloat64Max); | 734 X87OperandGenerator g(this); |
735 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | |
736 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | |
737 Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, NULL); | |
768 } | 738 } |
769 | 739 |
770 | 740 |
771 void InstructionSelector::VisitFloat32Min(Node* node) { | 741 void InstructionSelector::VisitFloat32Min(Node* node) { |
772 VisitRROFloat(this, node, kAVXFloat32Min, kSSEFloat32Min); | 742 X87OperandGenerator g(this); |
743 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | |
744 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | |
745 Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, NULL); | |
773 } | 746 } |
774 | 747 |
775 | 748 |
776 void InstructionSelector::VisitFloat64Min(Node* node) { | 749 void InstructionSelector::VisitFloat64Min(Node* node) { |
777 VisitRROFloat(this, node, kAVXFloat64Min, kSSEFloat64Min); | 750 X87OperandGenerator g(this); |
751 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | |
752 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | |
753 Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, NULL); | |
778 } | 754 } |
779 | 755 |
780 | 756 |
781 void InstructionSelector::VisitFloat32Abs(Node* node) { | 757 void InstructionSelector::VisitFloat32Abs(Node* node) { |
782 IA32OperandGenerator g(this); | 758 X87OperandGenerator g(this); |
783 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs); | 759 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
760 Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, NULL); | |
784 } | 761 } |
785 | 762 |
786 | 763 |
787 void InstructionSelector::VisitFloat64Abs(Node* node) { | 764 void InstructionSelector::VisitFloat64Abs(Node* node) { |
788 IA32OperandGenerator g(this); | 765 X87OperandGenerator g(this); |
789 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs); | 766 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
767 Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, NULL); | |
790 } | 768 } |
791 | 769 |
792 | 770 |
793 void InstructionSelector::VisitFloat32Sqrt(Node* node) { | 771 void InstructionSelector::VisitFloat32Sqrt(Node* node) { |
794 VisitROFloat(this, node, kSSEFloat32Sqrt); | 772 X87OperandGenerator g(this); |
773 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | |
774 Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, NULL); | |
795 } | 775 } |
796 | 776 |
797 | 777 |
798 void InstructionSelector::VisitFloat64Sqrt(Node* node) { | 778 void InstructionSelector::VisitFloat64Sqrt(Node* node) { |
799 VisitROFloat(this, node, kSSEFloat64Sqrt); | 779 X87OperandGenerator g(this); |
780 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | |
781 Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, NULL); | |
800 } | 782 } |
801 | 783 |
802 | 784 |
803 void InstructionSelector::VisitFloat64RoundDown(Node* node) { | 785 void InstructionSelector::VisitFloat64RoundDown(Node* node) { |
804 VisitRRFloat(this, node, kSSEFloat64Round | MiscField::encode(kRoundDown)); | 786 X87OperandGenerator g(this); |
787 Emit(kX87Float64Round | MiscField::encode(kRoundDown), | |
788 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); | |
805 } | 789 } |
806 | 790 |
807 | 791 |
808 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { | 792 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { |
809 VisitRRFloat(this, node, kSSEFloat64Round | MiscField::encode(kRoundToZero)); | 793 X87OperandGenerator g(this); |
810 } | 794 Emit(kX87Float64Round | MiscField::encode(kRoundToZero), |
811 | 795 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); |
812 | 796 } |
797 | |
798 | |
813 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { | 799 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { |
814 UNREACHABLE(); | 800 UNREACHABLE(); |
815 } | 801 } |
816 | 802 |
817 | 803 |
818 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) { | 804 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) { |
819 IA32OperandGenerator g(this); | 805 X87OperandGenerator g(this); |
820 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); | 806 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); |
821 | 807 |
822 FrameStateDescriptor* frame_state_descriptor = nullptr; | 808 FrameStateDescriptor* frame_state_descriptor = nullptr; |
823 if (descriptor->NeedsFrameState()) { | 809 if (descriptor->NeedsFrameState()) { |
824 frame_state_descriptor = | 810 frame_state_descriptor = |
825 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); | 811 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); |
826 } | 812 } |
827 | 813 |
828 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | 814 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
829 | 815 |
830 // Compute InstructionOperands for inputs and outputs. | 816 // Compute InstructionOperands for inputs and outputs. |
831 InitializeCallBuffer(node, &buffer, true, true); | 817 InitializeCallBuffer(node, &buffer, true, true); |
832 | 818 |
833 // Push any stack arguments. | 819 // Push any stack arguments. |
834 for (Node* node : base::Reversed(buffer.pushed_nodes)) { | 820 for (Node* node : base::Reversed(buffer.pushed_nodes)) { |
835 // TODO(titzer): handle pushing double parameters. | 821 // TODO(titzer): handle pushing double parameters. |
836 InstructionOperand value = | 822 InstructionOperand value = |
837 g.CanBeImmediate(node) | 823 g.CanBeImmediate(node) |
838 ? g.UseImmediate(node) | 824 ? g.UseImmediate(node) |
839 : IsSupported(ATOM) ? g.UseRegister(node) : g.Use(node); | 825 : IsSupported(ATOM) ? g.UseRegister(node) : g.Use(node); |
840 Emit(kIA32Push, g.NoOutput(), value); | 826 Emit(kX87Push, g.NoOutput(), value); |
841 } | 827 } |
842 | 828 |
843 // Pass label of exception handler block. | 829 // Pass label of exception handler block. |
844 CallDescriptor::Flags flags = descriptor->flags(); | 830 CallDescriptor::Flags flags = descriptor->flags(); |
845 if (handler) { | 831 if (handler) { |
846 DCHECK_EQ(IrOpcode::kIfException, handler->front()->opcode()); | 832 DCHECK_EQ(IrOpcode::kIfException, handler->front()->opcode()); |
847 IfExceptionHint hint = OpParameter<IfExceptionHint>(handler->front()); | 833 IfExceptionHint hint = OpParameter<IfExceptionHint>(handler->front()); |
848 if (hint == IfExceptionHint::kLocallyCaught) { | 834 if (hint == IfExceptionHint::kLocallyCaught) { |
849 flags |= CallDescriptor::kHasLocalCatchHandler; | 835 flags |= CallDescriptor::kHasLocalCatchHandler; |
850 } | 836 } |
(...skipping 19 matching lines...) Expand all Loading... | |
870 | 856 |
871 // Emit the call instruction. | 857 // Emit the call instruction. |
872 size_t const output_count = buffer.outputs.size(); | 858 size_t const output_count = buffer.outputs.size(); |
873 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; | 859 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; |
874 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), | 860 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), |
875 &buffer.instruction_args.front())->MarkAsCall(); | 861 &buffer.instruction_args.front())->MarkAsCall(); |
876 } | 862 } |
877 | 863 |
878 | 864 |
879 void InstructionSelector::VisitTailCall(Node* node) { | 865 void InstructionSelector::VisitTailCall(Node* node) { |
880 IA32OperandGenerator g(this); | 866 X87OperandGenerator g(this); |
881 CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node); | 867 CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node); |
882 DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); | 868 DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); |
883 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite); | 869 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite); |
884 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); | 870 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); |
885 | 871 |
886 // TODO(turbofan): Relax restriction for stack parameters. | 872 // TODO(turbofan): Relax restriction for stack parameters. |
887 if (descriptor->UsesOnlyRegisters() && | 873 if (descriptor->UsesOnlyRegisters() && |
888 descriptor->HasSameReturnLocationsAs( | 874 descriptor->HasSameReturnLocationsAs( |
889 linkage()->GetIncomingDescriptor())) { | 875 linkage()->GetIncomingDescriptor())) { |
890 CallBuffer buffer(zone(), descriptor, nullptr); | 876 CallBuffer buffer(zone(), descriptor, nullptr); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
924 // Compute InstructionOperands for inputs and outputs. | 910 // Compute InstructionOperands for inputs and outputs. |
925 InitializeCallBuffer(node, &buffer, true, true); | 911 InitializeCallBuffer(node, &buffer, true, true); |
926 | 912 |
927 // Push any stack arguments. | 913 // Push any stack arguments. |
928 for (Node* node : base::Reversed(buffer.pushed_nodes)) { | 914 for (Node* node : base::Reversed(buffer.pushed_nodes)) { |
929 // TODO(titzer): Handle pushing double parameters. | 915 // TODO(titzer): Handle pushing double parameters. |
930 InstructionOperand value = | 916 InstructionOperand value = |
931 g.CanBeImmediate(node) | 917 g.CanBeImmediate(node) |
932 ? g.UseImmediate(node) | 918 ? g.UseImmediate(node) |
933 : IsSupported(ATOM) ? g.UseRegister(node) : g.Use(node); | 919 : IsSupported(ATOM) ? g.UseRegister(node) : g.Use(node); |
934 Emit(kIA32Push, g.NoOutput(), value); | 920 Emit(kX87Push, g.NoOutput(), value); |
935 } | 921 } |
936 | 922 |
937 // Select the appropriate opcode based on the call type. | 923 // Select the appropriate opcode based on the call type. |
938 InstructionCode opcode; | 924 InstructionCode opcode; |
939 switch (descriptor->kind()) { | 925 switch (descriptor->kind()) { |
940 case CallDescriptor::kCallCodeObject: | 926 case CallDescriptor::kCallCodeObject: |
941 opcode = kArchCallCodeObject; | 927 opcode = kArchCallCodeObject; |
942 break; | 928 break; |
943 case CallDescriptor::kCallJSFunction: | 929 case CallDescriptor::kCallJSFunction: |
944 opcode = kArchCallJSFunction; | 930 opcode = kArchCallJSFunction; |
(...skipping 13 matching lines...) Expand all Loading... | |
958 } | 944 } |
959 } | 945 } |
960 | 946 |
961 | 947 |
962 namespace { | 948 namespace { |
963 | 949 |
964 // Shared routine for multiple compare operations. | 950 // Shared routine for multiple compare operations. |
965 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, | 951 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, |
966 InstructionOperand left, InstructionOperand right, | 952 InstructionOperand left, InstructionOperand right, |
967 FlagsContinuation* cont) { | 953 FlagsContinuation* cont) { |
968 IA32OperandGenerator g(selector); | 954 X87OperandGenerator g(selector); |
969 if (cont->IsBranch()) { | 955 if (cont->IsBranch()) { |
970 selector->Emit(cont->Encode(opcode), g.NoOutput(), left, right, | 956 selector->Emit(cont->Encode(opcode), g.NoOutput(), left, right, |
971 g.Label(cont->true_block()), g.Label(cont->false_block())); | 957 g.Label(cont->true_block()), g.Label(cont->false_block())); |
972 } else { | 958 } else { |
973 DCHECK(cont->IsSet()); | 959 DCHECK(cont->IsSet()); |
974 selector->Emit(cont->Encode(opcode), g.DefineAsByteRegister(cont->result()), | 960 selector->Emit(cont->Encode(opcode), g.DefineAsByteRegister(cont->result()), |
975 left, right); | 961 left, right); |
976 } | 962 } |
977 } | 963 } |
978 | 964 |
979 | 965 |
980 // Shared routine for multiple compare operations. | 966 // Shared routine for multiple compare operations. |
981 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, | 967 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, |
982 Node* left, Node* right, FlagsContinuation* cont, | 968 Node* left, Node* right, FlagsContinuation* cont, |
983 bool commutative) { | 969 bool commutative) { |
984 IA32OperandGenerator g(selector); | 970 X87OperandGenerator g(selector); |
985 if (commutative && g.CanBeBetterLeftOperand(right)) { | 971 if (commutative && g.CanBeBetterLeftOperand(right)) { |
986 std::swap(left, right); | 972 std::swap(left, right); |
987 } | 973 } |
988 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont); | 974 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont); |
989 } | 975 } |
990 | 976 |
991 | 977 |
992 // Shared routine for multiple float32 compare operations (inputs commuted). | 978 // Shared routine for multiple float32 compare operations (inputs commuted). |
993 void VisitFloat32Compare(InstructionSelector* selector, Node* node, | 979 void VisitFloat32Compare(InstructionSelector* selector, Node* node, |
994 FlagsContinuation* cont) { | 980 FlagsContinuation* cont) { |
995 Node* const left = node->InputAt(0); | 981 X87OperandGenerator g(selector); |
996 Node* const right = node->InputAt(1); | 982 selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
997 VisitCompare(selector, kSSEFloat32Cmp, right, left, cont, false); | 983 selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
984 if (cont->IsBranch()) { | |
985 selector->Emit(cont->Encode(kX87Float32Cmp), g.NoOutput(), | |
986 g.Label(cont->true_block()), g.Label(cont->false_block())); | |
987 } else { | |
988 DCHECK(cont->IsSet()); | |
989 selector->Emit(cont->Encode(kX87Float32Cmp), | |
990 g.DefineAsByteRegister(cont->result())); | |
991 } | |
998 } | 992 } |
999 | 993 |
1000 | 994 |
1001 // Shared routine for multiple float64 compare operations (inputs commuted). | 995 // Shared routine for multiple float64 compare operations (inputs commuted). |
1002 void VisitFloat64Compare(InstructionSelector* selector, Node* node, | 996 void VisitFloat64Compare(InstructionSelector* selector, Node* node, |
1003 FlagsContinuation* cont) { | 997 FlagsContinuation* cont) { |
1004 Node* const left = node->InputAt(0); | 998 X87OperandGenerator g(selector); |
1005 Node* const right = node->InputAt(1); | 999 selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
1006 VisitCompare(selector, kSSEFloat64Cmp, right, left, cont, false); | 1000 selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
1001 if (cont->IsBranch()) { | |
1002 selector->Emit(cont->Encode(kX87Float64Cmp), g.NoOutput(), | |
1003 g.Label(cont->true_block()), g.Label(cont->false_block())); | |
1004 } else { | |
1005 DCHECK(cont->IsSet()); | |
1006 selector->Emit(cont->Encode(kX87Float64Cmp), | |
1007 g.DefineAsByteRegister(cont->result())); | |
1008 } | |
1007 } | 1009 } |
1008 | 1010 |
1009 | 1011 |
1010 // Shared routine for multiple word compare operations. | 1012 // Shared routine for multiple word compare operations. |
1011 void VisitWordCompare(InstructionSelector* selector, Node* node, | 1013 void VisitWordCompare(InstructionSelector* selector, Node* node, |
1012 InstructionCode opcode, FlagsContinuation* cont) { | 1014 InstructionCode opcode, FlagsContinuation* cont) { |
1013 IA32OperandGenerator g(selector); | 1015 X87OperandGenerator g(selector); |
1014 Node* const left = node->InputAt(0); | 1016 Node* const left = node->InputAt(0); |
1015 Node* const right = node->InputAt(1); | 1017 Node* const right = node->InputAt(1); |
1016 | 1018 |
1017 // Match immediates on left or right side of comparison. | 1019 // Match immediates on left or right side of comparison. |
1018 if (g.CanBeImmediate(right)) { | 1020 if (g.CanBeImmediate(right)) { |
1019 VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right), cont); | 1021 VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right), cont); |
1020 } else if (g.CanBeImmediate(left)) { | 1022 } else if (g.CanBeImmediate(left)) { |
1021 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); | 1023 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); |
1022 VisitCompare(selector, opcode, g.Use(right), g.UseImmediate(left), cont); | 1024 VisitCompare(selector, opcode, g.Use(right), g.UseImmediate(left), cont); |
1023 } else { | 1025 } else { |
1024 VisitCompare(selector, opcode, left, right, cont, | 1026 VisitCompare(selector, opcode, left, right, cont, |
1025 node->op()->HasProperty(Operator::kCommutative)); | 1027 node->op()->HasProperty(Operator::kCommutative)); |
1026 } | 1028 } |
1027 } | 1029 } |
1028 | 1030 |
1029 | 1031 |
1030 void VisitWordCompare(InstructionSelector* selector, Node* node, | 1032 void VisitWordCompare(InstructionSelector* selector, Node* node, |
1031 FlagsContinuation* cont) { | 1033 FlagsContinuation* cont) { |
1032 IA32OperandGenerator g(selector); | 1034 X87OperandGenerator g(selector); |
1033 Int32BinopMatcher m(node); | 1035 Int32BinopMatcher m(node); |
1034 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) { | 1036 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) { |
1035 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node()); | 1037 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node()); |
1036 ExternalReference js_stack_limit = | 1038 ExternalReference js_stack_limit = |
1037 ExternalReference::address_of_stack_limit(selector->isolate()); | 1039 ExternalReference::address_of_stack_limit(selector->isolate()); |
1038 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) { | 1040 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) { |
1039 // Compare(Load(js_stack_limit), LoadStackPointer) | 1041 // Compare(Load(js_stack_limit), LoadStackPointer) |
1040 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); | 1042 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); |
1041 InstructionCode opcode = cont->Encode(kIA32StackCheck); | 1043 InstructionCode opcode = cont->Encode(kX87StackCheck); |
1042 if (cont->IsBranch()) { | 1044 if (cont->IsBranch()) { |
1043 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()), | 1045 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()), |
1044 g.Label(cont->false_block())); | 1046 g.Label(cont->false_block())); |
1045 } else { | 1047 } else { |
1046 DCHECK(cont->IsSet()); | 1048 DCHECK(cont->IsSet()); |
1047 selector->Emit(opcode, g.DefineAsRegister(cont->result())); | 1049 selector->Emit(opcode, g.DefineAsRegister(cont->result())); |
1048 } | 1050 } |
1049 return; | 1051 return; |
1050 } | 1052 } |
1051 } | 1053 } |
1052 VisitWordCompare(selector, node, kIA32Cmp, cont); | 1054 VisitWordCompare(selector, node, kX87Cmp, cont); |
1053 } | 1055 } |
1054 | 1056 |
1055 | 1057 |
1056 // Shared routine for word comparison with zero. | 1058 // Shared routine for word comparison with zero. |
1057 void VisitWordCompareZero(InstructionSelector* selector, Node* user, | 1059 void VisitWordCompareZero(InstructionSelector* selector, Node* user, |
1058 Node* value, FlagsContinuation* cont) { | 1060 Node* value, FlagsContinuation* cont) { |
1059 // Try to combine the branch with a comparison. | 1061 // Try to combine the branch with a comparison. |
1060 while (selector->CanCover(user, value)) { | 1062 while (selector->CanCover(user, value)) { |
1061 switch (value->opcode()) { | 1063 switch (value->opcode()) { |
1062 case IrOpcode::kWord32Equal: { | 1064 case IrOpcode::kWord32Equal: { |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1110 // unless the 0th projection (the use of the actual value of the | 1112 // unless the 0th projection (the use of the actual value of the |
1111 // <Operation> is either NULL, which means there's no use of the | 1113 // <Operation> is either NULL, which means there's no use of the |
1112 // actual value, or was already defined, which means it is scheduled | 1114 // actual value, or was already defined, which means it is scheduled |
1113 // *AFTER* this branch). | 1115 // *AFTER* this branch). |
1114 Node* const node = value->InputAt(0); | 1116 Node* const node = value->InputAt(0); |
1115 Node* const result = NodeProperties::FindProjection(node, 0); | 1117 Node* const result = NodeProperties::FindProjection(node, 0); |
1116 if (result == NULL || selector->IsDefined(result)) { | 1118 if (result == NULL || selector->IsDefined(result)) { |
1117 switch (node->opcode()) { | 1119 switch (node->opcode()) { |
1118 case IrOpcode::kInt32AddWithOverflow: | 1120 case IrOpcode::kInt32AddWithOverflow: |
1119 cont->OverwriteAndNegateIfEqual(kOverflow); | 1121 cont->OverwriteAndNegateIfEqual(kOverflow); |
1120 return VisitBinop(selector, node, kIA32Add, cont); | 1122 return VisitBinop(selector, node, kX87Add, cont); |
1121 case IrOpcode::kInt32SubWithOverflow: | 1123 case IrOpcode::kInt32SubWithOverflow: |
1122 cont->OverwriteAndNegateIfEqual(kOverflow); | 1124 cont->OverwriteAndNegateIfEqual(kOverflow); |
1123 return VisitBinop(selector, node, kIA32Sub, cont); | 1125 return VisitBinop(selector, node, kX87Sub, cont); |
1124 default: | 1126 default: |
1125 break; | 1127 break; |
1126 } | 1128 } |
1127 } | 1129 } |
1128 } | 1130 } |
1129 break; | 1131 break; |
1130 case IrOpcode::kInt32Sub: | 1132 case IrOpcode::kInt32Sub: |
1131 return VisitWordCompare(selector, value, cont); | 1133 return VisitWordCompare(selector, value, cont); |
1132 case IrOpcode::kWord32And: | 1134 case IrOpcode::kWord32And: |
1133 return VisitWordCompare(selector, value, kIA32Test, cont); | 1135 return VisitWordCompare(selector, value, kX87Test, cont); |
1134 default: | 1136 default: |
1135 break; | 1137 break; |
1136 } | 1138 } |
1137 break; | 1139 break; |
1138 } | 1140 } |
1139 | 1141 |
1140 // Continuation could not be combined with a compare, emit compare against 0. | 1142 // Continuation could not be combined with a compare, emit compare against 0. |
1141 IA32OperandGenerator g(selector); | 1143 X87OperandGenerator g(selector); |
1142 VisitCompare(selector, kIA32Cmp, g.Use(value), g.TempImmediate(0), cont); | 1144 VisitCompare(selector, kX87Cmp, g.Use(value), g.TempImmediate(0), cont); |
1143 } | 1145 } |
1144 | 1146 |
1145 } // namespace | 1147 } // namespace |
1146 | 1148 |
1147 | 1149 |
1148 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, | 1150 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, |
1149 BasicBlock* fbranch) { | 1151 BasicBlock* fbranch) { |
1150 FlagsContinuation cont(kNotEqual, tbranch, fbranch); | 1152 FlagsContinuation cont(kNotEqual, tbranch, fbranch); |
1151 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); | 1153 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); |
1152 } | 1154 } |
1153 | 1155 |
1154 | 1156 |
1155 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) { | 1157 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) { |
1156 IA32OperandGenerator g(this); | 1158 X87OperandGenerator g(this); |
1157 InstructionOperand value_operand = g.UseRegister(node->InputAt(0)); | 1159 InstructionOperand value_operand = g.UseRegister(node->InputAt(0)); |
1158 | 1160 |
1159 // Emit either ArchTableSwitch or ArchLookupSwitch. | 1161 // Emit either ArchTableSwitch or ArchLookupSwitch. |
1160 size_t table_space_cost = 4 + sw.value_range; | 1162 size_t table_space_cost = 4 + sw.value_range; |
1161 size_t table_time_cost = 3; | 1163 size_t table_time_cost = 3; |
1162 size_t lookup_space_cost = 3 + 2 * sw.case_count; | 1164 size_t lookup_space_cost = 3 + 2 * sw.case_count; |
1163 size_t lookup_time_cost = sw.case_count; | 1165 size_t lookup_time_cost = sw.case_count; |
1164 if (sw.case_count > 4 && | 1166 if (sw.case_count > 4 && |
1165 table_space_cost + 3 * table_time_cost <= | 1167 table_space_cost + 3 * table_time_cost <= |
1166 lookup_space_cost + 3 * lookup_time_cost && | 1168 lookup_space_cost + 3 * lookup_time_cost && |
1167 sw.min_value > std::numeric_limits<int32_t>::min()) { | 1169 sw.min_value > std::numeric_limits<int32_t>::min()) { |
1168 InstructionOperand index_operand = value_operand; | 1170 InstructionOperand index_operand = value_operand; |
1169 if (sw.min_value) { | 1171 if (sw.min_value) { |
1170 index_operand = g.TempRegister(); | 1172 index_operand = g.TempRegister(); |
1171 Emit(kIA32Lea | AddressingModeField::encode(kMode_MRI), index_operand, | 1173 Emit(kX87Lea | AddressingModeField::encode(kMode_MRI), index_operand, |
1172 value_operand, g.TempImmediate(-sw.min_value)); | 1174 value_operand, g.TempImmediate(-sw.min_value)); |
1173 } | 1175 } |
1174 // Generate a table lookup. | 1176 // Generate a table lookup. |
1175 return EmitTableSwitch(sw, index_operand); | 1177 return EmitTableSwitch(sw, index_operand); |
1176 } | 1178 } |
1177 | 1179 |
1178 // Generate a sequence of conditional jumps. | 1180 // Generate a sequence of conditional jumps. |
1179 return EmitLookupSwitch(sw, value_operand); | 1181 return EmitLookupSwitch(sw, value_operand); |
1180 } | 1182 } |
1181 | 1183 |
(...skipping 28 matching lines...) Expand all Loading... | |
1210 | 1212 |
1211 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { | 1213 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { |
1212 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); | 1214 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); |
1213 VisitWordCompare(this, node, &cont); | 1215 VisitWordCompare(this, node, &cont); |
1214 } | 1216 } |
1215 | 1217 |
1216 | 1218 |
1217 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { | 1219 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
1218 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { | 1220 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
1219 FlagsContinuation cont(kOverflow, ovf); | 1221 FlagsContinuation cont(kOverflow, ovf); |
1220 return VisitBinop(this, node, kIA32Add, &cont); | 1222 return VisitBinop(this, node, kX87Add, &cont); |
1221 } | 1223 } |
1222 FlagsContinuation cont; | 1224 FlagsContinuation cont; |
1223 VisitBinop(this, node, kIA32Add, &cont); | 1225 VisitBinop(this, node, kX87Add, &cont); |
1224 } | 1226 } |
1225 | 1227 |
1226 | 1228 |
1227 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { | 1229 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
1228 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { | 1230 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
1229 FlagsContinuation cont(kOverflow, ovf); | 1231 FlagsContinuation cont(kOverflow, ovf); |
1230 return VisitBinop(this, node, kIA32Sub, &cont); | 1232 return VisitBinop(this, node, kX87Sub, &cont); |
1231 } | 1233 } |
1232 FlagsContinuation cont; | 1234 FlagsContinuation cont; |
1233 VisitBinop(this, node, kIA32Sub, &cont); | 1235 VisitBinop(this, node, kX87Sub, &cont); |
1234 } | 1236 } |
1235 | 1237 |
1236 | 1238 |
1237 void InstructionSelector::VisitFloat32Equal(Node* node) { | 1239 void InstructionSelector::VisitFloat32Equal(Node* node) { |
1238 FlagsContinuation cont(kUnorderedEqual, node); | 1240 FlagsContinuation cont(kUnorderedEqual, node); |
1239 VisitFloat32Compare(this, node, &cont); | 1241 VisitFloat32Compare(this, node, &cont); |
1240 } | 1242 } |
1241 | 1243 |
1242 | 1244 |
1243 void InstructionSelector::VisitFloat32LessThan(Node* node) { | 1245 void InstructionSelector::VisitFloat32LessThan(Node* node) { |
(...skipping 20 matching lines...) Expand all Loading... | |
1264 } | 1266 } |
1265 | 1267 |
1266 | 1268 |
1267 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { | 1269 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { |
1268 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node); | 1270 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node); |
1269 VisitFloat64Compare(this, node, &cont); | 1271 VisitFloat64Compare(this, node, &cont); |
1270 } | 1272 } |
1271 | 1273 |
1272 | 1274 |
1273 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) { | 1275 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) { |
1274 IA32OperandGenerator g(this); | 1276 X87OperandGenerator g(this); |
1275 Emit(kSSEFloat64ExtractLowWord32, g.DefineAsRegister(node), | 1277 Emit(kX87Float64ExtractLowWord32, g.DefineAsRegister(node), |
1276 g.Use(node->InputAt(0))); | 1278 g.Use(node->InputAt(0))); |
1277 } | 1279 } |
1278 | 1280 |
1279 | 1281 |
1280 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) { | 1282 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) { |
1281 IA32OperandGenerator g(this); | 1283 X87OperandGenerator g(this); |
1282 Emit(kSSEFloat64ExtractHighWord32, g.DefineAsRegister(node), | 1284 Emit(kX87Float64ExtractHighWord32, g.DefineAsRegister(node), |
1283 g.Use(node->InputAt(0))); | 1285 g.Use(node->InputAt(0))); |
1284 } | 1286 } |
1285 | 1287 |
1286 | 1288 |
1287 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) { | 1289 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) { |
1288 IA32OperandGenerator g(this); | 1290 X87OperandGenerator g(this); |
1289 Node* left = node->InputAt(0); | 1291 Node* left = node->InputAt(0); |
1290 Node* right = node->InputAt(1); | 1292 Node* right = node->InputAt(1); |
1291 Float64Matcher mleft(left); | 1293 /* Float64Matcher mleft(left); |
Weiliang
2015/06/15 06:14:30
remove the comments code
| |
1292 if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) { | 1294 if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) { |
1293 Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right)); | 1295 Emit(kX87Float64LoadLowWord32, g.DefineAsRegister(node), g.Use(right)); |
1294 return; | 1296 return; |
1295 } | 1297 } |
1296 Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node), | 1298 */ |
1297 g.UseRegister(left), g.Use(right)); | 1299 Emit(kX87Float64InsertLowWord32, g.UseFixed(node, stX_0), |
1300 g.UseRegister(left), g.UseRegister(right)); | |
1298 } | 1301 } |
1299 | 1302 |
1300 | 1303 |
1301 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) { | 1304 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) { |
1302 IA32OperandGenerator g(this); | 1305 X87OperandGenerator g(this); |
1303 Node* left = node->InputAt(0); | 1306 Node* left = node->InputAt(0); |
1304 Node* right = node->InputAt(1); | 1307 Node* right = node->InputAt(1); |
1305 Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node), | 1308 Emit(kX87Float64InsertHighWord32, g.UseFixed(node, stX_0), |
1306 g.UseRegister(left), g.Use(right)); | 1309 g.UseRegister(left), g.UseRegister(right)); |
1307 } | 1310 } |
1308 | 1311 |
1309 | 1312 |
1310 // static | 1313 // static |
1311 MachineOperatorBuilder::Flags | 1314 MachineOperatorBuilder::Flags |
1312 InstructionSelector::SupportedMachineOperatorFlags() { | 1315 InstructionSelector::SupportedMachineOperatorFlags() { |
1313 MachineOperatorBuilder::Flags flags = | 1316 MachineOperatorBuilder::Flags flags = |
1314 MachineOperatorBuilder::kFloat32Max | | 1317 MachineOperatorBuilder::kFloat32Max | |
1315 MachineOperatorBuilder::kFloat32Min | | 1318 MachineOperatorBuilder::kFloat32Min | |
1316 MachineOperatorBuilder::kFloat64Max | | 1319 MachineOperatorBuilder::kFloat64Max | |
1317 MachineOperatorBuilder::kFloat64Min | | 1320 MachineOperatorBuilder::kFloat64Min | |
1318 MachineOperatorBuilder::kWord32ShiftIsSafe; | 1321 MachineOperatorBuilder::kWord32ShiftIsSafe; |
1319 if (CpuFeatures::IsSupported(SSE4_1)) { | |
1320 flags |= MachineOperatorBuilder::kFloat64RoundDown | | |
1321 MachineOperatorBuilder::kFloat64RoundTruncate; | |
1322 } | |
1323 return flags; | 1322 return flags; |
1324 } | 1323 } |
1325 | 1324 |
1326 } // namespace compiler | 1325 } // namespace compiler |
1327 } // namespace internal | 1326 } // namespace internal |
1328 } // namespace v8 | 1327 } // namespace v8 |
OLD | NEW |