| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
| 7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
| 8 #include "src/compiler/node-properties.h" | 8 #include "src/compiler/node-properties.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 47 default: | 47 default: |
| 48 return false; | 48 return false; |
| 49 } | 49 } |
| 50 } | 50 } |
| 51 | 51 |
| 52 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale, Node* base, | 52 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale, Node* base, |
| 53 Node* displacement_node, | 53 Node* displacement_node, |
| 54 InstructionOperand inputs[], | 54 InstructionOperand inputs[], |
| 55 size_t* input_count) { | 55 size_t* input_count) { |
| 56 AddressingMode mode = kMode_MRI; | 56 AddressingMode mode = kMode_MRI; |
| 57 int32_t displacement = (displacement_node == NULL) | 57 int32_t displacement = (displacement_node == nullptr) |
| 58 ? 0 | 58 ? 0 |
| 59 : OpParameter<int32_t>(displacement_node); | 59 : OpParameter<int32_t>(displacement_node); |
| 60 if (base != NULL) { | 60 if (base != nullptr) { |
| 61 if (base->opcode() == IrOpcode::kInt32Constant) { | 61 if (base->opcode() == IrOpcode::kInt32Constant) { |
| 62 displacement += OpParameter<int32_t>(base); | 62 displacement += OpParameter<int32_t>(base); |
| 63 base = NULL; | 63 base = nullptr; |
| 64 } | 64 } |
| 65 } | 65 } |
| 66 if (base != NULL) { | 66 if (base != nullptr) { |
| 67 inputs[(*input_count)++] = UseRegister(base); | 67 inputs[(*input_count)++] = UseRegister(base); |
| 68 if (index != NULL) { | 68 if (index != nullptr) { |
| 69 DCHECK(scale >= 0 && scale <= 3); | 69 DCHECK(scale >= 0 && scale <= 3); |
| 70 inputs[(*input_count)++] = UseRegister(index); | 70 inputs[(*input_count)++] = UseRegister(index); |
| 71 if (displacement != 0) { | 71 if (displacement != 0) { |
| 72 inputs[(*input_count)++] = TempImmediate(displacement); | 72 inputs[(*input_count)++] = TempImmediate(displacement); |
| 73 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I, | 73 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I, |
| 74 kMode_MR4I, kMode_MR8I}; | 74 kMode_MR4I, kMode_MR8I}; |
| 75 mode = kMRnI_modes[scale]; | 75 mode = kMRnI_modes[scale]; |
| 76 } else { | 76 } else { |
| 77 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2, | 77 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2, |
| 78 kMode_MR4, kMode_MR8}; | 78 kMode_MR4, kMode_MR8}; |
| 79 mode = kMRn_modes[scale]; | 79 mode = kMRn_modes[scale]; |
| 80 } | 80 } |
| 81 } else { | 81 } else { |
| 82 if (displacement == 0) { | 82 if (displacement == 0) { |
| 83 mode = kMode_MR; | 83 mode = kMode_MR; |
| 84 } else { | 84 } else { |
| 85 inputs[(*input_count)++] = TempImmediate(displacement); | 85 inputs[(*input_count)++] = TempImmediate(displacement); |
| 86 mode = kMode_MRI; | 86 mode = kMode_MRI; |
| 87 } | 87 } |
| 88 } | 88 } |
| 89 } else { | 89 } else { |
| 90 DCHECK(scale >= 0 && scale <= 3); | 90 DCHECK(scale >= 0 && scale <= 3); |
| 91 if (index != NULL) { | 91 if (index != nullptr) { |
| 92 inputs[(*input_count)++] = UseRegister(index); | 92 inputs[(*input_count)++] = UseRegister(index); |
| 93 if (displacement != 0) { | 93 if (displacement != 0) { |
| 94 inputs[(*input_count)++] = TempImmediate(displacement); | 94 inputs[(*input_count)++] = TempImmediate(displacement); |
| 95 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I, | 95 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I, |
| 96 kMode_M4I, kMode_M8I}; | 96 kMode_M4I, kMode_M8I}; |
| 97 mode = kMnI_modes[scale]; | 97 mode = kMnI_modes[scale]; |
| 98 } else { | 98 } else { |
| 99 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_M2, | 99 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_M2, |
| 100 kMode_M4, kMode_M8}; | 100 kMode_M4, kMode_M8}; |
| 101 mode = kMn_modes[scale]; | 101 mode = kMn_modes[scale]; |
| 102 } | 102 } |
| 103 } else { | 103 } else { |
| 104 inputs[(*input_count)++] = TempImmediate(displacement); | 104 inputs[(*input_count)++] = TempImmediate(displacement); |
| 105 return kMode_MI; | 105 return kMode_MI; |
| 106 } | 106 } |
| 107 } | 107 } |
| 108 return mode; | 108 return mode; |
| 109 } | 109 } |
| 110 | 110 |
| 111 AddressingMode GetEffectiveAddressMemoryOperand(Node* node, | 111 AddressingMode GetEffectiveAddressMemoryOperand(Node* node, |
| 112 InstructionOperand inputs[], | 112 InstructionOperand inputs[], |
| 113 size_t* input_count) { | 113 size_t* input_count) { |
| 114 BaseWithIndexAndDisplacement32Matcher m(node, true); | 114 BaseWithIndexAndDisplacement32Matcher m(node, true); |
| 115 DCHECK(m.matches()); | 115 DCHECK(m.matches()); |
| 116 if ((m.displacement() == NULL || CanBeImmediate(m.displacement()))) { | 116 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) { |
| 117 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(), | 117 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(), |
| 118 m.displacement(), inputs, input_count); | 118 m.displacement(), inputs, input_count); |
| 119 } else { | 119 } else { |
| 120 inputs[(*input_count)++] = UseRegister(node->InputAt(0)); | 120 inputs[(*input_count)++] = UseRegister(node->InputAt(0)); |
| 121 inputs[(*input_count)++] = UseRegister(node->InputAt(1)); | 121 inputs[(*input_count)++] = UseRegister(node->InputAt(1)); |
| 122 return kMode_MR1; | 122 return kMode_MR1; |
| 123 } | 123 } |
| 124 } | 124 } |
| 125 | 125 |
| 126 bool CanBeBetterLeftOperand(Node* node) const { | 126 bool CanBeBetterLeftOperand(Node* node) const { |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 252 val = g.UseRegister(value); | 252 val = g.UseRegister(value); |
| 253 } | 253 } |
| 254 | 254 |
| 255 InstructionOperand inputs[4]; | 255 InstructionOperand inputs[4]; |
| 256 size_t input_count = 0; | 256 size_t input_count = 0; |
| 257 AddressingMode addressing_mode = | 257 AddressingMode addressing_mode = |
| 258 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); | 258 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); |
| 259 InstructionCode code = | 259 InstructionCode code = |
| 260 opcode | AddressingModeField::encode(addressing_mode); | 260 opcode | AddressingModeField::encode(addressing_mode); |
| 261 inputs[input_count++] = val; | 261 inputs[input_count++] = val; |
| 262 Emit(code, 0, static_cast<InstructionOperand*>(NULL), input_count, inputs); | 262 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, |
| 263 inputs); |
| 263 } | 264 } |
| 264 } | 265 } |
| 265 | 266 |
| 266 | 267 |
| 267 void InstructionSelector::VisitCheckedLoad(Node* node) { | 268 void InstructionSelector::VisitCheckedLoad(Node* node) { |
| 268 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op()); | 269 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op()); |
| 269 X87OperandGenerator g(this); | 270 X87OperandGenerator g(this); |
| 270 Node* const buffer = node->InputAt(0); | 271 Node* const buffer = node->InputAt(0); |
| 271 Node* const offset = node->InputAt(1); | 272 Node* const offset = node->InputAt(1); |
| 272 Node* const length = node->InputAt(2); | 273 Node* const length = node->InputAt(2); |
| (...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 509 selector->Emit(opcode, 1, outputs, input_count, inputs); | 510 selector->Emit(opcode, 1, outputs, input_count, inputs); |
| 510 } | 511 } |
| 511 | 512 |
| 512 } // namespace | 513 } // namespace |
| 513 | 514 |
| 514 | 515 |
| 515 void InstructionSelector::VisitWord32Shl(Node* node) { | 516 void InstructionSelector::VisitWord32Shl(Node* node) { |
| 516 Int32ScaleMatcher m(node, true); | 517 Int32ScaleMatcher m(node, true); |
| 517 if (m.matches()) { | 518 if (m.matches()) { |
| 518 Node* index = node->InputAt(0); | 519 Node* index = node->InputAt(0); |
| 519 Node* base = m.power_of_two_plus_one() ? index : NULL; | 520 Node* base = m.power_of_two_plus_one() ? index : nullptr; |
| 520 EmitLea(this, node, index, m.scale(), base, NULL); | 521 EmitLea(this, node, index, m.scale(), base, nullptr); |
| 521 return; | 522 return; |
| 522 } | 523 } |
| 523 VisitShift(this, node, kX87Shl); | 524 VisitShift(this, node, kX87Shl); |
| 524 } | 525 } |
| 525 | 526 |
| 526 | 527 |
| 527 void InstructionSelector::VisitWord32Shr(Node* node) { | 528 void InstructionSelector::VisitWord32Shr(Node* node) { |
| 528 VisitShift(this, node, kX87Shr); | 529 VisitShift(this, node, kX87Shr); |
| 529 } | 530 } |
| 530 | 531 |
| (...skipping 22 matching lines...) Expand all Loading... |
| 553 Emit(kX87Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 554 Emit(kX87Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); |
| 554 } | 555 } |
| 555 | 556 |
| 556 | 557 |
| 557 void InstructionSelector::VisitInt32Add(Node* node) { | 558 void InstructionSelector::VisitInt32Add(Node* node) { |
| 558 X87OperandGenerator g(this); | 559 X87OperandGenerator g(this); |
| 559 | 560 |
| 560 // Try to match the Add to a lea pattern | 561 // Try to match the Add to a lea pattern |
| 561 BaseWithIndexAndDisplacement32Matcher m(node); | 562 BaseWithIndexAndDisplacement32Matcher m(node); |
| 562 if (m.matches() && | 563 if (m.matches() && |
| 563 (m.displacement() == NULL || g.CanBeImmediate(m.displacement()))) { | 564 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) { |
| 564 InstructionOperand inputs[4]; | 565 InstructionOperand inputs[4]; |
| 565 size_t input_count = 0; | 566 size_t input_count = 0; |
| 566 AddressingMode mode = g.GenerateMemoryOperandInputs( | 567 AddressingMode mode = g.GenerateMemoryOperandInputs( |
| 567 m.index(), m.scale(), m.base(), m.displacement(), inputs, &input_count); | 568 m.index(), m.scale(), m.base(), m.displacement(), inputs, &input_count); |
| 568 | 569 |
| 569 DCHECK_NE(0u, input_count); | 570 DCHECK_NE(0u, input_count); |
| 570 DCHECK_GE(arraysize(inputs), input_count); | 571 DCHECK_GE(arraysize(inputs), input_count); |
| 571 | 572 |
| 572 InstructionOperand outputs[1]; | 573 InstructionOperand outputs[1]; |
| 573 outputs[0] = g.DefineAsRegister(node); | 574 outputs[0] = g.DefineAsRegister(node); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 590 } else { | 591 } else { |
| 591 VisitBinop(this, node, kX87Sub); | 592 VisitBinop(this, node, kX87Sub); |
| 592 } | 593 } |
| 593 } | 594 } |
| 594 | 595 |
| 595 | 596 |
| 596 void InstructionSelector::VisitInt32Mul(Node* node) { | 597 void InstructionSelector::VisitInt32Mul(Node* node) { |
| 597 Int32ScaleMatcher m(node, true); | 598 Int32ScaleMatcher m(node, true); |
| 598 if (m.matches()) { | 599 if (m.matches()) { |
| 599 Node* index = node->InputAt(0); | 600 Node* index = node->InputAt(0); |
| 600 Node* base = m.power_of_two_plus_one() ? index : NULL; | 601 Node* base = m.power_of_two_plus_one() ? index : nullptr; |
| 601 EmitLea(this, node, index, m.scale(), base, NULL); | 602 EmitLea(this, node, index, m.scale(), base, nullptr); |
| 602 return; | 603 return; |
| 603 } | 604 } |
| 604 X87OperandGenerator g(this); | 605 X87OperandGenerator g(this); |
| 605 Node* left = node->InputAt(0); | 606 Node* left = node->InputAt(0); |
| 606 Node* right = node->InputAt(1); | 607 Node* right = node->InputAt(1); |
| 607 if (g.CanBeImmediate(right)) { | 608 if (g.CanBeImmediate(right)) { |
| 608 Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left), | 609 Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left), |
| 609 g.UseImmediate(right)); | 610 g.UseImmediate(right)); |
| 610 } else { | 611 } else { |
| 611 if (g.CanBeBetterLeftOperand(right)) { | 612 if (g.CanBeBetterLeftOperand(right)) { |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 700 g.Use(node->InputAt(0))); | 701 g.Use(node->InputAt(0))); |
| 701 return; | 702 return; |
| 702 } | 703 } |
| 703 UNREACHABLE(); | 704 UNREACHABLE(); |
| 704 } | 705 } |
| 705 | 706 |
| 706 | 707 |
| 707 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) { | 708 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) { |
| 708 X87OperandGenerator g(this); | 709 X87OperandGenerator g(this); |
| 709 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 710 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 710 Emit(kX87BitcastFI, g.DefineAsRegister(node), 0, NULL); | 711 Emit(kX87BitcastFI, g.DefineAsRegister(node), 0, nullptr); |
| 711 } | 712 } |
| 712 | 713 |
| 713 | 714 |
| 714 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) { | 715 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) { |
| 715 X87OperandGenerator g(this); | 716 X87OperandGenerator g(this); |
| 716 Emit(kX87BitcastIF, g.DefineAsFixed(node, stX_0), g.Use(node->InputAt(0))); | 717 Emit(kX87BitcastIF, g.DefineAsFixed(node, stX_0), g.Use(node->InputAt(0))); |
| 717 } | 718 } |
| 718 | 719 |
| 719 | 720 |
| 720 void InstructionSelector::VisitFloat32Add(Node* node) { | 721 void InstructionSelector::VisitFloat32Add(Node* node) { |
| 721 X87OperandGenerator g(this); | 722 X87OperandGenerator g(this); |
| 722 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 723 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 723 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | 724 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
| 724 Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, NULL); | 725 Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 725 } | 726 } |
| 726 | 727 |
| 727 | 728 |
| 728 void InstructionSelector::VisitFloat64Add(Node* node) { | 729 void InstructionSelector::VisitFloat64Add(Node* node) { |
| 729 X87OperandGenerator g(this); | 730 X87OperandGenerator g(this); |
| 730 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 731 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 731 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 732 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 732 Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, NULL); | 733 Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 733 } | 734 } |
| 734 | 735 |
| 735 | 736 |
| 736 void InstructionSelector::VisitFloat32Sub(Node* node) { | 737 void InstructionSelector::VisitFloat32Sub(Node* node) { |
| 737 X87OperandGenerator g(this); | 738 X87OperandGenerator g(this); |
| 738 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 739 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 739 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | 740 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
| 740 Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, NULL); | 741 Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 741 } | 742 } |
| 742 | 743 |
| 743 | 744 |
| 744 void InstructionSelector::VisitFloat64Sub(Node* node) { | 745 void InstructionSelector::VisitFloat64Sub(Node* node) { |
| 745 X87OperandGenerator g(this); | 746 X87OperandGenerator g(this); |
| 746 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 747 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 747 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 748 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 748 Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, NULL); | 749 Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 749 } | 750 } |
| 750 | 751 |
| 751 | 752 |
| 752 void InstructionSelector::VisitFloat32Mul(Node* node) { | 753 void InstructionSelector::VisitFloat32Mul(Node* node) { |
| 753 X87OperandGenerator g(this); | 754 X87OperandGenerator g(this); |
| 754 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 755 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 755 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | 756 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
| 756 Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, NULL); | 757 Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 757 } | 758 } |
| 758 | 759 |
| 759 | 760 |
| 760 void InstructionSelector::VisitFloat64Mul(Node* node) { | 761 void InstructionSelector::VisitFloat64Mul(Node* node) { |
| 761 X87OperandGenerator g(this); | 762 X87OperandGenerator g(this); |
| 762 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 763 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 763 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 764 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 764 Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, NULL); | 765 Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 765 } | 766 } |
| 766 | 767 |
| 767 | 768 |
| 768 void InstructionSelector::VisitFloat32Div(Node* node) { | 769 void InstructionSelector::VisitFloat32Div(Node* node) { |
| 769 X87OperandGenerator g(this); | 770 X87OperandGenerator g(this); |
| 770 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 771 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 771 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | 772 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
| 772 Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, NULL); | 773 Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 773 } | 774 } |
| 774 | 775 |
| 775 | 776 |
| 776 void InstructionSelector::VisitFloat64Div(Node* node) { | 777 void InstructionSelector::VisitFloat64Div(Node* node) { |
| 777 X87OperandGenerator g(this); | 778 X87OperandGenerator g(this); |
| 778 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 779 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 779 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 780 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 780 Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, NULL); | 781 Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 781 } | 782 } |
| 782 | 783 |
| 783 | 784 |
| 784 void InstructionSelector::VisitFloat64Mod(Node* node) { | 785 void InstructionSelector::VisitFloat64Mod(Node* node) { |
| 785 X87OperandGenerator g(this); | 786 X87OperandGenerator g(this); |
| 786 InstructionOperand temps[] = {g.TempRegister(eax)}; | 787 InstructionOperand temps[] = {g.TempRegister(eax)}; |
| 787 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 788 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 788 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 789 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 789 Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall(); | 790 Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall(); |
| 790 } | 791 } |
| 791 | 792 |
| 792 | 793 |
| 793 void InstructionSelector::VisitFloat32Max(Node* node) { | 794 void InstructionSelector::VisitFloat32Max(Node* node) { |
| 794 X87OperandGenerator g(this); | 795 X87OperandGenerator g(this); |
| 795 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 796 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 796 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | 797 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
| 797 Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, NULL); | 798 Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 798 } | 799 } |
| 799 | 800 |
| 800 | 801 |
| 801 void InstructionSelector::VisitFloat64Max(Node* node) { | 802 void InstructionSelector::VisitFloat64Max(Node* node) { |
| 802 X87OperandGenerator g(this); | 803 X87OperandGenerator g(this); |
| 803 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 804 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 804 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 805 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 805 Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, NULL); | 806 Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 806 } | 807 } |
| 807 | 808 |
| 808 | 809 |
| 809 void InstructionSelector::VisitFloat32Min(Node* node) { | 810 void InstructionSelector::VisitFloat32Min(Node* node) { |
| 810 X87OperandGenerator g(this); | 811 X87OperandGenerator g(this); |
| 811 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 812 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 812 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); | 813 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); |
| 813 Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, NULL); | 814 Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 814 } | 815 } |
| 815 | 816 |
| 816 | 817 |
| 817 void InstructionSelector::VisitFloat64Min(Node* node) { | 818 void InstructionSelector::VisitFloat64Min(Node* node) { |
| 818 X87OperandGenerator g(this); | 819 X87OperandGenerator g(this); |
| 819 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 820 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 820 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); | 821 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); |
| 821 Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, NULL); | 822 Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 822 } | 823 } |
| 823 | 824 |
| 824 | 825 |
| 825 void InstructionSelector::VisitFloat32Abs(Node* node) { | 826 void InstructionSelector::VisitFloat32Abs(Node* node) { |
| 826 X87OperandGenerator g(this); | 827 X87OperandGenerator g(this); |
| 827 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 828 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 828 Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, NULL); | 829 Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 829 } | 830 } |
| 830 | 831 |
| 831 | 832 |
| 832 void InstructionSelector::VisitFloat64Abs(Node* node) { | 833 void InstructionSelector::VisitFloat64Abs(Node* node) { |
| 833 X87OperandGenerator g(this); | 834 X87OperandGenerator g(this); |
| 834 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 835 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 835 Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, NULL); | 836 Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 836 } | 837 } |
| 837 | 838 |
| 838 | 839 |
| 839 void InstructionSelector::VisitFloat32Sqrt(Node* node) { | 840 void InstructionSelector::VisitFloat32Sqrt(Node* node) { |
| 840 X87OperandGenerator g(this); | 841 X87OperandGenerator g(this); |
| 841 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); | 842 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); |
| 842 Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, NULL); | 843 Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 843 } | 844 } |
| 844 | 845 |
| 845 | 846 |
| 846 void InstructionSelector::VisitFloat64Sqrt(Node* node) { | 847 void InstructionSelector::VisitFloat64Sqrt(Node* node) { |
| 847 X87OperandGenerator g(this); | 848 X87OperandGenerator g(this); |
| 848 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); | 849 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); |
| 849 Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, NULL); | 850 Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr); |
| 850 } | 851 } |
| 851 | 852 |
| 852 | 853 |
| 853 void InstructionSelector::VisitFloat32RoundDown(Node* node) { | 854 void InstructionSelector::VisitFloat32RoundDown(Node* node) { |
| 854 X87OperandGenerator g(this); | 855 X87OperandGenerator g(this); |
| 855 Emit(kX87Float32Round | MiscField::encode(kRoundDown), | 856 Emit(kX87Float32Round | MiscField::encode(kRoundDown), |
| 856 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); | 857 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); |
| 857 } | 858 } |
| 858 | 859 |
| 859 | 860 |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1114 return VisitFloat64Compare(selector, value, cont); | 1115 return VisitFloat64Compare(selector, value, cont); |
| 1115 case IrOpcode::kFloat64LessThanOrEqual: | 1116 case IrOpcode::kFloat64LessThanOrEqual: |
| 1116 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual); | 1117 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual); |
| 1117 return VisitFloat64Compare(selector, value, cont); | 1118 return VisitFloat64Compare(selector, value, cont); |
| 1118 case IrOpcode::kProjection: | 1119 case IrOpcode::kProjection: |
| 1119 // Check if this is the overflow output projection of an | 1120 // Check if this is the overflow output projection of an |
| 1120 // <Operation>WithOverflow node. | 1121 // <Operation>WithOverflow node. |
| 1121 if (ProjectionIndexOf(value->op()) == 1u) { | 1122 if (ProjectionIndexOf(value->op()) == 1u) { |
| 1122 // We cannot combine the <Operation>WithOverflow with this branch | 1123 // We cannot combine the <Operation>WithOverflow with this branch |
| 1123 // unless the 0th projection (the use of the actual value of the | 1124 // unless the 0th projection (the use of the actual value of the |
| 1124 // <Operation> is either NULL, which means there's no use of the | 1125 // <Operation> is either nullptr, which means there's no use of the |
| 1125 // actual value, or was already defined, which means it is scheduled | 1126 // actual value, or was already defined, which means it is scheduled |
| 1126 // *AFTER* this branch). | 1127 // *AFTER* this branch). |
| 1127 Node* const node = value->InputAt(0); | 1128 Node* const node = value->InputAt(0); |
| 1128 Node* const result = NodeProperties::FindProjection(node, 0); | 1129 Node* const result = NodeProperties::FindProjection(node, 0); |
| 1129 if (result == NULL || selector->IsDefined(result)) { | 1130 if (result == nullptr || selector->IsDefined(result)) { |
| 1130 switch (node->opcode()) { | 1131 switch (node->opcode()) { |
| 1131 case IrOpcode::kInt32AddWithOverflow: | 1132 case IrOpcode::kInt32AddWithOverflow: |
| 1132 cont->OverwriteAndNegateIfEqual(kOverflow); | 1133 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 1133 return VisitBinop(selector, node, kX87Add, cont); | 1134 return VisitBinop(selector, node, kX87Add, cont); |
| 1134 case IrOpcode::kInt32SubWithOverflow: | 1135 case IrOpcode::kInt32SubWithOverflow: |
| 1135 cont->OverwriteAndNegateIfEqual(kOverflow); | 1136 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 1136 return VisitBinop(selector, node, kX87Sub, cont); | 1137 return VisitBinop(selector, node, kX87Sub, cont); |
| 1137 default: | 1138 default: |
| 1138 break; | 1139 break; |
| 1139 } | 1140 } |
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1335 MachineOperatorBuilder::kFloat32RoundTruncate | | 1336 MachineOperatorBuilder::kFloat32RoundTruncate | |
| 1336 MachineOperatorBuilder::kFloat64RoundTruncate | | 1337 MachineOperatorBuilder::kFloat64RoundTruncate | |
| 1337 MachineOperatorBuilder::kFloat32RoundTiesEven | | 1338 MachineOperatorBuilder::kFloat32RoundTiesEven | |
| 1338 MachineOperatorBuilder::kFloat64RoundTiesEven; | 1339 MachineOperatorBuilder::kFloat64RoundTiesEven; |
| 1339 return flags; | 1340 return flags; |
| 1340 } | 1341 } |
| 1341 | 1342 |
| 1342 } // namespace compiler | 1343 } // namespace compiler |
| 1343 } // namespace internal | 1344 } // namespace internal |
| 1344 } // namespace v8 | 1345 } // namespace v8 |
| OLD | NEW |