| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
| 6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| (...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 669 Node* right = node->InputAt(1); | 669 Node* right = node->InputAt(1); |
| 670 VisitCompare(this, kSSEFloat64Cmp, g.UseDoubleRegister(left), g.Use(right), | 670 VisitCompare(this, kSSEFloat64Cmp, g.UseDoubleRegister(left), g.Use(right), |
| 671 cont); | 671 cont); |
| 672 } | 672 } |
| 673 | 673 |
| 674 | 674 |
| 675 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, | 675 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, |
| 676 BasicBlock* deoptimization) { | 676 BasicBlock* deoptimization) { |
| 677 X64OperandGenerator g(this); | 677 X64OperandGenerator g(this); |
| 678 CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call); | 678 CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call); |
| 679 CallBuffer buffer(zone(), descriptor); // TODO(turbofan): temp zone here? | 679 |
| 680 FrameStateDescriptor* frame_state_descriptor = NULL; |
| 681 if (descriptor->NeedsFrameState()) { |
| 682 frame_state_descriptor = |
| 683 GetFrameStateDescriptor(call->InputAt(descriptor->InputCount())); |
| 684 } |
| 685 |
| 686 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
| 680 | 687 |
| 681 // Compute InstructionOperands for inputs and outputs. | 688 // Compute InstructionOperands for inputs and outputs. |
| 682 InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization); | 689 InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization); |
| 683 | 690 |
| 684 // TODO(dcarney): stack alignment for c calls. | 691 // TODO(dcarney): stack alignment for c calls. |
| 685 // TODO(dcarney): shadow space on window for c calls. | 692 // TODO(dcarney): shadow space on window for c calls. |
| 686 // Push any stack arguments. | 693 // Push any stack arguments. |
| 687 for (int i = buffer.pushed_count - 1; i >= 0; --i) { | 694 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); |
| 688 Node* input = buffer.pushed_nodes[i]; | 695 input != buffer.pushed_nodes.rend(); input++) { |
| 689 // TODO(titzer): handle pushing double parameters. | 696 // TODO(titzer): handle pushing double parameters. |
| 690 if (g.CanBeImmediate(input)) { | 697 if (g.CanBeImmediate(*input)) { |
| 691 Emit(kX64PushI, NULL, g.UseImmediate(input)); | 698 Emit(kX64PushI, NULL, g.UseImmediate(*input)); |
| 692 } else { | 699 } else { |
| 693 Emit(kX64Push, NULL, g.Use(input)); | 700 Emit(kX64Push, NULL, g.Use(*input)); |
| 694 } | 701 } |
| 695 } | 702 } |
| 696 | 703 |
| 697 // Select the appropriate opcode based on the call type. | 704 // Select the appropriate opcode based on the call type. |
| 698 InstructionCode opcode; | 705 InstructionCode opcode; |
| 699 switch (descriptor->kind()) { | 706 switch (descriptor->kind()) { |
| 700 case CallDescriptor::kCallCodeObject: { | 707 case CallDescriptor::kCallCodeObject: { |
| 701 bool lazy_deopt = descriptor->CanLazilyDeoptimize(); | 708 opcode = kX64CallCodeObject; |
| 702 opcode = kX64CallCodeObject | MiscField::encode(lazy_deopt ? 1 : 0); | |
| 703 break; | 709 break; |
| 704 } | 710 } |
| 705 case CallDescriptor::kCallAddress: | 711 case CallDescriptor::kCallAddress: |
| 706 opcode = kX64CallAddress; | 712 opcode = kX64CallAddress; |
| 707 break; | 713 break; |
| 708 case CallDescriptor::kCallJSFunction: | 714 case CallDescriptor::kCallJSFunction: |
| 709 opcode = kX64CallJSFunction; | 715 opcode = kX64CallJSFunction; |
| 710 break; | 716 break; |
| 711 default: | 717 default: |
| 712 UNREACHABLE(); | 718 UNREACHABLE(); |
| 713 return; | 719 return; |
| 714 } | 720 } |
| 721 opcode |= MiscField::encode(descriptor->deoptimization_support()); |
| 715 | 722 |
| 716 // Emit the call instruction. | 723 // Emit the call instruction. |
| 717 Instruction* call_instr = | 724 Instruction* call_instr = |
| 718 Emit(opcode, buffer.output_count, buffer.outputs, | 725 Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(), |
| 719 buffer.fixed_and_control_count(), buffer.fixed_and_control_args); | 726 buffer.instruction_args.size(), &buffer.instruction_args.front()); |
| 720 | 727 |
| 721 call_instr->MarkAsCall(); | 728 call_instr->MarkAsCall(); |
| 722 if (deoptimization != NULL) { | 729 if (deoptimization != NULL) { |
| 723 DCHECK(continuation != NULL); | 730 DCHECK(continuation != NULL); |
| 724 call_instr->MarkAsControl(); | 731 call_instr->MarkAsControl(); |
| 725 } | 732 } |
| 726 | 733 |
| 727 // Caller clean up of stack for C-style calls. | 734 // Caller clean up of stack for C-style calls. |
| 728 if (descriptor->kind() == CallDescriptor::kCallAddress && | 735 if (descriptor->kind() == CallDescriptor::kCallAddress && |
| 729 buffer.pushed_count > 0) { | 736 !buffer.pushed_nodes.empty()) { |
| 730 DCHECK(deoptimization == NULL && continuation == NULL); | 737 DCHECK(deoptimization == NULL && continuation == NULL); |
| 731 Emit(kPopStack | MiscField::encode(buffer.pushed_count), NULL); | 738 Emit(kPopStack | |
| 739 MiscField::encode(static_cast<int>(buffer.pushed_nodes.size())), |
| 740 NULL); |
| 732 } | 741 } |
| 733 } | 742 } |
| 734 | 743 |
| 735 } // namespace compiler | 744 } // namespace compiler |
| 736 } // namespace internal | 745 } // namespace internal |
| 737 } // namespace v8 | 746 } // namespace v8 |
| OLD | NEW |