| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/base/bits.h" | 6 #include "src/base/bits.h" |
| 7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/compiler/node-properties.h" | 9 #include "src/compiler/node-properties.h" |
| 10 | 10 |
| (...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 521 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { | 521 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { |
| 522 VisitRR(this, kMipsFloat64RoundTruncate, node); | 522 VisitRR(this, kMipsFloat64RoundTruncate, node); |
| 523 } | 523 } |
| 524 | 524 |
| 525 | 525 |
| 526 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { | 526 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { |
| 527 UNREACHABLE(); | 527 UNREACHABLE(); |
| 528 } | 528 } |
| 529 | 529 |
| 530 | 530 |
| 531 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) { | 531 void InstructionSelector::EmitPrepareArguments(NodeVector* arguments, |
| 532 const CallDescriptor* descriptor, |
| 533 Node* node) { |
| 532 MipsOperandGenerator g(this); | 534 MipsOperandGenerator g(this); |
| 533 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); | |
| 534 | |
| 535 FrameStateDescriptor* frame_state_descriptor = nullptr; | |
| 536 if (descriptor->NeedsFrameState()) { | |
| 537 frame_state_descriptor = | |
| 538 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); | |
| 539 } | |
| 540 | |
| 541 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | |
| 542 | |
| 543 // Compute InstructionOperands for inputs and outputs. | |
| 544 InitializeCallBuffer(node, &buffer, true, true); | |
| 545 | 535 |
| 546 // Prepare for C function call. | 536 // Prepare for C function call. |
| 547 if (descriptor->IsCFunctionCall()) { | 537 if (descriptor->IsCFunctionCall()) { |
| 548 Emit(kArchPrepareCallCFunction | | 538 Emit(kArchPrepareCallCFunction | |
| 549 MiscField::encode(static_cast<int>(descriptor->CParameterCount())), | 539 MiscField::encode(static_cast<int>(descriptor->CParameterCount())), |
| 550 0, nullptr, 0, nullptr); | 540 0, nullptr, 0, nullptr); |
| 551 | 541 |
| 552 // Poke any stack arguments. | 542 // Poke any stack arguments. |
| 553 int slot = kCArgSlotCount; | 543 int slot = kCArgSlotCount; |
| 554 for (Node* input : buffer.pushed_nodes) { | 544 for (Node* input : (*arguments)) { |
| 555 Emit(kMipsStoreToStackSlot, g.NoOutput(), g.UseRegister(input), | 545 Emit(kMipsStoreToStackSlot, g.NoOutput(), g.UseRegister(input), |
| 556 g.TempImmediate(slot << kPointerSizeLog2)); | 546 g.TempImmediate(slot << kPointerSizeLog2)); |
| 557 ++slot; | 547 ++slot; |
| 558 } | 548 } |
| 559 } else { | 549 } else { |
| 560 // Possibly align stack here for functions. | 550 // Possibly align stack here for functions. |
| 561 int push_count = static_cast<int>(descriptor->StackParameterCount()); | 551 int push_count = static_cast<int>(descriptor->StackParameterCount()); |
| 562 if (push_count > 0) { | 552 if (push_count > 0) { |
| 563 Emit(kMipsStackClaim, g.NoOutput(), | 553 Emit(kMipsStackClaim, g.NoOutput(), |
| 564 g.TempImmediate(push_count << kPointerSizeLog2)); | 554 g.TempImmediate(push_count << kPointerSizeLog2)); |
| 565 } | 555 } |
| 566 for (size_t n = 0; n < buffer.pushed_nodes.size(); ++n) { | 556 for (size_t n = 0; n < arguments->size(); ++n) { |
| 567 if (Node* input = buffer.pushed_nodes[n]) { | 557 if (Node* input = (*arguments)[n]) { |
| 568 Emit(kMipsStoreToStackSlot, g.NoOutput(), g.UseRegister(input), | 558 Emit(kMipsStoreToStackSlot, g.NoOutput(), g.UseRegister(input), |
| 569 g.TempImmediate(n << kPointerSizeLog2)); | 559 g.TempImmediate(n << kPointerSizeLog2)); |
| 570 } | 560 } |
| 571 } | 561 } |
| 572 } | 562 } |
| 573 | |
| 574 // Pass label of exception handler block. | |
| 575 CallDescriptor::Flags flags = descriptor->flags(); | |
| 576 if (handler) { | |
| 577 DCHECK_EQ(IrOpcode::kIfException, handler->front()->opcode()); | |
| 578 IfExceptionHint hint = OpParameter<IfExceptionHint>(handler->front()); | |
| 579 if (hint == IfExceptionHint::kLocallyCaught) { | |
| 580 flags |= CallDescriptor::kHasLocalCatchHandler; | |
| 581 } | |
| 582 flags |= CallDescriptor::kHasExceptionHandler; | |
| 583 buffer.instruction_args.push_back(g.Label(handler)); | |
| 584 } | |
| 585 | |
| 586 // Select the appropriate opcode based on the call type. | |
| 587 InstructionCode opcode = kArchNop; | |
| 588 switch (descriptor->kind()) { | |
| 589 case CallDescriptor::kCallAddress: | |
| 590 opcode = | |
| 591 kArchCallCFunction | | |
| 592 MiscField::encode(static_cast<int>(descriptor->CParameterCount())); | |
| 593 break; | |
| 594 case CallDescriptor::kCallCodeObject: | |
| 595 opcode = kArchCallCodeObject | MiscField::encode(flags); | |
| 596 break; | |
| 597 case CallDescriptor::kCallJSFunction: | |
| 598 opcode = kArchCallJSFunction | MiscField::encode(flags); | |
| 599 break; | |
| 600 case CallDescriptor::kLazyBailout: | |
| 601 opcode = kArchLazyBailout | MiscField::encode(flags); | |
| 602 break; | |
| 603 } | |
| 604 | |
| 605 // Emit the call instruction. | |
| 606 size_t const output_count = buffer.outputs.size(); | |
| 607 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; | |
| 608 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), | |
| 609 &buffer.instruction_args.front())->MarkAsCall(); | |
| 610 } | 563 } |
| 611 | 564 |
| 612 | 565 |
| 613 void InstructionSelector::VisitTailCall(Node* node) { | 566 void InstructionSelector::VisitTailCall(Node* node) { |
| 614 MipsOperandGenerator g(this); | 567 MipsOperandGenerator g(this); |
| 615 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); | 568 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); |
| 616 DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); | 569 DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); |
| 617 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite); | 570 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite); |
| 618 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); | 571 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); |
| 619 | 572 |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1159 IsFp64Mode()) { | 1112 IsFp64Mode()) { |
| 1160 flags |= MachineOperatorBuilder::kFloat64RoundDown | | 1113 flags |= MachineOperatorBuilder::kFloat64RoundDown | |
| 1161 MachineOperatorBuilder::kFloat64RoundTruncate; | 1114 MachineOperatorBuilder::kFloat64RoundTruncate; |
| 1162 } | 1115 } |
| 1163 return flags; | 1116 return flags; |
| 1164 } | 1117 } |
| 1165 | 1118 |
| 1166 } // namespace compiler | 1119 } // namespace compiler |
| 1167 } // namespace internal | 1120 } // namespace internal |
| 1168 } // namespace v8 | 1121 } // namespace v8 |
| OLD | NEW |