| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 #include "src/compiler/code-generator-impl.h" | 6 #include "src/compiler/code-generator-impl.h" |
| 7 #include "src/compiler/gap-resolver.h" | 7 #include "src/compiler/gap-resolver.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/compiler/node-properties-inl.h" | 9 #include "src/compiler/node-properties-inl.h" |
| 10 #include "src/mips/macro-assembler-mips.h" | 10 #include "src/mips/macro-assembler-mips.h" |
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 148 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 148 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
| 149 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 149 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
| 150 } | 150 } |
| 151 | 151 |
| 152 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 152 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 153 __ Call(at); | 153 __ Call(at); |
| 154 AddSafepointAndDeopt(instr); | 154 AddSafepointAndDeopt(instr); |
| 155 break; | 155 break; |
| 156 } | 156 } |
| 157 case kArchJmp: | 157 case kArchJmp: |
| 158 __ Branch(GetLabel(i.InputRpo(0))); | 158 AssembleArchJump(i.InputRpo(0)); |
| 159 break; | 159 break; |
| 160 case kArchNop: | 160 case kArchNop: |
| 161 // don't emit code for nops. | 161 // don't emit code for nops. |
| 162 break; | 162 break; |
| 163 case kArchRet: | 163 case kArchRet: |
| 164 AssembleReturn(); | 164 AssembleReturn(); |
| 165 break; | 165 break; |
| 166 case kArchStackPointer: | 166 case kArchStackPointer: |
| 167 __ mov(i.OutputRegister(), sp); | 167 __ mov(i.OutputRegister(), sp); |
| 168 break; | 168 break; |
| (...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 473 | 473 |
| 474 #define UNSUPPORTED_COND(opcode, condition) \ | 474 #define UNSUPPORTED_COND(opcode, condition) \ |
| 475 OFStream out(stdout); \ | 475 OFStream out(stdout); \ |
| 476 out << "Unsupported " << #opcode << " condition: \"" << condition << "\""; \ | 476 out << "Unsupported " << #opcode << " condition: \"" << condition << "\""; \ |
| 477 UNIMPLEMENTED(); | 477 UNIMPLEMENTED(); |
| 478 | 478 |
| 479 // Assembles branches after an instruction. | 479 // Assembles branches after an instruction. |
| 480 void CodeGenerator::AssembleArchBranch(Instruction* instr, | 480 void CodeGenerator::AssembleArchBranch(Instruction* instr, |
| 481 FlagsCondition condition) { | 481 FlagsCondition condition) { |
| 482 MipsOperandConverter i(this, instr); | 482 MipsOperandConverter i(this, instr); |
| 483 Label done; | 483 Label* tlabel = branch->true_label; |
| 484 | 484 Label* flabel = branch->false_label; |
| 485 // Emit a branch. The true and false targets are always the last two inputs | |
| 486 // to the instruction. | |
| 487 BasicBlock::RpoNumber tblock = | |
| 488 i.InputRpo(static_cast<int>(instr->InputCount()) - 2); | |
| 489 BasicBlock::RpoNumber fblock = | |
| 490 i.InputRpo(static_cast<int>(instr->InputCount()) - 1); | |
| 491 bool fallthru = IsNextInAssemblyOrder(fblock); | |
| 492 Label* tlabel = GetLabel(tblock); | |
| 493 Label* flabel = fallthru ? &done : GetLabel(fblock); | |
| 494 Condition cc = kNoCondition; | 485 Condition cc = kNoCondition; |
| 495 | 486 |
| 496 // MIPS does not have condition code flags, so compare and branch are | 487 // MIPS does not have condition code flags, so compare and branch are |
| 497 // implemented differently than on the other arch's. The compare operations | 488 // implemented differently than on the other arch's. The compare operations |
| 498 // emit mips psuedo-instructions, which are handled here by branch | 489 // emit mips psuedo-instructions, which are handled here by branch |
| 499 // instructions that do the actual comparison. Essential that the input | 490 // instructions that do the actual comparison. Essential that the input |
| 500 // registers to compare psuedo-op are not modified before this branch op, as | 491 // registers to compare psuedo-op are not modified before this branch op, as |
| 501 // they are tested here. | 492 // they are tested here. |
| 502 // TODO(plind): Add CHECK() to ensure that test/cmp and this branch were | 493 // TODO(plind): Add CHECK() to ensure that test/cmp and this branch were |
| 503 // not separated by other instructions. | 494 // not separated by other instructions. |
| 504 | 495 |
| 505 if (instr->arch_opcode() == kMips64Tst) { | 496 if (instr->arch_opcode() == kMips64Tst) { |
| 506 switch (condition) { | 497 switch (branch->condition) { |
| 507 case kNotEqual: | 498 case kNotEqual: |
| 508 cc = ne; | 499 cc = ne; |
| 509 break; | 500 break; |
| 510 case kEqual: | 501 case kEqual: |
| 511 cc = eq; | 502 cc = eq; |
| 512 break; | 503 break; |
| 513 default: | 504 default: |
| 514 UNSUPPORTED_COND(kMips64Tst, condition); | 505 UNSUPPORTED_COND(kMips64Tst, branch->condition); |
| 515 break; | 506 break; |
| 516 } | 507 } |
| 517 __ And(at, i.InputRegister(0), i.InputOperand(1)); | 508 __ And(at, i.InputRegister(0), i.InputOperand(1)); |
| 518 __ Branch(tlabel, cc, at, Operand(zero_reg)); | 509 __ Branch(tlabel, cc, at, Operand(zero_reg)); |
| 519 } else if (instr->arch_opcode() == kMips64Tst32) { | 510 } else if (instr->arch_opcode() == kMips64Tst32) { |
| 520 switch (condition) { | 511 switch (branch->condition) { |
| 521 case kNotEqual: | 512 case kNotEqual: |
| 522 cc = ne; | 513 cc = ne; |
| 523 break; | 514 break; |
| 524 case kEqual: | 515 case kEqual: |
| 525 cc = eq; | 516 cc = eq; |
| 526 break; | 517 break; |
| 527 default: | 518 default: |
| 528 UNSUPPORTED_COND(kMips64Tst32, condition); | 519 UNSUPPORTED_COND(kMips64Tst32, branch->condition); |
| 529 break; | 520 break; |
| 530 } | 521 } |
| 531 // Zero-extend registers on MIPS64 only 64-bit operand | 522 // Zero-extend registers on MIPS64 only 64-bit operand |
| 532 // branch and compare op. is available. | 523 // branch and compare op. is available. |
| 533 // This is a disadvantage to perform 32-bit operation on MIPS64. | 524 // This is a disadvantage to perform 32-bit operation on MIPS64. |
| 534 // Try to force globally in front-end Word64 representation to be preferred | 525 // Try to force globally in front-end Word64 representation to be preferred |
| 535 // for MIPS64 even for Word32. | 526 // for MIPS64 even for Word32. |
| 536 __ And(at, i.InputRegister(0), i.InputOperand(1)); | 527 __ And(at, i.InputRegister(0), i.InputOperand(1)); |
| 537 __ Dext(at, at, 0, 32); | 528 __ Dext(at, at, 0, 32); |
| 538 __ Branch(tlabel, cc, at, Operand(zero_reg)); | 529 __ Branch(tlabel, cc, at, Operand(zero_reg)); |
| 539 } else if (instr->arch_opcode() == kMips64Dadd || | 530 } else if (instr->arch_opcode() == kMips64Dadd || |
| 540 instr->arch_opcode() == kMips64Dsub) { | 531 instr->arch_opcode() == kMips64Dsub) { |
| 541 switch (condition) { | 532 switch (branch->condition) { |
| 542 case kOverflow: | 533 case kOverflow: |
| 543 cc = ne; | 534 cc = ne; |
| 544 break; | 535 break; |
| 545 case kNotOverflow: | 536 case kNotOverflow: |
| 546 cc = eq; | 537 cc = eq; |
| 547 break; | 538 break; |
| 548 default: | 539 default: |
| 549 UNSUPPORTED_COND(kMips64Dadd, condition); | 540 UNSUPPORTED_COND(kMips64Dadd, branch->condition); |
| 550 break; | 541 break; |
| 551 } | 542 } |
| 552 | 543 |
| 553 __ dsra32(kScratchReg, i.OutputRegister(), 0); | 544 __ dsra32(kScratchReg, i.OutputRegister(), 0); |
| 554 __ sra(at, i.OutputRegister(), 31); | 545 __ sra(at, i.OutputRegister(), 31); |
| 555 __ Branch(tlabel, cc, at, Operand(kScratchReg)); | 546 __ Branch(tlabel, cc, at, Operand(kScratchReg)); |
| 556 } else if (instr->arch_opcode() == kMips64Cmp) { | 547 } else if (instr->arch_opcode() == kMips64Cmp) { |
| 557 switch (condition) { | 548 switch (branch->condition) { |
| 558 case kEqual: | 549 case kEqual: |
| 559 cc = eq; | 550 cc = eq; |
| 560 break; | 551 break; |
| 561 case kNotEqual: | 552 case kNotEqual: |
| 562 cc = ne; | 553 cc = ne; |
| 563 break; | 554 break; |
| 564 case kSignedLessThan: | 555 case kSignedLessThan: |
| 565 cc = lt; | 556 cc = lt; |
| 566 break; | 557 break; |
| 567 case kSignedGreaterThanOrEqual: | 558 case kSignedGreaterThanOrEqual: |
| (...skipping 11 matching lines...) Expand all Loading... |
| 579 case kUnsignedGreaterThanOrEqual: | 570 case kUnsignedGreaterThanOrEqual: |
| 580 cc = hs; | 571 cc = hs; |
| 581 break; | 572 break; |
| 582 case kUnsignedLessThanOrEqual: | 573 case kUnsignedLessThanOrEqual: |
| 583 cc = ls; | 574 cc = ls; |
| 584 break; | 575 break; |
| 585 case kUnsignedGreaterThan: | 576 case kUnsignedGreaterThan: |
| 586 cc = hi; | 577 cc = hi; |
| 587 break; | 578 break; |
| 588 default: | 579 default: |
| 589 UNSUPPORTED_COND(kMips64Cmp, condition); | 580 UNSUPPORTED_COND(kMips64Cmp, branch->condition); |
| 590 break; | 581 break; |
| 591 } | 582 } |
| 592 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); | 583 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); |
| 593 | 584 |
| 594 if (!fallthru) __ Branch(flabel); // no fallthru to flabel. | 585 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. |
| 595 __ bind(&done); | 586 __ bind(&done); |
| 596 | 587 |
| 597 } else if (instr->arch_opcode() == kMips64Cmp32) { | 588 } else if (instr->arch_opcode() == kMips64Cmp32) { |
| 598 switch (condition) { | 589 switch (branch->condition) { |
| 599 case kEqual: | 590 case kEqual: |
| 600 cc = eq; | 591 cc = eq; |
| 601 break; | 592 break; |
| 602 case kNotEqual: | 593 case kNotEqual: |
| 603 cc = ne; | 594 cc = ne; |
| 604 break; | 595 break; |
| 605 case kSignedLessThan: | 596 case kSignedLessThan: |
| 606 cc = lt; | 597 cc = lt; |
| 607 break; | 598 break; |
| 608 case kSignedGreaterThanOrEqual: | 599 case kSignedGreaterThanOrEqual: |
| (...skipping 11 matching lines...) Expand all Loading... |
| 620 case kUnsignedGreaterThanOrEqual: | 611 case kUnsignedGreaterThanOrEqual: |
| 621 cc = hs; | 612 cc = hs; |
| 622 break; | 613 break; |
| 623 case kUnsignedLessThanOrEqual: | 614 case kUnsignedLessThanOrEqual: |
| 624 cc = ls; | 615 cc = ls; |
| 625 break; | 616 break; |
| 626 case kUnsignedGreaterThan: | 617 case kUnsignedGreaterThan: |
| 627 cc = hi; | 618 cc = hi; |
| 628 break; | 619 break; |
| 629 default: | 620 default: |
| 630 UNSUPPORTED_COND(kMips64Cmp32, condition); | 621 UNSUPPORTED_COND(kMips64Cmp32, branch->condition); |
| 631 break; | 622 break; |
| 632 } | 623 } |
| 633 | 624 |
| 634 switch (condition) { | 625 switch (branch->condition) { |
| 635 case kEqual: | 626 case kEqual: |
| 636 case kNotEqual: | 627 case kNotEqual: |
| 637 case kSignedLessThan: | 628 case kSignedLessThan: |
| 638 case kSignedGreaterThanOrEqual: | 629 case kSignedGreaterThanOrEqual: |
| 639 case kSignedLessThanOrEqual: | 630 case kSignedLessThanOrEqual: |
| 640 case kSignedGreaterThan: | 631 case kSignedGreaterThan: |
| 641 // Sign-extend registers on MIPS64 only 64-bit operand | 632 // Sign-extend registers on MIPS64 only 64-bit operand |
| 642 // branch and compare op. is available. | 633 // branch and compare op. is available. |
| 643 __ sll(i.InputRegister(0), i.InputRegister(0), 0); | 634 __ sll(i.InputRegister(0), i.InputRegister(0), 0); |
| 644 if (instr->InputAt(1)->IsRegister()) { | 635 if (instr->InputAt(1)->IsRegister()) { |
| 645 __ sll(i.InputRegister(1), i.InputRegister(1), 0); | 636 __ sll(i.InputRegister(1), i.InputRegister(1), 0); |
| 646 } | 637 } |
| 647 break; | 638 break; |
| 648 case kUnsignedLessThan: | 639 case kUnsignedLessThan: |
| 649 case kUnsignedGreaterThanOrEqual: | 640 case kUnsignedGreaterThanOrEqual: |
| 650 case kUnsignedLessThanOrEqual: | 641 case kUnsignedLessThanOrEqual: |
| 651 case kUnsignedGreaterThan: | 642 case kUnsignedGreaterThan: |
| 652 // Zero-extend registers on MIPS64 only 64-bit operand | 643 // Zero-extend registers on MIPS64 only 64-bit operand |
| 653 // branch and compare op. is available. | 644 // branch and compare op. is available. |
| 654 __ Dext(i.InputRegister(0), i.InputRegister(0), 0, 32); | 645 __ Dext(i.InputRegister(0), i.InputRegister(0), 0, 32); |
| 655 if (instr->InputAt(1)->IsRegister()) { | 646 if (instr->InputAt(1)->IsRegister()) { |
| 656 __ Dext(i.InputRegister(1), i.InputRegister(1), 0, 32); | 647 __ Dext(i.InputRegister(1), i.InputRegister(1), 0, 32); |
| 657 } | 648 } |
| 658 break; | 649 break; |
| 659 default: | 650 default: |
| 660 UNSUPPORTED_COND(kMips64Cmp, condition); | 651 UNSUPPORTED_COND(kMips64Cmp, branch->condition); |
| 661 break; | 652 break; |
| 662 } | 653 } |
| 663 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); | 654 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); |
| 664 | 655 |
| 665 if (!fallthru) __ Branch(flabel); // no fallthru to flabel. | 656 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. |
| 666 __ bind(&done); | 657 __ bind(&done); |
| 667 } else if (instr->arch_opcode() == kMips64CmpD) { | 658 } else if (instr->arch_opcode() == kMips64CmpD) { |
| 668 // TODO(dusmil) optimize unordered checks to use less instructions | 659 // TODO(dusmil) optimize unordered checks to use less instructions |
| 669 // even if we have to unfold BranchF macro. | 660 // even if we have to unfold BranchF macro. |
| 670 Label* nan = flabel; | 661 Label* nan = flabel; |
| 671 switch (condition) { | 662 switch (branch->condition) { |
| 672 case kUnorderedEqual: | 663 case kUnorderedEqual: |
| 673 cc = eq; | 664 cc = eq; |
| 674 break; | 665 break; |
| 675 case kUnorderedNotEqual: | 666 case kUnorderedNotEqual: |
| 676 cc = ne; | 667 cc = ne; |
| 677 nan = tlabel; | 668 nan = tlabel; |
| 678 break; | 669 break; |
| 679 case kUnorderedLessThan: | 670 case kUnorderedLessThan: |
| 680 cc = lt; | 671 cc = lt; |
| 681 break; | 672 break; |
| 682 case kUnorderedGreaterThanOrEqual: | 673 case kUnorderedGreaterThanOrEqual: |
| 683 cc = ge; | 674 cc = ge; |
| 684 nan = tlabel; | 675 nan = tlabel; |
| 685 break; | 676 break; |
| 686 case kUnorderedLessThanOrEqual: | 677 case kUnorderedLessThanOrEqual: |
| 687 cc = le; | 678 cc = le; |
| 688 break; | 679 break; |
| 689 case kUnorderedGreaterThan: | 680 case kUnorderedGreaterThan: |
| 690 cc = gt; | 681 cc = gt; |
| 691 nan = tlabel; | 682 nan = tlabel; |
| 692 break; | 683 break; |
| 693 default: | 684 default: |
| 694 UNSUPPORTED_COND(kMips64CmpD, condition); | 685 UNSUPPORTED_COND(kMips64CmpD, branch->condition); |
| 695 break; | 686 break; |
| 696 } | 687 } |
| 697 __ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0), | 688 __ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0), |
| 698 i.InputDoubleRegister(1)); | 689 i.InputDoubleRegister(1)); |
| 699 | 690 |
| 700 if (!fallthru) __ Branch(flabel); // no fallthru to flabel. | 691 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. |
| 701 __ bind(&done); | 692 __ bind(&done); |
| 702 | 693 |
| 703 } else { | 694 } else { |
| 704 PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n", | 695 PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n", |
| 705 instr->arch_opcode()); | 696 instr->arch_opcode()); |
| 706 UNIMPLEMENTED(); | 697 UNIMPLEMENTED(); |
| 707 } | 698 } |
| 708 } | 699 } |
| 709 | 700 |
| 710 | 701 |
| 702 void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { |
| 703 if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target)); |
| 704 } |
| 705 |
| 706 |
| 711 // Assembles boolean materializations after an instruction. | 707 // Assembles boolean materializations after an instruction. |
| 712 void CodeGenerator::AssembleArchBoolean(Instruction* instr, | 708 void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
| 713 FlagsCondition condition) { | 709 FlagsCondition condition) { |
| 714 MipsOperandConverter i(this, instr); | 710 MipsOperandConverter i(this, instr); |
| 715 Label done; | 711 Label done; |
| 716 | 712 |
| 717 // Materialize a full 32-bit 1 or 0 value. The result register is always the | 713 // Materialize a full 32-bit 1 or 0 value. The result register is always the |
| 718 // last output of the instruction. | 714 // last output of the instruction. |
| 719 Label false_value; | 715 Label false_value; |
| 720 DCHECK_NE(0, instr->OutputCount()); | 716 DCHECK_NE(0, instr->OutputCount()); |
| (...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1220 } | 1216 } |
| 1221 } | 1217 } |
| 1222 MarkLazyDeoptSite(); | 1218 MarkLazyDeoptSite(); |
| 1223 } | 1219 } |
| 1224 | 1220 |
| 1225 #undef __ | 1221 #undef __ |
| 1226 | 1222 |
| 1227 } // namespace compiler | 1223 } // namespace compiler |
| 1228 } // namespace internal | 1224 } // namespace internal |
| 1229 } // namespace v8 | 1225 } // namespace v8 |
| OLD | NEW |