| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 #include "src/compilation-info.h" | 6 #include "src/compilation-info.h" |
| 7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
| 8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
| 9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
| 10 #include "src/compiler/osr.h" | 10 #include "src/compiler/osr.h" |
| (...skipping 565 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 576 __ MovFromFloatResult(i.OutputDoubleRegister()); \ | 576 __ MovFromFloatResult(i.OutputDoubleRegister()); \ |
| 577 } while (0) | 577 } while (0) |
| 578 | 578 |
| 579 void CodeGenerator::AssembleDeconstructFrame() { | 579 void CodeGenerator::AssembleDeconstructFrame() { |
| 580 __ mov(sp, fp); | 580 __ mov(sp, fp); |
| 581 __ Pop(ra, fp); | 581 __ Pop(ra, fp); |
| 582 } | 582 } |
| 583 | 583 |
| 584 void CodeGenerator::AssemblePrepareTailCall() { | 584 void CodeGenerator::AssemblePrepareTailCall() { |
| 585 if (frame_access_state()->has_frame()) { | 585 if (frame_access_state()->has_frame()) { |
| 586 __ ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | 586 __ Ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); |
| 587 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 587 __ Ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 588 } | 588 } |
| 589 frame_access_state()->SetFrameAccessToSP(); | 589 frame_access_state()->SetFrameAccessToSP(); |
| 590 } | 590 } |
| 591 | 591 |
| 592 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 592 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
| 593 Register scratch1, | 593 Register scratch1, |
| 594 Register scratch2, | 594 Register scratch2, |
| 595 Register scratch3) { | 595 Register scratch3) { |
| 596 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | 596 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); |
| 597 Label done; | 597 Label done; |
| 598 | 598 |
| 599 // Check if current frame is an arguments adaptor frame. | 599 // Check if current frame is an arguments adaptor frame. |
| 600 __ ld(scratch3, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 600 __ Ld(scratch3, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 601 __ Branch(&done, ne, scratch3, | 601 __ Branch(&done, ne, scratch3, |
| 602 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); | 602 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); |
| 603 | 603 |
| 604 // Load arguments count from current arguments adaptor frame (note, it | 604 // Load arguments count from current arguments adaptor frame (note, it |
| 605 // does not include receiver). | 605 // does not include receiver). |
| 606 Register caller_args_count_reg = scratch1; | 606 Register caller_args_count_reg = scratch1; |
| 607 __ ld(caller_args_count_reg, | 607 __ Ld(caller_args_count_reg, |
| 608 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 608 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 609 __ SmiUntag(caller_args_count_reg); | 609 __ SmiUntag(caller_args_count_reg); |
| 610 | 610 |
| 611 ParameterCount callee_args_count(args_reg); | 611 ParameterCount callee_args_count(args_reg); |
| 612 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | 612 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, |
| 613 scratch3); | 613 scratch3); |
| 614 __ bind(&done); | 614 __ bind(&done); |
| 615 } | 615 } |
| 616 | 616 |
| 617 namespace { | 617 namespace { |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 689 __ Jump(i.InputRegister(0)); | 689 __ Jump(i.InputRegister(0)); |
| 690 frame_access_state()->ClearSPDelta(); | 690 frame_access_state()->ClearSPDelta(); |
| 691 frame_access_state()->SetFrameAccessToDefault(); | 691 frame_access_state()->SetFrameAccessToDefault(); |
| 692 break; | 692 break; |
| 693 } | 693 } |
| 694 case kArchCallJSFunction: { | 694 case kArchCallJSFunction: { |
| 695 EnsureSpaceForLazyDeopt(); | 695 EnsureSpaceForLazyDeopt(); |
| 696 Register func = i.InputRegister(0); | 696 Register func = i.InputRegister(0); |
| 697 if (FLAG_debug_code) { | 697 if (FLAG_debug_code) { |
| 698 // Check the function's context matches the context argument. | 698 // Check the function's context matches the context argument. |
| 699 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 699 __ Ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
| 700 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 700 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
| 701 } | 701 } |
| 702 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 702 __ Ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 703 __ Call(at); | 703 __ Call(at); |
| 704 RecordCallPosition(instr); | 704 RecordCallPosition(instr); |
| 705 frame_access_state()->ClearSPDelta(); | 705 frame_access_state()->ClearSPDelta(); |
| 706 break; | 706 break; |
| 707 } | 707 } |
| 708 case kArchTailCallJSFunctionFromJSFunction: { | 708 case kArchTailCallJSFunctionFromJSFunction: { |
| 709 Register func = i.InputRegister(0); | 709 Register func = i.InputRegister(0); |
| 710 if (FLAG_debug_code) { | 710 if (FLAG_debug_code) { |
| 711 // Check the function's context matches the context argument. | 711 // Check the function's context matches the context argument. |
| 712 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 712 __ Ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
| 713 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 713 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
| 714 } | 714 } |
| 715 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, | 715 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, |
| 716 i.TempRegister(0), i.TempRegister(1), | 716 i.TempRegister(0), i.TempRegister(1), |
| 717 i.TempRegister(2)); | 717 i.TempRegister(2)); |
| 718 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 718 __ Ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 719 __ Jump(at); | 719 __ Jump(at); |
| 720 frame_access_state()->ClearSPDelta(); | 720 frame_access_state()->ClearSPDelta(); |
| 721 frame_access_state()->SetFrameAccessToDefault(); | 721 frame_access_state()->SetFrameAccessToDefault(); |
| 722 break; | 722 break; |
| 723 } | 723 } |
| 724 case kArchPrepareCallCFunction: { | 724 case kArchPrepareCallCFunction: { |
| 725 int const num_parameters = MiscField::decode(instr->opcode()); | 725 int const num_parameters = MiscField::decode(instr->opcode()); |
| 726 __ PrepareCallCFunction(num_parameters, kScratchReg); | 726 __ PrepareCallCFunction(num_parameters, kScratchReg); |
| 727 // Frame alignment requires using FP-relative frame addressing. | 727 // Frame alignment requires using FP-relative frame addressing. |
| 728 frame_access_state()->SetFrameAccessToFP(); | 728 frame_access_state()->SetFrameAccessToFP(); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 777 AssembleReturn(instr->InputAt(0)); | 777 AssembleReturn(instr->InputAt(0)); |
| 778 break; | 778 break; |
| 779 case kArchStackPointer: | 779 case kArchStackPointer: |
| 780 __ mov(i.OutputRegister(), sp); | 780 __ mov(i.OutputRegister(), sp); |
| 781 break; | 781 break; |
| 782 case kArchFramePointer: | 782 case kArchFramePointer: |
| 783 __ mov(i.OutputRegister(), fp); | 783 __ mov(i.OutputRegister(), fp); |
| 784 break; | 784 break; |
| 785 case kArchParentFramePointer: | 785 case kArchParentFramePointer: |
| 786 if (frame_access_state()->has_frame()) { | 786 if (frame_access_state()->has_frame()) { |
| 787 __ ld(i.OutputRegister(), MemOperand(fp, 0)); | 787 __ Ld(i.OutputRegister(), MemOperand(fp, 0)); |
| 788 } else { | 788 } else { |
| 789 __ mov(i.OutputRegister(), fp); | 789 __ mov(i.OutputRegister(), fp); |
| 790 } | 790 } |
| 791 break; | 791 break; |
| 792 case kArchTruncateDoubleToI: | 792 case kArchTruncateDoubleToI: |
| 793 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); | 793 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); |
| 794 break; | 794 break; |
| 795 case kArchStoreWithWriteBarrier: { | 795 case kArchStoreWithWriteBarrier: { |
| 796 RecordWriteMode mode = | 796 RecordWriteMode mode = |
| 797 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode())); | 797 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode())); |
| 798 Register object = i.InputRegister(0); | 798 Register object = i.InputRegister(0); |
| 799 Register index = i.InputRegister(1); | 799 Register index = i.InputRegister(1); |
| 800 Register value = i.InputRegister(2); | 800 Register value = i.InputRegister(2); |
| 801 Register scratch0 = i.TempRegister(0); | 801 Register scratch0 = i.TempRegister(0); |
| 802 Register scratch1 = i.TempRegister(1); | 802 Register scratch1 = i.TempRegister(1); |
| 803 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value, | 803 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value, |
| 804 scratch0, scratch1, mode); | 804 scratch0, scratch1, mode); |
| 805 __ Daddu(at, object, index); | 805 __ Daddu(at, object, index); |
| 806 __ sd(value, MemOperand(at)); | 806 __ Sd(value, MemOperand(at)); |
| 807 __ CheckPageFlag(object, scratch0, | 807 __ CheckPageFlag(object, scratch0, |
| 808 MemoryChunk::kPointersFromHereAreInterestingMask, ne, | 808 MemoryChunk::kPointersFromHereAreInterestingMask, ne, |
| 809 ool->entry()); | 809 ool->entry()); |
| 810 __ bind(ool->exit()); | 810 __ bind(ool->exit()); |
| 811 break; | 811 break; |
| 812 } | 812 } |
| 813 case kArchStackSlot: { | 813 case kArchStackSlot: { |
| 814 FrameOffset offset = | 814 FrameOffset offset = |
| 815 frame_access_state()->GetFrameOffset(i.InputInt32(0)); | 815 frame_access_state()->GetFrameOffset(i.InputInt32(0)); |
| 816 __ Daddu(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp, | 816 __ Daddu(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp, |
| (...skipping 888 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1705 break; | 1705 break; |
| 1706 // ... more basic instructions ... | 1706 // ... more basic instructions ... |
| 1707 | 1707 |
| 1708 case kMips64Seb: | 1708 case kMips64Seb: |
| 1709 __ seb(i.OutputRegister(), i.InputRegister(0)); | 1709 __ seb(i.OutputRegister(), i.InputRegister(0)); |
| 1710 break; | 1710 break; |
| 1711 case kMips64Seh: | 1711 case kMips64Seh: |
| 1712 __ seh(i.OutputRegister(), i.InputRegister(0)); | 1712 __ seh(i.OutputRegister(), i.InputRegister(0)); |
| 1713 break; | 1713 break; |
| 1714 case kMips64Lbu: | 1714 case kMips64Lbu: |
| 1715 __ lbu(i.OutputRegister(), i.MemoryOperand()); | 1715 __ Lbu(i.OutputRegister(), i.MemoryOperand()); |
| 1716 break; | 1716 break; |
| 1717 case kMips64Lb: | 1717 case kMips64Lb: |
| 1718 __ lb(i.OutputRegister(), i.MemoryOperand()); | 1718 __ Lb(i.OutputRegister(), i.MemoryOperand()); |
| 1719 break; | 1719 break; |
| 1720 case kMips64Sb: | 1720 case kMips64Sb: |
| 1721 __ sb(i.InputOrZeroRegister(2), i.MemoryOperand()); | 1721 __ Sb(i.InputOrZeroRegister(2), i.MemoryOperand()); |
| 1722 break; | 1722 break; |
| 1723 case kMips64Lhu: | 1723 case kMips64Lhu: |
| 1724 __ lhu(i.OutputRegister(), i.MemoryOperand()); | 1724 __ Lhu(i.OutputRegister(), i.MemoryOperand()); |
| 1725 break; | 1725 break; |
| 1726 case kMips64Ulhu: | 1726 case kMips64Ulhu: |
| 1727 __ Ulhu(i.OutputRegister(), i.MemoryOperand()); | 1727 __ Ulhu(i.OutputRegister(), i.MemoryOperand()); |
| 1728 break; | 1728 break; |
| 1729 case kMips64Lh: | 1729 case kMips64Lh: |
| 1730 __ lh(i.OutputRegister(), i.MemoryOperand()); | 1730 __ Lh(i.OutputRegister(), i.MemoryOperand()); |
| 1731 break; | 1731 break; |
| 1732 case kMips64Ulh: | 1732 case kMips64Ulh: |
| 1733 __ Ulh(i.OutputRegister(), i.MemoryOperand()); | 1733 __ Ulh(i.OutputRegister(), i.MemoryOperand()); |
| 1734 break; | 1734 break; |
| 1735 case kMips64Sh: | 1735 case kMips64Sh: |
| 1736 __ sh(i.InputOrZeroRegister(2), i.MemoryOperand()); | 1736 __ Sh(i.InputOrZeroRegister(2), i.MemoryOperand()); |
| 1737 break; | 1737 break; |
| 1738 case kMips64Ush: | 1738 case kMips64Ush: |
| 1739 __ Ush(i.InputOrZeroRegister(2), i.MemoryOperand(), kScratchReg); | 1739 __ Ush(i.InputOrZeroRegister(2), i.MemoryOperand(), kScratchReg); |
| 1740 break; | 1740 break; |
| 1741 case kMips64Lw: | 1741 case kMips64Lw: |
| 1742 __ lw(i.OutputRegister(), i.MemoryOperand()); | 1742 __ Lw(i.OutputRegister(), i.MemoryOperand()); |
| 1743 break; | 1743 break; |
| 1744 case kMips64Ulw: | 1744 case kMips64Ulw: |
| 1745 __ Ulw(i.OutputRegister(), i.MemoryOperand()); | 1745 __ Ulw(i.OutputRegister(), i.MemoryOperand()); |
| 1746 break; | 1746 break; |
| 1747 case kMips64Lwu: | 1747 case kMips64Lwu: |
| 1748 __ lwu(i.OutputRegister(), i.MemoryOperand()); | 1748 __ Lwu(i.OutputRegister(), i.MemoryOperand()); |
| 1749 break; | 1749 break; |
| 1750 case kMips64Ulwu: | 1750 case kMips64Ulwu: |
| 1751 __ Ulwu(i.OutputRegister(), i.MemoryOperand()); | 1751 __ Ulwu(i.OutputRegister(), i.MemoryOperand()); |
| 1752 break; | 1752 break; |
| 1753 case kMips64Ld: | 1753 case kMips64Ld: |
| 1754 __ ld(i.OutputRegister(), i.MemoryOperand()); | 1754 __ Ld(i.OutputRegister(), i.MemoryOperand()); |
| 1755 break; | 1755 break; |
| 1756 case kMips64Uld: | 1756 case kMips64Uld: |
| 1757 __ Uld(i.OutputRegister(), i.MemoryOperand()); | 1757 __ Uld(i.OutputRegister(), i.MemoryOperand()); |
| 1758 break; | 1758 break; |
| 1759 case kMips64Sw: | 1759 case kMips64Sw: |
| 1760 __ sw(i.InputOrZeroRegister(2), i.MemoryOperand()); | 1760 __ Sw(i.InputOrZeroRegister(2), i.MemoryOperand()); |
| 1761 break; | 1761 break; |
| 1762 case kMips64Usw: | 1762 case kMips64Usw: |
| 1763 __ Usw(i.InputOrZeroRegister(2), i.MemoryOperand()); | 1763 __ Usw(i.InputOrZeroRegister(2), i.MemoryOperand()); |
| 1764 break; | 1764 break; |
| 1765 case kMips64Sd: | 1765 case kMips64Sd: |
| 1766 __ sd(i.InputOrZeroRegister(2), i.MemoryOperand()); | 1766 __ Sd(i.InputOrZeroRegister(2), i.MemoryOperand()); |
| 1767 break; | 1767 break; |
| 1768 case kMips64Usd: | 1768 case kMips64Usd: |
| 1769 __ Usd(i.InputOrZeroRegister(2), i.MemoryOperand()); | 1769 __ Usd(i.InputOrZeroRegister(2), i.MemoryOperand()); |
| 1770 break; | 1770 break; |
| 1771 case kMips64Lwc1: { | 1771 case kMips64Lwc1: { |
| 1772 __ lwc1(i.OutputSingleRegister(), i.MemoryOperand()); | 1772 __ Lwc1(i.OutputSingleRegister(), i.MemoryOperand()); |
| 1773 break; | 1773 break; |
| 1774 } | 1774 } |
| 1775 case kMips64Ulwc1: { | 1775 case kMips64Ulwc1: { |
| 1776 __ Ulwc1(i.OutputSingleRegister(), i.MemoryOperand(), kScratchReg); | 1776 __ Ulwc1(i.OutputSingleRegister(), i.MemoryOperand(), kScratchReg); |
| 1777 break; | 1777 break; |
| 1778 } | 1778 } |
| 1779 case kMips64Swc1: { | 1779 case kMips64Swc1: { |
| 1780 size_t index = 0; | 1780 size_t index = 0; |
| 1781 MemOperand operand = i.MemoryOperand(&index); | 1781 MemOperand operand = i.MemoryOperand(&index); |
| 1782 FPURegister ft = i.InputOrZeroSingleRegister(index); | 1782 FPURegister ft = i.InputOrZeroSingleRegister(index); |
| 1783 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { | 1783 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { |
| 1784 __ Move(kDoubleRegZero, 0.0); | 1784 __ Move(kDoubleRegZero, 0.0); |
| 1785 } | 1785 } |
| 1786 __ swc1(ft, operand); | 1786 __ Swc1(ft, operand); |
| 1787 break; | 1787 break; |
| 1788 } | 1788 } |
| 1789 case kMips64Uswc1: { | 1789 case kMips64Uswc1: { |
| 1790 size_t index = 0; | 1790 size_t index = 0; |
| 1791 MemOperand operand = i.MemoryOperand(&index); | 1791 MemOperand operand = i.MemoryOperand(&index); |
| 1792 FPURegister ft = i.InputOrZeroSingleRegister(index); | 1792 FPURegister ft = i.InputOrZeroSingleRegister(index); |
| 1793 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { | 1793 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { |
| 1794 __ Move(kDoubleRegZero, 0.0); | 1794 __ Move(kDoubleRegZero, 0.0); |
| 1795 } | 1795 } |
| 1796 __ Uswc1(ft, operand, kScratchReg); | 1796 __ Uswc1(ft, operand, kScratchReg); |
| 1797 break; | 1797 break; |
| 1798 } | 1798 } |
| 1799 case kMips64Ldc1: | 1799 case kMips64Ldc1: |
| 1800 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand()); | 1800 __ Ldc1(i.OutputDoubleRegister(), i.MemoryOperand()); |
| 1801 break; | 1801 break; |
| 1802 case kMips64Uldc1: | 1802 case kMips64Uldc1: |
| 1803 __ Uldc1(i.OutputDoubleRegister(), i.MemoryOperand(), kScratchReg); | 1803 __ Uldc1(i.OutputDoubleRegister(), i.MemoryOperand(), kScratchReg); |
| 1804 break; | 1804 break; |
| 1805 case kMips64Sdc1: { | 1805 case kMips64Sdc1: { |
| 1806 FPURegister ft = i.InputOrZeroDoubleRegister(2); | 1806 FPURegister ft = i.InputOrZeroDoubleRegister(2); |
| 1807 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { | 1807 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { |
| 1808 __ Move(kDoubleRegZero, 0.0); | 1808 __ Move(kDoubleRegZero, 0.0); |
| 1809 } | 1809 } |
| 1810 __ sdc1(ft, i.MemoryOperand()); | 1810 __ Sdc1(ft, i.MemoryOperand()); |
| 1811 break; | 1811 break; |
| 1812 } | 1812 } |
| 1813 case kMips64Usdc1: { | 1813 case kMips64Usdc1: { |
| 1814 FPURegister ft = i.InputOrZeroDoubleRegister(2); | 1814 FPURegister ft = i.InputOrZeroDoubleRegister(2); |
| 1815 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { | 1815 if (ft.is(kDoubleRegZero) && !__ IsDoubleZeroRegSet()) { |
| 1816 __ Move(kDoubleRegZero, 0.0); | 1816 __ Move(kDoubleRegZero, 0.0); |
| 1817 } | 1817 } |
| 1818 __ Usdc1(ft, i.MemoryOperand(), kScratchReg); | 1818 __ Usdc1(ft, i.MemoryOperand(), kScratchReg); |
| 1819 break; | 1819 break; |
| 1820 } | 1820 } |
| 1821 case kMips64Push: | 1821 case kMips64Push: |
| 1822 if (instr->InputAt(0)->IsFPRegister()) { | 1822 if (instr->InputAt(0)->IsFPRegister()) { |
| 1823 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); | 1823 __ Sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); |
| 1824 __ Subu(sp, sp, Operand(kDoubleSize)); | 1824 __ Subu(sp, sp, Operand(kDoubleSize)); |
| 1825 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); | 1825 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
| 1826 } else { | 1826 } else { |
| 1827 __ Push(i.InputRegister(0)); | 1827 __ Push(i.InputRegister(0)); |
| 1828 frame_access_state()->IncreaseSPDelta(1); | 1828 frame_access_state()->IncreaseSPDelta(1); |
| 1829 } | 1829 } |
| 1830 break; | 1830 break; |
| 1831 case kMips64StackClaim: { | 1831 case kMips64StackClaim: { |
| 1832 __ Dsubu(sp, sp, Operand(i.InputInt32(0))); | 1832 __ Dsubu(sp, sp, Operand(i.InputInt32(0))); |
| 1833 frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize); | 1833 frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize); |
| 1834 break; | 1834 break; |
| 1835 } | 1835 } |
| 1836 case kMips64StoreToStackSlot: { | 1836 case kMips64StoreToStackSlot: { |
| 1837 if (instr->InputAt(0)->IsFPRegister()) { | 1837 if (instr->InputAt(0)->IsFPRegister()) { |
| 1838 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1))); | 1838 __ Sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1))); |
| 1839 } else { | 1839 } else { |
| 1840 __ sd(i.InputRegister(0), MemOperand(sp, i.InputInt32(1))); | 1840 __ Sd(i.InputRegister(0), MemOperand(sp, i.InputInt32(1))); |
| 1841 } | 1841 } |
| 1842 break; | 1842 break; |
| 1843 } | 1843 } |
| 1844 case kMips64ByteSwap64: { | 1844 case kMips64ByteSwap64: { |
| 1845 __ ByteSwapSigned(i.OutputRegister(0), i.InputRegister(0), 8); | 1845 __ ByteSwapSigned(i.OutputRegister(0), i.InputRegister(0), 8); |
| 1846 break; | 1846 break; |
| 1847 } | 1847 } |
| 1848 case kMips64ByteSwap32: { | 1848 case kMips64ByteSwap32: { |
| 1849 __ ByteSwapUnsigned(i.OutputRegister(0), i.InputRegister(0), 4); | 1849 __ ByteSwapUnsigned(i.OutputRegister(0), i.InputRegister(0), 4); |
| 1850 __ dsrl32(i.OutputRegister(0), i.OutputRegister(0), 0); | 1850 __ dsrl32(i.OutputRegister(0), i.OutputRegister(0), 0); |
| (...skipping 906 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2757 InstructionOperand* destination) { | 2757 InstructionOperand* destination) { |
| 2758 MipsOperandConverter g(this, nullptr); | 2758 MipsOperandConverter g(this, nullptr); |
| 2759 // Dispatch on the source and destination operand kinds. Not all | 2759 // Dispatch on the source and destination operand kinds. Not all |
| 2760 // combinations are possible. | 2760 // combinations are possible. |
| 2761 if (source->IsRegister()) { | 2761 if (source->IsRegister()) { |
| 2762 DCHECK(destination->IsRegister() || destination->IsStackSlot()); | 2762 DCHECK(destination->IsRegister() || destination->IsStackSlot()); |
| 2763 Register src = g.ToRegister(source); | 2763 Register src = g.ToRegister(source); |
| 2764 if (destination->IsRegister()) { | 2764 if (destination->IsRegister()) { |
| 2765 __ mov(g.ToRegister(destination), src); | 2765 __ mov(g.ToRegister(destination), src); |
| 2766 } else { | 2766 } else { |
| 2767 __ sd(src, g.ToMemOperand(destination)); | 2767 __ Sd(src, g.ToMemOperand(destination)); |
| 2768 } | 2768 } |
| 2769 } else if (source->IsStackSlot()) { | 2769 } else if (source->IsStackSlot()) { |
| 2770 DCHECK(destination->IsRegister() || destination->IsStackSlot()); | 2770 DCHECK(destination->IsRegister() || destination->IsStackSlot()); |
| 2771 MemOperand src = g.ToMemOperand(source); | 2771 MemOperand src = g.ToMemOperand(source); |
| 2772 if (destination->IsRegister()) { | 2772 if (destination->IsRegister()) { |
| 2773 __ ld(g.ToRegister(destination), src); | 2773 __ Ld(g.ToRegister(destination), src); |
| 2774 } else { | 2774 } else { |
| 2775 Register temp = kScratchReg; | 2775 Register temp = kScratchReg; |
| 2776 __ ld(temp, src); | 2776 __ Ld(temp, src); |
| 2777 __ sd(temp, g.ToMemOperand(destination)); | 2777 __ Sd(temp, g.ToMemOperand(destination)); |
| 2778 } | 2778 } |
| 2779 } else if (source->IsConstant()) { | 2779 } else if (source->IsConstant()) { |
| 2780 Constant src = g.ToConstant(source); | 2780 Constant src = g.ToConstant(source); |
| 2781 if (destination->IsRegister() || destination->IsStackSlot()) { | 2781 if (destination->IsRegister() || destination->IsStackSlot()) { |
| 2782 Register dst = | 2782 Register dst = |
| 2783 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg; | 2783 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg; |
| 2784 switch (src.type()) { | 2784 switch (src.type()) { |
| 2785 case Constant::kInt32: | 2785 case Constant::kInt32: |
| 2786 if (RelocInfo::IsWasmSizeReference(src.rmode())) { | 2786 if (RelocInfo::IsWasmSizeReference(src.rmode())) { |
| 2787 __ li(dst, Operand(src.ToInt32(), src.rmode())); | 2787 __ li(dst, Operand(src.ToInt32(), src.rmode())); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 2813 __ LoadRoot(dst, index); | 2813 __ LoadRoot(dst, index); |
| 2814 } else { | 2814 } else { |
| 2815 __ li(dst, src_object); | 2815 __ li(dst, src_object); |
| 2816 } | 2816 } |
| 2817 break; | 2817 break; |
| 2818 } | 2818 } |
| 2819 case Constant::kRpoNumber: | 2819 case Constant::kRpoNumber: |
| 2820 UNREACHABLE(); // TODO(titzer): loading RPO numbers on mips64. | 2820 UNREACHABLE(); // TODO(titzer): loading RPO numbers on mips64. |
| 2821 break; | 2821 break; |
| 2822 } | 2822 } |
| 2823 if (destination->IsStackSlot()) __ sd(dst, g.ToMemOperand(destination)); | 2823 if (destination->IsStackSlot()) __ Sd(dst, g.ToMemOperand(destination)); |
| 2824 } else if (src.type() == Constant::kFloat32) { | 2824 } else if (src.type() == Constant::kFloat32) { |
| 2825 if (destination->IsFPStackSlot()) { | 2825 if (destination->IsFPStackSlot()) { |
| 2826 MemOperand dst = g.ToMemOperand(destination); | 2826 MemOperand dst = g.ToMemOperand(destination); |
| 2827 if (bit_cast<int32_t>(src.ToFloat32()) == 0) { | 2827 if (bit_cast<int32_t>(src.ToFloat32()) == 0) { |
| 2828 __ sw(zero_reg, dst); | 2828 __ Sw(zero_reg, dst); |
| 2829 } else { | 2829 } else { |
| 2830 __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32()))); | 2830 __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32()))); |
| 2831 __ sw(at, dst); | 2831 __ Sw(at, dst); |
| 2832 } | 2832 } |
| 2833 } else { | 2833 } else { |
| 2834 DCHECK(destination->IsFPRegister()); | 2834 DCHECK(destination->IsFPRegister()); |
| 2835 FloatRegister dst = g.ToSingleRegister(destination); | 2835 FloatRegister dst = g.ToSingleRegister(destination); |
| 2836 __ Move(dst, src.ToFloat32()); | 2836 __ Move(dst, src.ToFloat32()); |
| 2837 } | 2837 } |
| 2838 } else { | 2838 } else { |
| 2839 DCHECK_EQ(Constant::kFloat64, src.type()); | 2839 DCHECK_EQ(Constant::kFloat64, src.type()); |
| 2840 DoubleRegister dst = destination->IsFPRegister() | 2840 DoubleRegister dst = destination->IsFPRegister() |
| 2841 ? g.ToDoubleRegister(destination) | 2841 ? g.ToDoubleRegister(destination) |
| 2842 : kScratchDoubleReg; | 2842 : kScratchDoubleReg; |
| 2843 __ Move(dst, src.ToFloat64()); | 2843 __ Move(dst, src.ToFloat64()); |
| 2844 if (destination->IsFPStackSlot()) { | 2844 if (destination->IsFPStackSlot()) { |
| 2845 __ sdc1(dst, g.ToMemOperand(destination)); | 2845 __ Sdc1(dst, g.ToMemOperand(destination)); |
| 2846 } | 2846 } |
| 2847 } | 2847 } |
| 2848 } else if (source->IsFPRegister()) { | 2848 } else if (source->IsFPRegister()) { |
| 2849 FPURegister src = g.ToDoubleRegister(source); | 2849 FPURegister src = g.ToDoubleRegister(source); |
| 2850 if (destination->IsFPRegister()) { | 2850 if (destination->IsFPRegister()) { |
| 2851 FPURegister dst = g.ToDoubleRegister(destination); | 2851 FPURegister dst = g.ToDoubleRegister(destination); |
| 2852 __ Move(dst, src); | 2852 __ Move(dst, src); |
| 2853 } else { | 2853 } else { |
| 2854 DCHECK(destination->IsFPStackSlot()); | 2854 DCHECK(destination->IsFPStackSlot()); |
| 2855 __ sdc1(src, g.ToMemOperand(destination)); | 2855 __ Sdc1(src, g.ToMemOperand(destination)); |
| 2856 } | 2856 } |
| 2857 } else if (source->IsFPStackSlot()) { | 2857 } else if (source->IsFPStackSlot()) { |
| 2858 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); | 2858 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); |
| 2859 MemOperand src = g.ToMemOperand(source); | 2859 MemOperand src = g.ToMemOperand(source); |
| 2860 if (destination->IsFPRegister()) { | 2860 if (destination->IsFPRegister()) { |
| 2861 __ ldc1(g.ToDoubleRegister(destination), src); | 2861 __ Ldc1(g.ToDoubleRegister(destination), src); |
| 2862 } else { | 2862 } else { |
| 2863 FPURegister temp = kScratchDoubleReg; | 2863 FPURegister temp = kScratchDoubleReg; |
| 2864 __ ldc1(temp, src); | 2864 __ Ldc1(temp, src); |
| 2865 __ sdc1(temp, g.ToMemOperand(destination)); | 2865 __ Sdc1(temp, g.ToMemOperand(destination)); |
| 2866 } | 2866 } |
| 2867 } else { | 2867 } else { |
| 2868 UNREACHABLE(); | 2868 UNREACHABLE(); |
| 2869 } | 2869 } |
| 2870 } | 2870 } |
| 2871 | 2871 |
| 2872 | 2872 |
| 2873 void CodeGenerator::AssembleSwap(InstructionOperand* source, | 2873 void CodeGenerator::AssembleSwap(InstructionOperand* source, |
| 2874 InstructionOperand* destination) { | 2874 InstructionOperand* destination) { |
| 2875 MipsOperandConverter g(this, nullptr); | 2875 MipsOperandConverter g(this, nullptr); |
| 2876 // Dispatch on the source and destination operand kinds. Not all | 2876 // Dispatch on the source and destination operand kinds. Not all |
| 2877 // combinations are possible. | 2877 // combinations are possible. |
| 2878 if (source->IsRegister()) { | 2878 if (source->IsRegister()) { |
| 2879 // Register-register. | 2879 // Register-register. |
| 2880 Register temp = kScratchReg; | 2880 Register temp = kScratchReg; |
| 2881 Register src = g.ToRegister(source); | 2881 Register src = g.ToRegister(source); |
| 2882 if (destination->IsRegister()) { | 2882 if (destination->IsRegister()) { |
| 2883 Register dst = g.ToRegister(destination); | 2883 Register dst = g.ToRegister(destination); |
| 2884 __ Move(temp, src); | 2884 __ Move(temp, src); |
| 2885 __ Move(src, dst); | 2885 __ Move(src, dst); |
| 2886 __ Move(dst, temp); | 2886 __ Move(dst, temp); |
| 2887 } else { | 2887 } else { |
| 2888 DCHECK(destination->IsStackSlot()); | 2888 DCHECK(destination->IsStackSlot()); |
| 2889 MemOperand dst = g.ToMemOperand(destination); | 2889 MemOperand dst = g.ToMemOperand(destination); |
| 2890 __ mov(temp, src); | 2890 __ mov(temp, src); |
| 2891 __ ld(src, dst); | 2891 __ Ld(src, dst); |
| 2892 __ sd(temp, dst); | 2892 __ Sd(temp, dst); |
| 2893 } | 2893 } |
| 2894 } else if (source->IsStackSlot()) { | 2894 } else if (source->IsStackSlot()) { |
| 2895 DCHECK(destination->IsStackSlot()); | 2895 DCHECK(destination->IsStackSlot()); |
| 2896 Register temp_0 = kScratchReg; | 2896 Register temp_0 = kScratchReg; |
| 2897 Register temp_1 = kScratchReg2; | 2897 Register temp_1 = kScratchReg2; |
| 2898 MemOperand src = g.ToMemOperand(source); | 2898 MemOperand src = g.ToMemOperand(source); |
| 2899 MemOperand dst = g.ToMemOperand(destination); | 2899 MemOperand dst = g.ToMemOperand(destination); |
| 2900 __ ld(temp_0, src); | 2900 __ Ld(temp_0, src); |
| 2901 __ ld(temp_1, dst); | 2901 __ Ld(temp_1, dst); |
| 2902 __ sd(temp_0, dst); | 2902 __ Sd(temp_0, dst); |
| 2903 __ sd(temp_1, src); | 2903 __ Sd(temp_1, src); |
| 2904 } else if (source->IsFPRegister()) { | 2904 } else if (source->IsFPRegister()) { |
| 2905 FPURegister temp = kScratchDoubleReg; | 2905 FPURegister temp = kScratchDoubleReg; |
| 2906 FPURegister src = g.ToDoubleRegister(source); | 2906 FPURegister src = g.ToDoubleRegister(source); |
| 2907 if (destination->IsFPRegister()) { | 2907 if (destination->IsFPRegister()) { |
| 2908 FPURegister dst = g.ToDoubleRegister(destination); | 2908 FPURegister dst = g.ToDoubleRegister(destination); |
| 2909 __ Move(temp, src); | 2909 __ Move(temp, src); |
| 2910 __ Move(src, dst); | 2910 __ Move(src, dst); |
| 2911 __ Move(dst, temp); | 2911 __ Move(dst, temp); |
| 2912 } else { | 2912 } else { |
| 2913 DCHECK(destination->IsFPStackSlot()); | 2913 DCHECK(destination->IsFPStackSlot()); |
| 2914 MemOperand dst = g.ToMemOperand(destination); | 2914 MemOperand dst = g.ToMemOperand(destination); |
| 2915 __ Move(temp, src); | 2915 __ Move(temp, src); |
| 2916 __ ldc1(src, dst); | 2916 __ Ldc1(src, dst); |
| 2917 __ sdc1(temp, dst); | 2917 __ Sdc1(temp, dst); |
| 2918 } | 2918 } |
| 2919 } else if (source->IsFPStackSlot()) { | 2919 } else if (source->IsFPStackSlot()) { |
| 2920 DCHECK(destination->IsFPStackSlot()); | 2920 DCHECK(destination->IsFPStackSlot()); |
| 2921 Register temp_0 = kScratchReg; | 2921 Register temp_0 = kScratchReg; |
| 2922 FPURegister temp_1 = kScratchDoubleReg; | 2922 FPURegister temp_1 = kScratchDoubleReg; |
| 2923 MemOperand src0 = g.ToMemOperand(source); | 2923 MemOperand src0 = g.ToMemOperand(source); |
| 2924 MemOperand src1(src0.rm(), src0.offset() + kIntSize); | 2924 MemOperand src1(src0.rm(), src0.offset() + kIntSize); |
| 2925 MemOperand dst0 = g.ToMemOperand(destination); | 2925 MemOperand dst0 = g.ToMemOperand(destination); |
| 2926 MemOperand dst1(dst0.rm(), dst0.offset() + kIntSize); | 2926 MemOperand dst1(dst0.rm(), dst0.offset() + kIntSize); |
| 2927 __ ldc1(temp_1, dst0); // Save destination in temp_1. | 2927 __ Ldc1(temp_1, dst0); // Save destination in temp_1. |
| 2928 __ lw(temp_0, src0); // Then use temp_0 to copy source to destination. | 2928 __ Lw(temp_0, src0); // Then use temp_0 to copy source to destination. |
| 2929 __ sw(temp_0, dst0); | 2929 __ Sw(temp_0, dst0); |
| 2930 __ lw(temp_0, src1); | 2930 __ Lw(temp_0, src1); |
| 2931 __ sw(temp_0, dst1); | 2931 __ Sw(temp_0, dst1); |
| 2932 __ sdc1(temp_1, src0); | 2932 __ Sdc1(temp_1, src0); |
| 2933 } else { | 2933 } else { |
| 2934 // No other combinations are possible. | 2934 // No other combinations are possible. |
| 2935 UNREACHABLE(); | 2935 UNREACHABLE(); |
| 2936 } | 2936 } |
| 2937 } | 2937 } |
| 2938 | 2938 |
| 2939 | 2939 |
| 2940 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { | 2940 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { |
| 2941 // On 64-bit MIPS we emit the jump tables inline. | 2941 // On 64-bit MIPS we emit the jump tables inline. |
| 2942 UNREACHABLE(); | 2942 UNREACHABLE(); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2963 padding_size -= v8::internal::Assembler::kInstrSize; | 2963 padding_size -= v8::internal::Assembler::kInstrSize; |
| 2964 } | 2964 } |
| 2965 } | 2965 } |
| 2966 } | 2966 } |
| 2967 | 2967 |
| 2968 #undef __ | 2968 #undef __ |
| 2969 | 2969 |
| 2970 } // namespace compiler | 2970 } // namespace compiler |
| 2971 } // namespace internal | 2971 } // namespace internal |
| 2972 } // namespace v8 | 2972 } // namespace v8 |
| OLD | NEW |