| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" | 5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" |
| 6 | 6 |
| 7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 21 matching lines...) Expand all Loading... |
| 32 virtual void AfterCall() const { | 32 virtual void AfterCall() const { |
| 33 codegen_->RecordSafepoint(pointers_, deopt_mode_); | 33 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
| 34 } | 34 } |
| 35 | 35 |
| 36 private: | 36 private: |
| 37 LCodeGen* codegen_; | 37 LCodeGen* codegen_; |
| 38 LPointerMap* pointers_; | 38 LPointerMap* pointers_; |
| 39 Safepoint::DeoptMode deopt_mode_; | 39 Safepoint::DeoptMode deopt_mode_; |
| 40 }; | 40 }; |
| 41 | 41 |
| 42 LCodeGen::PushSafepointRegistersScope::PushSafepointRegistersScope( |
| 43 LCodeGen* codegen) |
| 44 : codegen_(codegen) { |
| 45 DCHECK(codegen_->info()->is_calling()); |
| 46 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); |
| 47 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; |
| 48 |
| 49 UseScratchRegisterScope temps(codegen_->masm_); |
| 50 // Preserve the value of lr which must be saved on the stack (the call to |
| 51 // the stub will clobber it). |
| 52 Register to_be_pushed_lr = |
| 53 temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr()); |
| 54 codegen_->masm_->Mov(to_be_pushed_lr, lr); |
| 55 StoreRegistersStateStub stub(codegen_->isolate()); |
| 56 codegen_->masm_->CallStub(&stub); |
| 57 } |
| 58 |
| 59 LCodeGen::PushSafepointRegistersScope::~PushSafepointRegistersScope() { |
| 60 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); |
| 61 RestoreRegistersStateStub stub(codegen_->isolate()); |
| 62 codegen_->masm_->CallStub(&stub); |
| 63 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; |
| 64 } |
| 42 | 65 |
| 43 #define __ masm()-> | 66 #define __ masm()-> |
| 44 | 67 |
| 45 // Emit code to branch if the given condition holds. | 68 // Emit code to branch if the given condition holds. |
| 46 // The code generated here doesn't modify the flags and they must have | 69 // The code generated here doesn't modify the flags and they must have |
| 47 // been set by some prior instructions. | 70 // been set by some prior instructions. |
| 48 // | 71 // |
| 49 // The EmitInverted function simply inverts the condition. | 72 // The EmitInverted function simply inverts the condition. |
| 50 class BranchOnCondition : public BranchGenerator { | 73 class BranchOnCondition : public BranchGenerator { |
| 51 public: | 74 public: |
| (...skipping 1705 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1757 __ Ldr(double_scratch(), FieldMemOperand(value, | 1780 __ Ldr(double_scratch(), FieldMemOperand(value, |
| 1758 HeapNumber::kValueOffset)); | 1781 HeapNumber::kValueOffset)); |
| 1759 // Test the double value. Zero and NaN are false. | 1782 // Test the double value. Zero and NaN are false. |
| 1760 EmitBranchIfNonZeroNumber(instr, double_scratch(), double_scratch()); | 1783 EmitBranchIfNonZeroNumber(instr, double_scratch(), double_scratch()); |
| 1761 } else if (type.IsString()) { | 1784 } else if (type.IsString()) { |
| 1762 DCHECK(!info()->IsStub()); | 1785 DCHECK(!info()->IsStub()); |
| 1763 Register temp = ToRegister(instr->temp1()); | 1786 Register temp = ToRegister(instr->temp1()); |
| 1764 __ Ldr(temp, FieldMemOperand(value, String::kLengthOffset)); | 1787 __ Ldr(temp, FieldMemOperand(value, String::kLengthOffset)); |
| 1765 EmitCompareAndBranch(instr, ne, temp, 0); | 1788 EmitCompareAndBranch(instr, ne, temp, 0); |
| 1766 } else { | 1789 } else { |
| 1767 ToBooleanICStub::Types expected = | 1790 ToBooleanHints expected = instr->hydrogen()->expected_input_types(); |
| 1768 instr->hydrogen()->expected_input_types(); | |
| 1769 // Avoid deopts in the case where we've never executed this path before. | 1791 // Avoid deopts in the case where we've never executed this path before. |
| 1770 if (expected.IsEmpty()) expected = ToBooleanICStub::Types::Generic(); | 1792 if (expected == ToBooleanHint::kNone) expected = ToBooleanHint::kAny; |
| 1771 | 1793 |
| 1772 if (expected.Contains(ToBooleanICStub::UNDEFINED)) { | 1794 if (expected & ToBooleanHint::kUndefined) { |
| 1773 // undefined -> false. | 1795 // undefined -> false. |
| 1774 __ JumpIfRoot( | 1796 __ JumpIfRoot( |
| 1775 value, Heap::kUndefinedValueRootIndex, false_label); | 1797 value, Heap::kUndefinedValueRootIndex, false_label); |
| 1776 } | 1798 } |
| 1777 | 1799 |
| 1778 if (expected.Contains(ToBooleanICStub::BOOLEAN)) { | 1800 if (expected & ToBooleanHint::kBoolean) { |
| 1779 // Boolean -> its value. | 1801 // Boolean -> its value. |
| 1780 __ JumpIfRoot( | 1802 __ JumpIfRoot( |
| 1781 value, Heap::kTrueValueRootIndex, true_label); | 1803 value, Heap::kTrueValueRootIndex, true_label); |
| 1782 __ JumpIfRoot( | 1804 __ JumpIfRoot( |
| 1783 value, Heap::kFalseValueRootIndex, false_label); | 1805 value, Heap::kFalseValueRootIndex, false_label); |
| 1784 } | 1806 } |
| 1785 | 1807 |
| 1786 if (expected.Contains(ToBooleanICStub::NULL_TYPE)) { | 1808 if (expected & ToBooleanHint::kNull) { |
| 1787 // 'null' -> false. | 1809 // 'null' -> false. |
| 1788 __ JumpIfRoot( | 1810 __ JumpIfRoot( |
| 1789 value, Heap::kNullValueRootIndex, false_label); | 1811 value, Heap::kNullValueRootIndex, false_label); |
| 1790 } | 1812 } |
| 1791 | 1813 |
| 1792 if (expected.Contains(ToBooleanICStub::SMI)) { | 1814 if (expected & ToBooleanHint::kSmallInteger) { |
| 1793 // Smis: 0 -> false, all other -> true. | 1815 // Smis: 0 -> false, all other -> true. |
| 1794 DCHECK(Smi::kZero == 0); | 1816 DCHECK(Smi::kZero == 0); |
| 1795 __ Cbz(value, false_label); | 1817 __ Cbz(value, false_label); |
| 1796 __ JumpIfSmi(value, true_label); | 1818 __ JumpIfSmi(value, true_label); |
| 1797 } else if (expected.NeedsMap()) { | 1819 } else if (expected & ToBooleanHint::kNeedsMap) { |
| 1798 // If we need a map later and have a smi, deopt. | 1820 // If we need a map later and have a smi, deopt. |
| 1799 DeoptimizeIfSmi(value, instr, DeoptimizeReason::kSmi); | 1821 DeoptimizeIfSmi(value, instr, DeoptimizeReason::kSmi); |
| 1800 } | 1822 } |
| 1801 | 1823 |
| 1802 Register map = NoReg; | 1824 Register map = NoReg; |
| 1803 Register scratch = NoReg; | 1825 Register scratch = NoReg; |
| 1804 | 1826 |
| 1805 if (expected.NeedsMap()) { | 1827 if (expected & ToBooleanHint::kNeedsMap) { |
| 1806 DCHECK((instr->temp1() != NULL) && (instr->temp2() != NULL)); | 1828 DCHECK((instr->temp1() != NULL) && (instr->temp2() != NULL)); |
| 1807 map = ToRegister(instr->temp1()); | 1829 map = ToRegister(instr->temp1()); |
| 1808 scratch = ToRegister(instr->temp2()); | 1830 scratch = ToRegister(instr->temp2()); |
| 1809 | 1831 |
| 1810 __ Ldr(map, FieldMemOperand(value, HeapObject::kMapOffset)); | 1832 __ Ldr(map, FieldMemOperand(value, HeapObject::kMapOffset)); |
| 1811 | 1833 |
| 1812 if (expected.CanBeUndetectable()) { | 1834 if (expected & ToBooleanHint::kCanBeUndetectable) { |
| 1813 // Undetectable -> false. | 1835 // Undetectable -> false. |
| 1814 __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); | 1836 __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); |
| 1815 __ TestAndBranchIfAnySet( | 1837 __ TestAndBranchIfAnySet( |
| 1816 scratch, 1 << Map::kIsUndetectable, false_label); | 1838 scratch, 1 << Map::kIsUndetectable, false_label); |
| 1817 } | 1839 } |
| 1818 } | 1840 } |
| 1819 | 1841 |
| 1820 if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) { | 1842 if (expected & ToBooleanHint::kReceiver) { |
| 1821 // spec object -> true. | 1843 // spec object -> true. |
| 1822 __ CompareInstanceType(map, scratch, FIRST_JS_RECEIVER_TYPE); | 1844 __ CompareInstanceType(map, scratch, FIRST_JS_RECEIVER_TYPE); |
| 1823 __ B(ge, true_label); | 1845 __ B(ge, true_label); |
| 1824 } | 1846 } |
| 1825 | 1847 |
| 1826 if (expected.Contains(ToBooleanICStub::STRING)) { | 1848 if (expected & ToBooleanHint::kString) { |
| 1827 // String value -> false iff empty. | 1849 // String value -> false iff empty. |
| 1828 Label not_string; | 1850 Label not_string; |
| 1829 __ CompareInstanceType(map, scratch, FIRST_NONSTRING_TYPE); | 1851 __ CompareInstanceType(map, scratch, FIRST_NONSTRING_TYPE); |
| 1830 __ B(ge, ¬_string); | 1852 __ B(ge, ¬_string); |
| 1831 __ Ldr(scratch, FieldMemOperand(value, String::kLengthOffset)); | 1853 __ Ldr(scratch, FieldMemOperand(value, String::kLengthOffset)); |
| 1832 __ Cbz(scratch, false_label); | 1854 __ Cbz(scratch, false_label); |
| 1833 __ B(true_label); | 1855 __ B(true_label); |
| 1834 __ Bind(¬_string); | 1856 __ Bind(¬_string); |
| 1835 } | 1857 } |
| 1836 | 1858 |
| 1837 if (expected.Contains(ToBooleanICStub::SYMBOL)) { | 1859 if (expected & ToBooleanHint::kSymbol) { |
| 1838 // Symbol value -> true. | 1860 // Symbol value -> true. |
| 1839 __ CompareInstanceType(map, scratch, SYMBOL_TYPE); | 1861 __ CompareInstanceType(map, scratch, SYMBOL_TYPE); |
| 1840 __ B(eq, true_label); | 1862 __ B(eq, true_label); |
| 1841 } | 1863 } |
| 1842 | 1864 |
| 1843 if (expected.Contains(ToBooleanICStub::SIMD_VALUE)) { | 1865 if (expected & ToBooleanHint::kSimdValue) { |
| 1844 // SIMD value -> true. | 1866 // SIMD value -> true. |
| 1845 __ CompareInstanceType(map, scratch, SIMD128_VALUE_TYPE); | 1867 __ CompareInstanceType(map, scratch, SIMD128_VALUE_TYPE); |
| 1846 __ B(eq, true_label); | 1868 __ B(eq, true_label); |
| 1847 } | 1869 } |
| 1848 | 1870 |
| 1849 if (expected.Contains(ToBooleanICStub::HEAP_NUMBER)) { | 1871 if (expected & ToBooleanHint::kHeapNumber) { |
| 1850 Label not_heap_number; | 1872 Label not_heap_number; |
| 1851 __ JumpIfNotRoot(map, Heap::kHeapNumberMapRootIndex, ¬_heap_number); | 1873 __ JumpIfNotRoot(map, Heap::kHeapNumberMapRootIndex, ¬_heap_number); |
| 1852 | 1874 |
| 1853 __ Ldr(double_scratch(), | 1875 __ Ldr(double_scratch(), |
| 1854 FieldMemOperand(value, HeapNumber::kValueOffset)); | 1876 FieldMemOperand(value, HeapNumber::kValueOffset)); |
| 1855 __ Fcmp(double_scratch(), 0.0); | 1877 __ Fcmp(double_scratch(), 0.0); |
| 1856 // If we got a NaN (overflow bit is set), jump to the false branch. | 1878 // If we got a NaN (overflow bit is set), jump to the false branch. |
| 1857 __ B(vs, false_label); | 1879 __ B(vs, false_label); |
| 1858 __ B(eq, false_label); | 1880 __ B(eq, false_label); |
| 1859 __ B(true_label); | 1881 __ B(true_label); |
| 1860 __ Bind(¬_heap_number); | 1882 __ Bind(¬_heap_number); |
| 1861 } | 1883 } |
| 1862 | 1884 |
| 1863 if (!expected.IsGeneric()) { | 1885 if (expected != ToBooleanHint::kAny) { |
| 1864 // We've seen something for the first time -> deopt. | 1886 // We've seen something for the first time -> deopt. |
| 1865 // This can only happen if we are not generic already. | 1887 // This can only happen if we are not generic already. |
| 1866 Deoptimize(instr, DeoptimizeReason::kUnexpectedObject); | 1888 Deoptimize(instr, DeoptimizeReason::kUnexpectedObject); |
| 1867 } | 1889 } |
| 1868 } | 1890 } |
| 1869 } | 1891 } |
| 1870 } | 1892 } |
| 1871 | 1893 |
| 1872 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 1894 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 1873 int formal_parameter_count, int arity, | 1895 int formal_parameter_count, int arity, |
| (...skipping 3692 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5566 // Index is equal to negated out of object property index plus 1. | 5588 // Index is equal to negated out of object property index plus 1. |
| 5567 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5589 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 5568 __ Ldr(result, FieldMemOperand(result, | 5590 __ Ldr(result, FieldMemOperand(result, |
| 5569 FixedArray::kHeaderSize - kPointerSize)); | 5591 FixedArray::kHeaderSize - kPointerSize)); |
| 5570 __ Bind(deferred->exit()); | 5592 __ Bind(deferred->exit()); |
| 5571 __ Bind(&done); | 5593 __ Bind(&done); |
| 5572 } | 5594 } |
| 5573 | 5595 |
| 5574 } // namespace internal | 5596 } // namespace internal |
| 5575 } // namespace v8 | 5597 } // namespace v8 |
| OLD | NEW |