OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" | 5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1426 } else { | 1426 } else { |
1427 DCHECK(instr->temp3() == NULL); | 1427 DCHECK(instr->temp3() == NULL); |
1428 } | 1428 } |
1429 } | 1429 } |
1430 | 1430 |
1431 | 1431 |
1432 void LCodeGen::DoDeferredAllocate(LAllocate* instr) { | 1432 void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
1433 // TODO(3095996): Get rid of this. For now, we need to make the | 1433 // TODO(3095996): Get rid of this. For now, we need to make the |
1434 // result register contain a valid pointer because it is already | 1434 // result register contain a valid pointer because it is already |
1435 // contained in the register pointer map. | 1435 // contained in the register pointer map. |
1436 __ Mov(ToRegister(instr->result()), Smi::FromInt(0)); | 1436 __ Mov(ToRegister(instr->result()), Smi::kZero); |
1437 | 1437 |
1438 PushSafepointRegistersScope scope(this); | 1438 PushSafepointRegistersScope scope(this); |
1439 LoadContextFromDeferred(instr->context()); | 1439 LoadContextFromDeferred(instr->context()); |
1440 // We're in a SafepointRegistersScope so we can use any scratch registers. | 1440 // We're in a SafepointRegistersScope so we can use any scratch registers. |
1441 Register size = x0; | 1441 Register size = x0; |
1442 if (instr->size()->IsConstantOperand()) { | 1442 if (instr->size()->IsConstantOperand()) { |
1443 __ Mov(size, ToSmi(LConstantOperand::cast(instr->size()))); | 1443 __ Mov(size, ToSmi(LConstantOperand::cast(instr->size()))); |
1444 } else { | 1444 } else { |
1445 __ SmiTag(size, ToRegister32(instr->size()).X()); | 1445 __ SmiTag(size, ToRegister32(instr->size()).X()); |
1446 } | 1446 } |
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1736 DCHECK(r.IsTagged()); | 1736 DCHECK(r.IsTagged()); |
1737 Register value = ToRegister(instr->value()); | 1737 Register value = ToRegister(instr->value()); |
1738 HType type = instr->hydrogen()->value()->type(); | 1738 HType type = instr->hydrogen()->value()->type(); |
1739 | 1739 |
1740 if (type.IsBoolean()) { | 1740 if (type.IsBoolean()) { |
1741 DCHECK(!info()->IsStub()); | 1741 DCHECK(!info()->IsStub()); |
1742 __ CompareRoot(value, Heap::kTrueValueRootIndex); | 1742 __ CompareRoot(value, Heap::kTrueValueRootIndex); |
1743 EmitBranch(instr, eq); | 1743 EmitBranch(instr, eq); |
1744 } else if (type.IsSmi()) { | 1744 } else if (type.IsSmi()) { |
1745 DCHECK(!info()->IsStub()); | 1745 DCHECK(!info()->IsStub()); |
1746 EmitCompareAndBranch(instr, ne, value, Smi::FromInt(0)); | 1746 EmitCompareAndBranch(instr, ne, value, Smi::kZero); |
1747 } else if (type.IsJSArray()) { | 1747 } else if (type.IsJSArray()) { |
1748 DCHECK(!info()->IsStub()); | 1748 DCHECK(!info()->IsStub()); |
1749 EmitGoto(instr->TrueDestination(chunk())); | 1749 EmitGoto(instr->TrueDestination(chunk())); |
1750 } else if (type.IsHeapNumber()) { | 1750 } else if (type.IsHeapNumber()) { |
1751 DCHECK(!info()->IsStub()); | 1751 DCHECK(!info()->IsStub()); |
1752 __ Ldr(double_scratch(), FieldMemOperand(value, | 1752 __ Ldr(double_scratch(), FieldMemOperand(value, |
1753 HeapNumber::kValueOffset)); | 1753 HeapNumber::kValueOffset)); |
1754 // Test the double value. Zero and NaN are false. | 1754 // Test the double value. Zero and NaN are false. |
1755 EmitBranchIfNonZeroNumber(instr, double_scratch(), double_scratch()); | 1755 EmitBranchIfNonZeroNumber(instr, double_scratch(), double_scratch()); |
1756 } else if (type.IsString()) { | 1756 } else if (type.IsString()) { |
(...skipping 22 matching lines...) Expand all Loading... |
1779 } | 1779 } |
1780 | 1780 |
1781 if (expected.Contains(ToBooleanICStub::NULL_TYPE)) { | 1781 if (expected.Contains(ToBooleanICStub::NULL_TYPE)) { |
1782 // 'null' -> false. | 1782 // 'null' -> false. |
1783 __ JumpIfRoot( | 1783 __ JumpIfRoot( |
1784 value, Heap::kNullValueRootIndex, false_label); | 1784 value, Heap::kNullValueRootIndex, false_label); |
1785 } | 1785 } |
1786 | 1786 |
1787 if (expected.Contains(ToBooleanICStub::SMI)) { | 1787 if (expected.Contains(ToBooleanICStub::SMI)) { |
1788 // Smis: 0 -> false, all other -> true. | 1788 // Smis: 0 -> false, all other -> true. |
1789 DCHECK(Smi::FromInt(0) == 0); | 1789 DCHECK(Smi::kZero == 0); |
1790 __ Cbz(value, false_label); | 1790 __ Cbz(value, false_label); |
1791 __ JumpIfSmi(value, true_label); | 1791 __ JumpIfSmi(value, true_label); |
1792 } else if (expected.NeedsMap()) { | 1792 } else if (expected.NeedsMap()) { |
1793 // If we need a map later and have a smi, deopt. | 1793 // If we need a map later and have a smi, deopt. |
1794 DeoptimizeIfSmi(value, instr, DeoptimizeReason::kSmi); | 1794 DeoptimizeIfSmi(value, instr, DeoptimizeReason::kSmi); |
1795 } | 1795 } |
1796 | 1796 |
1797 Register map = NoReg; | 1797 Register map = NoReg; |
1798 Register scratch = NoReg; | 1798 Register scratch = NoReg; |
1799 | 1799 |
(...skipping 3813 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5613 DeferredLoadMutableDouble* deferred; | 5613 DeferredLoadMutableDouble* deferred; |
5614 deferred = new(zone()) DeferredLoadMutableDouble( | 5614 deferred = new(zone()) DeferredLoadMutableDouble( |
5615 this, instr, result, object, index); | 5615 this, instr, result, object, index); |
5616 | 5616 |
5617 Label out_of_object, done; | 5617 Label out_of_object, done; |
5618 | 5618 |
5619 __ TestAndBranchIfAnySet( | 5619 __ TestAndBranchIfAnySet( |
5620 index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry()); | 5620 index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry()); |
5621 __ Mov(index, Operand(index, ASR, 1)); | 5621 __ Mov(index, Operand(index, ASR, 1)); |
5622 | 5622 |
5623 __ Cmp(index, Smi::FromInt(0)); | 5623 __ Cmp(index, Smi::kZero); |
5624 __ B(lt, &out_of_object); | 5624 __ B(lt, &out_of_object); |
5625 | 5625 |
5626 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); | 5626 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); |
5627 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5627 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5628 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize)); | 5628 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize)); |
5629 | 5629 |
5630 __ B(&done); | 5630 __ B(&done); |
5631 | 5631 |
5632 __ Bind(&out_of_object); | 5632 __ Bind(&out_of_object); |
5633 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5633 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5634 // Index is equal to negated out of object property index plus 1. | 5634 // Index is equal to negated out of object property index plus 1. |
5635 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5635 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5636 __ Ldr(result, FieldMemOperand(result, | 5636 __ Ldr(result, FieldMemOperand(result, |
5637 FixedArray::kHeaderSize - kPointerSize)); | 5637 FixedArray::kHeaderSize - kPointerSize)); |
5638 __ Bind(deferred->exit()); | 5638 __ Bind(deferred->exit()); |
5639 __ Bind(&done); | 5639 __ Bind(&done); |
5640 } | 5640 } |
5641 | 5641 |
5642 } // namespace internal | 5642 } // namespace internal |
5643 } // namespace v8 | 5643 } // namespace v8 |
OLD | NEW |