OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/base/division-by-constant.h" | 8 #include "src/base/division-by-constant.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 3735 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3746 Ldr(scratch, FieldMemOperand(string, String::kLengthOffset)); | 3746 Ldr(scratch, FieldMemOperand(string, String::kLengthOffset)); |
3747 Cmp(index, index_type == kIndexIsSmi ? scratch : Operand::UntagSmi(scratch)); | 3747 Cmp(index, index_type == kIndexIsSmi ? scratch : Operand::UntagSmi(scratch)); |
3748 Check(lt, kIndexIsTooLarge); | 3748 Check(lt, kIndexIsTooLarge); |
3749 | 3749 |
3750 DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero); | 3750 DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero); |
3751 Cmp(index, 0); | 3751 Cmp(index, 0); |
3752 Check(ge, kIndexIsNegative); | 3752 Check(ge, kIndexIsNegative); |
3753 } | 3753 } |
3754 | 3754 |
3755 | 3755 |
3756 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | |
3757 Register scratch1, | |
3758 Register scratch2, | |
3759 Label* miss) { | |
3760 DCHECK(!AreAliased(holder_reg, scratch1, scratch2)); | |
3761 Label same_contexts; | |
3762 | |
3763 // Load current lexical context from the active StandardFrame, which | |
3764 // may require crawling past STUB frames. | |
3765 Label load_context; | |
3766 Label has_context; | |
3767 Mov(scratch2, fp); | |
3768 bind(&load_context); | |
3769 Ldr(scratch1, | |
3770 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
3771 JumpIfNotSmi(scratch1, &has_context); | |
3772 Ldr(scratch2, MemOperand(scratch2, CommonFrameConstants::kCallerFPOffset)); | |
3773 B(&load_context); | |
3774 bind(&has_context); | |
3775 | |
3776 // In debug mode, make sure the lexical context is set. | |
3777 #ifdef DEBUG | |
3778 Cmp(scratch1, 0); | |
3779 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext); | |
3780 #endif | |
3781 | |
3782 // Load the native context of the current context. | |
3783 Ldr(scratch1, ContextMemOperand(scratch1, Context::NATIVE_CONTEXT_INDEX)); | |
3784 | |
3785 // Check the context is a native context. | |
3786 if (emit_debug_code()) { | |
3787 // Read the first word and compare to the native_context_map. | |
3788 Ldr(scratch2, FieldMemOperand(scratch1, HeapObject::kMapOffset)); | |
3789 CompareRoot(scratch2, Heap::kNativeContextMapRootIndex); | |
3790 Check(eq, kExpectedNativeContext); | |
3791 } | |
3792 | |
3793 // Check if both contexts are the same. | |
3794 Ldr(scratch2, FieldMemOperand(holder_reg, | |
3795 JSGlobalProxy::kNativeContextOffset)); | |
3796 Cmp(scratch1, scratch2); | |
3797 B(&same_contexts, eq); | |
3798 | |
3799 // Check the context is a native context. | |
3800 if (emit_debug_code()) { | |
3801 // We're short on scratch registers here, so use holder_reg as a scratch. | |
3802 Push(holder_reg); | |
3803 Register scratch3 = holder_reg; | |
3804 | |
3805 CompareRoot(scratch2, Heap::kNullValueRootIndex); | |
3806 Check(ne, kExpectedNonNullContext); | |
3807 | |
3808 Ldr(scratch3, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | |
3809 CompareRoot(scratch3, Heap::kNativeContextMapRootIndex); | |
3810 Check(eq, kExpectedNativeContext); | |
3811 Pop(holder_reg); | |
3812 } | |
3813 | |
3814 // Check that the security token in the calling global object is | |
3815 // compatible with the security token in the receiving global | |
3816 // object. | |
3817 int token_offset = Context::kHeaderSize + | |
3818 Context::SECURITY_TOKEN_INDEX * kPointerSize; | |
3819 | |
3820 Ldr(scratch1, FieldMemOperand(scratch1, token_offset)); | |
3821 Ldr(scratch2, FieldMemOperand(scratch2, token_offset)); | |
3822 Cmp(scratch1, scratch2); | |
3823 B(miss, ne); | |
3824 | |
3825 Bind(&same_contexts); | |
3826 } | |
3827 | |
3828 | |
3829 // Compute the hash code from the untagged key. This must be kept in sync with | 3756 // Compute the hash code from the untagged key. This must be kept in sync with |
3830 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in | 3757 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in |
3831 // code-stub-hydrogen.cc | 3758 // code-stub-hydrogen.cc |
3832 void MacroAssembler::GetNumberHash(Register key, Register scratch) { | 3759 void MacroAssembler::GetNumberHash(Register key, Register scratch) { |
3833 DCHECK(!AreAliased(key, scratch)); | 3760 DCHECK(!AreAliased(key, scratch)); |
3834 | 3761 |
3835 // Xor original key with a seed. | 3762 // Xor original key with a seed. |
3836 LoadRoot(scratch, Heap::kHashSeedRootIndex); | 3763 LoadRoot(scratch, Heap::kHashSeedRootIndex); |
3837 Eor(key, key, Operand::UntagSmi(scratch)); | 3764 Eor(key, key, Operand::UntagSmi(scratch)); |
3838 | 3765 |
(...skipping 1167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5006 } | 4933 } |
5007 | 4934 |
5008 | 4935 |
5009 #undef __ | 4936 #undef __ |
5010 | 4937 |
5011 | 4938 |
5012 } // namespace internal | 4939 } // namespace internal |
5013 } // namespace v8 | 4940 } // namespace v8 |
5014 | 4941 |
5015 #endif // V8_TARGET_ARCH_ARM64 | 4942 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |