OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
(...skipping 2296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2307 } | 2307 } |
2308 | 2308 |
2309 | 2309 |
2310 void MacroAssembler::CompareRoot(Register obj, | 2310 void MacroAssembler::CompareRoot(Register obj, |
2311 Heap::RootListIndex index) { | 2311 Heap::RootListIndex index) { |
2312 DCHECK(!obj.is(ip)); | 2312 DCHECK(!obj.is(ip)); |
2313 LoadRoot(ip, index); | 2313 LoadRoot(ip, index); |
2314 cmp(obj, ip); | 2314 cmp(obj, ip); |
2315 } | 2315 } |
2316 | 2316 |
2317 void MacroAssembler::CheckFastObjectElements(Register map, | |
2318 Register scratch, | |
2319 Label* fail) { | |
2320 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
2321 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
2322 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
2323 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | |
2324 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | |
2325 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
2326 b(ls, fail); | |
2327 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue)); | |
2328 b(hi, fail); | |
2329 } | |
2330 | |
2331 | |
2332 void MacroAssembler::CheckFastSmiElements(Register map, | |
2333 Register scratch, | |
2334 Label* fail) { | |
2335 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
2336 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
2337 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | |
2338 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
2339 b(hi, fail); | |
2340 } | |
2341 | |
2342 | |
2343 void MacroAssembler::StoreNumberToDoubleElements( | |
2344 Register value_reg, | |
2345 Register key_reg, | |
2346 Register elements_reg, | |
2347 Register scratch1, | |
2348 LowDwVfpRegister double_scratch, | |
2349 Label* fail, | |
2350 int elements_offset) { | |
2351 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1)); | |
2352 Label smi_value, store; | |
2353 | |
2354 // Handle smi values specially. | |
2355 JumpIfSmi(value_reg, &smi_value); | |
2356 | |
2357 // Ensure that the object is a heap number | |
2358 CheckMap(value_reg, | |
2359 scratch1, | |
2360 isolate()->factory()->heap_number_map(), | |
2361 fail, | |
2362 DONT_DO_SMI_CHECK); | |
2363 | |
2364 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
2365 VFPCanonicalizeNaN(double_scratch); | |
2366 b(&store); | |
2367 | |
2368 bind(&smi_value); | |
2369 SmiToDouble(double_scratch, value_reg); | |
2370 | |
2371 bind(&store); | |
2372 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg)); | |
2373 vstr(double_scratch, | |
2374 FieldMemOperand(scratch1, | |
2375 FixedDoubleArray::kHeaderSize - elements_offset)); | |
2376 } | |
2377 | |
2378 | |
2379 void MacroAssembler::CompareMap(Register obj, | 2317 void MacroAssembler::CompareMap(Register obj, |
2380 Register scratch, | 2318 Register scratch, |
2381 Handle<Map> map, | 2319 Handle<Map> map, |
2382 Label* early_success) { | 2320 Label* early_success) { |
2383 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 2321 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); |
2384 CompareMap(scratch, map, early_success); | 2322 CompareMap(scratch, map, early_success); |
2385 } | 2323 } |
2386 | 2324 |
2387 | 2325 |
2388 void MacroAssembler::CompareMap(Register obj_map, | 2326 void MacroAssembler::CompareMap(Register obj_map, |
(...skipping 482 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2871 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 2809 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
2872 } | 2810 } |
2873 } else { | 2811 } else { |
2874 // Slot is in the current function context. Move it into the | 2812 // Slot is in the current function context. Move it into the |
2875 // destination register in case we store into it (the write barrier | 2813 // destination register in case we store into it (the write barrier |
2876 // cannot be allowed to destroy the context in esi). | 2814 // cannot be allowed to destroy the context in esi). |
2877 mov(dst, cp); | 2815 mov(dst, cp); |
2878 } | 2816 } |
2879 } | 2817 } |
2880 | 2818 |
2881 | |
2882 void MacroAssembler::LoadTransitionedArrayMapConditional( | |
2883 ElementsKind expected_kind, | |
2884 ElementsKind transitioned_kind, | |
2885 Register map_in_out, | |
2886 Register scratch, | |
2887 Label* no_map_match) { | |
2888 DCHECK(IsFastElementsKind(expected_kind)); | |
2889 DCHECK(IsFastElementsKind(transitioned_kind)); | |
2890 | |
2891 // Check that the function's map is the same as the expected cached map. | |
2892 ldr(scratch, NativeContextMemOperand()); | |
2893 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind))); | |
2894 cmp(map_in_out, ip); | |
2895 b(ne, no_map_match); | |
2896 | |
2897 // Use the transitioned cached map. | |
2898 ldr(map_in_out, | |
2899 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind))); | |
2900 } | |
2901 | |
2902 | |
2903 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { | 2819 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { |
2904 ldr(dst, NativeContextMemOperand()); | 2820 ldr(dst, NativeContextMemOperand()); |
2905 ldr(dst, ContextMemOperand(dst, index)); | 2821 ldr(dst, ContextMemOperand(dst, index)); |
2906 } | 2822 } |
2907 | 2823 |
2908 | 2824 |
2909 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2825 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
2910 Register map, | 2826 Register map, |
2911 Register scratch) { | 2827 Register scratch) { |
2912 // Load the initial map. The global functions all have initial maps. | 2828 // Load the initial map. The global functions all have initial maps. |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2955 } | 2871 } |
2956 | 2872 |
2957 | 2873 |
2958 void MacroAssembler::UntagAndJumpIfSmi( | 2874 void MacroAssembler::UntagAndJumpIfSmi( |
2959 Register dst, Register src, Label* smi_case) { | 2875 Register dst, Register src, Label* smi_case) { |
2960 STATIC_ASSERT(kSmiTag == 0); | 2876 STATIC_ASSERT(kSmiTag == 0); |
2961 SmiUntag(dst, src, SetCC); | 2877 SmiUntag(dst, src, SetCC); |
2962 b(cc, smi_case); // Shifter carry is not set for a smi. | 2878 b(cc, smi_case); // Shifter carry is not set for a smi. |
2963 } | 2879 } |
2964 | 2880 |
2965 | |
2966 void MacroAssembler::UntagAndJumpIfNotSmi( | |
2967 Register dst, Register src, Label* non_smi_case) { | |
2968 STATIC_ASSERT(kSmiTag == 0); | |
2969 SmiUntag(dst, src, SetCC); | |
2970 b(cs, non_smi_case); // Shifter carry is set for a non-smi. | |
2971 } | |
2972 | |
2973 | |
2974 void MacroAssembler::JumpIfEitherSmi(Register reg1, | 2881 void MacroAssembler::JumpIfEitherSmi(Register reg1, |
2975 Register reg2, | 2882 Register reg2, |
2976 Label* on_either_smi) { | 2883 Label* on_either_smi) { |
2977 STATIC_ASSERT(kSmiTag == 0); | 2884 STATIC_ASSERT(kSmiTag == 0); |
2978 tst(reg1, Operand(kSmiTagMask)); | 2885 tst(reg1, Operand(kSmiTagMask)); |
2979 tst(reg2, Operand(kSmiTagMask), ne); | 2886 tst(reg2, Operand(kSmiTagMask), ne); |
2980 b(eq, on_either_smi); | 2887 b(eq, on_either_smi); |
2981 } | 2888 } |
2982 | 2889 |
2983 void MacroAssembler::AssertNotNumber(Register object) { | 2890 void MacroAssembler::AssertNotNumber(Register object) { |
(...skipping 870 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3854 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { | 3761 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { |
3855 int code = config->GetAllocatableGeneralCode(i); | 3762 int code = config->GetAllocatableGeneralCode(i); |
3856 Register candidate = Register::from_code(code); | 3763 Register candidate = Register::from_code(code); |
3857 if (regs & candidate.bit()) continue; | 3764 if (regs & candidate.bit()) continue; |
3858 return candidate; | 3765 return candidate; |
3859 } | 3766 } |
3860 UNREACHABLE(); | 3767 UNREACHABLE(); |
3861 return no_reg; | 3768 return no_reg; |
3862 } | 3769 } |
3863 | 3770 |
3864 | |
3865 void MacroAssembler::JumpIfDictionaryInPrototypeChain( | |
3866 Register object, | |
3867 Register scratch0, | |
3868 Register scratch1, | |
3869 Label* found) { | |
3870 DCHECK(!scratch1.is(scratch0)); | |
3871 Register current = scratch0; | |
3872 Label loop_again, end; | |
3873 | |
3874 // scratch contained elements pointer. | |
3875 mov(current, object); | |
3876 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
3877 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
3878 CompareRoot(current, Heap::kNullValueRootIndex); | |
3879 b(eq, &end); | |
3880 | |
3881 // Loop based on the map going up the prototype chain. | |
3882 bind(&loop_again); | |
3883 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
3884 | |
3885 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE); | |
3886 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE); | |
3887 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset)); | |
3888 cmp(scratch1, Operand(JS_OBJECT_TYPE)); | |
3889 b(lo, found); | |
3890 | |
3891 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset)); | |
3892 DecodeField<Map::ElementsKindBits>(scratch1); | |
3893 cmp(scratch1, Operand(DICTIONARY_ELEMENTS)); | |
3894 b(eq, found); | |
3895 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
3896 CompareRoot(current, Heap::kNullValueRootIndex); | |
3897 b(ne, &loop_again); | |
3898 | |
3899 bind(&end); | |
3900 } | |
3901 | |
3902 | |
3903 #ifdef DEBUG | 3771 #ifdef DEBUG |
3904 bool AreAliased(Register reg1, | 3772 bool AreAliased(Register reg1, |
3905 Register reg2, | 3773 Register reg2, |
3906 Register reg3, | 3774 Register reg3, |
3907 Register reg4, | 3775 Register reg4, |
3908 Register reg5, | 3776 Register reg5, |
3909 Register reg6, | 3777 Register reg6, |
3910 Register reg7, | 3778 Register reg7, |
3911 Register reg8) { | 3779 Register reg8) { |
3912 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + | 3780 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3994 } | 3862 } |
3995 } | 3863 } |
3996 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift)); | 3864 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift)); |
3997 add(result, result, Operand(dividend, LSR, 31)); | 3865 add(result, result, Operand(dividend, LSR, 31)); |
3998 } | 3866 } |
3999 | 3867 |
4000 } // namespace internal | 3868 } // namespace internal |
4001 } // namespace v8 | 3869 } // namespace v8 |
4002 | 3870 |
4003 #endif // V8_TARGET_ARCH_ARM | 3871 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |