| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
| (...skipping 902 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 913 } | 913 } |
| 914 } | 914 } |
| 915 | 915 |
| 916 | 916 |
| 917 Register MacroAssembler::GetSmiConstant(Smi* source) { | 917 Register MacroAssembler::GetSmiConstant(Smi* source) { |
| 918 int value = source->value(); | 918 int value = source->value(); |
| 919 if (value == 0) { | 919 if (value == 0) { |
| 920 xorl(kScratchRegister, kScratchRegister); | 920 xorl(kScratchRegister, kScratchRegister); |
| 921 return kScratchRegister; | 921 return kScratchRegister; |
| 922 } | 922 } |
| 923 if (value == 1) { | |
| 924 return kSmiConstantRegister; | |
| 925 } | |
| 926 LoadSmiConstant(kScratchRegister, source); | 923 LoadSmiConstant(kScratchRegister, source); |
| 927 return kScratchRegister; | 924 return kScratchRegister; |
| 928 } | 925 } |
| 929 | 926 |
| 930 | 927 |
| 931 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { | 928 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { |
| 932 if (emit_debug_code()) { | 929 Move(dst, source, Assembler::RelocInfoNone()); |
| 933 Move(dst, Smi::FromInt(kSmiConstantRegisterValue), | |
| 934 Assembler::RelocInfoNone()); | |
| 935 cmpp(dst, kSmiConstantRegister); | |
| 936 Assert(equal, kUninitializedKSmiConstantRegister); | |
| 937 } | |
| 938 int value = source->value(); | |
| 939 if (value == 0) { | |
| 940 xorl(dst, dst); | |
| 941 return; | |
| 942 } | |
| 943 bool negative = value < 0; | |
| 944 unsigned int uvalue = negative ? -value : value; | |
| 945 | |
| 946 switch (uvalue) { | |
| 947 case 9: | |
| 948 leap(dst, | |
| 949 Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0)); | |
| 950 break; | |
| 951 case 8: | |
| 952 xorl(dst, dst); | |
| 953 leap(dst, Operand(dst, kSmiConstantRegister, times_8, 0)); | |
| 954 break; | |
| 955 case 4: | |
| 956 xorl(dst, dst); | |
| 957 leap(dst, Operand(dst, kSmiConstantRegister, times_4, 0)); | |
| 958 break; | |
| 959 case 5: | |
| 960 leap(dst, | |
| 961 Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0)); | |
| 962 break; | |
| 963 case 3: | |
| 964 leap(dst, | |
| 965 Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0)); | |
| 966 break; | |
| 967 case 2: | |
| 968 leap(dst, | |
| 969 Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0)); | |
| 970 break; | |
| 971 case 1: | |
| 972 movp(dst, kSmiConstantRegister); | |
| 973 break; | |
| 974 case 0: | |
| 975 UNREACHABLE(); | |
| 976 return; | |
| 977 default: | |
| 978 Move(dst, source, Assembler::RelocInfoNone()); | |
| 979 return; | |
| 980 } | |
| 981 if (negative) { | |
| 982 negp(dst); | |
| 983 } | |
| 984 } | 930 } |
| 985 | 931 |
| 986 | 932 |
| 987 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { | 933 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { |
| 988 STATIC_ASSERT(kSmiTag == 0); | 934 STATIC_ASSERT(kSmiTag == 0); |
| 989 if (!dst.is(src)) { | 935 if (!dst.is(src)) { |
| 990 movl(dst, src); | 936 movl(dst, src); |
| 991 } | 937 } |
| 992 shlp(dst, Immediate(kSmiShift)); | 938 shlp(dst, Immediate(kSmiShift)); |
| 993 } | 939 } |
| (...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1266 if (!scratch.is(first)) { | 1212 if (!scratch.is(first)) { |
| 1267 movl(scratch, first); | 1213 movl(scratch, first); |
| 1268 } | 1214 } |
| 1269 andl(scratch, second); | 1215 andl(scratch, second); |
| 1270 } | 1216 } |
| 1271 testb(scratch, Immediate(kSmiTagMask)); | 1217 testb(scratch, Immediate(kSmiTagMask)); |
| 1272 return zero; | 1218 return zero; |
| 1273 } | 1219 } |
| 1274 | 1220 |
| 1275 | 1221 |
| 1276 Condition MacroAssembler::CheckIsMinSmi(Register src) { | |
| 1277 DCHECK(!src.is(kScratchRegister)); | |
| 1278 // If we overflow by subtracting one, it's the minimal smi value. | |
| 1279 cmpp(src, kSmiConstantRegister); | |
| 1280 return overflow; | |
| 1281 } | |
| 1282 | |
| 1283 | |
| 1284 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { | 1222 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { |
| 1285 if (SmiValuesAre32Bits()) { | 1223 if (SmiValuesAre32Bits()) { |
| 1286 // A 32-bit integer value can always be converted to a smi. | 1224 // A 32-bit integer value can always be converted to a smi. |
| 1287 return always; | 1225 return always; |
| 1288 } else { | 1226 } else { |
| 1289 DCHECK(SmiValuesAre31Bits()); | 1227 DCHECK(SmiValuesAre31Bits()); |
| 1290 cmpl(src, Immediate(0xc0000000)); | 1228 cmpl(src, Immediate(0xc0000000)); |
| 1291 return positive; | 1229 return positive; |
| 1292 } | 1230 } |
| 1293 } | 1231 } |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1412 | 1350 |
| 1413 | 1351 |
| 1414 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { | 1352 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { |
| 1415 if (constant->value() == 0) { | 1353 if (constant->value() == 0) { |
| 1416 if (!dst.is(src)) { | 1354 if (!dst.is(src)) { |
| 1417 movp(dst, src); | 1355 movp(dst, src); |
| 1418 } | 1356 } |
| 1419 return; | 1357 return; |
| 1420 } else if (dst.is(src)) { | 1358 } else if (dst.is(src)) { |
| 1421 DCHECK(!dst.is(kScratchRegister)); | 1359 DCHECK(!dst.is(kScratchRegister)); |
| 1422 switch (constant->value()) { | 1360 Register constant_reg = GetSmiConstant(constant); |
| 1423 case 1: | 1361 addp(dst, constant_reg); |
| 1424 addp(dst, kSmiConstantRegister); | |
| 1425 return; | |
| 1426 case 2: | |
| 1427 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0)); | |
| 1428 return; | |
| 1429 case 4: | |
| 1430 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0)); | |
| 1431 return; | |
| 1432 case 8: | |
| 1433 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0)); | |
| 1434 return; | |
| 1435 default: | |
| 1436 Register constant_reg = GetSmiConstant(constant); | |
| 1437 addp(dst, constant_reg); | |
| 1438 return; | |
| 1439 } | |
| 1440 } else { | 1362 } else { |
| 1441 switch (constant->value()) { | 1363 LoadSmiConstant(dst, constant); |
| 1442 case 1: | 1364 addp(dst, src); |
| 1443 leap(dst, Operand(src, kSmiConstantRegister, times_1, 0)); | |
| 1444 return; | |
| 1445 case 2: | |
| 1446 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0)); | |
| 1447 return; | |
| 1448 case 4: | |
| 1449 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0)); | |
| 1450 return; | |
| 1451 case 8: | |
| 1452 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0)); | |
| 1453 return; | |
| 1454 default: | |
| 1455 LoadSmiConstant(dst, constant); | |
| 1456 addp(dst, src); | |
| 1457 return; | |
| 1458 } | |
| 1459 } | 1365 } |
| 1460 } | 1366 } |
| 1461 | 1367 |
| 1462 | 1368 |
| 1463 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { | 1369 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { |
| 1464 if (constant->value() != 0) { | 1370 if (constant->value() != 0) { |
| 1465 if (SmiValuesAre32Bits()) { | 1371 if (SmiValuesAre32Bits()) { |
| 1466 addl(Operand(dst, kSmiShift / kBitsPerByte), | 1372 addl(Operand(dst, kSmiShift / kBitsPerByte), |
| 1467 Immediate(constant->value())); | 1373 Immediate(constant->value())); |
| 1468 } else { | 1374 } else { |
| (...skipping 1294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2763 leal(rsp, Operand(rsp, 4)); | 2669 leal(rsp, Operand(rsp, 4)); |
| 2764 } | 2670 } |
| 2765 } | 2671 } |
| 2766 | 2672 |
| 2767 | 2673 |
| 2768 void MacroAssembler::Pop(const Operand& dst) { | 2674 void MacroAssembler::Pop(const Operand& dst) { |
| 2769 if (kPointerSize == kInt64Size) { | 2675 if (kPointerSize == kInt64Size) { |
| 2770 popq(dst); | 2676 popq(dst); |
| 2771 } else { | 2677 } else { |
| 2772 Register scratch = dst.AddressUsesRegister(kScratchRegister) | 2678 Register scratch = dst.AddressUsesRegister(kScratchRegister) |
| 2773 ? kSmiConstantRegister : kScratchRegister; | 2679 ? kRootRegister : kScratchRegister; |
| 2774 movp(scratch, Operand(rsp, 0)); | 2680 movp(scratch, Operand(rsp, 0)); |
| 2775 movp(dst, scratch); | 2681 movp(dst, scratch); |
| 2776 leal(rsp, Operand(rsp, 4)); | 2682 leal(rsp, Operand(rsp, 4)); |
| 2777 if (scratch.is(kSmiConstantRegister)) { | 2683 if (scratch.is(kRootRegister)) { |
| 2778 // Restore kSmiConstantRegister. | 2684 // Restore kRootRegister. |
| 2779 movp(kSmiConstantRegister, | 2685 InitializeRootRegister(); |
| 2780 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)), | |
| 2781 Assembler::RelocInfoNone()); | |
| 2782 } | 2686 } |
| 2783 } | 2687 } |
| 2784 } | 2688 } |
| 2785 | 2689 |
| 2786 | 2690 |
| 2787 void MacroAssembler::PopQuad(const Operand& dst) { | 2691 void MacroAssembler::PopQuad(const Operand& dst) { |
| 2788 if (kPointerSize == kInt64Size) { | 2692 if (kPointerSize == kInt64Size) { |
| 2789 popq(dst); | 2693 popq(dst); |
| 2790 } else { | 2694 } else { |
| 2791 popq(kScratchRegister); | 2695 popq(kScratchRegister); |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2914 Push(rcx); | 2818 Push(rcx); |
| 2915 Push(rdx); | 2819 Push(rdx); |
| 2916 Push(rbx); | 2820 Push(rbx); |
| 2917 // Not pushing rsp or rbp. | 2821 // Not pushing rsp or rbp. |
| 2918 Push(rsi); | 2822 Push(rsi); |
| 2919 Push(rdi); | 2823 Push(rdi); |
| 2920 Push(r8); | 2824 Push(r8); |
| 2921 Push(r9); | 2825 Push(r9); |
| 2922 // r10 is kScratchRegister. | 2826 // r10 is kScratchRegister. |
| 2923 Push(r11); | 2827 Push(r11); |
| 2924 // r12 is kSmiConstantRegister. | 2828 Push(r12); |
| 2925 // r13 is kRootRegister. | 2829 // r13 is kRootRegister. |
| 2926 Push(r14); | 2830 Push(r14); |
| 2927 Push(r15); | 2831 Push(r15); |
| 2928 STATIC_ASSERT(11 == kNumSafepointSavedRegisters); | 2832 STATIC_ASSERT(12 == kNumSafepointSavedRegisters); |
| 2929 // Use lea for symmetry with Popad. | 2833 // Use lea for symmetry with Popad. |
| 2930 int sp_delta = | 2834 int sp_delta = |
| 2931 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; | 2835 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; |
| 2932 leap(rsp, Operand(rsp, -sp_delta)); | 2836 leap(rsp, Operand(rsp, -sp_delta)); |
| 2933 } | 2837 } |
| 2934 | 2838 |
| 2935 | 2839 |
| 2936 void MacroAssembler::Popad() { | 2840 void MacroAssembler::Popad() { |
| 2937 // Popad must not change the flags, so use lea instead of addq. | 2841 // Popad must not change the flags, so use lea instead of addq. |
| 2938 int sp_delta = | 2842 int sp_delta = |
| 2939 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; | 2843 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; |
| 2940 leap(rsp, Operand(rsp, sp_delta)); | 2844 leap(rsp, Operand(rsp, sp_delta)); |
| 2941 Pop(r15); | 2845 Pop(r15); |
| 2942 Pop(r14); | 2846 Pop(r14); |
| 2847 Pop(r12); |
| 2943 Pop(r11); | 2848 Pop(r11); |
| 2944 Pop(r9); | 2849 Pop(r9); |
| 2945 Pop(r8); | 2850 Pop(r8); |
| 2946 Pop(rdi); | 2851 Pop(rdi); |
| 2947 Pop(rsi); | 2852 Pop(rsi); |
| 2948 Pop(rbx); | 2853 Pop(rbx); |
| 2949 Pop(rdx); | 2854 Pop(rdx); |
| 2950 Pop(rcx); | 2855 Pop(rcx); |
| 2951 Pop(rax); | 2856 Pop(rax); |
| 2952 } | 2857 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2966 2, | 2871 2, |
| 2967 3, | 2872 3, |
| 2968 -1, | 2873 -1, |
| 2969 -1, | 2874 -1, |
| 2970 4, | 2875 4, |
| 2971 5, | 2876 5, |
| 2972 6, | 2877 6, |
| 2973 7, | 2878 7, |
| 2974 -1, | 2879 -1, |
| 2975 8, | 2880 8, |
| 2881 9, |
| 2976 -1, | 2882 -1, |
| 2977 -1, | 2883 10, |
| 2978 9, | 2884 11 |
| 2979 10 | |
| 2980 }; | 2885 }; |
| 2981 | 2886 |
| 2982 | 2887 |
| 2983 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, | 2888 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, |
| 2984 const Immediate& imm) { | 2889 const Immediate& imm) { |
| 2985 movp(SafepointRegisterSlot(dst), imm); | 2890 movp(SafepointRegisterSlot(dst), imm); |
| 2986 } | 2891 } |
| 2987 | 2892 |
| 2988 | 2893 |
| 2989 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) { | 2894 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) { |
| (...skipping 2206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5196 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift)); | 5101 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift)); |
| 5197 movl(rax, dividend); | 5102 movl(rax, dividend); |
| 5198 shrl(rax, Immediate(31)); | 5103 shrl(rax, Immediate(31)); |
| 5199 addl(rdx, rax); | 5104 addl(rdx, rax); |
| 5200 } | 5105 } |
| 5201 | 5106 |
| 5202 | 5107 |
| 5203 } } // namespace v8::internal | 5108 } } // namespace v8::internal |
| 5204 | 5109 |
| 5205 #endif // V8_TARGET_ARCH_X64 | 5110 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |