| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/crankshaft/x64/lithium-codegen-x64.h" | 7 #include "src/crankshaft/x64/lithium-codegen-x64.h" |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
| (...skipping 684 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 695 WriteTranslation(environment, &translation); | 695 WriteTranslation(environment, &translation); |
| 696 int deoptimization_index = deoptimizations_.length(); | 696 int deoptimization_index = deoptimizations_.length(); |
| 697 int pc_offset = masm()->pc_offset(); | 697 int pc_offset = masm()->pc_offset(); |
| 698 environment->Register(deoptimization_index, | 698 environment->Register(deoptimization_index, |
| 699 translation.index(), | 699 translation.index(), |
| 700 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 700 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
| 701 deoptimizations_.Add(environment, environment->zone()); | 701 deoptimizations_.Add(environment, environment->zone()); |
| 702 } | 702 } |
| 703 } | 703 } |
| 704 | 704 |
| 705 | |
| 706 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 705 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
| 707 Deoptimizer::DeoptReason deopt_reason, | 706 DeoptimizeReason deopt_reason, |
| 708 Deoptimizer::BailoutType bailout_type) { | 707 Deoptimizer::BailoutType bailout_type) { |
| 709 LEnvironment* environment = instr->environment(); | 708 LEnvironment* environment = instr->environment(); |
| 710 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 709 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 711 DCHECK(environment->HasBeenRegistered()); | 710 DCHECK(environment->HasBeenRegistered()); |
| 712 int id = environment->deoptimization_index(); | 711 int id = environment->deoptimization_index(); |
| 713 Address entry = | 712 Address entry = |
| 714 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 713 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 715 if (entry == NULL) { | 714 if (entry == NULL) { |
| 716 Abort(kBailoutWasNotPrepared); | 715 Abort(kBailoutWasNotPrepared); |
| 717 return; | 716 return; |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 768 jump_table_.Add(table_entry, zone()); | 767 jump_table_.Add(table_entry, zone()); |
| 769 } | 768 } |
| 770 if (cc == no_condition) { | 769 if (cc == no_condition) { |
| 771 __ jmp(&jump_table_.last().label); | 770 __ jmp(&jump_table_.last().label); |
| 772 } else { | 771 } else { |
| 773 __ j(cc, &jump_table_.last().label); | 772 __ j(cc, &jump_table_.last().label); |
| 774 } | 773 } |
| 775 } | 774 } |
| 776 } | 775 } |
| 777 | 776 |
| 778 | |
| 779 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 777 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
| 780 Deoptimizer::DeoptReason deopt_reason) { | 778 DeoptimizeReason deopt_reason) { |
| 781 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 779 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 782 ? Deoptimizer::LAZY | 780 ? Deoptimizer::LAZY |
| 783 : Deoptimizer::EAGER; | 781 : Deoptimizer::EAGER; |
| 784 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); | 782 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); |
| 785 } | 783 } |
| 786 | 784 |
| 787 | 785 |
| 788 void LCodeGen::RecordSafepointWithLazyDeopt( | 786 void LCodeGen::RecordSafepointWithLazyDeopt( |
| 789 LInstruction* instr, SafepointMode safepoint_mode, int argc) { | 787 LInstruction* instr, SafepointMode safepoint_mode, int argc) { |
| 790 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | 788 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 903 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 901 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
| 904 Label dividend_is_not_negative, done; | 902 Label dividend_is_not_negative, done; |
| 905 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 903 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { |
| 906 __ testl(dividend, dividend); | 904 __ testl(dividend, dividend); |
| 907 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); | 905 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); |
| 908 // Note that this is correct even for kMinInt operands. | 906 // Note that this is correct even for kMinInt operands. |
| 909 __ negl(dividend); | 907 __ negl(dividend); |
| 910 __ andl(dividend, Immediate(mask)); | 908 __ andl(dividend, Immediate(mask)); |
| 911 __ negl(dividend); | 909 __ negl(dividend); |
| 912 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 910 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 913 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 911 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 914 } | 912 } |
| 915 __ jmp(&done, Label::kNear); | 913 __ jmp(&done, Label::kNear); |
| 916 } | 914 } |
| 917 | 915 |
| 918 __ bind(÷nd_is_not_negative); | 916 __ bind(÷nd_is_not_negative); |
| 919 __ andl(dividend, Immediate(mask)); | 917 __ andl(dividend, Immediate(mask)); |
| 920 __ bind(&done); | 918 __ bind(&done); |
| 921 } | 919 } |
| 922 | 920 |
| 923 | 921 |
| 924 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 922 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
| 925 Register dividend = ToRegister(instr->dividend()); | 923 Register dividend = ToRegister(instr->dividend()); |
| 926 int32_t divisor = instr->divisor(); | 924 int32_t divisor = instr->divisor(); |
| 927 DCHECK(ToRegister(instr->result()).is(rax)); | 925 DCHECK(ToRegister(instr->result()).is(rax)); |
| 928 | 926 |
| 929 if (divisor == 0) { | 927 if (divisor == 0) { |
| 930 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 928 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
| 931 return; | 929 return; |
| 932 } | 930 } |
| 933 | 931 |
| 934 __ TruncatingDiv(dividend, Abs(divisor)); | 932 __ TruncatingDiv(dividend, Abs(divisor)); |
| 935 __ imull(rdx, rdx, Immediate(Abs(divisor))); | 933 __ imull(rdx, rdx, Immediate(Abs(divisor))); |
| 936 __ movl(rax, dividend); | 934 __ movl(rax, dividend); |
| 937 __ subl(rax, rdx); | 935 __ subl(rax, rdx); |
| 938 | 936 |
| 939 // Check for negative zero. | 937 // Check for negative zero. |
| 940 HMod* hmod = instr->hydrogen(); | 938 HMod* hmod = instr->hydrogen(); |
| 941 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 939 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 942 Label remainder_not_zero; | 940 Label remainder_not_zero; |
| 943 __ j(not_zero, &remainder_not_zero, Label::kNear); | 941 __ j(not_zero, &remainder_not_zero, Label::kNear); |
| 944 __ cmpl(dividend, Immediate(0)); | 942 __ cmpl(dividend, Immediate(0)); |
| 945 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); | 943 DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero); |
| 946 __ bind(&remainder_not_zero); | 944 __ bind(&remainder_not_zero); |
| 947 } | 945 } |
| 948 } | 946 } |
| 949 | 947 |
| 950 | 948 |
| 951 void LCodeGen::DoModI(LModI* instr) { | 949 void LCodeGen::DoModI(LModI* instr) { |
| 952 HMod* hmod = instr->hydrogen(); | 950 HMod* hmod = instr->hydrogen(); |
| 953 | 951 |
| 954 Register left_reg = ToRegister(instr->left()); | 952 Register left_reg = ToRegister(instr->left()); |
| 955 DCHECK(left_reg.is(rax)); | 953 DCHECK(left_reg.is(rax)); |
| 956 Register right_reg = ToRegister(instr->right()); | 954 Register right_reg = ToRegister(instr->right()); |
| 957 DCHECK(!right_reg.is(rax)); | 955 DCHECK(!right_reg.is(rax)); |
| 958 DCHECK(!right_reg.is(rdx)); | 956 DCHECK(!right_reg.is(rdx)); |
| 959 Register result_reg = ToRegister(instr->result()); | 957 Register result_reg = ToRegister(instr->result()); |
| 960 DCHECK(result_reg.is(rdx)); | 958 DCHECK(result_reg.is(rdx)); |
| 961 | 959 |
| 962 Label done; | 960 Label done; |
| 963 // Check for x % 0, idiv would signal a divide error. We have to | 961 // Check for x % 0, idiv would signal a divide error. We have to |
| 964 // deopt in this case because we can't return a NaN. | 962 // deopt in this case because we can't return a NaN. |
| 965 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 963 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
| 966 __ testl(right_reg, right_reg); | 964 __ testl(right_reg, right_reg); |
| 967 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 965 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
| 968 } | 966 } |
| 969 | 967 |
| 970 // Check for kMinInt % -1, idiv would signal a divide error. We | 968 // Check for kMinInt % -1, idiv would signal a divide error. We |
| 971 // have to deopt if we care about -0, because we can't return that. | 969 // have to deopt if we care about -0, because we can't return that. |
| 972 if (hmod->CheckFlag(HValue::kCanOverflow)) { | 970 if (hmod->CheckFlag(HValue::kCanOverflow)) { |
| 973 Label no_overflow_possible; | 971 Label no_overflow_possible; |
| 974 __ cmpl(left_reg, Immediate(kMinInt)); | 972 __ cmpl(left_reg, Immediate(kMinInt)); |
| 975 __ j(not_zero, &no_overflow_possible, Label::kNear); | 973 __ j(not_zero, &no_overflow_possible, Label::kNear); |
| 976 __ cmpl(right_reg, Immediate(-1)); | 974 __ cmpl(right_reg, Immediate(-1)); |
| 977 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 975 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 978 DeoptimizeIf(equal, instr, Deoptimizer::kMinusZero); | 976 DeoptimizeIf(equal, instr, DeoptimizeReason::kMinusZero); |
| 979 } else { | 977 } else { |
| 980 __ j(not_equal, &no_overflow_possible, Label::kNear); | 978 __ j(not_equal, &no_overflow_possible, Label::kNear); |
| 981 __ Set(result_reg, 0); | 979 __ Set(result_reg, 0); |
| 982 __ jmp(&done, Label::kNear); | 980 __ jmp(&done, Label::kNear); |
| 983 } | 981 } |
| 984 __ bind(&no_overflow_possible); | 982 __ bind(&no_overflow_possible); |
| 985 } | 983 } |
| 986 | 984 |
| 987 // Sign extend dividend in eax into edx:eax, since we are using only the low | 985 // Sign extend dividend in eax into edx:eax, since we are using only the low |
| 988 // 32 bits of the values. | 986 // 32 bits of the values. |
| 989 __ cdq(); | 987 __ cdq(); |
| 990 | 988 |
| 991 // If we care about -0, test if the dividend is <0 and the result is 0. | 989 // If we care about -0, test if the dividend is <0 and the result is 0. |
| 992 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 990 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 993 Label positive_left; | 991 Label positive_left; |
| 994 __ testl(left_reg, left_reg); | 992 __ testl(left_reg, left_reg); |
| 995 __ j(not_sign, &positive_left, Label::kNear); | 993 __ j(not_sign, &positive_left, Label::kNear); |
| 996 __ idivl(right_reg); | 994 __ idivl(right_reg); |
| 997 __ testl(result_reg, result_reg); | 995 __ testl(result_reg, result_reg); |
| 998 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 996 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 999 __ jmp(&done, Label::kNear); | 997 __ jmp(&done, Label::kNear); |
| 1000 __ bind(&positive_left); | 998 __ bind(&positive_left); |
| 1001 } | 999 } |
| 1002 __ idivl(right_reg); | 1000 __ idivl(right_reg); |
| 1003 __ bind(&done); | 1001 __ bind(&done); |
| 1004 } | 1002 } |
| 1005 | 1003 |
| 1006 | 1004 |
| 1007 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { | 1005 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { |
| 1008 Register dividend = ToRegister(instr->dividend()); | 1006 Register dividend = ToRegister(instr->dividend()); |
| 1009 int32_t divisor = instr->divisor(); | 1007 int32_t divisor = instr->divisor(); |
| 1010 DCHECK(dividend.is(ToRegister(instr->result()))); | 1008 DCHECK(dividend.is(ToRegister(instr->result()))); |
| 1011 | 1009 |
| 1012 // If the divisor is positive, things are easy: There can be no deopts and we | 1010 // If the divisor is positive, things are easy: There can be no deopts and we |
| 1013 // can simply do an arithmetic right shift. | 1011 // can simply do an arithmetic right shift. |
| 1014 if (divisor == 1) return; | 1012 if (divisor == 1) return; |
| 1015 int32_t shift = WhichPowerOf2Abs(divisor); | 1013 int32_t shift = WhichPowerOf2Abs(divisor); |
| 1016 if (divisor > 1) { | 1014 if (divisor > 1) { |
| 1017 __ sarl(dividend, Immediate(shift)); | 1015 __ sarl(dividend, Immediate(shift)); |
| 1018 return; | 1016 return; |
| 1019 } | 1017 } |
| 1020 | 1018 |
| 1021 // If the divisor is negative, we have to negate and handle edge cases. | 1019 // If the divisor is negative, we have to negate and handle edge cases. |
| 1022 __ negl(dividend); | 1020 __ negl(dividend); |
| 1023 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1021 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1024 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1022 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1025 } | 1023 } |
| 1026 | 1024 |
| 1027 // Dividing by -1 is basically negation, unless we overflow. | 1025 // Dividing by -1 is basically negation, unless we overflow. |
| 1028 if (divisor == -1) { | 1026 if (divisor == -1) { |
| 1029 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1027 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
| 1030 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1028 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1031 } | 1029 } |
| 1032 return; | 1030 return; |
| 1033 } | 1031 } |
| 1034 | 1032 |
| 1035 // If the negation could not overflow, simply shifting is OK. | 1033 // If the negation could not overflow, simply shifting is OK. |
| 1036 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1034 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
| 1037 __ sarl(dividend, Immediate(shift)); | 1035 __ sarl(dividend, Immediate(shift)); |
| 1038 return; | 1036 return; |
| 1039 } | 1037 } |
| 1040 | 1038 |
| 1041 Label not_kmin_int, done; | 1039 Label not_kmin_int, done; |
| 1042 __ j(no_overflow, ¬_kmin_int, Label::kNear); | 1040 __ j(no_overflow, ¬_kmin_int, Label::kNear); |
| 1043 __ movl(dividend, Immediate(kMinInt / divisor)); | 1041 __ movl(dividend, Immediate(kMinInt / divisor)); |
| 1044 __ jmp(&done, Label::kNear); | 1042 __ jmp(&done, Label::kNear); |
| 1045 __ bind(¬_kmin_int); | 1043 __ bind(¬_kmin_int); |
| 1046 __ sarl(dividend, Immediate(shift)); | 1044 __ sarl(dividend, Immediate(shift)); |
| 1047 __ bind(&done); | 1045 __ bind(&done); |
| 1048 } | 1046 } |
| 1049 | 1047 |
| 1050 | 1048 |
| 1051 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1049 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
| 1052 Register dividend = ToRegister(instr->dividend()); | 1050 Register dividend = ToRegister(instr->dividend()); |
| 1053 int32_t divisor = instr->divisor(); | 1051 int32_t divisor = instr->divisor(); |
| 1054 DCHECK(ToRegister(instr->result()).is(rdx)); | 1052 DCHECK(ToRegister(instr->result()).is(rdx)); |
| 1055 | 1053 |
| 1056 if (divisor == 0) { | 1054 if (divisor == 0) { |
| 1057 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1055 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
| 1058 return; | 1056 return; |
| 1059 } | 1057 } |
| 1060 | 1058 |
| 1061 // Check for (0 / -x) that will produce negative zero. | 1059 // Check for (0 / -x) that will produce negative zero. |
| 1062 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1060 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
| 1063 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1061 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1064 __ testl(dividend, dividend); | 1062 __ testl(dividend, dividend); |
| 1065 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1063 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1066 } | 1064 } |
| 1067 | 1065 |
| 1068 // Easy case: We need no dynamic check for the dividend and the flooring | 1066 // Easy case: We need no dynamic check for the dividend and the flooring |
| 1069 // division is the same as the truncating division. | 1067 // division is the same as the truncating division. |
| 1070 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1068 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
| 1071 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1069 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
| 1072 __ TruncatingDiv(dividend, Abs(divisor)); | 1070 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1073 if (divisor < 0) __ negl(rdx); | 1071 if (divisor < 0) __ negl(rdx); |
| 1074 return; | 1072 return; |
| 1075 } | 1073 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1102 Register result = ToRegister(instr->result()); | 1100 Register result = ToRegister(instr->result()); |
| 1103 DCHECK(dividend.is(rax)); | 1101 DCHECK(dividend.is(rax)); |
| 1104 DCHECK(remainder.is(rdx)); | 1102 DCHECK(remainder.is(rdx)); |
| 1105 DCHECK(result.is(rax)); | 1103 DCHECK(result.is(rax)); |
| 1106 DCHECK(!divisor.is(rax)); | 1104 DCHECK(!divisor.is(rax)); |
| 1107 DCHECK(!divisor.is(rdx)); | 1105 DCHECK(!divisor.is(rdx)); |
| 1108 | 1106 |
| 1109 // Check for x / 0. | 1107 // Check for x / 0. |
| 1110 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1108 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1111 __ testl(divisor, divisor); | 1109 __ testl(divisor, divisor); |
| 1112 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1110 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
| 1113 } | 1111 } |
| 1114 | 1112 |
| 1115 // Check for (0 / -x) that will produce negative zero. | 1113 // Check for (0 / -x) that will produce negative zero. |
| 1116 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1114 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1117 Label dividend_not_zero; | 1115 Label dividend_not_zero; |
| 1118 __ testl(dividend, dividend); | 1116 __ testl(dividend, dividend); |
| 1119 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1117 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
| 1120 __ testl(divisor, divisor); | 1118 __ testl(divisor, divisor); |
| 1121 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1119 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1122 __ bind(÷nd_not_zero); | 1120 __ bind(÷nd_not_zero); |
| 1123 } | 1121 } |
| 1124 | 1122 |
| 1125 // Check for (kMinInt / -1). | 1123 // Check for (kMinInt / -1). |
| 1126 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1124 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
| 1127 Label dividend_not_min_int; | 1125 Label dividend_not_min_int; |
| 1128 __ cmpl(dividend, Immediate(kMinInt)); | 1126 __ cmpl(dividend, Immediate(kMinInt)); |
| 1129 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1127 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
| 1130 __ cmpl(divisor, Immediate(-1)); | 1128 __ cmpl(divisor, Immediate(-1)); |
| 1131 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1129 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
| 1132 __ bind(÷nd_not_min_int); | 1130 __ bind(÷nd_not_min_int); |
| 1133 } | 1131 } |
| 1134 | 1132 |
| 1135 // Sign extend to rdx (= remainder). | 1133 // Sign extend to rdx (= remainder). |
| 1136 __ cdq(); | 1134 __ cdq(); |
| 1137 __ idivl(divisor); | 1135 __ idivl(divisor); |
| 1138 | 1136 |
| 1139 Label done; | 1137 Label done; |
| 1140 __ testl(remainder, remainder); | 1138 __ testl(remainder, remainder); |
| 1141 __ j(zero, &done, Label::kNear); | 1139 __ j(zero, &done, Label::kNear); |
| 1142 __ xorl(remainder, divisor); | 1140 __ xorl(remainder, divisor); |
| 1143 __ sarl(remainder, Immediate(31)); | 1141 __ sarl(remainder, Immediate(31)); |
| 1144 __ addl(result, remainder); | 1142 __ addl(result, remainder); |
| 1145 __ bind(&done); | 1143 __ bind(&done); |
| 1146 } | 1144 } |
| 1147 | 1145 |
| 1148 | 1146 |
| 1149 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1147 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
| 1150 Register dividend = ToRegister(instr->dividend()); | 1148 Register dividend = ToRegister(instr->dividend()); |
| 1151 int32_t divisor = instr->divisor(); | 1149 int32_t divisor = instr->divisor(); |
| 1152 Register result = ToRegister(instr->result()); | 1150 Register result = ToRegister(instr->result()); |
| 1153 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 1151 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); |
| 1154 DCHECK(!result.is(dividend)); | 1152 DCHECK(!result.is(dividend)); |
| 1155 | 1153 |
| 1156 // Check for (0 / -x) that will produce negative zero. | 1154 // Check for (0 / -x) that will produce negative zero. |
| 1157 HDiv* hdiv = instr->hydrogen(); | 1155 HDiv* hdiv = instr->hydrogen(); |
| 1158 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1156 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1159 __ testl(dividend, dividend); | 1157 __ testl(dividend, dividend); |
| 1160 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1158 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1161 } | 1159 } |
| 1162 // Check for (kMinInt / -1). | 1160 // Check for (kMinInt / -1). |
| 1163 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1161 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
| 1164 __ cmpl(dividend, Immediate(kMinInt)); | 1162 __ cmpl(dividend, Immediate(kMinInt)); |
| 1165 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1163 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
| 1166 } | 1164 } |
| 1167 // Deoptimize if remainder will not be 0. | 1165 // Deoptimize if remainder will not be 0. |
| 1168 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 1166 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && |
| 1169 divisor != 1 && divisor != -1) { | 1167 divisor != 1 && divisor != -1) { |
| 1170 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1168 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
| 1171 __ testl(dividend, Immediate(mask)); | 1169 __ testl(dividend, Immediate(mask)); |
| 1172 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); | 1170 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision); |
| 1173 } | 1171 } |
| 1174 __ Move(result, dividend); | 1172 __ Move(result, dividend); |
| 1175 int32_t shift = WhichPowerOf2Abs(divisor); | 1173 int32_t shift = WhichPowerOf2Abs(divisor); |
| 1176 if (shift > 0) { | 1174 if (shift > 0) { |
| 1177 // The arithmetic shift is always OK, the 'if' is an optimization only. | 1175 // The arithmetic shift is always OK, the 'if' is an optimization only. |
| 1178 if (shift > 1) __ sarl(result, Immediate(31)); | 1176 if (shift > 1) __ sarl(result, Immediate(31)); |
| 1179 __ shrl(result, Immediate(32 - shift)); | 1177 __ shrl(result, Immediate(32 - shift)); |
| 1180 __ addl(result, dividend); | 1178 __ addl(result, dividend); |
| 1181 __ sarl(result, Immediate(shift)); | 1179 __ sarl(result, Immediate(shift)); |
| 1182 } | 1180 } |
| 1183 if (divisor < 0) __ negl(result); | 1181 if (divisor < 0) __ negl(result); |
| 1184 } | 1182 } |
| 1185 | 1183 |
| 1186 | 1184 |
| 1187 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1185 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
| 1188 Register dividend = ToRegister(instr->dividend()); | 1186 Register dividend = ToRegister(instr->dividend()); |
| 1189 int32_t divisor = instr->divisor(); | 1187 int32_t divisor = instr->divisor(); |
| 1190 DCHECK(ToRegister(instr->result()).is(rdx)); | 1188 DCHECK(ToRegister(instr->result()).is(rdx)); |
| 1191 | 1189 |
| 1192 if (divisor == 0) { | 1190 if (divisor == 0) { |
| 1193 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1191 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
| 1194 return; | 1192 return; |
| 1195 } | 1193 } |
| 1196 | 1194 |
| 1197 // Check for (0 / -x) that will produce negative zero. | 1195 // Check for (0 / -x) that will produce negative zero. |
| 1198 HDiv* hdiv = instr->hydrogen(); | 1196 HDiv* hdiv = instr->hydrogen(); |
| 1199 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1197 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1200 __ testl(dividend, dividend); | 1198 __ testl(dividend, dividend); |
| 1201 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1199 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1202 } | 1200 } |
| 1203 | 1201 |
| 1204 __ TruncatingDiv(dividend, Abs(divisor)); | 1202 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1205 if (divisor < 0) __ negl(rdx); | 1203 if (divisor < 0) __ negl(rdx); |
| 1206 | 1204 |
| 1207 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1205 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { |
| 1208 __ movl(rax, rdx); | 1206 __ movl(rax, rdx); |
| 1209 __ imull(rax, rax, Immediate(divisor)); | 1207 __ imull(rax, rax, Immediate(divisor)); |
| 1210 __ subl(rax, dividend); | 1208 __ subl(rax, dividend); |
| 1211 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); | 1209 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision); |
| 1212 } | 1210 } |
| 1213 } | 1211 } |
| 1214 | 1212 |
| 1215 | 1213 |
| 1216 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1214 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
| 1217 void LCodeGen::DoDivI(LDivI* instr) { | 1215 void LCodeGen::DoDivI(LDivI* instr) { |
| 1218 HBinaryOperation* hdiv = instr->hydrogen(); | 1216 HBinaryOperation* hdiv = instr->hydrogen(); |
| 1219 Register dividend = ToRegister(instr->dividend()); | 1217 Register dividend = ToRegister(instr->dividend()); |
| 1220 Register divisor = ToRegister(instr->divisor()); | 1218 Register divisor = ToRegister(instr->divisor()); |
| 1221 Register remainder = ToRegister(instr->temp()); | 1219 Register remainder = ToRegister(instr->temp()); |
| 1222 DCHECK(dividend.is(rax)); | 1220 DCHECK(dividend.is(rax)); |
| 1223 DCHECK(remainder.is(rdx)); | 1221 DCHECK(remainder.is(rdx)); |
| 1224 DCHECK(ToRegister(instr->result()).is(rax)); | 1222 DCHECK(ToRegister(instr->result()).is(rax)); |
| 1225 DCHECK(!divisor.is(rax)); | 1223 DCHECK(!divisor.is(rax)); |
| 1226 DCHECK(!divisor.is(rdx)); | 1224 DCHECK(!divisor.is(rdx)); |
| 1227 | 1225 |
| 1228 // Check for x / 0. | 1226 // Check for x / 0. |
| 1229 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1227 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1230 __ testl(divisor, divisor); | 1228 __ testl(divisor, divisor); |
| 1231 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1229 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
| 1232 } | 1230 } |
| 1233 | 1231 |
| 1234 // Check for (0 / -x) that will produce negative zero. | 1232 // Check for (0 / -x) that will produce negative zero. |
| 1235 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1233 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1236 Label dividend_not_zero; | 1234 Label dividend_not_zero; |
| 1237 __ testl(dividend, dividend); | 1235 __ testl(dividend, dividend); |
| 1238 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1236 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
| 1239 __ testl(divisor, divisor); | 1237 __ testl(divisor, divisor); |
| 1240 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1238 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1241 __ bind(÷nd_not_zero); | 1239 __ bind(÷nd_not_zero); |
| 1242 } | 1240 } |
| 1243 | 1241 |
| 1244 // Check for (kMinInt / -1). | 1242 // Check for (kMinInt / -1). |
| 1245 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1243 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
| 1246 Label dividend_not_min_int; | 1244 Label dividend_not_min_int; |
| 1247 __ cmpl(dividend, Immediate(kMinInt)); | 1245 __ cmpl(dividend, Immediate(kMinInt)); |
| 1248 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1246 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
| 1249 __ cmpl(divisor, Immediate(-1)); | 1247 __ cmpl(divisor, Immediate(-1)); |
| 1250 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1248 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
| 1251 __ bind(÷nd_not_min_int); | 1249 __ bind(÷nd_not_min_int); |
| 1252 } | 1250 } |
| 1253 | 1251 |
| 1254 // Sign extend to rdx (= remainder). | 1252 // Sign extend to rdx (= remainder). |
| 1255 __ cdq(); | 1253 __ cdq(); |
| 1256 __ idivl(divisor); | 1254 __ idivl(divisor); |
| 1257 | 1255 |
| 1258 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1256 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
| 1259 // Deoptimize if remainder is not 0. | 1257 // Deoptimize if remainder is not 0. |
| 1260 __ testl(remainder, remainder); | 1258 __ testl(remainder, remainder); |
| 1261 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); | 1259 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision); |
| 1262 } | 1260 } |
| 1263 } | 1261 } |
| 1264 | 1262 |
| 1265 | 1263 |
| 1266 void LCodeGen::DoMulI(LMulI* instr) { | 1264 void LCodeGen::DoMulI(LMulI* instr) { |
| 1267 Register left = ToRegister(instr->left()); | 1265 Register left = ToRegister(instr->left()); |
| 1268 LOperand* right = instr->right(); | 1266 LOperand* right = instr->right(); |
| 1269 | 1267 |
| 1270 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1268 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1271 if (instr->hydrogen_value()->representation().IsSmi()) { | 1269 if (instr->hydrogen_value()->representation().IsSmi()) { |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1328 } else { | 1326 } else { |
| 1329 if (instr->hydrogen_value()->representation().IsSmi()) { | 1327 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1330 __ SmiToInteger64(left, left); | 1328 __ SmiToInteger64(left, left); |
| 1331 __ imulp(left, ToRegister(right)); | 1329 __ imulp(left, ToRegister(right)); |
| 1332 } else { | 1330 } else { |
| 1333 __ imull(left, ToRegister(right)); | 1331 __ imull(left, ToRegister(right)); |
| 1334 } | 1332 } |
| 1335 } | 1333 } |
| 1336 | 1334 |
| 1337 if (can_overflow) { | 1335 if (can_overflow) { |
| 1338 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1336 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1339 } | 1337 } |
| 1340 | 1338 |
| 1341 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1339 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1342 // Bail out if the result is supposed to be negative zero. | 1340 // Bail out if the result is supposed to be negative zero. |
| 1343 Label done; | 1341 Label done; |
| 1344 if (instr->hydrogen_value()->representation().IsSmi()) { | 1342 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1345 __ testp(left, left); | 1343 __ testp(left, left); |
| 1346 } else { | 1344 } else { |
| 1347 __ testl(left, left); | 1345 __ testl(left, left); |
| 1348 } | 1346 } |
| 1349 __ j(not_zero, &done, Label::kNear); | 1347 __ j(not_zero, &done, Label::kNear); |
| 1350 if (right->IsConstantOperand()) { | 1348 if (right->IsConstantOperand()) { |
| 1351 // Constant can't be represented as 32-bit Smi due to immediate size | 1349 // Constant can't be represented as 32-bit Smi due to immediate size |
| 1352 // limit. | 1350 // limit. |
| 1353 DCHECK(SmiValuesAre32Bits() | 1351 DCHECK(SmiValuesAre32Bits() |
| 1354 ? !instr->hydrogen_value()->representation().IsSmi() | 1352 ? !instr->hydrogen_value()->representation().IsSmi() |
| 1355 : SmiValuesAre31Bits()); | 1353 : SmiValuesAre31Bits()); |
| 1356 if (ToInteger32(LConstantOperand::cast(right)) < 0) { | 1354 if (ToInteger32(LConstantOperand::cast(right)) < 0) { |
| 1357 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 1355 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
| 1358 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { | 1356 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { |
| 1359 __ cmpl(kScratchRegister, Immediate(0)); | 1357 __ cmpl(kScratchRegister, Immediate(0)); |
| 1360 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); | 1358 DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero); |
| 1361 } | 1359 } |
| 1362 } else if (right->IsStackSlot()) { | 1360 } else if (right->IsStackSlot()) { |
| 1363 if (instr->hydrogen_value()->representation().IsSmi()) { | 1361 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1364 __ orp(kScratchRegister, ToOperand(right)); | 1362 __ orp(kScratchRegister, ToOperand(right)); |
| 1365 } else { | 1363 } else { |
| 1366 __ orl(kScratchRegister, ToOperand(right)); | 1364 __ orl(kScratchRegister, ToOperand(right)); |
| 1367 } | 1365 } |
| 1368 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1366 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1369 } else { | 1367 } else { |
| 1370 // Test the non-zero operand for negative sign. | 1368 // Test the non-zero operand for negative sign. |
| 1371 if (instr->hydrogen_value()->representation().IsSmi()) { | 1369 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1372 __ orp(kScratchRegister, ToRegister(right)); | 1370 __ orp(kScratchRegister, ToRegister(right)); |
| 1373 } else { | 1371 } else { |
| 1374 __ orl(kScratchRegister, ToRegister(right)); | 1372 __ orl(kScratchRegister, ToRegister(right)); |
| 1375 } | 1373 } |
| 1376 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1374 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1377 } | 1375 } |
| 1378 __ bind(&done); | 1376 __ bind(&done); |
| 1379 } | 1377 } |
| 1380 } | 1378 } |
| 1381 | 1379 |
| 1382 | 1380 |
| 1383 void LCodeGen::DoBitI(LBitI* instr) { | 1381 void LCodeGen::DoBitI(LBitI* instr) { |
| 1384 LOperand* left = instr->left(); | 1382 LOperand* left = instr->left(); |
| 1385 LOperand* right = instr->right(); | 1383 LOperand* right = instr->right(); |
| 1386 DCHECK(left->Equals(instr->result())); | 1384 DCHECK(left->Equals(instr->result())); |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1479 case Token::ROR: | 1477 case Token::ROR: |
| 1480 __ rorl_cl(ToRegister(left)); | 1478 __ rorl_cl(ToRegister(left)); |
| 1481 break; | 1479 break; |
| 1482 case Token::SAR: | 1480 case Token::SAR: |
| 1483 __ sarl_cl(ToRegister(left)); | 1481 __ sarl_cl(ToRegister(left)); |
| 1484 break; | 1482 break; |
| 1485 case Token::SHR: | 1483 case Token::SHR: |
| 1486 __ shrl_cl(ToRegister(left)); | 1484 __ shrl_cl(ToRegister(left)); |
| 1487 if (instr->can_deopt()) { | 1485 if (instr->can_deopt()) { |
| 1488 __ testl(ToRegister(left), ToRegister(left)); | 1486 __ testl(ToRegister(left), ToRegister(left)); |
| 1489 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); | 1487 DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue); |
| 1490 } | 1488 } |
| 1491 break; | 1489 break; |
| 1492 case Token::SHL: | 1490 case Token::SHL: |
| 1493 __ shll_cl(ToRegister(left)); | 1491 __ shll_cl(ToRegister(left)); |
| 1494 break; | 1492 break; |
| 1495 default: | 1493 default: |
| 1496 UNREACHABLE(); | 1494 UNREACHABLE(); |
| 1497 break; | 1495 break; |
| 1498 } | 1496 } |
| 1499 } else { | 1497 } else { |
| 1500 int32_t value = ToInteger32(LConstantOperand::cast(right)); | 1498 int32_t value = ToInteger32(LConstantOperand::cast(right)); |
| 1501 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); | 1499 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); |
| 1502 switch (instr->op()) { | 1500 switch (instr->op()) { |
| 1503 case Token::ROR: | 1501 case Token::ROR: |
| 1504 if (shift_count != 0) { | 1502 if (shift_count != 0) { |
| 1505 __ rorl(ToRegister(left), Immediate(shift_count)); | 1503 __ rorl(ToRegister(left), Immediate(shift_count)); |
| 1506 } | 1504 } |
| 1507 break; | 1505 break; |
| 1508 case Token::SAR: | 1506 case Token::SAR: |
| 1509 if (shift_count != 0) { | 1507 if (shift_count != 0) { |
| 1510 __ sarl(ToRegister(left), Immediate(shift_count)); | 1508 __ sarl(ToRegister(left), Immediate(shift_count)); |
| 1511 } | 1509 } |
| 1512 break; | 1510 break; |
| 1513 case Token::SHR: | 1511 case Token::SHR: |
| 1514 if (shift_count != 0) { | 1512 if (shift_count != 0) { |
| 1515 __ shrl(ToRegister(left), Immediate(shift_count)); | 1513 __ shrl(ToRegister(left), Immediate(shift_count)); |
| 1516 } else if (instr->can_deopt()) { | 1514 } else if (instr->can_deopt()) { |
| 1517 __ testl(ToRegister(left), ToRegister(left)); | 1515 __ testl(ToRegister(left), ToRegister(left)); |
| 1518 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); | 1516 DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue); |
| 1519 } | 1517 } |
| 1520 break; | 1518 break; |
| 1521 case Token::SHL: | 1519 case Token::SHL: |
| 1522 if (shift_count != 0) { | 1520 if (shift_count != 0) { |
| 1523 if (instr->hydrogen_value()->representation().IsSmi()) { | 1521 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1524 if (SmiValuesAre32Bits()) { | 1522 if (SmiValuesAre32Bits()) { |
| 1525 __ shlp(ToRegister(left), Immediate(shift_count)); | 1523 __ shlp(ToRegister(left), Immediate(shift_count)); |
| 1526 } else { | 1524 } else { |
| 1527 DCHECK(SmiValuesAre31Bits()); | 1525 DCHECK(SmiValuesAre31Bits()); |
| 1528 if (instr->can_deopt()) { | 1526 if (instr->can_deopt()) { |
| 1529 if (shift_count != 1) { | 1527 if (shift_count != 1) { |
| 1530 __ shll(ToRegister(left), Immediate(shift_count - 1)); | 1528 __ shll(ToRegister(left), Immediate(shift_count - 1)); |
| 1531 } | 1529 } |
| 1532 __ Integer32ToSmi(ToRegister(left), ToRegister(left)); | 1530 __ Integer32ToSmi(ToRegister(left), ToRegister(left)); |
| 1533 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1531 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1534 } else { | 1532 } else { |
| 1535 __ shll(ToRegister(left), Immediate(shift_count)); | 1533 __ shll(ToRegister(left), Immediate(shift_count)); |
| 1536 } | 1534 } |
| 1537 } | 1535 } |
| 1538 } else { | 1536 } else { |
| 1539 __ shll(ToRegister(left), Immediate(shift_count)); | 1537 __ shll(ToRegister(left), Immediate(shift_count)); |
| 1540 } | 1538 } |
| 1541 } | 1539 } |
| 1542 break; | 1540 break; |
| 1543 default: | 1541 default: |
| (...skipping 22 matching lines...) Expand all Loading... |
| 1566 } | 1564 } |
| 1567 } else { | 1565 } else { |
| 1568 if (instr->hydrogen_value()->representation().IsSmi()) { | 1566 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1569 __ subp(ToRegister(left), ToOperand(right)); | 1567 __ subp(ToRegister(left), ToOperand(right)); |
| 1570 } else { | 1568 } else { |
| 1571 __ subl(ToRegister(left), ToOperand(right)); | 1569 __ subl(ToRegister(left), ToOperand(right)); |
| 1572 } | 1570 } |
| 1573 } | 1571 } |
| 1574 | 1572 |
| 1575 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1573 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1576 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1574 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1577 } | 1575 } |
| 1578 } | 1576 } |
| 1579 | 1577 |
| 1580 | 1578 |
| 1581 void LCodeGen::DoConstantI(LConstantI* instr) { | 1579 void LCodeGen::DoConstantI(LConstantI* instr) { |
| 1582 Register dst = ToRegister(instr->result()); | 1580 Register dst = ToRegister(instr->result()); |
| 1583 if (instr->value() == 0) { | 1581 if (instr->value() == 0) { |
| 1584 __ xorl(dst, dst); | 1582 __ xorl(dst, dst); |
| 1585 } else { | 1583 } else { |
| 1586 __ movl(dst, Immediate(instr->value())); | 1584 __ movl(dst, Immediate(instr->value())); |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1741 __ addl(ToRegister(left), ToRegister(right)); | 1739 __ addl(ToRegister(left), ToRegister(right)); |
| 1742 } | 1740 } |
| 1743 } else { | 1741 } else { |
| 1744 if (is_p) { | 1742 if (is_p) { |
| 1745 __ addp(ToRegister(left), ToOperand(right)); | 1743 __ addp(ToRegister(left), ToOperand(right)); |
| 1746 } else { | 1744 } else { |
| 1747 __ addl(ToRegister(left), ToOperand(right)); | 1745 __ addl(ToRegister(left), ToOperand(right)); |
| 1748 } | 1746 } |
| 1749 } | 1747 } |
| 1750 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1748 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1751 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1749 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1752 } | 1750 } |
| 1753 } | 1751 } |
| 1754 } | 1752 } |
| 1755 | 1753 |
| 1756 | 1754 |
| 1757 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1755 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
| 1758 LOperand* left = instr->left(); | 1756 LOperand* left = instr->left(); |
| 1759 LOperand* right = instr->right(); | 1757 LOperand* right = instr->right(); |
| 1760 DCHECK(left->Equals(instr->result())); | 1758 DCHECK(left->Equals(instr->result())); |
| 1761 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1759 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
| (...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2019 } | 2017 } |
| 2020 | 2018 |
| 2021 if (expected.Contains(ToBooleanICStub::SMI)) { | 2019 if (expected.Contains(ToBooleanICStub::SMI)) { |
| 2022 // Smis: 0 -> false, all other -> true. | 2020 // Smis: 0 -> false, all other -> true. |
| 2023 __ Cmp(reg, Smi::FromInt(0)); | 2021 __ Cmp(reg, Smi::FromInt(0)); |
| 2024 __ j(equal, instr->FalseLabel(chunk_)); | 2022 __ j(equal, instr->FalseLabel(chunk_)); |
| 2025 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2023 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
| 2026 } else if (expected.NeedsMap()) { | 2024 } else if (expected.NeedsMap()) { |
| 2027 // If we need a map later and have a Smi -> deopt. | 2025 // If we need a map later and have a Smi -> deopt. |
| 2028 __ testb(reg, Immediate(kSmiTagMask)); | 2026 __ testb(reg, Immediate(kSmiTagMask)); |
| 2029 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); | 2027 DeoptimizeIf(zero, instr, DeoptimizeReason::kSmi); |
| 2030 } | 2028 } |
| 2031 | 2029 |
| 2032 const Register map = kScratchRegister; | 2030 const Register map = kScratchRegister; |
| 2033 if (expected.NeedsMap()) { | 2031 if (expected.NeedsMap()) { |
| 2034 __ movp(map, FieldOperand(reg, HeapObject::kMapOffset)); | 2032 __ movp(map, FieldOperand(reg, HeapObject::kMapOffset)); |
| 2035 | 2033 |
| 2036 if (expected.CanBeUndetectable()) { | 2034 if (expected.CanBeUndetectable()) { |
| 2037 // Undetectable -> false. | 2035 // Undetectable -> false. |
| 2038 __ testb(FieldOperand(map, Map::kBitFieldOffset), | 2036 __ testb(FieldOperand(map, Map::kBitFieldOffset), |
| 2039 Immediate(1 << Map::kIsUndetectable)); | 2037 Immediate(1 << Map::kIsUndetectable)); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2079 __ Xorpd(xmm_scratch, xmm_scratch); | 2077 __ Xorpd(xmm_scratch, xmm_scratch); |
| 2080 __ Ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); | 2078 __ Ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
| 2081 __ j(zero, instr->FalseLabel(chunk_)); | 2079 __ j(zero, instr->FalseLabel(chunk_)); |
| 2082 __ jmp(instr->TrueLabel(chunk_)); | 2080 __ jmp(instr->TrueLabel(chunk_)); |
| 2083 __ bind(¬_heap_number); | 2081 __ bind(¬_heap_number); |
| 2084 } | 2082 } |
| 2085 | 2083 |
| 2086 if (!expected.IsGeneric()) { | 2084 if (!expected.IsGeneric()) { |
| 2087 // We've seen something for the first time -> deopt. | 2085 // We've seen something for the first time -> deopt. |
| 2088 // This can only happen if we are not generic already. | 2086 // This can only happen if we are not generic already. |
| 2089 DeoptimizeIf(no_condition, instr, Deoptimizer::kUnexpectedObject); | 2087 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kUnexpectedObject); |
| 2090 } | 2088 } |
| 2091 } | 2089 } |
| 2092 } | 2090 } |
| 2093 } | 2091 } |
| 2094 | 2092 |
| 2095 | 2093 |
| 2096 void LCodeGen::EmitGoto(int block) { | 2094 void LCodeGen::EmitGoto(int block) { |
| 2097 if (!IsNextEmittedBlock(block)) { | 2095 if (!IsNextEmittedBlock(block)) { |
| 2098 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); | 2096 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); |
| 2099 } | 2097 } |
| (...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2448 } | 2446 } |
| 2449 | 2447 |
| 2450 // Loop through the {object}s prototype chain looking for the {prototype}. | 2448 // Loop through the {object}s prototype chain looking for the {prototype}. |
| 2451 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset)); | 2449 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2452 Label loop; | 2450 Label loop; |
| 2453 __ bind(&loop); | 2451 __ bind(&loop); |
| 2454 | 2452 |
| 2455 // Deoptimize if the object needs to be access checked. | 2453 // Deoptimize if the object needs to be access checked. |
| 2456 __ testb(FieldOperand(object_map, Map::kBitFieldOffset), | 2454 __ testb(FieldOperand(object_map, Map::kBitFieldOffset), |
| 2457 Immediate(1 << Map::kIsAccessCheckNeeded)); | 2455 Immediate(1 << Map::kIsAccessCheckNeeded)); |
| 2458 DeoptimizeIf(not_zero, instr, Deoptimizer::kAccessCheck); | 2456 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kAccessCheck); |
| 2459 // Deoptimize for proxies. | 2457 // Deoptimize for proxies. |
| 2460 __ CmpInstanceType(object_map, JS_PROXY_TYPE); | 2458 __ CmpInstanceType(object_map, JS_PROXY_TYPE); |
| 2461 DeoptimizeIf(equal, instr, Deoptimizer::kProxy); | 2459 DeoptimizeIf(equal, instr, DeoptimizeReason::kProxy); |
| 2462 | 2460 |
| 2463 __ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); | 2461 __ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
| 2464 __ CompareRoot(object_prototype, Heap::kNullValueRootIndex); | 2462 __ CompareRoot(object_prototype, Heap::kNullValueRootIndex); |
| 2465 EmitFalseBranch(instr, equal); | 2463 EmitFalseBranch(instr, equal); |
| 2466 __ cmpp(object_prototype, prototype); | 2464 __ cmpp(object_prototype, prototype); |
| 2467 EmitTrueBranch(instr, equal); | 2465 EmitTrueBranch(instr, equal); |
| 2468 __ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); | 2466 __ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
| 2469 __ jmp(&loop); | 2467 __ jmp(&loop); |
| 2470 } | 2468 } |
| 2471 | 2469 |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2566 } | 2564 } |
| 2567 | 2565 |
| 2568 | 2566 |
| 2569 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2567 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 2570 Register context = ToRegister(instr->context()); | 2568 Register context = ToRegister(instr->context()); |
| 2571 Register result = ToRegister(instr->result()); | 2569 Register result = ToRegister(instr->result()); |
| 2572 __ movp(result, ContextOperand(context, instr->slot_index())); | 2570 __ movp(result, ContextOperand(context, instr->slot_index())); |
| 2573 if (instr->hydrogen()->RequiresHoleCheck()) { | 2571 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2574 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2572 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2575 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2573 if (instr->hydrogen()->DeoptimizesOnHole()) { |
| 2576 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2574 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2577 } else { | 2575 } else { |
| 2578 Label is_not_hole; | 2576 Label is_not_hole; |
| 2579 __ j(not_equal, &is_not_hole, Label::kNear); | 2577 __ j(not_equal, &is_not_hole, Label::kNear); |
| 2580 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 2578 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 2581 __ bind(&is_not_hole); | 2579 __ bind(&is_not_hole); |
| 2582 } | 2580 } |
| 2583 } | 2581 } |
| 2584 } | 2582 } |
| 2585 | 2583 |
| 2586 | 2584 |
| 2587 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2585 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| 2588 Register context = ToRegister(instr->context()); | 2586 Register context = ToRegister(instr->context()); |
| 2589 Register value = ToRegister(instr->value()); | 2587 Register value = ToRegister(instr->value()); |
| 2590 | 2588 |
| 2591 Operand target = ContextOperand(context, instr->slot_index()); | 2589 Operand target = ContextOperand(context, instr->slot_index()); |
| 2592 | 2590 |
| 2593 Label skip_assignment; | 2591 Label skip_assignment; |
| 2594 if (instr->hydrogen()->RequiresHoleCheck()) { | 2592 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2595 __ CompareRoot(target, Heap::kTheHoleValueRootIndex); | 2593 __ CompareRoot(target, Heap::kTheHoleValueRootIndex); |
| 2596 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2594 if (instr->hydrogen()->DeoptimizesOnHole()) { |
| 2597 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2595 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2598 } else { | 2596 } else { |
| 2599 __ j(not_equal, &skip_assignment); | 2597 __ j(not_equal, &skip_assignment); |
| 2600 } | 2598 } |
| 2601 } | 2599 } |
| 2602 __ movp(target, value); | 2600 __ movp(target, value); |
| 2603 | 2601 |
| 2604 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2602 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2605 SmiCheck check_needed = | 2603 SmiCheck check_needed = |
| 2606 instr->hydrogen()->value()->type().IsHeapObject() | 2604 instr->hydrogen()->value()->type().IsHeapObject() |
| 2607 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2605 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2684 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 2682 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| 2685 Register function = ToRegister(instr->function()); | 2683 Register function = ToRegister(instr->function()); |
| 2686 Register result = ToRegister(instr->result()); | 2684 Register result = ToRegister(instr->result()); |
| 2687 | 2685 |
| 2688 // Get the prototype or initial map from the function. | 2686 // Get the prototype or initial map from the function. |
| 2689 __ movp(result, | 2687 __ movp(result, |
| 2690 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2688 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 2691 | 2689 |
| 2692 // Check that the function has a prototype or an initial map. | 2690 // Check that the function has a prototype or an initial map. |
| 2693 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2691 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2694 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2692 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2695 | 2693 |
| 2696 // If the function does not have an initial map, we're done. | 2694 // If the function does not have an initial map, we're done. |
| 2697 Label done; | 2695 Label done; |
| 2698 __ CmpObjectType(result, MAP_TYPE, kScratchRegister); | 2696 __ CmpObjectType(result, MAP_TYPE, kScratchRegister); |
| 2699 __ j(not_equal, &done, Label::kNear); | 2697 __ j(not_equal, &done, Label::kNear); |
| 2700 | 2698 |
| 2701 // Get the prototype from the initial map. | 2699 // Get the prototype from the initial map. |
| 2702 __ movp(result, FieldOperand(result, Map::kPrototypeOffset)); | 2700 __ movp(result, FieldOperand(result, Map::kPrototypeOffset)); |
| 2703 | 2701 |
| 2704 // All done. | 2702 // All done. |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2786 case UINT16_ELEMENTS: | 2784 case UINT16_ELEMENTS: |
| 2787 __ movzxwl(result, operand); | 2785 __ movzxwl(result, operand); |
| 2788 break; | 2786 break; |
| 2789 case INT32_ELEMENTS: | 2787 case INT32_ELEMENTS: |
| 2790 __ movl(result, operand); | 2788 __ movl(result, operand); |
| 2791 break; | 2789 break; |
| 2792 case UINT32_ELEMENTS: | 2790 case UINT32_ELEMENTS: |
| 2793 __ movl(result, operand); | 2791 __ movl(result, operand); |
| 2794 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 2792 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
| 2795 __ testl(result, result); | 2793 __ testl(result, result); |
| 2796 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); | 2794 DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue); |
| 2797 } | 2795 } |
| 2798 break; | 2796 break; |
| 2799 case FLOAT32_ELEMENTS: | 2797 case FLOAT32_ELEMENTS: |
| 2800 case FLOAT64_ELEMENTS: | 2798 case FLOAT64_ELEMENTS: |
| 2801 case FAST_ELEMENTS: | 2799 case FAST_ELEMENTS: |
| 2802 case FAST_SMI_ELEMENTS: | 2800 case FAST_SMI_ELEMENTS: |
| 2803 case FAST_DOUBLE_ELEMENTS: | 2801 case FAST_DOUBLE_ELEMENTS: |
| 2804 case FAST_HOLEY_ELEMENTS: | 2802 case FAST_HOLEY_ELEMENTS: |
| 2805 case FAST_HOLEY_SMI_ELEMENTS: | 2803 case FAST_HOLEY_SMI_ELEMENTS: |
| 2806 case FAST_HOLEY_DOUBLE_ELEMENTS: | 2804 case FAST_HOLEY_DOUBLE_ELEMENTS: |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2827 __ movsxlq(ToRegister(key), ToRegister(key)); | 2825 __ movsxlq(ToRegister(key), ToRegister(key)); |
| 2828 } | 2826 } |
| 2829 if (instr->hydrogen()->RequiresHoleCheck()) { | 2827 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2830 Operand hole_check_operand = BuildFastArrayOperand( | 2828 Operand hole_check_operand = BuildFastArrayOperand( |
| 2831 instr->elements(), | 2829 instr->elements(), |
| 2832 key, | 2830 key, |
| 2833 instr->hydrogen()->key()->representation(), | 2831 instr->hydrogen()->key()->representation(), |
| 2834 FAST_DOUBLE_ELEMENTS, | 2832 FAST_DOUBLE_ELEMENTS, |
| 2835 instr->base_offset() + sizeof(kHoleNanLower32)); | 2833 instr->base_offset() + sizeof(kHoleNanLower32)); |
| 2836 __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32)); | 2834 __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32)); |
| 2837 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2835 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2838 } | 2836 } |
| 2839 | 2837 |
| 2840 Operand double_load_operand = BuildFastArrayOperand( | 2838 Operand double_load_operand = BuildFastArrayOperand( |
| 2841 instr->elements(), | 2839 instr->elements(), |
| 2842 key, | 2840 key, |
| 2843 instr->hydrogen()->key()->representation(), | 2841 instr->hydrogen()->key()->representation(), |
| 2844 FAST_DOUBLE_ELEMENTS, | 2842 FAST_DOUBLE_ELEMENTS, |
| 2845 instr->base_offset()); | 2843 instr->base_offset()); |
| 2846 __ Movsd(result, double_load_operand); | 2844 __ Movsd(result, double_load_operand); |
| 2847 } | 2845 } |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2884 __ Load(result, | 2882 __ Load(result, |
| 2885 BuildFastArrayOperand(instr->elements(), key, | 2883 BuildFastArrayOperand(instr->elements(), key, |
| 2886 instr->hydrogen()->key()->representation(), | 2884 instr->hydrogen()->key()->representation(), |
| 2887 FAST_ELEMENTS, offset), | 2885 FAST_ELEMENTS, offset), |
| 2888 representation); | 2886 representation); |
| 2889 | 2887 |
| 2890 // Check for the hole value. | 2888 // Check for the hole value. |
| 2891 if (requires_hole_check) { | 2889 if (requires_hole_check) { |
| 2892 if (IsFastSmiElementsKind(hinstr->elements_kind())) { | 2890 if (IsFastSmiElementsKind(hinstr->elements_kind())) { |
| 2893 Condition smi = __ CheckSmi(result); | 2891 Condition smi = __ CheckSmi(result); |
| 2894 DeoptimizeIf(NegateCondition(smi), instr, Deoptimizer::kNotASmi); | 2892 DeoptimizeIf(NegateCondition(smi), instr, DeoptimizeReason::kNotASmi); |
| 2895 } else { | 2893 } else { |
| 2896 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2894 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2897 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2895 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2898 } | 2896 } |
| 2899 } else if (hinstr->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) { | 2897 } else if (hinstr->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) { |
| 2900 DCHECK(hinstr->elements_kind() == FAST_HOLEY_ELEMENTS); | 2898 DCHECK(hinstr->elements_kind() == FAST_HOLEY_ELEMENTS); |
| 2901 Label done; | 2899 Label done; |
| 2902 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2900 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2903 __ j(not_equal, &done); | 2901 __ j(not_equal, &done); |
| 2904 if (info()->IsStub()) { | 2902 if (info()->IsStub()) { |
| 2905 // A stub can safely convert the hole to undefined only if the array | 2903 // A stub can safely convert the hole to undefined only if the array |
| 2906 // protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise | 2904 // protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise |
| 2907 // it needs to bail out. | 2905 // it needs to bail out. |
| 2908 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); | 2906 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); |
| 2909 __ Cmp(FieldOperand(result, Cell::kValueOffset), | 2907 __ Cmp(FieldOperand(result, Cell::kValueOffset), |
| 2910 Smi::FromInt(Isolate::kArrayProtectorValid)); | 2908 Smi::FromInt(Isolate::kArrayProtectorValid)); |
| 2911 DeoptimizeIf(not_equal, instr, Deoptimizer::kHole); | 2909 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole); |
| 2912 } | 2910 } |
| 2913 __ Move(result, isolate()->factory()->undefined_value()); | 2911 __ Move(result, isolate()->factory()->undefined_value()); |
| 2914 __ bind(&done); | 2912 __ bind(&done); |
| 2915 } | 2913 } |
| 2916 } | 2914 } |
| 2917 | 2915 |
| 2918 | 2916 |
| 2919 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 2917 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
| 2920 if (instr->is_fixed_typed_array()) { | 2918 if (instr->is_fixed_typed_array()) { |
| 2921 DoLoadKeyedExternalArray(instr); | 2919 DoLoadKeyedExternalArray(instr); |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3050 } | 3048 } |
| 3051 | 3049 |
| 3052 // Normal function. Replace undefined or null with global receiver. | 3050 // Normal function. Replace undefined or null with global receiver. |
| 3053 __ CompareRoot(receiver, Heap::kNullValueRootIndex); | 3051 __ CompareRoot(receiver, Heap::kNullValueRootIndex); |
| 3054 __ j(equal, &global_object, Label::kNear); | 3052 __ j(equal, &global_object, Label::kNear); |
| 3055 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); | 3053 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); |
| 3056 __ j(equal, &global_object, Label::kNear); | 3054 __ j(equal, &global_object, Label::kNear); |
| 3057 | 3055 |
| 3058 // The receiver should be a JS object. | 3056 // The receiver should be a JS object. |
| 3059 Condition is_smi = __ CheckSmi(receiver); | 3057 Condition is_smi = __ CheckSmi(receiver); |
| 3060 DeoptimizeIf(is_smi, instr, Deoptimizer::kSmi); | 3058 DeoptimizeIf(is_smi, instr, DeoptimizeReason::kSmi); |
| 3061 __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, kScratchRegister); | 3059 __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, kScratchRegister); |
| 3062 DeoptimizeIf(below, instr, Deoptimizer::kNotAJavaScriptObject); | 3060 DeoptimizeIf(below, instr, DeoptimizeReason::kNotAJavaScriptObject); |
| 3063 | 3061 |
| 3064 __ jmp(&receiver_ok, Label::kNear); | 3062 __ jmp(&receiver_ok, Label::kNear); |
| 3065 __ bind(&global_object); | 3063 __ bind(&global_object); |
| 3066 __ movp(receiver, FieldOperand(function, JSFunction::kContextOffset)); | 3064 __ movp(receiver, FieldOperand(function, JSFunction::kContextOffset)); |
| 3067 __ movp(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX)); | 3065 __ movp(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX)); |
| 3068 __ movp(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX)); | 3066 __ movp(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX)); |
| 3069 | 3067 |
| 3070 __ bind(&receiver_ok); | 3068 __ bind(&receiver_ok); |
| 3071 } | 3069 } |
| 3072 | 3070 |
| 3073 | 3071 |
| 3074 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3072 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 3075 Register receiver = ToRegister(instr->receiver()); | 3073 Register receiver = ToRegister(instr->receiver()); |
| 3076 Register function = ToRegister(instr->function()); | 3074 Register function = ToRegister(instr->function()); |
| 3077 Register length = ToRegister(instr->length()); | 3075 Register length = ToRegister(instr->length()); |
| 3078 Register elements = ToRegister(instr->elements()); | 3076 Register elements = ToRegister(instr->elements()); |
| 3079 DCHECK(receiver.is(rax)); // Used for parameter count. | 3077 DCHECK(receiver.is(rax)); // Used for parameter count. |
| 3080 DCHECK(function.is(rdi)); // Required by InvokeFunction. | 3078 DCHECK(function.is(rdi)); // Required by InvokeFunction. |
| 3081 DCHECK(ToRegister(instr->result()).is(rax)); | 3079 DCHECK(ToRegister(instr->result()).is(rax)); |
| 3082 | 3080 |
| 3083 // Copy the arguments to this function possibly from the | 3081 // Copy the arguments to this function possibly from the |
| 3084 // adaptor frame below it. | 3082 // adaptor frame below it. |
| 3085 const uint32_t kArgumentsLimit = 1 * KB; | 3083 const uint32_t kArgumentsLimit = 1 * KB; |
| 3086 __ cmpp(length, Immediate(kArgumentsLimit)); | 3084 __ cmpp(length, Immediate(kArgumentsLimit)); |
| 3087 DeoptimizeIf(above, instr, Deoptimizer::kTooManyArguments); | 3085 DeoptimizeIf(above, instr, DeoptimizeReason::kTooManyArguments); |
| 3088 | 3086 |
| 3089 __ Push(receiver); | 3087 __ Push(receiver); |
| 3090 __ movp(receiver, length); | 3088 __ movp(receiver, length); |
| 3091 | 3089 |
| 3092 // Loop through the arguments pushing them onto the execution | 3090 // Loop through the arguments pushing them onto the execution |
| 3093 // stack. | 3091 // stack. |
| 3094 Label invoke, loop; | 3092 Label invoke, loop; |
| 3095 // length is a small non-negative integer, due to the test above. | 3093 // length is a small non-negative integer, due to the test above. |
| 3096 __ testl(length, length); | 3094 __ testl(length, length); |
| 3097 __ j(zero, &invoke, Label::kNear); | 3095 __ j(zero, &invoke, Label::kNear); |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3248 } | 3246 } |
| 3249 generator.AfterCall(); | 3247 generator.AfterCall(); |
| 3250 } | 3248 } |
| 3251 } | 3249 } |
| 3252 | 3250 |
| 3253 | 3251 |
| 3254 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3252 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
| 3255 Register input_reg = ToRegister(instr->value()); | 3253 Register input_reg = ToRegister(instr->value()); |
| 3256 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 3254 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 3257 Heap::kHeapNumberMapRootIndex); | 3255 Heap::kHeapNumberMapRootIndex); |
| 3258 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3256 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 3259 | 3257 |
| 3260 Label slow, allocated, done; | 3258 Label slow, allocated, done; |
| 3261 uint32_t available_regs = rax.bit() | rcx.bit() | rdx.bit() | rbx.bit(); | 3259 uint32_t available_regs = rax.bit() | rcx.bit() | rdx.bit() | rbx.bit(); |
| 3262 available_regs &= ~input_reg.bit(); | 3260 available_regs &= ~input_reg.bit(); |
| 3263 if (instr->context()->IsRegister()) { | 3261 if (instr->context()->IsRegister()) { |
| 3264 // Make sure that the context isn't overwritten in the AllocateHeapNumber | 3262 // Make sure that the context isn't overwritten in the AllocateHeapNumber |
| 3265 // macro below. | 3263 // macro below. |
| 3266 available_regs &= ~ToRegister(instr->context()).bit(); | 3264 available_regs &= ~ToRegister(instr->context()).bit(); |
| 3267 } | 3265 } |
| 3268 | 3266 |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3305 __ bind(&done); | 3303 __ bind(&done); |
| 3306 } | 3304 } |
| 3307 | 3305 |
| 3308 | 3306 |
| 3309 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3307 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
| 3310 Register input_reg = ToRegister(instr->value()); | 3308 Register input_reg = ToRegister(instr->value()); |
| 3311 __ testl(input_reg, input_reg); | 3309 __ testl(input_reg, input_reg); |
| 3312 Label is_positive; | 3310 Label is_positive; |
| 3313 __ j(not_sign, &is_positive, Label::kNear); | 3311 __ j(not_sign, &is_positive, Label::kNear); |
| 3314 __ negl(input_reg); // Sets flags. | 3312 __ negl(input_reg); // Sets flags. |
| 3315 DeoptimizeIf(negative, instr, Deoptimizer::kOverflow); | 3313 DeoptimizeIf(negative, instr, DeoptimizeReason::kOverflow); |
| 3316 __ bind(&is_positive); | 3314 __ bind(&is_positive); |
| 3317 } | 3315 } |
| 3318 | 3316 |
| 3319 | 3317 |
| 3320 void LCodeGen::EmitSmiMathAbs(LMathAbs* instr) { | 3318 void LCodeGen::EmitSmiMathAbs(LMathAbs* instr) { |
| 3321 Register input_reg = ToRegister(instr->value()); | 3319 Register input_reg = ToRegister(instr->value()); |
| 3322 __ testp(input_reg, input_reg); | 3320 __ testp(input_reg, input_reg); |
| 3323 Label is_positive; | 3321 Label is_positive; |
| 3324 __ j(not_sign, &is_positive, Label::kNear); | 3322 __ j(not_sign, &is_positive, Label::kNear); |
| 3325 __ negp(input_reg); // Sets flags. | 3323 __ negp(input_reg); // Sets flags. |
| 3326 DeoptimizeIf(negative, instr, Deoptimizer::kOverflow); | 3324 DeoptimizeIf(negative, instr, DeoptimizeReason::kOverflow); |
| 3327 __ bind(&is_positive); | 3325 __ bind(&is_positive); |
| 3328 } | 3326 } |
| 3329 | 3327 |
| 3330 | 3328 |
| 3331 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3329 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
| 3332 // Class for deferred case. | 3330 // Class for deferred case. |
| 3333 class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode { | 3331 class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode { |
| 3334 public: | 3332 public: |
| 3335 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) | 3333 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) |
| 3336 : LDeferredCode(codegen), instr_(instr) { } | 3334 : LDeferredCode(codegen), instr_(instr) { } |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3378 XMMRegister xmm_scratch = double_scratch0(); | 3376 XMMRegister xmm_scratch = double_scratch0(); |
| 3379 Register output_reg = ToRegister(instr->result()); | 3377 Register output_reg = ToRegister(instr->result()); |
| 3380 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3378 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3381 | 3379 |
| 3382 if (CpuFeatures::IsSupported(SSE4_1)) { | 3380 if (CpuFeatures::IsSupported(SSE4_1)) { |
| 3383 CpuFeatureScope scope(masm(), SSE4_1); | 3381 CpuFeatureScope scope(masm(), SSE4_1); |
| 3384 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3382 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3385 // Deoptimize if minus zero. | 3383 // Deoptimize if minus zero. |
| 3386 __ Movq(output_reg, input_reg); | 3384 __ Movq(output_reg, input_reg); |
| 3387 __ subq(output_reg, Immediate(1)); | 3385 __ subq(output_reg, Immediate(1)); |
| 3388 DeoptimizeIf(overflow, instr, Deoptimizer::kMinusZero); | 3386 DeoptimizeIf(overflow, instr, DeoptimizeReason::kMinusZero); |
| 3389 } | 3387 } |
| 3390 __ Roundsd(xmm_scratch, input_reg, kRoundDown); | 3388 __ Roundsd(xmm_scratch, input_reg, kRoundDown); |
| 3391 __ Cvttsd2si(output_reg, xmm_scratch); | 3389 __ Cvttsd2si(output_reg, xmm_scratch); |
| 3392 __ cmpl(output_reg, Immediate(0x1)); | 3390 __ cmpl(output_reg, Immediate(0x1)); |
| 3393 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3391 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3394 } else { | 3392 } else { |
| 3395 Label negative_sign, done; | 3393 Label negative_sign, done; |
| 3396 // Deoptimize on unordered. | 3394 // Deoptimize on unordered. |
| 3397 __ Xorpd(xmm_scratch, xmm_scratch); // Zero the register. | 3395 __ Xorpd(xmm_scratch, xmm_scratch); // Zero the register. |
| 3398 __ Ucomisd(input_reg, xmm_scratch); | 3396 __ Ucomisd(input_reg, xmm_scratch); |
| 3399 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); | 3397 DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN); |
| 3400 __ j(below, &negative_sign, Label::kNear); | 3398 __ j(below, &negative_sign, Label::kNear); |
| 3401 | 3399 |
| 3402 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3400 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3403 // Check for negative zero. | 3401 // Check for negative zero. |
| 3404 Label positive_sign; | 3402 Label positive_sign; |
| 3405 __ j(above, &positive_sign, Label::kNear); | 3403 __ j(above, &positive_sign, Label::kNear); |
| 3406 __ Movmskpd(output_reg, input_reg); | 3404 __ Movmskpd(output_reg, input_reg); |
| 3407 __ testl(output_reg, Immediate(1)); | 3405 __ testl(output_reg, Immediate(1)); |
| 3408 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 3406 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 3409 __ Set(output_reg, 0); | 3407 __ Set(output_reg, 0); |
| 3410 __ jmp(&done); | 3408 __ jmp(&done); |
| 3411 __ bind(&positive_sign); | 3409 __ bind(&positive_sign); |
| 3412 } | 3410 } |
| 3413 | 3411 |
| 3414 // Use truncating instruction (OK because input is positive). | 3412 // Use truncating instruction (OK because input is positive). |
| 3415 __ Cvttsd2si(output_reg, input_reg); | 3413 __ Cvttsd2si(output_reg, input_reg); |
| 3416 // Overflow is signalled with minint. | 3414 // Overflow is signalled with minint. |
| 3417 __ cmpl(output_reg, Immediate(0x1)); | 3415 __ cmpl(output_reg, Immediate(0x1)); |
| 3418 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3416 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3419 __ jmp(&done, Label::kNear); | 3417 __ jmp(&done, Label::kNear); |
| 3420 | 3418 |
| 3421 // Non-zero negative reaches here. | 3419 // Non-zero negative reaches here. |
| 3422 __ bind(&negative_sign); | 3420 __ bind(&negative_sign); |
| 3423 // Truncate, then compare and compensate. | 3421 // Truncate, then compare and compensate. |
| 3424 __ Cvttsd2si(output_reg, input_reg); | 3422 __ Cvttsd2si(output_reg, input_reg); |
| 3425 __ Cvtlsi2sd(xmm_scratch, output_reg); | 3423 __ Cvtlsi2sd(xmm_scratch, output_reg); |
| 3426 __ Ucomisd(input_reg, xmm_scratch); | 3424 __ Ucomisd(input_reg, xmm_scratch); |
| 3427 __ j(equal, &done, Label::kNear); | 3425 __ j(equal, &done, Label::kNear); |
| 3428 __ subl(output_reg, Immediate(1)); | 3426 __ subl(output_reg, Immediate(1)); |
| 3429 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3427 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3430 | 3428 |
| 3431 __ bind(&done); | 3429 __ bind(&done); |
| 3432 } | 3430 } |
| 3433 } | 3431 } |
| 3434 | 3432 |
| 3435 void LCodeGen::DoMathRoundD(LMathRoundD* instr) { | 3433 void LCodeGen::DoMathRoundD(LMathRoundD* instr) { |
| 3436 XMMRegister xmm_scratch = double_scratch0(); | 3434 XMMRegister xmm_scratch = double_scratch0(); |
| 3437 XMMRegister output_reg = ToDoubleRegister(instr->result()); | 3435 XMMRegister output_reg = ToDoubleRegister(instr->result()); |
| 3438 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3436 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3439 CpuFeatureScope scope(masm(), SSE4_1); | 3437 CpuFeatureScope scope(masm(), SSE4_1); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3461 __ movq(kScratchRegister, one_half); | 3459 __ movq(kScratchRegister, one_half); |
| 3462 __ Movq(xmm_scratch, kScratchRegister); | 3460 __ Movq(xmm_scratch, kScratchRegister); |
| 3463 __ Ucomisd(xmm_scratch, input_reg); | 3461 __ Ucomisd(xmm_scratch, input_reg); |
| 3464 __ j(above, &below_one_half, Label::kNear); | 3462 __ j(above, &below_one_half, Label::kNear); |
| 3465 | 3463 |
| 3466 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). | 3464 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). |
| 3467 __ Addsd(xmm_scratch, input_reg); | 3465 __ Addsd(xmm_scratch, input_reg); |
| 3468 __ Cvttsd2si(output_reg, xmm_scratch); | 3466 __ Cvttsd2si(output_reg, xmm_scratch); |
| 3469 // Overflow is signalled with minint. | 3467 // Overflow is signalled with minint. |
| 3470 __ cmpl(output_reg, Immediate(0x1)); | 3468 __ cmpl(output_reg, Immediate(0x1)); |
| 3471 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3469 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3472 __ jmp(&done, dist); | 3470 __ jmp(&done, dist); |
| 3473 | 3471 |
| 3474 __ bind(&below_one_half); | 3472 __ bind(&below_one_half); |
| 3475 __ movq(kScratchRegister, minus_one_half); | 3473 __ movq(kScratchRegister, minus_one_half); |
| 3476 __ Movq(xmm_scratch, kScratchRegister); | 3474 __ Movq(xmm_scratch, kScratchRegister); |
| 3477 __ Ucomisd(xmm_scratch, input_reg); | 3475 __ Ucomisd(xmm_scratch, input_reg); |
| 3478 __ j(below_equal, &round_to_zero, Label::kNear); | 3476 __ j(below_equal, &round_to_zero, Label::kNear); |
| 3479 | 3477 |
| 3480 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then | 3478 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then |
| 3481 // compare and compensate. | 3479 // compare and compensate. |
| 3482 __ Movapd(input_temp, input_reg); // Do not alter input_reg. | 3480 __ Movapd(input_temp, input_reg); // Do not alter input_reg. |
| 3483 __ Subsd(input_temp, xmm_scratch); | 3481 __ Subsd(input_temp, xmm_scratch); |
| 3484 __ Cvttsd2si(output_reg, input_temp); | 3482 __ Cvttsd2si(output_reg, input_temp); |
| 3485 // Catch minint due to overflow, and to prevent overflow when compensating. | 3483 // Catch minint due to overflow, and to prevent overflow when compensating. |
| 3486 __ cmpl(output_reg, Immediate(0x1)); | 3484 __ cmpl(output_reg, Immediate(0x1)); |
| 3487 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3485 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3488 | 3486 |
| 3489 __ Cvtlsi2sd(xmm_scratch, output_reg); | 3487 __ Cvtlsi2sd(xmm_scratch, output_reg); |
| 3490 __ Ucomisd(xmm_scratch, input_temp); | 3488 __ Ucomisd(xmm_scratch, input_temp); |
| 3491 __ j(equal, &done, dist); | 3489 __ j(equal, &done, dist); |
| 3492 __ subl(output_reg, Immediate(1)); | 3490 __ subl(output_reg, Immediate(1)); |
| 3493 // No overflow because we already ruled out minint. | 3491 // No overflow because we already ruled out minint. |
| 3494 __ jmp(&done, dist); | 3492 __ jmp(&done, dist); |
| 3495 | 3493 |
| 3496 __ bind(&round_to_zero); | 3494 __ bind(&round_to_zero); |
| 3497 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 3495 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
| 3498 // we can ignore the difference between a result of -0 and +0. | 3496 // we can ignore the difference between a result of -0 and +0. |
| 3499 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3497 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3500 __ Movq(output_reg, input_reg); | 3498 __ Movq(output_reg, input_reg); |
| 3501 __ testq(output_reg, output_reg); | 3499 __ testq(output_reg, output_reg); |
| 3502 DeoptimizeIf(negative, instr, Deoptimizer::kMinusZero); | 3500 DeoptimizeIf(negative, instr, DeoptimizeReason::kMinusZero); |
| 3503 } | 3501 } |
| 3504 __ Set(output_reg, 0); | 3502 __ Set(output_reg, 0); |
| 3505 __ bind(&done); | 3503 __ bind(&done); |
| 3506 } | 3504 } |
| 3507 | 3505 |
| 3508 | 3506 |
| 3509 void LCodeGen::DoMathFround(LMathFround* instr) { | 3507 void LCodeGen::DoMathFround(LMathFround* instr) { |
| 3510 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3508 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3511 XMMRegister output_reg = ToDoubleRegister(instr->result()); | 3509 XMMRegister output_reg = ToDoubleRegister(instr->result()); |
| 3512 __ Cvtsd2ss(output_reg, input_reg); | 3510 __ Cvtsd2ss(output_reg, input_reg); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3571 DCHECK(ToDoubleRegister(instr->left()).is(xmm2)); | 3569 DCHECK(ToDoubleRegister(instr->left()).is(xmm2)); |
| 3572 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); | 3570 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); |
| 3573 | 3571 |
| 3574 if (exponent_type.IsSmi()) { | 3572 if (exponent_type.IsSmi()) { |
| 3575 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3573 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3576 __ CallStub(&stub); | 3574 __ CallStub(&stub); |
| 3577 } else if (exponent_type.IsTagged()) { | 3575 } else if (exponent_type.IsTagged()) { |
| 3578 Label no_deopt; | 3576 Label no_deopt; |
| 3579 __ JumpIfSmi(tagged_exponent, &no_deopt, Label::kNear); | 3577 __ JumpIfSmi(tagged_exponent, &no_deopt, Label::kNear); |
| 3580 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, rcx); | 3578 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, rcx); |
| 3581 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3579 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 3582 __ bind(&no_deopt); | 3580 __ bind(&no_deopt); |
| 3583 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3581 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3584 __ CallStub(&stub); | 3582 __ CallStub(&stub); |
| 3585 } else if (exponent_type.IsInteger32()) { | 3583 } else if (exponent_type.IsInteger32()) { |
| 3586 MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3584 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
| 3587 __ CallStub(&stub); | 3585 __ CallStub(&stub); |
| 3588 } else { | 3586 } else { |
| 3589 DCHECK(exponent_type.IsDouble()); | 3587 DCHECK(exponent_type.IsDouble()); |
| 3590 MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3588 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
| 3591 __ CallStub(&stub); | 3589 __ CallStub(&stub); |
| (...skipping 375 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3967 __ cmpl(length, index); | 3965 __ cmpl(length, index); |
| 3968 } | 3966 } |
| 3969 } | 3967 } |
| 3970 } | 3968 } |
| 3971 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 3969 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { |
| 3972 Label done; | 3970 Label done; |
| 3973 __ j(NegateCondition(cc), &done, Label::kNear); | 3971 __ j(NegateCondition(cc), &done, Label::kNear); |
| 3974 __ int3(); | 3972 __ int3(); |
| 3975 __ bind(&done); | 3973 __ bind(&done); |
| 3976 } else { | 3974 } else { |
| 3977 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds); | 3975 DeoptimizeIf(cc, instr, DeoptimizeReason::kOutOfBounds); |
| 3978 } | 3976 } |
| 3979 } | 3977 } |
| 3980 | 3978 |
| 3981 | 3979 |
| 3982 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 3980 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
| 3983 ElementsKind elements_kind = instr->elements_kind(); | 3981 ElementsKind elements_kind = instr->elements_kind(); |
| 3984 LOperand* key = instr->key(); | 3982 LOperand* key = instr->key(); |
| 3985 if (kPointerSize == kInt32Size && !key->IsConstantOperand()) { | 3983 if (kPointerSize == kInt32Size && !key->IsConstantOperand()) { |
| 3986 Register key_reg = ToRegister(key); | 3984 Register key_reg = ToRegister(key); |
| 3987 Representation key_representation = | 3985 Representation key_representation = |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4266 | 4264 |
| 4267 GrowArrayElementsStub stub(isolate(), instr->hydrogen()->is_js_array(), | 4265 GrowArrayElementsStub stub(isolate(), instr->hydrogen()->is_js_array(), |
| 4268 instr->hydrogen()->kind()); | 4266 instr->hydrogen()->kind()); |
| 4269 __ CallStub(&stub); | 4267 __ CallStub(&stub); |
| 4270 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); | 4268 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); |
| 4271 __ StoreToSafepointRegisterSlot(result, result); | 4269 __ StoreToSafepointRegisterSlot(result, result); |
| 4272 } | 4270 } |
| 4273 | 4271 |
| 4274 // Deopt on smi, which means the elements array changed to dictionary mode. | 4272 // Deopt on smi, which means the elements array changed to dictionary mode. |
| 4275 Condition is_smi = __ CheckSmi(result); | 4273 Condition is_smi = __ CheckSmi(result); |
| 4276 DeoptimizeIf(is_smi, instr, Deoptimizer::kSmi); | 4274 DeoptimizeIf(is_smi, instr, DeoptimizeReason::kSmi); |
| 4277 } | 4275 } |
| 4278 | 4276 |
| 4279 | 4277 |
| 4280 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { | 4278 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
| 4281 Register object_reg = ToRegister(instr->object()); | 4279 Register object_reg = ToRegister(instr->object()); |
| 4282 | 4280 |
| 4283 Handle<Map> from_map = instr->original_map(); | 4281 Handle<Map> from_map = instr->original_map(); |
| 4284 Handle<Map> to_map = instr->transitioned_map(); | 4282 Handle<Map> to_map = instr->transitioned_map(); |
| 4285 ElementsKind from_kind = instr->from_kind(); | 4283 ElementsKind from_kind = instr->from_kind(); |
| 4286 ElementsKind to_kind = instr->to_kind(); | 4284 ElementsKind to_kind = instr->to_kind(); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 4306 } | 4304 } |
| 4307 __ bind(¬_applicable); | 4305 __ bind(¬_applicable); |
| 4308 } | 4306 } |
| 4309 | 4307 |
| 4310 | 4308 |
| 4311 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4309 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 4312 Register object = ToRegister(instr->object()); | 4310 Register object = ToRegister(instr->object()); |
| 4313 Register temp = ToRegister(instr->temp()); | 4311 Register temp = ToRegister(instr->temp()); |
| 4314 Label no_memento_found; | 4312 Label no_memento_found; |
| 4315 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4313 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
| 4316 DeoptimizeIf(equal, instr, Deoptimizer::kMementoFound); | 4314 DeoptimizeIf(equal, instr, DeoptimizeReason::kMementoFound); |
| 4317 __ bind(&no_memento_found); | 4315 __ bind(&no_memento_found); |
| 4318 } | 4316 } |
| 4319 | 4317 |
| 4320 | 4318 |
| 4321 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4319 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4322 DCHECK(ToRegister(instr->context()).is(rsi)); | 4320 DCHECK(ToRegister(instr->context()).is(rsi)); |
| 4323 DCHECK(ToRegister(instr->left()).is(rdx)); | 4321 DCHECK(ToRegister(instr->left()).is(rdx)); |
| 4324 DCHECK(ToRegister(instr->right()).is(rax)); | 4322 DCHECK(ToRegister(instr->right()).is(rax)); |
| 4325 StringAddStub stub(isolate(), | 4323 StringAddStub stub(isolate(), |
| 4326 instr->hydrogen()->flags(), | 4324 instr->hydrogen()->flags(), |
| (...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4622 } | 4620 } |
| 4623 | 4621 |
| 4624 | 4622 |
| 4625 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4623 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 4626 HChange* hchange = instr->hydrogen(); | 4624 HChange* hchange = instr->hydrogen(); |
| 4627 Register input = ToRegister(instr->value()); | 4625 Register input = ToRegister(instr->value()); |
| 4628 Register output = ToRegister(instr->result()); | 4626 Register output = ToRegister(instr->result()); |
| 4629 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4627 if (hchange->CheckFlag(HValue::kCanOverflow) && |
| 4630 hchange->value()->CheckFlag(HValue::kUint32)) { | 4628 hchange->value()->CheckFlag(HValue::kUint32)) { |
| 4631 Condition is_smi = __ CheckUInteger32ValidSmiValue(input); | 4629 Condition is_smi = __ CheckUInteger32ValidSmiValue(input); |
| 4632 DeoptimizeIf(NegateCondition(is_smi), instr, Deoptimizer::kOverflow); | 4630 DeoptimizeIf(NegateCondition(is_smi), instr, DeoptimizeReason::kOverflow); |
| 4633 } | 4631 } |
| 4634 __ Integer32ToSmi(output, input); | 4632 __ Integer32ToSmi(output, input); |
| 4635 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4633 if (hchange->CheckFlag(HValue::kCanOverflow) && |
| 4636 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4634 !hchange->value()->CheckFlag(HValue::kUint32)) { |
| 4637 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 4635 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 4638 } | 4636 } |
| 4639 } | 4637 } |
| 4640 | 4638 |
| 4641 | 4639 |
| 4642 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4640 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
| 4643 DCHECK(instr->value()->Equals(instr->result())); | 4641 DCHECK(instr->value()->Equals(instr->result())); |
| 4644 Register input = ToRegister(instr->value()); | 4642 Register input = ToRegister(instr->value()); |
| 4645 if (instr->needs_check()) { | 4643 if (instr->needs_check()) { |
| 4646 Condition is_smi = __ CheckSmi(input); | 4644 Condition is_smi = __ CheckSmi(input); |
| 4647 DeoptimizeIf(NegateCondition(is_smi), instr, Deoptimizer::kNotASmi); | 4645 DeoptimizeIf(NegateCondition(is_smi), instr, DeoptimizeReason::kNotASmi); |
| 4648 } else { | 4646 } else { |
| 4649 __ AssertSmi(input); | 4647 __ AssertSmi(input); |
| 4650 } | 4648 } |
| 4651 __ SmiToInteger32(input, input); | 4649 __ SmiToInteger32(input, input); |
| 4652 } | 4650 } |
| 4653 | 4651 |
| 4654 | 4652 |
| 4655 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 4653 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, |
| 4656 XMMRegister result_reg, NumberUntagDMode mode) { | 4654 XMMRegister result_reg, NumberUntagDMode mode) { |
| 4657 bool can_convert_undefined_to_nan = | 4655 bool can_convert_undefined_to_nan = |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4668 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 4666 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4669 Heap::kHeapNumberMapRootIndex); | 4667 Heap::kHeapNumberMapRootIndex); |
| 4670 | 4668 |
| 4671 // On x64 it is safe to load at heap number offset before evaluating the map | 4669 // On x64 it is safe to load at heap number offset before evaluating the map |
| 4672 // check, since all heap objects are at least two words long. | 4670 // check, since all heap objects are at least two words long. |
| 4673 __ Movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4671 __ Movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4674 | 4672 |
| 4675 if (can_convert_undefined_to_nan) { | 4673 if (can_convert_undefined_to_nan) { |
| 4676 __ j(not_equal, &convert, Label::kNear); | 4674 __ j(not_equal, &convert, Label::kNear); |
| 4677 } else { | 4675 } else { |
| 4678 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 4676 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 4679 } | 4677 } |
| 4680 | 4678 |
| 4681 if (deoptimize_on_minus_zero) { | 4679 if (deoptimize_on_minus_zero) { |
| 4682 XMMRegister xmm_scratch = double_scratch0(); | 4680 XMMRegister xmm_scratch = double_scratch0(); |
| 4683 __ Xorpd(xmm_scratch, xmm_scratch); | 4681 __ Xorpd(xmm_scratch, xmm_scratch); |
| 4684 __ Ucomisd(xmm_scratch, result_reg); | 4682 __ Ucomisd(xmm_scratch, result_reg); |
| 4685 __ j(not_equal, &done, Label::kNear); | 4683 __ j(not_equal, &done, Label::kNear); |
| 4686 __ Movmskpd(kScratchRegister, result_reg); | 4684 __ Movmskpd(kScratchRegister, result_reg); |
| 4687 __ testl(kScratchRegister, Immediate(1)); | 4685 __ testl(kScratchRegister, Immediate(1)); |
| 4688 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 4686 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 4689 } | 4687 } |
| 4690 __ jmp(&done, Label::kNear); | 4688 __ jmp(&done, Label::kNear); |
| 4691 | 4689 |
| 4692 if (can_convert_undefined_to_nan) { | 4690 if (can_convert_undefined_to_nan) { |
| 4693 __ bind(&convert); | 4691 __ bind(&convert); |
| 4694 | 4692 |
| 4695 // Convert undefined (and hole) to NaN. Compute NaN as 0/0. | 4693 // Convert undefined (and hole) to NaN. Compute NaN as 0/0. |
| 4696 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); | 4694 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); |
| 4697 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); | 4695 DeoptimizeIf(not_equal, instr, |
| 4696 DeoptimizeReason::kNotAHeapNumberUndefined); |
| 4698 | 4697 |
| 4699 __ Pcmpeqd(result_reg, result_reg); | 4698 __ Pcmpeqd(result_reg, result_reg); |
| 4700 __ jmp(&done, Label::kNear); | 4699 __ jmp(&done, Label::kNear); |
| 4701 } | 4700 } |
| 4702 } else { | 4701 } else { |
| 4703 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4702 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
| 4704 } | 4703 } |
| 4705 | 4704 |
| 4706 // Smi to XMM conversion | 4705 // Smi to XMM conversion |
| 4707 __ bind(&load_smi); | 4706 __ bind(&load_smi); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 4734 | 4733 |
| 4735 __ bind(&check_bools); | 4734 __ bind(&check_bools); |
| 4736 __ CompareRoot(input_reg, Heap::kTrueValueRootIndex); | 4735 __ CompareRoot(input_reg, Heap::kTrueValueRootIndex); |
| 4737 __ j(not_equal, &check_false, Label::kNear); | 4736 __ j(not_equal, &check_false, Label::kNear); |
| 4738 __ Set(input_reg, 1); | 4737 __ Set(input_reg, 1); |
| 4739 __ jmp(done); | 4738 __ jmp(done); |
| 4740 | 4739 |
| 4741 __ bind(&check_false); | 4740 __ bind(&check_false); |
| 4742 __ CompareRoot(input_reg, Heap::kFalseValueRootIndex); | 4741 __ CompareRoot(input_reg, Heap::kFalseValueRootIndex); |
| 4743 DeoptimizeIf(not_equal, instr, | 4742 DeoptimizeIf(not_equal, instr, |
| 4744 Deoptimizer::kNotAHeapNumberUndefinedBoolean); | 4743 DeoptimizeReason::kNotAHeapNumberUndefinedBoolean); |
| 4745 __ Set(input_reg, 0); | 4744 __ Set(input_reg, 0); |
| 4746 } else { | 4745 } else { |
| 4747 XMMRegister scratch = ToDoubleRegister(instr->temp()); | 4746 XMMRegister scratch = ToDoubleRegister(instr->temp()); |
| 4748 DCHECK(!scratch.is(double_scratch0())); | 4747 DCHECK(!scratch.is(double_scratch0())); |
| 4749 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 4748 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4750 Heap::kHeapNumberMapRootIndex); | 4749 Heap::kHeapNumberMapRootIndex); |
| 4751 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 4750 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 4752 __ Movsd(double_scratch0(), | 4751 __ Movsd(double_scratch0(), |
| 4753 FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4752 FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4754 __ Cvttsd2si(input_reg, double_scratch0()); | 4753 __ Cvttsd2si(input_reg, double_scratch0()); |
| 4755 __ Cvtlsi2sd(scratch, input_reg); | 4754 __ Cvtlsi2sd(scratch, input_reg); |
| 4756 __ Ucomisd(double_scratch0(), scratch); | 4755 __ Ucomisd(double_scratch0(), scratch); |
| 4757 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); | 4756 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision); |
| 4758 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); | 4757 DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN); |
| 4759 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { | 4758 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { |
| 4760 __ testl(input_reg, input_reg); | 4759 __ testl(input_reg, input_reg); |
| 4761 __ j(not_zero, done); | 4760 __ j(not_zero, done); |
| 4762 __ Movmskpd(input_reg, double_scratch0()); | 4761 __ Movmskpd(input_reg, double_scratch0()); |
| 4763 __ andl(input_reg, Immediate(1)); | 4762 __ andl(input_reg, Immediate(1)); |
| 4764 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 4763 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 4765 } | 4764 } |
| 4766 } | 4765 } |
| 4767 } | 4766 } |
| 4768 | 4767 |
| 4769 | 4768 |
| 4770 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 4769 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
| 4771 class DeferredTaggedToI final : public LDeferredCode { | 4770 class DeferredTaggedToI final : public LDeferredCode { |
| 4772 public: | 4771 public: |
| 4773 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 4772 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
| 4774 : LDeferredCode(codegen), instr_(instr) { } | 4773 : LDeferredCode(codegen), instr_(instr) { } |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4825 __ TruncateDoubleToI(result_reg, input_reg); | 4824 __ TruncateDoubleToI(result_reg, input_reg); |
| 4826 } else { | 4825 } else { |
| 4827 Label lost_precision, is_nan, minus_zero, done; | 4826 Label lost_precision, is_nan, minus_zero, done; |
| 4828 XMMRegister xmm_scratch = double_scratch0(); | 4827 XMMRegister xmm_scratch = double_scratch0(); |
| 4829 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 4828 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 4830 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4829 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
| 4831 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, | 4830 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, |
| 4832 &is_nan, &minus_zero, dist); | 4831 &is_nan, &minus_zero, dist); |
| 4833 __ jmp(&done, dist); | 4832 __ jmp(&done, dist); |
| 4834 __ bind(&lost_precision); | 4833 __ bind(&lost_precision); |
| 4835 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4834 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
| 4836 __ bind(&is_nan); | 4835 __ bind(&is_nan); |
| 4837 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4836 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
| 4838 __ bind(&minus_zero); | 4837 __ bind(&minus_zero); |
| 4839 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 4838 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
| 4840 __ bind(&done); | 4839 __ bind(&done); |
| 4841 } | 4840 } |
| 4842 } | 4841 } |
| 4843 | 4842 |
| 4844 | 4843 |
| 4845 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4844 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
| 4846 LOperand* input = instr->value(); | 4845 LOperand* input = instr->value(); |
| 4847 DCHECK(input->IsDoubleRegister()); | 4846 DCHECK(input->IsDoubleRegister()); |
| 4848 LOperand* result = instr->result(); | 4847 LOperand* result = instr->result(); |
| 4849 DCHECK(result->IsRegister()); | 4848 DCHECK(result->IsRegister()); |
| 4850 | 4849 |
| 4851 XMMRegister input_reg = ToDoubleRegister(input); | 4850 XMMRegister input_reg = ToDoubleRegister(input); |
| 4852 Register result_reg = ToRegister(result); | 4851 Register result_reg = ToRegister(result); |
| 4853 | 4852 |
| 4854 Label lost_precision, is_nan, minus_zero, done; | 4853 Label lost_precision, is_nan, minus_zero, done; |
| 4855 XMMRegister xmm_scratch = double_scratch0(); | 4854 XMMRegister xmm_scratch = double_scratch0(); |
| 4856 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 4855 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 4857 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4856 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
| 4858 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, | 4857 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, |
| 4859 &minus_zero, dist); | 4858 &minus_zero, dist); |
| 4860 __ jmp(&done, dist); | 4859 __ jmp(&done, dist); |
| 4861 __ bind(&lost_precision); | 4860 __ bind(&lost_precision); |
| 4862 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4861 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
| 4863 __ bind(&is_nan); | 4862 __ bind(&is_nan); |
| 4864 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4863 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
| 4865 __ bind(&minus_zero); | 4864 __ bind(&minus_zero); |
| 4866 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 4865 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
| 4867 __ bind(&done); | 4866 __ bind(&done); |
| 4868 __ Integer32ToSmi(result_reg, result_reg); | 4867 __ Integer32ToSmi(result_reg, result_reg); |
| 4869 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 4868 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 4870 } | 4869 } |
| 4871 | 4870 |
| 4872 | 4871 |
| 4873 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 4872 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
| 4874 LOperand* input = instr->value(); | 4873 LOperand* input = instr->value(); |
| 4875 Condition cc = masm()->CheckSmi(ToRegister(input)); | 4874 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| 4876 DeoptimizeIf(NegateCondition(cc), instr, Deoptimizer::kNotASmi); | 4875 DeoptimizeIf(NegateCondition(cc), instr, DeoptimizeReason::kNotASmi); |
| 4877 } | 4876 } |
| 4878 | 4877 |
| 4879 | 4878 |
| 4880 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 4879 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
| 4881 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 4880 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
| 4882 LOperand* input = instr->value(); | 4881 LOperand* input = instr->value(); |
| 4883 Condition cc = masm()->CheckSmi(ToRegister(input)); | 4882 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| 4884 DeoptimizeIf(cc, instr, Deoptimizer::kSmi); | 4883 DeoptimizeIf(cc, instr, DeoptimizeReason::kSmi); |
| 4885 } | 4884 } |
| 4886 } | 4885 } |
| 4887 | 4886 |
| 4888 | 4887 |
| 4889 void LCodeGen::DoCheckArrayBufferNotNeutered( | 4888 void LCodeGen::DoCheckArrayBufferNotNeutered( |
| 4890 LCheckArrayBufferNotNeutered* instr) { | 4889 LCheckArrayBufferNotNeutered* instr) { |
| 4891 Register view = ToRegister(instr->view()); | 4890 Register view = ToRegister(instr->view()); |
| 4892 | 4891 |
| 4893 __ movp(kScratchRegister, | 4892 __ movp(kScratchRegister, |
| 4894 FieldOperand(view, JSArrayBufferView::kBufferOffset)); | 4893 FieldOperand(view, JSArrayBufferView::kBufferOffset)); |
| 4895 __ testb(FieldOperand(kScratchRegister, JSArrayBuffer::kBitFieldOffset), | 4894 __ testb(FieldOperand(kScratchRegister, JSArrayBuffer::kBitFieldOffset), |
| 4896 Immediate(1 << JSArrayBuffer::WasNeutered::kShift)); | 4895 Immediate(1 << JSArrayBuffer::WasNeutered::kShift)); |
| 4897 DeoptimizeIf(not_zero, instr, Deoptimizer::kOutOfBounds); | 4896 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kOutOfBounds); |
| 4898 } | 4897 } |
| 4899 | 4898 |
| 4900 | 4899 |
| 4901 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 4900 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 4902 Register input = ToRegister(instr->value()); | 4901 Register input = ToRegister(instr->value()); |
| 4903 | 4902 |
| 4904 __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); | 4903 __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); |
| 4905 | 4904 |
| 4906 if (instr->hydrogen()->is_interval_check()) { | 4905 if (instr->hydrogen()->is_interval_check()) { |
| 4907 InstanceType first; | 4906 InstanceType first; |
| 4908 InstanceType last; | 4907 InstanceType last; |
| 4909 instr->hydrogen()->GetCheckInterval(&first, &last); | 4908 instr->hydrogen()->GetCheckInterval(&first, &last); |
| 4910 | 4909 |
| 4911 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | 4910 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 4912 Immediate(static_cast<int8_t>(first))); | 4911 Immediate(static_cast<int8_t>(first))); |
| 4913 | 4912 |
| 4914 // If there is only one type in the interval check for equality. | 4913 // If there is only one type in the interval check for equality. |
| 4915 if (first == last) { | 4914 if (first == last) { |
| 4916 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); | 4915 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType); |
| 4917 } else { | 4916 } else { |
| 4918 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType); | 4917 DeoptimizeIf(below, instr, DeoptimizeReason::kWrongInstanceType); |
| 4919 // Omit check for the last type. | 4918 // Omit check for the last type. |
| 4920 if (last != LAST_TYPE) { | 4919 if (last != LAST_TYPE) { |
| 4921 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | 4920 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 4922 Immediate(static_cast<int8_t>(last))); | 4921 Immediate(static_cast<int8_t>(last))); |
| 4923 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType); | 4922 DeoptimizeIf(above, instr, DeoptimizeReason::kWrongInstanceType); |
| 4924 } | 4923 } |
| 4925 } | 4924 } |
| 4926 } else { | 4925 } else { |
| 4927 uint8_t mask; | 4926 uint8_t mask; |
| 4928 uint8_t tag; | 4927 uint8_t tag; |
| 4929 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 4928 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
| 4930 | 4929 |
| 4931 if (base::bits::IsPowerOfTwo32(mask)) { | 4930 if (base::bits::IsPowerOfTwo32(mask)) { |
| 4932 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 4931 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); |
| 4933 __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | 4932 __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 4934 Immediate(mask)); | 4933 Immediate(mask)); |
| 4935 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, | 4934 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, |
| 4936 Deoptimizer::kWrongInstanceType); | 4935 DeoptimizeReason::kWrongInstanceType); |
| 4937 } else { | 4936 } else { |
| 4938 __ movzxbl(kScratchRegister, | 4937 __ movzxbl(kScratchRegister, |
| 4939 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); | 4938 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); |
| 4940 __ andb(kScratchRegister, Immediate(mask)); | 4939 __ andb(kScratchRegister, Immediate(mask)); |
| 4941 __ cmpb(kScratchRegister, Immediate(tag)); | 4940 __ cmpb(kScratchRegister, Immediate(tag)); |
| 4942 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); | 4941 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType); |
| 4943 } | 4942 } |
| 4944 } | 4943 } |
| 4945 } | 4944 } |
| 4946 | 4945 |
| 4947 | 4946 |
| 4948 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 4947 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
| 4949 Register reg = ToRegister(instr->value()); | 4948 Register reg = ToRegister(instr->value()); |
| 4950 __ Cmp(reg, instr->hydrogen()->object().handle()); | 4949 __ Cmp(reg, instr->hydrogen()->object().handle()); |
| 4951 DeoptimizeIf(not_equal, instr, Deoptimizer::kValueMismatch); | 4950 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kValueMismatch); |
| 4952 } | 4951 } |
| 4953 | 4952 |
| 4954 | 4953 |
| 4955 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 4954 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
| 4956 { | 4955 { |
| 4957 PushSafepointRegistersScope scope(this); | 4956 PushSafepointRegistersScope scope(this); |
| 4958 __ Push(object); | 4957 __ Push(object); |
| 4959 __ Set(rsi, 0); | 4958 __ Set(rsi, 0); |
| 4960 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 4959 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); |
| 4961 RecordSafepointWithRegisters( | 4960 RecordSafepointWithRegisters( |
| 4962 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 4961 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 4963 | 4962 |
| 4964 __ testp(rax, Immediate(kSmiTagMask)); | 4963 __ testp(rax, Immediate(kSmiTagMask)); |
| 4965 } | 4964 } |
| 4966 DeoptimizeIf(zero, instr, Deoptimizer::kInstanceMigrationFailed); | 4965 DeoptimizeIf(zero, instr, DeoptimizeReason::kInstanceMigrationFailed); |
| 4967 } | 4966 } |
| 4968 | 4967 |
| 4969 | 4968 |
| 4970 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 4969 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 4971 class DeferredCheckMaps final : public LDeferredCode { | 4970 class DeferredCheckMaps final : public LDeferredCode { |
| 4972 public: | 4971 public: |
| 4973 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 4972 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
| 4974 : LDeferredCode(codegen), instr_(instr), object_(object) { | 4973 : LDeferredCode(codegen), instr_(instr), object_(object) { |
| 4975 SetExit(check_maps()); | 4974 SetExit(check_maps()); |
| 4976 } | 4975 } |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5010 Handle<Map> map = maps->at(i).handle(); | 5009 Handle<Map> map = maps->at(i).handle(); |
| 5011 __ CompareMap(reg, map); | 5010 __ CompareMap(reg, map); |
| 5012 __ j(equal, &success, Label::kNear); | 5011 __ j(equal, &success, Label::kNear); |
| 5013 } | 5012 } |
| 5014 | 5013 |
| 5015 Handle<Map> map = maps->at(maps->size() - 1).handle(); | 5014 Handle<Map> map = maps->at(maps->size() - 1).handle(); |
| 5016 __ CompareMap(reg, map); | 5015 __ CompareMap(reg, map); |
| 5017 if (instr->hydrogen()->HasMigrationTarget()) { | 5016 if (instr->hydrogen()->HasMigrationTarget()) { |
| 5018 __ j(not_equal, deferred->entry()); | 5017 __ j(not_equal, deferred->entry()); |
| 5019 } else { | 5018 } else { |
| 5020 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); | 5019 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap); |
| 5021 } | 5020 } |
| 5022 | 5021 |
| 5023 __ bind(&success); | 5022 __ bind(&success); |
| 5024 } | 5023 } |
| 5025 | 5024 |
| 5026 | 5025 |
| 5027 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5026 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
| 5028 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5027 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
| 5029 XMMRegister xmm_scratch = double_scratch0(); | 5028 XMMRegister xmm_scratch = double_scratch0(); |
| 5030 Register result_reg = ToRegister(instr->result()); | 5029 Register result_reg = ToRegister(instr->result()); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 5049 __ JumpIfSmi(input_reg, &is_smi, dist); | 5048 __ JumpIfSmi(input_reg, &is_smi, dist); |
| 5050 | 5049 |
| 5051 // Check for heap number | 5050 // Check for heap number |
| 5052 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5051 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 5053 factory()->heap_number_map()); | 5052 factory()->heap_number_map()); |
| 5054 __ j(equal, &heap_number, Label::kNear); | 5053 __ j(equal, &heap_number, Label::kNear); |
| 5055 | 5054 |
| 5056 // Check for undefined. Undefined is converted to zero for clamping | 5055 // Check for undefined. Undefined is converted to zero for clamping |
| 5057 // conversions. | 5056 // conversions. |
| 5058 __ Cmp(input_reg, factory()->undefined_value()); | 5057 __ Cmp(input_reg, factory()->undefined_value()); |
| 5059 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); | 5058 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumberUndefined); |
| 5060 __ xorl(input_reg, input_reg); | 5059 __ xorl(input_reg, input_reg); |
| 5061 __ jmp(&done, Label::kNear); | 5060 __ jmp(&done, Label::kNear); |
| 5062 | 5061 |
| 5063 // Heap number | 5062 // Heap number |
| 5064 __ bind(&heap_number); | 5063 __ bind(&heap_number); |
| 5065 __ Movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 5064 __ Movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 5066 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); | 5065 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); |
| 5067 __ jmp(&done, Label::kNear); | 5066 __ jmp(&done, Label::kNear); |
| 5068 | 5067 |
| 5069 // smi | 5068 // smi |
| (...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5480 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); | 5479 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); |
| 5481 __ jmp(&done, Label::kNear); | 5480 __ jmp(&done, Label::kNear); |
| 5482 __ bind(&load_cache); | 5481 __ bind(&load_cache); |
| 5483 __ LoadInstanceDescriptors(map, result); | 5482 __ LoadInstanceDescriptors(map, result); |
| 5484 __ movp(result, | 5483 __ movp(result, |
| 5485 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 5484 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
| 5486 __ movp(result, | 5485 __ movp(result, |
| 5487 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 5486 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
| 5488 __ bind(&done); | 5487 __ bind(&done); |
| 5489 Condition cc = masm()->CheckSmi(result); | 5488 Condition cc = masm()->CheckSmi(result); |
| 5490 DeoptimizeIf(cc, instr, Deoptimizer::kNoCache); | 5489 DeoptimizeIf(cc, instr, DeoptimizeReason::kNoCache); |
| 5491 } | 5490 } |
| 5492 | 5491 |
| 5493 | 5492 |
| 5494 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5493 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
| 5495 Register object = ToRegister(instr->value()); | 5494 Register object = ToRegister(instr->value()); |
| 5496 __ cmpp(ToRegister(instr->map()), | 5495 __ cmpp(ToRegister(instr->map()), |
| 5497 FieldOperand(object, HeapObject::kMapOffset)); | 5496 FieldOperand(object, HeapObject::kMapOffset)); |
| 5498 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); | 5497 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap); |
| 5499 } | 5498 } |
| 5500 | 5499 |
| 5501 | 5500 |
| 5502 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5501 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 5503 Register object, | 5502 Register object, |
| 5504 Register index) { | 5503 Register index) { |
| 5505 PushSafepointRegistersScope scope(this); | 5504 PushSafepointRegistersScope scope(this); |
| 5506 __ Push(object); | 5505 __ Push(object); |
| 5507 __ Push(index); | 5506 __ Push(index); |
| 5508 __ xorp(rsi, rsi); | 5507 __ xorp(rsi, rsi); |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5569 __ bind(deferred->exit()); | 5568 __ bind(deferred->exit()); |
| 5570 __ bind(&done); | 5569 __ bind(&done); |
| 5571 } | 5570 } |
| 5572 | 5571 |
| 5573 #undef __ | 5572 #undef __ |
| 5574 | 5573 |
| 5575 } // namespace internal | 5574 } // namespace internal |
| 5576 } // namespace v8 | 5575 } // namespace v8 |
| 5577 | 5576 |
| 5578 #endif // V8_TARGET_ARCH_X64 | 5577 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |