| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
| 6 | 6 |
| 7 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" | 7 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
| (...skipping 664 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 675 WriteTranslation(environment, &translation); | 675 WriteTranslation(environment, &translation); |
| 676 int deoptimization_index = deoptimizations_.length(); | 676 int deoptimization_index = deoptimizations_.length(); |
| 677 int pc_offset = masm()->pc_offset(); | 677 int pc_offset = masm()->pc_offset(); |
| 678 environment->Register(deoptimization_index, | 678 environment->Register(deoptimization_index, |
| 679 translation.index(), | 679 translation.index(), |
| 680 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 680 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
| 681 deoptimizations_.Add(environment, zone()); | 681 deoptimizations_.Add(environment, zone()); |
| 682 } | 682 } |
| 683 } | 683 } |
| 684 | 684 |
| 685 | |
| 686 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 685 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
| 687 Deoptimizer::DeoptReason deopt_reason, | 686 DeoptimizeReason deopt_reason, |
| 688 Deoptimizer::BailoutType bailout_type) { | 687 Deoptimizer::BailoutType bailout_type) { |
| 689 LEnvironment* environment = instr->environment(); | 688 LEnvironment* environment = instr->environment(); |
| 690 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 689 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 691 DCHECK(environment->HasBeenRegistered()); | 690 DCHECK(environment->HasBeenRegistered()); |
| 692 int id = environment->deoptimization_index(); | 691 int id = environment->deoptimization_index(); |
| 693 Address entry = | 692 Address entry = |
| 694 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 693 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 695 if (entry == NULL) { | 694 if (entry == NULL) { |
| 696 Abort(kBailoutWasNotPrepared); | 695 Abort(kBailoutWasNotPrepared); |
| 697 return; | 696 return; |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 742 jump_table_.Add(table_entry, zone()); | 741 jump_table_.Add(table_entry, zone()); |
| 743 } | 742 } |
| 744 if (cc == no_condition) { | 743 if (cc == no_condition) { |
| 745 __ jmp(&jump_table_.last().label); | 744 __ jmp(&jump_table_.last().label); |
| 746 } else { | 745 } else { |
| 747 __ j(cc, &jump_table_.last().label); | 746 __ j(cc, &jump_table_.last().label); |
| 748 } | 747 } |
| 749 } | 748 } |
| 750 } | 749 } |
| 751 | 750 |
| 752 | |
| 753 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 751 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
| 754 Deoptimizer::DeoptReason deopt_reason) { | 752 DeoptimizeReason deopt_reason) { |
| 755 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 753 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 756 ? Deoptimizer::LAZY | 754 ? Deoptimizer::LAZY |
| 757 : Deoptimizer::EAGER; | 755 : Deoptimizer::EAGER; |
| 758 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); | 756 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); |
| 759 } | 757 } |
| 760 | 758 |
| 761 | 759 |
| 762 void LCodeGen::RecordSafepointWithLazyDeopt( | 760 void LCodeGen::RecordSafepointWithLazyDeopt( |
| 763 LInstruction* instr, SafepointMode safepoint_mode) { | 761 LInstruction* instr, SafepointMode safepoint_mode) { |
| 764 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | 762 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 875 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 873 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
| 876 Label dividend_is_not_negative, done; | 874 Label dividend_is_not_negative, done; |
| 877 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 875 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { |
| 878 __ test(dividend, dividend); | 876 __ test(dividend, dividend); |
| 879 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); | 877 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); |
| 880 // Note that this is correct even for kMinInt operands. | 878 // Note that this is correct even for kMinInt operands. |
| 881 __ neg(dividend); | 879 __ neg(dividend); |
| 882 __ and_(dividend, mask); | 880 __ and_(dividend, mask); |
| 883 __ neg(dividend); | 881 __ neg(dividend); |
| 884 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 882 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 885 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 883 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 886 } | 884 } |
| 887 __ jmp(&done, Label::kNear); | 885 __ jmp(&done, Label::kNear); |
| 888 } | 886 } |
| 889 | 887 |
| 890 __ bind(÷nd_is_not_negative); | 888 __ bind(÷nd_is_not_negative); |
| 891 __ and_(dividend, mask); | 889 __ and_(dividend, mask); |
| 892 __ bind(&done); | 890 __ bind(&done); |
| 893 } | 891 } |
| 894 | 892 |
| 895 | 893 |
| 896 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 894 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
| 897 Register dividend = ToRegister(instr->dividend()); | 895 Register dividend = ToRegister(instr->dividend()); |
| 898 int32_t divisor = instr->divisor(); | 896 int32_t divisor = instr->divisor(); |
| 899 DCHECK(ToRegister(instr->result()).is(eax)); | 897 DCHECK(ToRegister(instr->result()).is(eax)); |
| 900 | 898 |
| 901 if (divisor == 0) { | 899 if (divisor == 0) { |
| 902 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 900 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
| 903 return; | 901 return; |
| 904 } | 902 } |
| 905 | 903 |
| 906 __ TruncatingDiv(dividend, Abs(divisor)); | 904 __ TruncatingDiv(dividend, Abs(divisor)); |
| 907 __ imul(edx, edx, Abs(divisor)); | 905 __ imul(edx, edx, Abs(divisor)); |
| 908 __ mov(eax, dividend); | 906 __ mov(eax, dividend); |
| 909 __ sub(eax, edx); | 907 __ sub(eax, edx); |
| 910 | 908 |
| 911 // Check for negative zero. | 909 // Check for negative zero. |
| 912 HMod* hmod = instr->hydrogen(); | 910 HMod* hmod = instr->hydrogen(); |
| 913 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 911 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 914 Label remainder_not_zero; | 912 Label remainder_not_zero; |
| 915 __ j(not_zero, &remainder_not_zero, Label::kNear); | 913 __ j(not_zero, &remainder_not_zero, Label::kNear); |
| 916 __ cmp(dividend, Immediate(0)); | 914 __ cmp(dividend, Immediate(0)); |
| 917 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); | 915 DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero); |
| 918 __ bind(&remainder_not_zero); | 916 __ bind(&remainder_not_zero); |
| 919 } | 917 } |
| 920 } | 918 } |
| 921 | 919 |
| 922 | 920 |
| 923 void LCodeGen::DoModI(LModI* instr) { | 921 void LCodeGen::DoModI(LModI* instr) { |
| 924 HMod* hmod = instr->hydrogen(); | 922 HMod* hmod = instr->hydrogen(); |
| 925 | 923 |
| 926 Register left_reg = ToRegister(instr->left()); | 924 Register left_reg = ToRegister(instr->left()); |
| 927 DCHECK(left_reg.is(eax)); | 925 DCHECK(left_reg.is(eax)); |
| 928 Register right_reg = ToRegister(instr->right()); | 926 Register right_reg = ToRegister(instr->right()); |
| 929 DCHECK(!right_reg.is(eax)); | 927 DCHECK(!right_reg.is(eax)); |
| 930 DCHECK(!right_reg.is(edx)); | 928 DCHECK(!right_reg.is(edx)); |
| 931 Register result_reg = ToRegister(instr->result()); | 929 Register result_reg = ToRegister(instr->result()); |
| 932 DCHECK(result_reg.is(edx)); | 930 DCHECK(result_reg.is(edx)); |
| 933 | 931 |
| 934 Label done; | 932 Label done; |
| 935 // Check for x % 0, idiv would signal a divide error. We have to | 933 // Check for x % 0, idiv would signal a divide error. We have to |
| 936 // deopt in this case because we can't return a NaN. | 934 // deopt in this case because we can't return a NaN. |
| 937 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 935 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
| 938 __ test(right_reg, Operand(right_reg)); | 936 __ test(right_reg, Operand(right_reg)); |
| 939 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 937 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
| 940 } | 938 } |
| 941 | 939 |
| 942 // Check for kMinInt % -1, idiv would signal a divide error. We | 940 // Check for kMinInt % -1, idiv would signal a divide error. We |
| 943 // have to deopt if we care about -0, because we can't return that. | 941 // have to deopt if we care about -0, because we can't return that. |
| 944 if (hmod->CheckFlag(HValue::kCanOverflow)) { | 942 if (hmod->CheckFlag(HValue::kCanOverflow)) { |
| 945 Label no_overflow_possible; | 943 Label no_overflow_possible; |
| 946 __ cmp(left_reg, kMinInt); | 944 __ cmp(left_reg, kMinInt); |
| 947 __ j(not_equal, &no_overflow_possible, Label::kNear); | 945 __ j(not_equal, &no_overflow_possible, Label::kNear); |
| 948 __ cmp(right_reg, -1); | 946 __ cmp(right_reg, -1); |
| 949 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 947 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 950 DeoptimizeIf(equal, instr, Deoptimizer::kMinusZero); | 948 DeoptimizeIf(equal, instr, DeoptimizeReason::kMinusZero); |
| 951 } else { | 949 } else { |
| 952 __ j(not_equal, &no_overflow_possible, Label::kNear); | 950 __ j(not_equal, &no_overflow_possible, Label::kNear); |
| 953 __ Move(result_reg, Immediate(0)); | 951 __ Move(result_reg, Immediate(0)); |
| 954 __ jmp(&done, Label::kNear); | 952 __ jmp(&done, Label::kNear); |
| 955 } | 953 } |
| 956 __ bind(&no_overflow_possible); | 954 __ bind(&no_overflow_possible); |
| 957 } | 955 } |
| 958 | 956 |
| 959 // Sign extend dividend in eax into edx:eax. | 957 // Sign extend dividend in eax into edx:eax. |
| 960 __ cdq(); | 958 __ cdq(); |
| 961 | 959 |
| 962 // If we care about -0, test if the dividend is <0 and the result is 0. | 960 // If we care about -0, test if the dividend is <0 and the result is 0. |
| 963 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 961 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 964 Label positive_left; | 962 Label positive_left; |
| 965 __ test(left_reg, Operand(left_reg)); | 963 __ test(left_reg, Operand(left_reg)); |
| 966 __ j(not_sign, &positive_left, Label::kNear); | 964 __ j(not_sign, &positive_left, Label::kNear); |
| 967 __ idiv(right_reg); | 965 __ idiv(right_reg); |
| 968 __ test(result_reg, Operand(result_reg)); | 966 __ test(result_reg, Operand(result_reg)); |
| 969 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 967 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 970 __ jmp(&done, Label::kNear); | 968 __ jmp(&done, Label::kNear); |
| 971 __ bind(&positive_left); | 969 __ bind(&positive_left); |
| 972 } | 970 } |
| 973 __ idiv(right_reg); | 971 __ idiv(right_reg); |
| 974 __ bind(&done); | 972 __ bind(&done); |
| 975 } | 973 } |
| 976 | 974 |
| 977 | 975 |
| 978 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 976 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
| 979 Register dividend = ToRegister(instr->dividend()); | 977 Register dividend = ToRegister(instr->dividend()); |
| 980 int32_t divisor = instr->divisor(); | 978 int32_t divisor = instr->divisor(); |
| 981 Register result = ToRegister(instr->result()); | 979 Register result = ToRegister(instr->result()); |
| 982 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 980 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); |
| 983 DCHECK(!result.is(dividend)); | 981 DCHECK(!result.is(dividend)); |
| 984 | 982 |
| 985 // Check for (0 / -x) that will produce negative zero. | 983 // Check for (0 / -x) that will produce negative zero. |
| 986 HDiv* hdiv = instr->hydrogen(); | 984 HDiv* hdiv = instr->hydrogen(); |
| 987 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 985 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 988 __ test(dividend, dividend); | 986 __ test(dividend, dividend); |
| 989 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 987 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 990 } | 988 } |
| 991 // Check for (kMinInt / -1). | 989 // Check for (kMinInt / -1). |
| 992 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 990 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
| 993 __ cmp(dividend, kMinInt); | 991 __ cmp(dividend, kMinInt); |
| 994 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 992 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
| 995 } | 993 } |
| 996 // Deoptimize if remainder will not be 0. | 994 // Deoptimize if remainder will not be 0. |
| 997 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 995 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && |
| 998 divisor != 1 && divisor != -1) { | 996 divisor != 1 && divisor != -1) { |
| 999 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 997 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
| 1000 __ test(dividend, Immediate(mask)); | 998 __ test(dividend, Immediate(mask)); |
| 1001 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); | 999 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision); |
| 1002 } | 1000 } |
| 1003 __ Move(result, dividend); | 1001 __ Move(result, dividend); |
| 1004 int32_t shift = WhichPowerOf2Abs(divisor); | 1002 int32_t shift = WhichPowerOf2Abs(divisor); |
| 1005 if (shift > 0) { | 1003 if (shift > 0) { |
| 1006 // The arithmetic shift is always OK, the 'if' is an optimization only. | 1004 // The arithmetic shift is always OK, the 'if' is an optimization only. |
| 1007 if (shift > 1) __ sar(result, 31); | 1005 if (shift > 1) __ sar(result, 31); |
| 1008 __ shr(result, 32 - shift); | 1006 __ shr(result, 32 - shift); |
| 1009 __ add(result, dividend); | 1007 __ add(result, dividend); |
| 1010 __ sar(result, shift); | 1008 __ sar(result, shift); |
| 1011 } | 1009 } |
| 1012 if (divisor < 0) __ neg(result); | 1010 if (divisor < 0) __ neg(result); |
| 1013 } | 1011 } |
| 1014 | 1012 |
| 1015 | 1013 |
| 1016 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1014 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
| 1017 Register dividend = ToRegister(instr->dividend()); | 1015 Register dividend = ToRegister(instr->dividend()); |
| 1018 int32_t divisor = instr->divisor(); | 1016 int32_t divisor = instr->divisor(); |
| 1019 DCHECK(ToRegister(instr->result()).is(edx)); | 1017 DCHECK(ToRegister(instr->result()).is(edx)); |
| 1020 | 1018 |
| 1021 if (divisor == 0) { | 1019 if (divisor == 0) { |
| 1022 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1020 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
| 1023 return; | 1021 return; |
| 1024 } | 1022 } |
| 1025 | 1023 |
| 1026 // Check for (0 / -x) that will produce negative zero. | 1024 // Check for (0 / -x) that will produce negative zero. |
| 1027 HDiv* hdiv = instr->hydrogen(); | 1025 HDiv* hdiv = instr->hydrogen(); |
| 1028 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1026 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1029 __ test(dividend, dividend); | 1027 __ test(dividend, dividend); |
| 1030 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1028 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1031 } | 1029 } |
| 1032 | 1030 |
| 1033 __ TruncatingDiv(dividend, Abs(divisor)); | 1031 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1034 if (divisor < 0) __ neg(edx); | 1032 if (divisor < 0) __ neg(edx); |
| 1035 | 1033 |
| 1036 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1034 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { |
| 1037 __ mov(eax, edx); | 1035 __ mov(eax, edx); |
| 1038 __ imul(eax, eax, divisor); | 1036 __ imul(eax, eax, divisor); |
| 1039 __ sub(eax, dividend); | 1037 __ sub(eax, dividend); |
| 1040 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); | 1038 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision); |
| 1041 } | 1039 } |
| 1042 } | 1040 } |
| 1043 | 1041 |
| 1044 | 1042 |
| 1045 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1043 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
| 1046 void LCodeGen::DoDivI(LDivI* instr) { | 1044 void LCodeGen::DoDivI(LDivI* instr) { |
| 1047 HBinaryOperation* hdiv = instr->hydrogen(); | 1045 HBinaryOperation* hdiv = instr->hydrogen(); |
| 1048 Register dividend = ToRegister(instr->dividend()); | 1046 Register dividend = ToRegister(instr->dividend()); |
| 1049 Register divisor = ToRegister(instr->divisor()); | 1047 Register divisor = ToRegister(instr->divisor()); |
| 1050 Register remainder = ToRegister(instr->temp()); | 1048 Register remainder = ToRegister(instr->temp()); |
| 1051 DCHECK(dividend.is(eax)); | 1049 DCHECK(dividend.is(eax)); |
| 1052 DCHECK(remainder.is(edx)); | 1050 DCHECK(remainder.is(edx)); |
| 1053 DCHECK(ToRegister(instr->result()).is(eax)); | 1051 DCHECK(ToRegister(instr->result()).is(eax)); |
| 1054 DCHECK(!divisor.is(eax)); | 1052 DCHECK(!divisor.is(eax)); |
| 1055 DCHECK(!divisor.is(edx)); | 1053 DCHECK(!divisor.is(edx)); |
| 1056 | 1054 |
| 1057 // Check for x / 0. | 1055 // Check for x / 0. |
| 1058 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1056 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1059 __ test(divisor, divisor); | 1057 __ test(divisor, divisor); |
| 1060 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1058 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
| 1061 } | 1059 } |
| 1062 | 1060 |
| 1063 // Check for (0 / -x) that will produce negative zero. | 1061 // Check for (0 / -x) that will produce negative zero. |
| 1064 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1062 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1065 Label dividend_not_zero; | 1063 Label dividend_not_zero; |
| 1066 __ test(dividend, dividend); | 1064 __ test(dividend, dividend); |
| 1067 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1065 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
| 1068 __ test(divisor, divisor); | 1066 __ test(divisor, divisor); |
| 1069 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1067 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1070 __ bind(÷nd_not_zero); | 1068 __ bind(÷nd_not_zero); |
| 1071 } | 1069 } |
| 1072 | 1070 |
| 1073 // Check for (kMinInt / -1). | 1071 // Check for (kMinInt / -1). |
| 1074 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1072 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
| 1075 Label dividend_not_min_int; | 1073 Label dividend_not_min_int; |
| 1076 __ cmp(dividend, kMinInt); | 1074 __ cmp(dividend, kMinInt); |
| 1077 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1075 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
| 1078 __ cmp(divisor, -1); | 1076 __ cmp(divisor, -1); |
| 1079 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1077 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
| 1080 __ bind(÷nd_not_min_int); | 1078 __ bind(÷nd_not_min_int); |
| 1081 } | 1079 } |
| 1082 | 1080 |
| 1083 // Sign extend to edx (= remainder). | 1081 // Sign extend to edx (= remainder). |
| 1084 __ cdq(); | 1082 __ cdq(); |
| 1085 __ idiv(divisor); | 1083 __ idiv(divisor); |
| 1086 | 1084 |
| 1087 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1085 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
| 1088 // Deoptimize if remainder is not 0. | 1086 // Deoptimize if remainder is not 0. |
| 1089 __ test(remainder, remainder); | 1087 __ test(remainder, remainder); |
| 1090 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); | 1088 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision); |
| 1091 } | 1089 } |
| 1092 } | 1090 } |
| 1093 | 1091 |
| 1094 | 1092 |
| 1095 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { | 1093 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { |
| 1096 Register dividend = ToRegister(instr->dividend()); | 1094 Register dividend = ToRegister(instr->dividend()); |
| 1097 int32_t divisor = instr->divisor(); | 1095 int32_t divisor = instr->divisor(); |
| 1098 DCHECK(dividend.is(ToRegister(instr->result()))); | 1096 DCHECK(dividend.is(ToRegister(instr->result()))); |
| 1099 | 1097 |
| 1100 // If the divisor is positive, things are easy: There can be no deopts and we | 1098 // If the divisor is positive, things are easy: There can be no deopts and we |
| 1101 // can simply do an arithmetic right shift. | 1099 // can simply do an arithmetic right shift. |
| 1102 if (divisor == 1) return; | 1100 if (divisor == 1) return; |
| 1103 int32_t shift = WhichPowerOf2Abs(divisor); | 1101 int32_t shift = WhichPowerOf2Abs(divisor); |
| 1104 if (divisor > 1) { | 1102 if (divisor > 1) { |
| 1105 __ sar(dividend, shift); | 1103 __ sar(dividend, shift); |
| 1106 return; | 1104 return; |
| 1107 } | 1105 } |
| 1108 | 1106 |
| 1109 // If the divisor is negative, we have to negate and handle edge cases. | 1107 // If the divisor is negative, we have to negate and handle edge cases. |
| 1110 __ neg(dividend); | 1108 __ neg(dividend); |
| 1111 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1109 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1112 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1110 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1113 } | 1111 } |
| 1114 | 1112 |
| 1115 // Dividing by -1 is basically negation, unless we overflow. | 1113 // Dividing by -1 is basically negation, unless we overflow. |
| 1116 if (divisor == -1) { | 1114 if (divisor == -1) { |
| 1117 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1115 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
| 1118 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1116 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1119 } | 1117 } |
| 1120 return; | 1118 return; |
| 1121 } | 1119 } |
| 1122 | 1120 |
| 1123 // If the negation could not overflow, simply shifting is OK. | 1121 // If the negation could not overflow, simply shifting is OK. |
| 1124 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1122 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
| 1125 __ sar(dividend, shift); | 1123 __ sar(dividend, shift); |
| 1126 return; | 1124 return; |
| 1127 } | 1125 } |
| 1128 | 1126 |
| 1129 Label not_kmin_int, done; | 1127 Label not_kmin_int, done; |
| 1130 __ j(no_overflow, ¬_kmin_int, Label::kNear); | 1128 __ j(no_overflow, ¬_kmin_int, Label::kNear); |
| 1131 __ mov(dividend, Immediate(kMinInt / divisor)); | 1129 __ mov(dividend, Immediate(kMinInt / divisor)); |
| 1132 __ jmp(&done, Label::kNear); | 1130 __ jmp(&done, Label::kNear); |
| 1133 __ bind(¬_kmin_int); | 1131 __ bind(¬_kmin_int); |
| 1134 __ sar(dividend, shift); | 1132 __ sar(dividend, shift); |
| 1135 __ bind(&done); | 1133 __ bind(&done); |
| 1136 } | 1134 } |
| 1137 | 1135 |
| 1138 | 1136 |
| 1139 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1137 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
| 1140 Register dividend = ToRegister(instr->dividend()); | 1138 Register dividend = ToRegister(instr->dividend()); |
| 1141 int32_t divisor = instr->divisor(); | 1139 int32_t divisor = instr->divisor(); |
| 1142 DCHECK(ToRegister(instr->result()).is(edx)); | 1140 DCHECK(ToRegister(instr->result()).is(edx)); |
| 1143 | 1141 |
| 1144 if (divisor == 0) { | 1142 if (divisor == 0) { |
| 1145 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1143 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
| 1146 return; | 1144 return; |
| 1147 } | 1145 } |
| 1148 | 1146 |
| 1149 // Check for (0 / -x) that will produce negative zero. | 1147 // Check for (0 / -x) that will produce negative zero. |
| 1150 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1148 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
| 1151 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1149 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1152 __ test(dividend, dividend); | 1150 __ test(dividend, dividend); |
| 1153 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1151 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
| 1154 } | 1152 } |
| 1155 | 1153 |
| 1156 // Easy case: We need no dynamic check for the dividend and the flooring | 1154 // Easy case: We need no dynamic check for the dividend and the flooring |
| 1157 // division is the same as the truncating division. | 1155 // division is the same as the truncating division. |
| 1158 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1156 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
| 1159 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1157 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
| 1160 __ TruncatingDiv(dividend, Abs(divisor)); | 1158 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1161 if (divisor < 0) __ neg(edx); | 1159 if (divisor < 0) __ neg(edx); |
| 1162 return; | 1160 return; |
| 1163 } | 1161 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1190 Register result = ToRegister(instr->result()); | 1188 Register result = ToRegister(instr->result()); |
| 1191 DCHECK(dividend.is(eax)); | 1189 DCHECK(dividend.is(eax)); |
| 1192 DCHECK(remainder.is(edx)); | 1190 DCHECK(remainder.is(edx)); |
| 1193 DCHECK(result.is(eax)); | 1191 DCHECK(result.is(eax)); |
| 1194 DCHECK(!divisor.is(eax)); | 1192 DCHECK(!divisor.is(eax)); |
| 1195 DCHECK(!divisor.is(edx)); | 1193 DCHECK(!divisor.is(edx)); |
| 1196 | 1194 |
| 1197 // Check for x / 0. | 1195 // Check for x / 0. |
| 1198 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1196 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1199 __ test(divisor, divisor); | 1197 __ test(divisor, divisor); |
| 1200 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1198 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
| 1201 } | 1199 } |
| 1202 | 1200 |
| 1203 // Check for (0 / -x) that will produce negative zero. | 1201 // Check for (0 / -x) that will produce negative zero. |
| 1204 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1202 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1205 Label dividend_not_zero; | 1203 Label dividend_not_zero; |
| 1206 __ test(dividend, dividend); | 1204 __ test(dividend, dividend); |
| 1207 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1205 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
| 1208 __ test(divisor, divisor); | 1206 __ test(divisor, divisor); |
| 1209 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1207 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1210 __ bind(÷nd_not_zero); | 1208 __ bind(÷nd_not_zero); |
| 1211 } | 1209 } |
| 1212 | 1210 |
| 1213 // Check for (kMinInt / -1). | 1211 // Check for (kMinInt / -1). |
| 1214 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1212 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
| 1215 Label dividend_not_min_int; | 1213 Label dividend_not_min_int; |
| 1216 __ cmp(dividend, kMinInt); | 1214 __ cmp(dividend, kMinInt); |
| 1217 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1215 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
| 1218 __ cmp(divisor, -1); | 1216 __ cmp(divisor, -1); |
| 1219 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1217 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
| 1220 __ bind(÷nd_not_min_int); | 1218 __ bind(÷nd_not_min_int); |
| 1221 } | 1219 } |
| 1222 | 1220 |
| 1223 // Sign extend to edx (= remainder). | 1221 // Sign extend to edx (= remainder). |
| 1224 __ cdq(); | 1222 __ cdq(); |
| 1225 __ idiv(divisor); | 1223 __ idiv(divisor); |
| 1226 | 1224 |
| 1227 Label done; | 1225 Label done; |
| 1228 __ test(remainder, remainder); | 1226 __ test(remainder, remainder); |
| 1229 __ j(zero, &done, Label::kNear); | 1227 __ j(zero, &done, Label::kNear); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1287 __ imul(left, left, constant); | 1285 __ imul(left, left, constant); |
| 1288 } | 1286 } |
| 1289 } else { | 1287 } else { |
| 1290 if (instr->hydrogen()->representation().IsSmi()) { | 1288 if (instr->hydrogen()->representation().IsSmi()) { |
| 1291 __ SmiUntag(left); | 1289 __ SmiUntag(left); |
| 1292 } | 1290 } |
| 1293 __ imul(left, ToOperand(right)); | 1291 __ imul(left, ToOperand(right)); |
| 1294 } | 1292 } |
| 1295 | 1293 |
| 1296 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1294 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1297 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1295 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1298 } | 1296 } |
| 1299 | 1297 |
| 1300 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1298 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1301 // Bail out if the result is supposed to be negative zero. | 1299 // Bail out if the result is supposed to be negative zero. |
| 1302 Label done; | 1300 Label done; |
| 1303 __ test(left, Operand(left)); | 1301 __ test(left, Operand(left)); |
| 1304 __ j(not_zero, &done, Label::kNear); | 1302 __ j(not_zero, &done, Label::kNear); |
| 1305 if (right->IsConstantOperand()) { | 1303 if (right->IsConstantOperand()) { |
| 1306 if (ToInteger32(LConstantOperand::cast(right)) < 0) { | 1304 if (ToInteger32(LConstantOperand::cast(right)) < 0) { |
| 1307 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 1305 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
| 1308 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { | 1306 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { |
| 1309 __ cmp(ToRegister(instr->temp()), Immediate(0)); | 1307 __ cmp(ToRegister(instr->temp()), Immediate(0)); |
| 1310 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); | 1308 DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero); |
| 1311 } | 1309 } |
| 1312 } else { | 1310 } else { |
| 1313 // Test the non-zero operand for negative sign. | 1311 // Test the non-zero operand for negative sign. |
| 1314 __ or_(ToRegister(instr->temp()), ToOperand(right)); | 1312 __ or_(ToRegister(instr->temp()), ToOperand(right)); |
| 1315 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1313 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
| 1316 } | 1314 } |
| 1317 __ bind(&done); | 1315 __ bind(&done); |
| 1318 } | 1316 } |
| 1319 } | 1317 } |
| 1320 | 1318 |
| 1321 | 1319 |
| 1322 void LCodeGen::DoBitI(LBitI* instr) { | 1320 void LCodeGen::DoBitI(LBitI* instr) { |
| 1323 LOperand* left = instr->left(); | 1321 LOperand* left = instr->left(); |
| 1324 LOperand* right = instr->right(); | 1322 LOperand* right = instr->right(); |
| 1325 DCHECK(left->Equals(instr->result())); | 1323 DCHECK(left->Equals(instr->result())); |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1378 case Token::ROR: | 1376 case Token::ROR: |
| 1379 __ ror_cl(ToRegister(left)); | 1377 __ ror_cl(ToRegister(left)); |
| 1380 break; | 1378 break; |
| 1381 case Token::SAR: | 1379 case Token::SAR: |
| 1382 __ sar_cl(ToRegister(left)); | 1380 __ sar_cl(ToRegister(left)); |
| 1383 break; | 1381 break; |
| 1384 case Token::SHR: | 1382 case Token::SHR: |
| 1385 __ shr_cl(ToRegister(left)); | 1383 __ shr_cl(ToRegister(left)); |
| 1386 if (instr->can_deopt()) { | 1384 if (instr->can_deopt()) { |
| 1387 __ test(ToRegister(left), ToRegister(left)); | 1385 __ test(ToRegister(left), ToRegister(left)); |
| 1388 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); | 1386 DeoptimizeIf(sign, instr, DeoptimizeReason::kNegativeValue); |
| 1389 } | 1387 } |
| 1390 break; | 1388 break; |
| 1391 case Token::SHL: | 1389 case Token::SHL: |
| 1392 __ shl_cl(ToRegister(left)); | 1390 __ shl_cl(ToRegister(left)); |
| 1393 break; | 1391 break; |
| 1394 default: | 1392 default: |
| 1395 UNREACHABLE(); | 1393 UNREACHABLE(); |
| 1396 break; | 1394 break; |
| 1397 } | 1395 } |
| 1398 } else { | 1396 } else { |
| 1399 int value = ToInteger32(LConstantOperand::cast(right)); | 1397 int value = ToInteger32(LConstantOperand::cast(right)); |
| 1400 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); | 1398 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); |
| 1401 switch (instr->op()) { | 1399 switch (instr->op()) { |
| 1402 case Token::ROR: | 1400 case Token::ROR: |
| 1403 if (shift_count == 0 && instr->can_deopt()) { | 1401 if (shift_count == 0 && instr->can_deopt()) { |
| 1404 __ test(ToRegister(left), ToRegister(left)); | 1402 __ test(ToRegister(left), ToRegister(left)); |
| 1405 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); | 1403 DeoptimizeIf(sign, instr, DeoptimizeReason::kNegativeValue); |
| 1406 } else { | 1404 } else { |
| 1407 __ ror(ToRegister(left), shift_count); | 1405 __ ror(ToRegister(left), shift_count); |
| 1408 } | 1406 } |
| 1409 break; | 1407 break; |
| 1410 case Token::SAR: | 1408 case Token::SAR: |
| 1411 if (shift_count != 0) { | 1409 if (shift_count != 0) { |
| 1412 __ sar(ToRegister(left), shift_count); | 1410 __ sar(ToRegister(left), shift_count); |
| 1413 } | 1411 } |
| 1414 break; | 1412 break; |
| 1415 case Token::SHR: | 1413 case Token::SHR: |
| 1416 if (shift_count != 0) { | 1414 if (shift_count != 0) { |
| 1417 __ shr(ToRegister(left), shift_count); | 1415 __ shr(ToRegister(left), shift_count); |
| 1418 } else if (instr->can_deopt()) { | 1416 } else if (instr->can_deopt()) { |
| 1419 __ test(ToRegister(left), ToRegister(left)); | 1417 __ test(ToRegister(left), ToRegister(left)); |
| 1420 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); | 1418 DeoptimizeIf(sign, instr, DeoptimizeReason::kNegativeValue); |
| 1421 } | 1419 } |
| 1422 break; | 1420 break; |
| 1423 case Token::SHL: | 1421 case Token::SHL: |
| 1424 if (shift_count != 0) { | 1422 if (shift_count != 0) { |
| 1425 if (instr->hydrogen_value()->representation().IsSmi() && | 1423 if (instr->hydrogen_value()->representation().IsSmi() && |
| 1426 instr->can_deopt()) { | 1424 instr->can_deopt()) { |
| 1427 if (shift_count != 1) { | 1425 if (shift_count != 1) { |
| 1428 __ shl(ToRegister(left), shift_count - 1); | 1426 __ shl(ToRegister(left), shift_count - 1); |
| 1429 } | 1427 } |
| 1430 __ SmiTag(ToRegister(left)); | 1428 __ SmiTag(ToRegister(left)); |
| 1431 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1429 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1432 } else { | 1430 } else { |
| 1433 __ shl(ToRegister(left), shift_count); | 1431 __ shl(ToRegister(left), shift_count); |
| 1434 } | 1432 } |
| 1435 } | 1433 } |
| 1436 break; | 1434 break; |
| 1437 default: | 1435 default: |
| 1438 UNREACHABLE(); | 1436 UNREACHABLE(); |
| 1439 break; | 1437 break; |
| 1440 } | 1438 } |
| 1441 } | 1439 } |
| 1442 } | 1440 } |
| 1443 | 1441 |
| 1444 | 1442 |
| 1445 void LCodeGen::DoSubI(LSubI* instr) { | 1443 void LCodeGen::DoSubI(LSubI* instr) { |
| 1446 LOperand* left = instr->left(); | 1444 LOperand* left = instr->left(); |
| 1447 LOperand* right = instr->right(); | 1445 LOperand* right = instr->right(); |
| 1448 DCHECK(left->Equals(instr->result())); | 1446 DCHECK(left->Equals(instr->result())); |
| 1449 | 1447 |
| 1450 if (right->IsConstantOperand()) { | 1448 if (right->IsConstantOperand()) { |
| 1451 __ sub(ToOperand(left), | 1449 __ sub(ToOperand(left), |
| 1452 ToImmediate(right, instr->hydrogen()->representation())); | 1450 ToImmediate(right, instr->hydrogen()->representation())); |
| 1453 } else { | 1451 } else { |
| 1454 __ sub(ToRegister(left), ToOperand(right)); | 1452 __ sub(ToRegister(left), ToOperand(right)); |
| 1455 } | 1453 } |
| 1456 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1454 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1457 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1455 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1458 } | 1456 } |
| 1459 } | 1457 } |
| 1460 | 1458 |
| 1461 | 1459 |
| 1462 void LCodeGen::DoConstantI(LConstantI* instr) { | 1460 void LCodeGen::DoConstantI(LConstantI* instr) { |
| 1463 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1461 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
| 1464 } | 1462 } |
| 1465 | 1463 |
| 1466 | 1464 |
| 1467 void LCodeGen::DoConstantS(LConstantS* instr) { | 1465 void LCodeGen::DoConstantS(LConstantS* instr) { |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1619 __ lea(ToRegister(instr->result()), address); | 1617 __ lea(ToRegister(instr->result()), address); |
| 1620 } | 1618 } |
| 1621 } else { | 1619 } else { |
| 1622 if (right->IsConstantOperand()) { | 1620 if (right->IsConstantOperand()) { |
| 1623 __ add(ToOperand(left), | 1621 __ add(ToOperand(left), |
| 1624 ToImmediate(right, instr->hydrogen()->representation())); | 1622 ToImmediate(right, instr->hydrogen()->representation())); |
| 1625 } else { | 1623 } else { |
| 1626 __ add(ToRegister(left), ToOperand(right)); | 1624 __ add(ToRegister(left), ToOperand(right)); |
| 1627 } | 1625 } |
| 1628 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1626 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1629 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1627 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 1630 } | 1628 } |
| 1631 } | 1629 } |
| 1632 } | 1630 } |
| 1633 | 1631 |
| 1634 | 1632 |
| 1635 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1633 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
| 1636 LOperand* left = instr->left(); | 1634 LOperand* left = instr->left(); |
| 1637 LOperand* right = instr->right(); | 1635 LOperand* right = instr->right(); |
| 1638 DCHECK(left->Equals(instr->result())); | 1636 DCHECK(left->Equals(instr->result())); |
| 1639 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1637 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1878 } | 1876 } |
| 1879 | 1877 |
| 1880 if (expected.Contains(ToBooleanICStub::SMI)) { | 1878 if (expected.Contains(ToBooleanICStub::SMI)) { |
| 1881 // Smis: 0 -> false, all other -> true. | 1879 // Smis: 0 -> false, all other -> true. |
| 1882 __ test(reg, Operand(reg)); | 1880 __ test(reg, Operand(reg)); |
| 1883 __ j(equal, instr->FalseLabel(chunk_)); | 1881 __ j(equal, instr->FalseLabel(chunk_)); |
| 1884 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 1882 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
| 1885 } else if (expected.NeedsMap()) { | 1883 } else if (expected.NeedsMap()) { |
| 1886 // If we need a map later and have a Smi -> deopt. | 1884 // If we need a map later and have a Smi -> deopt. |
| 1887 __ test(reg, Immediate(kSmiTagMask)); | 1885 __ test(reg, Immediate(kSmiTagMask)); |
| 1888 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); | 1886 DeoptimizeIf(zero, instr, DeoptimizeReason::kSmi); |
| 1889 } | 1887 } |
| 1890 | 1888 |
| 1891 Register map = no_reg; // Keep the compiler happy. | 1889 Register map = no_reg; // Keep the compiler happy. |
| 1892 if (expected.NeedsMap()) { | 1890 if (expected.NeedsMap()) { |
| 1893 map = ToRegister(instr->temp()); | 1891 map = ToRegister(instr->temp()); |
| 1894 DCHECK(!map.is(reg)); | 1892 DCHECK(!map.is(reg)); |
| 1895 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); | 1893 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); |
| 1896 | 1894 |
| 1897 if (expected.CanBeUndetectable()) { | 1895 if (expected.CanBeUndetectable()) { |
| 1898 // Undetectable -> false. | 1896 // Undetectable -> false. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1941 __ xorps(xmm_scratch, xmm_scratch); | 1939 __ xorps(xmm_scratch, xmm_scratch); |
| 1942 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); | 1940 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
| 1943 __ j(zero, instr->FalseLabel(chunk_)); | 1941 __ j(zero, instr->FalseLabel(chunk_)); |
| 1944 __ jmp(instr->TrueLabel(chunk_)); | 1942 __ jmp(instr->TrueLabel(chunk_)); |
| 1945 __ bind(¬_heap_number); | 1943 __ bind(¬_heap_number); |
| 1946 } | 1944 } |
| 1947 | 1945 |
| 1948 if (!expected.IsGeneric()) { | 1946 if (!expected.IsGeneric()) { |
| 1949 // We've seen something for the first time -> deopt. | 1947 // We've seen something for the first time -> deopt. |
| 1950 // This can only happen if we are not generic already. | 1948 // This can only happen if we are not generic already. |
| 1951 DeoptimizeIf(no_condition, instr, Deoptimizer::kUnexpectedObject); | 1949 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kUnexpectedObject); |
| 1952 } | 1950 } |
| 1953 } | 1951 } |
| 1954 } | 1952 } |
| 1955 } | 1953 } |
| 1956 | 1954 |
| 1957 | 1955 |
| 1958 void LCodeGen::EmitGoto(int block) { | 1956 void LCodeGen::EmitGoto(int block) { |
| 1959 if (!IsNextEmittedBlock(block)) { | 1957 if (!IsNextEmittedBlock(block)) { |
| 1960 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 1958 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
| 1961 } | 1959 } |
| (...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2299 } | 2297 } |
| 2300 | 2298 |
| 2301 // Loop through the {object}s prototype chain looking for the {prototype}. | 2299 // Loop through the {object}s prototype chain looking for the {prototype}. |
| 2302 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); | 2300 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2303 Label loop; | 2301 Label loop; |
| 2304 __ bind(&loop); | 2302 __ bind(&loop); |
| 2305 | 2303 |
| 2306 // Deoptimize if the object needs to be access checked. | 2304 // Deoptimize if the object needs to be access checked. |
| 2307 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), | 2305 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), |
| 2308 Immediate(1 << Map::kIsAccessCheckNeeded)); | 2306 Immediate(1 << Map::kIsAccessCheckNeeded)); |
| 2309 DeoptimizeIf(not_zero, instr, Deoptimizer::kAccessCheck); | 2307 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kAccessCheck); |
| 2310 // Deoptimize for proxies. | 2308 // Deoptimize for proxies. |
| 2311 __ CmpInstanceType(object_map, JS_PROXY_TYPE); | 2309 __ CmpInstanceType(object_map, JS_PROXY_TYPE); |
| 2312 DeoptimizeIf(equal, instr, Deoptimizer::kProxy); | 2310 DeoptimizeIf(equal, instr, DeoptimizeReason::kProxy); |
| 2313 | 2311 |
| 2314 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); | 2312 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
| 2315 __ cmp(object_prototype, factory()->null_value()); | 2313 __ cmp(object_prototype, factory()->null_value()); |
| 2316 EmitFalseBranch(instr, equal); | 2314 EmitFalseBranch(instr, equal); |
| 2317 __ cmp(object_prototype, prototype); | 2315 __ cmp(object_prototype, prototype); |
| 2318 EmitTrueBranch(instr, equal); | 2316 EmitTrueBranch(instr, equal); |
| 2319 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); | 2317 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
| 2320 __ jmp(&loop); | 2318 __ jmp(&loop); |
| 2321 } | 2319 } |
| 2322 | 2320 |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2424 | 2422 |
| 2425 | 2423 |
| 2426 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2424 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 2427 Register context = ToRegister(instr->context()); | 2425 Register context = ToRegister(instr->context()); |
| 2428 Register result = ToRegister(instr->result()); | 2426 Register result = ToRegister(instr->result()); |
| 2429 __ mov(result, ContextOperand(context, instr->slot_index())); | 2427 __ mov(result, ContextOperand(context, instr->slot_index())); |
| 2430 | 2428 |
| 2431 if (instr->hydrogen()->RequiresHoleCheck()) { | 2429 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2432 __ cmp(result, factory()->the_hole_value()); | 2430 __ cmp(result, factory()->the_hole_value()); |
| 2433 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2431 if (instr->hydrogen()->DeoptimizesOnHole()) { |
| 2434 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2432 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2435 } else { | 2433 } else { |
| 2436 Label is_not_hole; | 2434 Label is_not_hole; |
| 2437 __ j(not_equal, &is_not_hole, Label::kNear); | 2435 __ j(not_equal, &is_not_hole, Label::kNear); |
| 2438 __ mov(result, factory()->undefined_value()); | 2436 __ mov(result, factory()->undefined_value()); |
| 2439 __ bind(&is_not_hole); | 2437 __ bind(&is_not_hole); |
| 2440 } | 2438 } |
| 2441 } | 2439 } |
| 2442 } | 2440 } |
| 2443 | 2441 |
| 2444 | 2442 |
| 2445 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2443 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| 2446 Register context = ToRegister(instr->context()); | 2444 Register context = ToRegister(instr->context()); |
| 2447 Register value = ToRegister(instr->value()); | 2445 Register value = ToRegister(instr->value()); |
| 2448 | 2446 |
| 2449 Label skip_assignment; | 2447 Label skip_assignment; |
| 2450 | 2448 |
| 2451 Operand target = ContextOperand(context, instr->slot_index()); | 2449 Operand target = ContextOperand(context, instr->slot_index()); |
| 2452 if (instr->hydrogen()->RequiresHoleCheck()) { | 2450 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2453 __ cmp(target, factory()->the_hole_value()); | 2451 __ cmp(target, factory()->the_hole_value()); |
| 2454 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2452 if (instr->hydrogen()->DeoptimizesOnHole()) { |
| 2455 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2453 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2456 } else { | 2454 } else { |
| 2457 __ j(not_equal, &skip_assignment, Label::kNear); | 2455 __ j(not_equal, &skip_assignment, Label::kNear); |
| 2458 } | 2456 } |
| 2459 } | 2457 } |
| 2460 | 2458 |
| 2461 __ mov(target, value); | 2459 __ mov(target, value); |
| 2462 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2460 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2463 SmiCheck check_needed = | 2461 SmiCheck check_needed = |
| 2464 instr->hydrogen()->value()->type().IsHeapObject() | 2462 instr->hydrogen()->value()->type().IsHeapObject() |
| 2465 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2463 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2542 Register function = ToRegister(instr->function()); | 2540 Register function = ToRegister(instr->function()); |
| 2543 Register temp = ToRegister(instr->temp()); | 2541 Register temp = ToRegister(instr->temp()); |
| 2544 Register result = ToRegister(instr->result()); | 2542 Register result = ToRegister(instr->result()); |
| 2545 | 2543 |
| 2546 // Get the prototype or initial map from the function. | 2544 // Get the prototype or initial map from the function. |
| 2547 __ mov(result, | 2545 __ mov(result, |
| 2548 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2546 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 2549 | 2547 |
| 2550 // Check that the function has a prototype or an initial map. | 2548 // Check that the function has a prototype or an initial map. |
| 2551 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); | 2549 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); |
| 2552 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2550 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2553 | 2551 |
| 2554 // If the function does not have an initial map, we're done. | 2552 // If the function does not have an initial map, we're done. |
| 2555 Label done; | 2553 Label done; |
| 2556 __ CmpObjectType(result, MAP_TYPE, temp); | 2554 __ CmpObjectType(result, MAP_TYPE, temp); |
| 2557 __ j(not_equal, &done, Label::kNear); | 2555 __ j(not_equal, &done, Label::kNear); |
| 2558 | 2556 |
| 2559 // Get the prototype from the initial map. | 2557 // Get the prototype from the initial map. |
| 2560 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); | 2558 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); |
| 2561 | 2559 |
| 2562 // All done. | 2560 // All done. |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2626 case UINT16_ELEMENTS: | 2624 case UINT16_ELEMENTS: |
| 2627 __ movzx_w(result, operand); | 2625 __ movzx_w(result, operand); |
| 2628 break; | 2626 break; |
| 2629 case INT32_ELEMENTS: | 2627 case INT32_ELEMENTS: |
| 2630 __ mov(result, operand); | 2628 __ mov(result, operand); |
| 2631 break; | 2629 break; |
| 2632 case UINT32_ELEMENTS: | 2630 case UINT32_ELEMENTS: |
| 2633 __ mov(result, operand); | 2631 __ mov(result, operand); |
| 2634 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 2632 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
| 2635 __ test(result, Operand(result)); | 2633 __ test(result, Operand(result)); |
| 2636 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); | 2634 DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue); |
| 2637 } | 2635 } |
| 2638 break; | 2636 break; |
| 2639 case FLOAT32_ELEMENTS: | 2637 case FLOAT32_ELEMENTS: |
| 2640 case FLOAT64_ELEMENTS: | 2638 case FLOAT64_ELEMENTS: |
| 2641 case FAST_SMI_ELEMENTS: | 2639 case FAST_SMI_ELEMENTS: |
| 2642 case FAST_ELEMENTS: | 2640 case FAST_ELEMENTS: |
| 2643 case FAST_DOUBLE_ELEMENTS: | 2641 case FAST_DOUBLE_ELEMENTS: |
| 2644 case FAST_HOLEY_SMI_ELEMENTS: | 2642 case FAST_HOLEY_SMI_ELEMENTS: |
| 2645 case FAST_HOLEY_ELEMENTS: | 2643 case FAST_HOLEY_ELEMENTS: |
| 2646 case FAST_HOLEY_DOUBLE_ELEMENTS: | 2644 case FAST_HOLEY_DOUBLE_ELEMENTS: |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2658 | 2656 |
| 2659 | 2657 |
| 2660 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { | 2658 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { |
| 2661 if (instr->hydrogen()->RequiresHoleCheck()) { | 2659 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2662 Operand hole_check_operand = BuildFastArrayOperand( | 2660 Operand hole_check_operand = BuildFastArrayOperand( |
| 2663 instr->elements(), instr->key(), | 2661 instr->elements(), instr->key(), |
| 2664 instr->hydrogen()->key()->representation(), | 2662 instr->hydrogen()->key()->representation(), |
| 2665 FAST_DOUBLE_ELEMENTS, | 2663 FAST_DOUBLE_ELEMENTS, |
| 2666 instr->base_offset() + sizeof(kHoleNanLower32)); | 2664 instr->base_offset() + sizeof(kHoleNanLower32)); |
| 2667 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); | 2665 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); |
| 2668 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2666 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2669 } | 2667 } |
| 2670 | 2668 |
| 2671 Operand double_load_operand = BuildFastArrayOperand( | 2669 Operand double_load_operand = BuildFastArrayOperand( |
| 2672 instr->elements(), | 2670 instr->elements(), |
| 2673 instr->key(), | 2671 instr->key(), |
| 2674 instr->hydrogen()->key()->representation(), | 2672 instr->hydrogen()->key()->representation(), |
| 2675 FAST_DOUBLE_ELEMENTS, | 2673 FAST_DOUBLE_ELEMENTS, |
| 2676 instr->base_offset()); | 2674 instr->base_offset()); |
| 2677 XMMRegister result = ToDoubleRegister(instr->result()); | 2675 XMMRegister result = ToDoubleRegister(instr->result()); |
| 2678 __ movsd(result, double_load_operand); | 2676 __ movsd(result, double_load_operand); |
| 2679 } | 2677 } |
| 2680 | 2678 |
| 2681 | 2679 |
| 2682 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 2680 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
| 2683 Register result = ToRegister(instr->result()); | 2681 Register result = ToRegister(instr->result()); |
| 2684 | 2682 |
| 2685 // Load the result. | 2683 // Load the result. |
| 2686 __ mov(result, | 2684 __ mov(result, |
| 2687 BuildFastArrayOperand(instr->elements(), instr->key(), | 2685 BuildFastArrayOperand(instr->elements(), instr->key(), |
| 2688 instr->hydrogen()->key()->representation(), | 2686 instr->hydrogen()->key()->representation(), |
| 2689 FAST_ELEMENTS, instr->base_offset())); | 2687 FAST_ELEMENTS, instr->base_offset())); |
| 2690 | 2688 |
| 2691 // Check for the hole value. | 2689 // Check for the hole value. |
| 2692 if (instr->hydrogen()->RequiresHoleCheck()) { | 2690 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2693 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 2691 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
| 2694 __ test(result, Immediate(kSmiTagMask)); | 2692 __ test(result, Immediate(kSmiTagMask)); |
| 2695 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotASmi); | 2693 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotASmi); |
| 2696 } else { | 2694 } else { |
| 2697 __ cmp(result, factory()->the_hole_value()); | 2695 __ cmp(result, factory()->the_hole_value()); |
| 2698 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2696 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
| 2699 } | 2697 } |
| 2700 } else if (instr->hydrogen()->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) { | 2698 } else if (instr->hydrogen()->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) { |
| 2701 DCHECK(instr->hydrogen()->elements_kind() == FAST_HOLEY_ELEMENTS); | 2699 DCHECK(instr->hydrogen()->elements_kind() == FAST_HOLEY_ELEMENTS); |
| 2702 Label done; | 2700 Label done; |
| 2703 __ cmp(result, factory()->the_hole_value()); | 2701 __ cmp(result, factory()->the_hole_value()); |
| 2704 __ j(not_equal, &done); | 2702 __ j(not_equal, &done); |
| 2705 if (info()->IsStub()) { | 2703 if (info()->IsStub()) { |
| 2706 // A stub can safely convert the hole to undefined only if the array | 2704 // A stub can safely convert the hole to undefined only if the array |
| 2707 // protector cell contains (Smi) Isolate::kArrayProtectorValid. | 2705 // protector cell contains (Smi) Isolate::kArrayProtectorValid. |
| 2708 // Otherwise it needs to bail out. | 2706 // Otherwise it needs to bail out. |
| 2709 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); | 2707 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); |
| 2710 __ cmp(FieldOperand(result, PropertyCell::kValueOffset), | 2708 __ cmp(FieldOperand(result, PropertyCell::kValueOffset), |
| 2711 Immediate(Smi::FromInt(Isolate::kArrayProtectorValid))); | 2709 Immediate(Smi::FromInt(Isolate::kArrayProtectorValid))); |
| 2712 DeoptimizeIf(not_equal, instr, Deoptimizer::kHole); | 2710 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole); |
| 2713 } | 2711 } |
| 2714 __ mov(result, isolate()->factory()->undefined_value()); | 2712 __ mov(result, isolate()->factory()->undefined_value()); |
| 2715 __ bind(&done); | 2713 __ bind(&done); |
| 2716 } | 2714 } |
| 2717 } | 2715 } |
| 2718 | 2716 |
| 2719 | 2717 |
| 2720 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 2718 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
| 2721 if (instr->is_fixed_typed_array()) { | 2719 if (instr->is_fixed_typed_array()) { |
| 2722 DoLoadKeyedExternalArray(instr); | 2720 DoLoadKeyedExternalArray(instr); |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2852 } | 2850 } |
| 2853 | 2851 |
| 2854 // Normal function. Replace undefined or null with global receiver. | 2852 // Normal function. Replace undefined or null with global receiver. |
| 2855 __ cmp(receiver, factory()->null_value()); | 2853 __ cmp(receiver, factory()->null_value()); |
| 2856 __ j(equal, &global_object, Label::kNear); | 2854 __ j(equal, &global_object, Label::kNear); |
| 2857 __ cmp(receiver, factory()->undefined_value()); | 2855 __ cmp(receiver, factory()->undefined_value()); |
| 2858 __ j(equal, &global_object, Label::kNear); | 2856 __ j(equal, &global_object, Label::kNear); |
| 2859 | 2857 |
| 2860 // The receiver should be a JS object. | 2858 // The receiver should be a JS object. |
| 2861 __ test(receiver, Immediate(kSmiTagMask)); | 2859 __ test(receiver, Immediate(kSmiTagMask)); |
| 2862 DeoptimizeIf(equal, instr, Deoptimizer::kSmi); | 2860 DeoptimizeIf(equal, instr, DeoptimizeReason::kSmi); |
| 2863 __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, scratch); | 2861 __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, scratch); |
| 2864 DeoptimizeIf(below, instr, Deoptimizer::kNotAJavaScriptObject); | 2862 DeoptimizeIf(below, instr, DeoptimizeReason::kNotAJavaScriptObject); |
| 2865 | 2863 |
| 2866 __ jmp(&receiver_ok, Label::kNear); | 2864 __ jmp(&receiver_ok, Label::kNear); |
| 2867 __ bind(&global_object); | 2865 __ bind(&global_object); |
| 2868 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); | 2866 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); |
| 2869 __ mov(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX)); | 2867 __ mov(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX)); |
| 2870 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX)); | 2868 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX)); |
| 2871 __ bind(&receiver_ok); | 2869 __ bind(&receiver_ok); |
| 2872 } | 2870 } |
| 2873 | 2871 |
| 2874 | 2872 |
| 2875 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 2873 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 2876 Register receiver = ToRegister(instr->receiver()); | 2874 Register receiver = ToRegister(instr->receiver()); |
| 2877 Register function = ToRegister(instr->function()); | 2875 Register function = ToRegister(instr->function()); |
| 2878 Register length = ToRegister(instr->length()); | 2876 Register length = ToRegister(instr->length()); |
| 2879 Register elements = ToRegister(instr->elements()); | 2877 Register elements = ToRegister(instr->elements()); |
| 2880 DCHECK(receiver.is(eax)); // Used for parameter count. | 2878 DCHECK(receiver.is(eax)); // Used for parameter count. |
| 2881 DCHECK(function.is(edi)); // Required by InvokeFunction. | 2879 DCHECK(function.is(edi)); // Required by InvokeFunction. |
| 2882 DCHECK(ToRegister(instr->result()).is(eax)); | 2880 DCHECK(ToRegister(instr->result()).is(eax)); |
| 2883 | 2881 |
| 2884 // Copy the arguments to this function possibly from the | 2882 // Copy the arguments to this function possibly from the |
| 2885 // adaptor frame below it. | 2883 // adaptor frame below it. |
| 2886 const uint32_t kArgumentsLimit = 1 * KB; | 2884 const uint32_t kArgumentsLimit = 1 * KB; |
| 2887 __ cmp(length, kArgumentsLimit); | 2885 __ cmp(length, kArgumentsLimit); |
| 2888 DeoptimizeIf(above, instr, Deoptimizer::kTooManyArguments); | 2886 DeoptimizeIf(above, instr, DeoptimizeReason::kTooManyArguments); |
| 2889 | 2887 |
| 2890 __ push(receiver); | 2888 __ push(receiver); |
| 2891 __ mov(receiver, length); | 2889 __ mov(receiver, length); |
| 2892 | 2890 |
| 2893 // Loop through the arguments pushing them onto the execution | 2891 // Loop through the arguments pushing them onto the execution |
| 2894 // stack. | 2892 // stack. |
| 2895 Label invoke, loop; | 2893 Label invoke, loop; |
| 2896 // length is a small non-negative integer, due to the test above. | 2894 // length is a small non-negative integer, due to the test above. |
| 2897 __ test(length, Operand(length)); | 2895 __ test(length, Operand(length)); |
| 2898 __ j(zero, &invoke, Label::kNear); | 2896 __ j(zero, &invoke, Label::kNear); |
| (...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3054 } | 3052 } |
| 3055 generator.AfterCall(); | 3053 generator.AfterCall(); |
| 3056 } | 3054 } |
| 3057 } | 3055 } |
| 3058 | 3056 |
| 3059 | 3057 |
| 3060 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3058 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
| 3061 Register input_reg = ToRegister(instr->value()); | 3059 Register input_reg = ToRegister(instr->value()); |
| 3062 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 3060 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 3063 factory()->heap_number_map()); | 3061 factory()->heap_number_map()); |
| 3064 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3062 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 3065 | 3063 |
| 3066 Label slow, allocated, done; | 3064 Label slow, allocated, done; |
| 3067 uint32_t available_regs = eax.bit() | ecx.bit() | edx.bit() | ebx.bit(); | 3065 uint32_t available_regs = eax.bit() | ecx.bit() | edx.bit() | ebx.bit(); |
| 3068 available_regs &= ~input_reg.bit(); | 3066 available_regs &= ~input_reg.bit(); |
| 3069 if (instr->context()->IsRegister()) { | 3067 if (instr->context()->IsRegister()) { |
| 3070 // Make sure that the context isn't overwritten in the AllocateHeapNumber | 3068 // Make sure that the context isn't overwritten in the AllocateHeapNumber |
| 3071 // macro below. | 3069 // macro below. |
| 3072 available_regs &= ~ToRegister(instr->context()).bit(); | 3070 available_regs &= ~ToRegister(instr->context()).bit(); |
| 3073 } | 3071 } |
| 3074 | 3072 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3112 __ bind(&done); | 3110 __ bind(&done); |
| 3113 } | 3111 } |
| 3114 | 3112 |
| 3115 | 3113 |
| 3116 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3114 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
| 3117 Register input_reg = ToRegister(instr->value()); | 3115 Register input_reg = ToRegister(instr->value()); |
| 3118 __ test(input_reg, Operand(input_reg)); | 3116 __ test(input_reg, Operand(input_reg)); |
| 3119 Label is_positive; | 3117 Label is_positive; |
| 3120 __ j(not_sign, &is_positive, Label::kNear); | 3118 __ j(not_sign, &is_positive, Label::kNear); |
| 3121 __ neg(input_reg); // Sets flags. | 3119 __ neg(input_reg); // Sets flags. |
| 3122 DeoptimizeIf(negative, instr, Deoptimizer::kOverflow); | 3120 DeoptimizeIf(negative, instr, DeoptimizeReason::kOverflow); |
| 3123 __ bind(&is_positive); | 3121 __ bind(&is_positive); |
| 3124 } | 3122 } |
| 3125 | 3123 |
| 3126 | 3124 |
| 3127 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3125 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
| 3128 // Class for deferred case. | 3126 // Class for deferred case. |
| 3129 class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode { | 3127 class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode { |
| 3130 public: | 3128 public: |
| 3131 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, | 3129 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, |
| 3132 LMathAbs* instr) | 3130 LMathAbs* instr) |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3177 if (CpuFeatures::IsSupported(SSE4_1)) { | 3175 if (CpuFeatures::IsSupported(SSE4_1)) { |
| 3178 CpuFeatureScope scope(masm(), SSE4_1); | 3176 CpuFeatureScope scope(masm(), SSE4_1); |
| 3179 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3177 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3180 // Deoptimize on negative zero. | 3178 // Deoptimize on negative zero. |
| 3181 Label non_zero; | 3179 Label non_zero; |
| 3182 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. | 3180 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. |
| 3183 __ ucomisd(input_reg, xmm_scratch); | 3181 __ ucomisd(input_reg, xmm_scratch); |
| 3184 __ j(not_equal, &non_zero, Label::kNear); | 3182 __ j(not_equal, &non_zero, Label::kNear); |
| 3185 __ movmskpd(output_reg, input_reg); | 3183 __ movmskpd(output_reg, input_reg); |
| 3186 __ test(output_reg, Immediate(1)); | 3184 __ test(output_reg, Immediate(1)); |
| 3187 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 3185 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 3188 __ bind(&non_zero); | 3186 __ bind(&non_zero); |
| 3189 } | 3187 } |
| 3190 __ roundsd(xmm_scratch, input_reg, kRoundDown); | 3188 __ roundsd(xmm_scratch, input_reg, kRoundDown); |
| 3191 __ cvttsd2si(output_reg, Operand(xmm_scratch)); | 3189 __ cvttsd2si(output_reg, Operand(xmm_scratch)); |
| 3192 // Overflow is signalled with minint. | 3190 // Overflow is signalled with minint. |
| 3193 __ cmp(output_reg, 0x1); | 3191 __ cmp(output_reg, 0x1); |
| 3194 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3192 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3195 } else { | 3193 } else { |
| 3196 Label negative_sign, done; | 3194 Label negative_sign, done; |
| 3197 // Deoptimize on unordered. | 3195 // Deoptimize on unordered. |
| 3198 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. | 3196 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. |
| 3199 __ ucomisd(input_reg, xmm_scratch); | 3197 __ ucomisd(input_reg, xmm_scratch); |
| 3200 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); | 3198 DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN); |
| 3201 __ j(below, &negative_sign, Label::kNear); | 3199 __ j(below, &negative_sign, Label::kNear); |
| 3202 | 3200 |
| 3203 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3201 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3204 // Check for negative zero. | 3202 // Check for negative zero. |
| 3205 Label positive_sign; | 3203 Label positive_sign; |
| 3206 __ j(above, &positive_sign, Label::kNear); | 3204 __ j(above, &positive_sign, Label::kNear); |
| 3207 __ movmskpd(output_reg, input_reg); | 3205 __ movmskpd(output_reg, input_reg); |
| 3208 __ test(output_reg, Immediate(1)); | 3206 __ test(output_reg, Immediate(1)); |
| 3209 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 3207 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 3210 __ Move(output_reg, Immediate(0)); | 3208 __ Move(output_reg, Immediate(0)); |
| 3211 __ jmp(&done, Label::kNear); | 3209 __ jmp(&done, Label::kNear); |
| 3212 __ bind(&positive_sign); | 3210 __ bind(&positive_sign); |
| 3213 } | 3211 } |
| 3214 | 3212 |
| 3215 // Use truncating instruction (OK because input is positive). | 3213 // Use truncating instruction (OK because input is positive). |
| 3216 __ cvttsd2si(output_reg, Operand(input_reg)); | 3214 __ cvttsd2si(output_reg, Operand(input_reg)); |
| 3217 // Overflow is signalled with minint. | 3215 // Overflow is signalled with minint. |
| 3218 __ cmp(output_reg, 0x1); | 3216 __ cmp(output_reg, 0x1); |
| 3219 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3217 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3220 __ jmp(&done, Label::kNear); | 3218 __ jmp(&done, Label::kNear); |
| 3221 | 3219 |
| 3222 // Non-zero negative reaches here. | 3220 // Non-zero negative reaches here. |
| 3223 __ bind(&negative_sign); | 3221 __ bind(&negative_sign); |
| 3224 // Truncate, then compare and compensate. | 3222 // Truncate, then compare and compensate. |
| 3225 __ cvttsd2si(output_reg, Operand(input_reg)); | 3223 __ cvttsd2si(output_reg, Operand(input_reg)); |
| 3226 __ Cvtsi2sd(xmm_scratch, output_reg); | 3224 __ Cvtsi2sd(xmm_scratch, output_reg); |
| 3227 __ ucomisd(input_reg, xmm_scratch); | 3225 __ ucomisd(input_reg, xmm_scratch); |
| 3228 __ j(equal, &done, Label::kNear); | 3226 __ j(equal, &done, Label::kNear); |
| 3229 __ sub(output_reg, Immediate(1)); | 3227 __ sub(output_reg, Immediate(1)); |
| 3230 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3228 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3231 | 3229 |
| 3232 __ bind(&done); | 3230 __ bind(&done); |
| 3233 } | 3231 } |
| 3234 } | 3232 } |
| 3235 | 3233 |
| 3236 void LCodeGen::DoMathRoundD(LMathRoundD* instr) { | 3234 void LCodeGen::DoMathRoundD(LMathRoundD* instr) { |
| 3237 XMMRegister xmm_scratch = double_scratch0(); | 3235 XMMRegister xmm_scratch = double_scratch0(); |
| 3238 XMMRegister output_reg = ToDoubleRegister(instr->result()); | 3236 XMMRegister output_reg = ToDoubleRegister(instr->result()); |
| 3239 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3237 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3240 CpuFeatureScope scope(masm(), SSE4_1); | 3238 CpuFeatureScope scope(masm(), SSE4_1); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 3263 | 3261 |
| 3264 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); | 3262 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); |
| 3265 __ ucomisd(xmm_scratch, input_reg); | 3263 __ ucomisd(xmm_scratch, input_reg); |
| 3266 __ j(above, &below_one_half, Label::kNear); | 3264 __ j(above, &below_one_half, Label::kNear); |
| 3267 | 3265 |
| 3268 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). | 3266 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). |
| 3269 __ addsd(xmm_scratch, input_reg); | 3267 __ addsd(xmm_scratch, input_reg); |
| 3270 __ cvttsd2si(output_reg, Operand(xmm_scratch)); | 3268 __ cvttsd2si(output_reg, Operand(xmm_scratch)); |
| 3271 // Overflow is signalled with minint. | 3269 // Overflow is signalled with minint. |
| 3272 __ cmp(output_reg, 0x1); | 3270 __ cmp(output_reg, 0x1); |
| 3273 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3271 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3274 __ jmp(&done, dist); | 3272 __ jmp(&done, dist); |
| 3275 | 3273 |
| 3276 __ bind(&below_one_half); | 3274 __ bind(&below_one_half); |
| 3277 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); | 3275 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); |
| 3278 __ ucomisd(xmm_scratch, input_reg); | 3276 __ ucomisd(xmm_scratch, input_reg); |
| 3279 __ j(below_equal, &round_to_zero, Label::kNear); | 3277 __ j(below_equal, &round_to_zero, Label::kNear); |
| 3280 | 3278 |
| 3281 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then | 3279 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then |
| 3282 // compare and compensate. | 3280 // compare and compensate. |
| 3283 __ movaps(input_temp, input_reg); // Do not alter input_reg. | 3281 __ movaps(input_temp, input_reg); // Do not alter input_reg. |
| 3284 __ subsd(input_temp, xmm_scratch); | 3282 __ subsd(input_temp, xmm_scratch); |
| 3285 __ cvttsd2si(output_reg, Operand(input_temp)); | 3283 __ cvttsd2si(output_reg, Operand(input_temp)); |
| 3286 // Catch minint due to overflow, and to prevent overflow when compensating. | 3284 // Catch minint due to overflow, and to prevent overflow when compensating. |
| 3287 __ cmp(output_reg, 0x1); | 3285 __ cmp(output_reg, 0x1); |
| 3288 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 3286 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 3289 | 3287 |
| 3290 __ Cvtsi2sd(xmm_scratch, output_reg); | 3288 __ Cvtsi2sd(xmm_scratch, output_reg); |
| 3291 __ ucomisd(xmm_scratch, input_temp); | 3289 __ ucomisd(xmm_scratch, input_temp); |
| 3292 __ j(equal, &done, dist); | 3290 __ j(equal, &done, dist); |
| 3293 __ sub(output_reg, Immediate(1)); | 3291 __ sub(output_reg, Immediate(1)); |
| 3294 // No overflow because we already ruled out minint. | 3292 // No overflow because we already ruled out minint. |
| 3295 __ jmp(&done, dist); | 3293 __ jmp(&done, dist); |
| 3296 | 3294 |
| 3297 __ bind(&round_to_zero); | 3295 __ bind(&round_to_zero); |
| 3298 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 3296 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
| 3299 // we can ignore the difference between a result of -0 and +0. | 3297 // we can ignore the difference between a result of -0 and +0. |
| 3300 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3298 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3301 // If the sign is positive, we return +0. | 3299 // If the sign is positive, we return +0. |
| 3302 __ movmskpd(output_reg, input_reg); | 3300 __ movmskpd(output_reg, input_reg); |
| 3303 __ test(output_reg, Immediate(1)); | 3301 __ test(output_reg, Immediate(1)); |
| 3304 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 3302 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 3305 } | 3303 } |
| 3306 __ Move(output_reg, Immediate(0)); | 3304 __ Move(output_reg, Immediate(0)); |
| 3307 __ bind(&done); | 3305 __ bind(&done); |
| 3308 } | 3306 } |
| 3309 | 3307 |
| 3310 | 3308 |
| 3311 void LCodeGen::DoMathFround(LMathFround* instr) { | 3309 void LCodeGen::DoMathFround(LMathFround* instr) { |
| 3312 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3310 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3313 XMMRegister output_reg = ToDoubleRegister(instr->result()); | 3311 XMMRegister output_reg = ToDoubleRegister(instr->result()); |
| 3314 __ cvtsd2ss(output_reg, input_reg); | 3312 __ cvtsd2ss(output_reg, input_reg); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3370 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); | 3368 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); |
| 3371 | 3369 |
| 3372 if (exponent_type.IsSmi()) { | 3370 if (exponent_type.IsSmi()) { |
| 3373 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3371 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3374 __ CallStub(&stub); | 3372 __ CallStub(&stub); |
| 3375 } else if (exponent_type.IsTagged()) { | 3373 } else if (exponent_type.IsTagged()) { |
| 3376 Label no_deopt; | 3374 Label no_deopt; |
| 3377 __ JumpIfSmi(tagged_exponent, &no_deopt); | 3375 __ JumpIfSmi(tagged_exponent, &no_deopt); |
| 3378 DCHECK(!ecx.is(tagged_exponent)); | 3376 DCHECK(!ecx.is(tagged_exponent)); |
| 3379 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx); | 3377 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx); |
| 3380 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3378 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 3381 __ bind(&no_deopt); | 3379 __ bind(&no_deopt); |
| 3382 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3380 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3383 __ CallStub(&stub); | 3381 __ CallStub(&stub); |
| 3384 } else if (exponent_type.IsInteger32()) { | 3382 } else if (exponent_type.IsInteger32()) { |
| 3385 MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3383 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
| 3386 __ CallStub(&stub); | 3384 __ CallStub(&stub); |
| 3387 } else { | 3385 } else { |
| 3388 DCHECK(exponent_type.IsDouble()); | 3386 DCHECK(exponent_type.IsDouble()); |
| 3389 MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3387 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
| 3390 __ CallStub(&stub); | 3388 __ CallStub(&stub); |
| (...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3731 instr->hydrogen()->index()->representation())); | 3729 instr->hydrogen()->index()->representation())); |
| 3732 } else { | 3730 } else { |
| 3733 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); | 3731 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
| 3734 } | 3732 } |
| 3735 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 3733 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { |
| 3736 Label done; | 3734 Label done; |
| 3737 __ j(NegateCondition(cc), &done, Label::kNear); | 3735 __ j(NegateCondition(cc), &done, Label::kNear); |
| 3738 __ int3(); | 3736 __ int3(); |
| 3739 __ bind(&done); | 3737 __ bind(&done); |
| 3740 } else { | 3738 } else { |
| 3741 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds); | 3739 DeoptimizeIf(cc, instr, DeoptimizeReason::kOutOfBounds); |
| 3742 } | 3740 } |
| 3743 } | 3741 } |
| 3744 | 3742 |
| 3745 | 3743 |
| 3746 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 3744 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
| 3747 ElementsKind elements_kind = instr->elements_kind(); | 3745 ElementsKind elements_kind = instr->elements_kind(); |
| 3748 LOperand* key = instr->key(); | 3746 LOperand* key = instr->key(); |
| 3749 if (!key->IsConstantOperand() && | 3747 if (!key->IsConstantOperand() && |
| 3750 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), | 3748 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), |
| 3751 elements_kind)) { | 3749 elements_kind)) { |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3890 .code(); | 3888 .code(); |
| 3891 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3889 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3892 } | 3890 } |
| 3893 | 3891 |
| 3894 | 3892 |
| 3895 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 3893 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 3896 Register object = ToRegister(instr->object()); | 3894 Register object = ToRegister(instr->object()); |
| 3897 Register temp = ToRegister(instr->temp()); | 3895 Register temp = ToRegister(instr->temp()); |
| 3898 Label no_memento_found; | 3896 Label no_memento_found; |
| 3899 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 3897 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
| 3900 DeoptimizeIf(equal, instr, Deoptimizer::kMementoFound); | 3898 DeoptimizeIf(equal, instr, DeoptimizeReason::kMementoFound); |
| 3901 __ bind(&no_memento_found); | 3899 __ bind(&no_memento_found); |
| 3902 } | 3900 } |
| 3903 | 3901 |
| 3904 | 3902 |
| 3905 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) { | 3903 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) { |
| 3906 class DeferredMaybeGrowElements final : public LDeferredCode { | 3904 class DeferredMaybeGrowElements final : public LDeferredCode { |
| 3907 public: | 3905 public: |
| 3908 DeferredMaybeGrowElements(LCodeGen* codegen, LMaybeGrowElements* instr) | 3906 DeferredMaybeGrowElements(LCodeGen* codegen, LMaybeGrowElements* instr) |
| 3909 : LDeferredCode(codegen), instr_(instr) {} | 3907 : LDeferredCode(codegen), instr_(instr) {} |
| 3910 void Generate() override { codegen()->DoDeferredMaybeGrowElements(instr_); } | 3908 void Generate() override { codegen()->DoDeferredMaybeGrowElements(instr_); } |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3988 GrowArrayElementsStub stub(isolate(), instr->hydrogen()->is_js_array(), | 3986 GrowArrayElementsStub stub(isolate(), instr->hydrogen()->is_js_array(), |
| 3989 instr->hydrogen()->kind()); | 3987 instr->hydrogen()->kind()); |
| 3990 __ CallStub(&stub); | 3988 __ CallStub(&stub); |
| 3991 RecordSafepointWithLazyDeopt( | 3989 RecordSafepointWithLazyDeopt( |
| 3992 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 3990 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 3993 __ StoreToSafepointRegisterSlot(result, result); | 3991 __ StoreToSafepointRegisterSlot(result, result); |
| 3994 } | 3992 } |
| 3995 | 3993 |
| 3996 // Deopt on smi, which means the elements array changed to dictionary mode. | 3994 // Deopt on smi, which means the elements array changed to dictionary mode. |
| 3997 __ test(result, Immediate(kSmiTagMask)); | 3995 __ test(result, Immediate(kSmiTagMask)); |
| 3998 DeoptimizeIf(equal, instr, Deoptimizer::kSmi); | 3996 DeoptimizeIf(equal, instr, DeoptimizeReason::kSmi); |
| 3999 } | 3997 } |
| 4000 | 3998 |
| 4001 | 3999 |
| 4002 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { | 4000 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
| 4003 Register object_reg = ToRegister(instr->object()); | 4001 Register object_reg = ToRegister(instr->object()); |
| 4004 | 4002 |
| 4005 Handle<Map> from_map = instr->original_map(); | 4003 Handle<Map> from_map = instr->original_map(); |
| 4006 Handle<Map> to_map = instr->transitioned_map(); | 4004 Handle<Map> to_map = instr->transitioned_map(); |
| 4007 ElementsKind from_kind = instr->from_kind(); | 4005 ElementsKind from_kind = instr->from_kind(); |
| 4008 ElementsKind to_kind = instr->to_kind(); | 4006 ElementsKind to_kind = instr->to_kind(); |
| (...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4328 __ StoreToSafepointRegisterSlot(reg, eax); | 4326 __ StoreToSafepointRegisterSlot(reg, eax); |
| 4329 } | 4327 } |
| 4330 | 4328 |
| 4331 | 4329 |
| 4332 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4330 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 4333 HChange* hchange = instr->hydrogen(); | 4331 HChange* hchange = instr->hydrogen(); |
| 4334 Register input = ToRegister(instr->value()); | 4332 Register input = ToRegister(instr->value()); |
| 4335 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4333 if (hchange->CheckFlag(HValue::kCanOverflow) && |
| 4336 hchange->value()->CheckFlag(HValue::kUint32)) { | 4334 hchange->value()->CheckFlag(HValue::kUint32)) { |
| 4337 __ test(input, Immediate(0xc0000000)); | 4335 __ test(input, Immediate(0xc0000000)); |
| 4338 DeoptimizeIf(not_zero, instr, Deoptimizer::kOverflow); | 4336 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kOverflow); |
| 4339 } | 4337 } |
| 4340 __ SmiTag(input); | 4338 __ SmiTag(input); |
| 4341 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4339 if (hchange->CheckFlag(HValue::kCanOverflow) && |
| 4342 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4340 !hchange->value()->CheckFlag(HValue::kUint32)) { |
| 4343 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 4341 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 4344 } | 4342 } |
| 4345 } | 4343 } |
| 4346 | 4344 |
| 4347 | 4345 |
| 4348 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4346 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
| 4349 LOperand* input = instr->value(); | 4347 LOperand* input = instr->value(); |
| 4350 Register result = ToRegister(input); | 4348 Register result = ToRegister(input); |
| 4351 DCHECK(input->IsRegister() && input->Equals(instr->result())); | 4349 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
| 4352 if (instr->needs_check()) { | 4350 if (instr->needs_check()) { |
| 4353 __ test(result, Immediate(kSmiTagMask)); | 4351 __ test(result, Immediate(kSmiTagMask)); |
| 4354 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); | 4352 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kNotASmi); |
| 4355 } else { | 4353 } else { |
| 4356 __ AssertSmi(result); | 4354 __ AssertSmi(result); |
| 4357 } | 4355 } |
| 4358 __ SmiUntag(result); | 4356 __ SmiUntag(result); |
| 4359 } | 4357 } |
| 4360 | 4358 |
| 4361 | 4359 |
| 4362 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 4360 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, |
| 4363 Register temp_reg, XMMRegister result_reg, | 4361 Register temp_reg, XMMRegister result_reg, |
| 4364 NumberUntagDMode mode) { | 4362 NumberUntagDMode mode) { |
| 4365 bool can_convert_undefined_to_nan = | 4363 bool can_convert_undefined_to_nan = |
| 4366 instr->hydrogen()->can_convert_undefined_to_nan(); | 4364 instr->hydrogen()->can_convert_undefined_to_nan(); |
| 4367 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 4365 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); |
| 4368 | 4366 |
| 4369 Label convert, load_smi, done; | 4367 Label convert, load_smi, done; |
| 4370 | 4368 |
| 4371 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 4369 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
| 4372 // Smi check. | 4370 // Smi check. |
| 4373 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); | 4371 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); |
| 4374 | 4372 |
| 4375 // Heap number map check. | 4373 // Heap number map check. |
| 4376 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4374 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4377 factory()->heap_number_map()); | 4375 factory()->heap_number_map()); |
| 4378 if (can_convert_undefined_to_nan) { | 4376 if (can_convert_undefined_to_nan) { |
| 4379 __ j(not_equal, &convert, Label::kNear); | 4377 __ j(not_equal, &convert, Label::kNear); |
| 4380 } else { | 4378 } else { |
| 4381 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 4379 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 4382 } | 4380 } |
| 4383 | 4381 |
| 4384 // Heap number to XMM conversion. | 4382 // Heap number to XMM conversion. |
| 4385 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4383 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4386 | 4384 |
| 4387 if (deoptimize_on_minus_zero) { | 4385 if (deoptimize_on_minus_zero) { |
| 4388 XMMRegister xmm_scratch = double_scratch0(); | 4386 XMMRegister xmm_scratch = double_scratch0(); |
| 4389 __ xorps(xmm_scratch, xmm_scratch); | 4387 __ xorps(xmm_scratch, xmm_scratch); |
| 4390 __ ucomisd(result_reg, xmm_scratch); | 4388 __ ucomisd(result_reg, xmm_scratch); |
| 4391 __ j(not_zero, &done, Label::kNear); | 4389 __ j(not_zero, &done, Label::kNear); |
| 4392 __ movmskpd(temp_reg, result_reg); | 4390 __ movmskpd(temp_reg, result_reg); |
| 4393 __ test_b(temp_reg, Immediate(1)); | 4391 __ test_b(temp_reg, Immediate(1)); |
| 4394 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 4392 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 4395 } | 4393 } |
| 4396 __ jmp(&done, Label::kNear); | 4394 __ jmp(&done, Label::kNear); |
| 4397 | 4395 |
| 4398 if (can_convert_undefined_to_nan) { | 4396 if (can_convert_undefined_to_nan) { |
| 4399 __ bind(&convert); | 4397 __ bind(&convert); |
| 4400 | 4398 |
| 4401 // Convert undefined to NaN. | 4399 // Convert undefined to NaN. |
| 4402 __ cmp(input_reg, factory()->undefined_value()); | 4400 __ cmp(input_reg, factory()->undefined_value()); |
| 4403 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); | 4401 DeoptimizeIf(not_equal, instr, |
| 4402 DeoptimizeReason::kNotAHeapNumberUndefined); |
| 4404 | 4403 |
| 4405 __ pcmpeqd(result_reg, result_reg); | 4404 __ pcmpeqd(result_reg, result_reg); |
| 4406 __ jmp(&done, Label::kNear); | 4405 __ jmp(&done, Label::kNear); |
| 4407 } | 4406 } |
| 4408 } else { | 4407 } else { |
| 4409 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4408 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
| 4410 } | 4409 } |
| 4411 | 4410 |
| 4412 __ bind(&load_smi); | 4411 __ bind(&load_smi); |
| 4413 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the | 4412 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4446 | 4445 |
| 4447 __ bind(&check_bools); | 4446 __ bind(&check_bools); |
| 4448 __ cmp(input_reg, factory()->true_value()); | 4447 __ cmp(input_reg, factory()->true_value()); |
| 4449 __ j(not_equal, &check_false, Label::kNear); | 4448 __ j(not_equal, &check_false, Label::kNear); |
| 4450 __ Move(input_reg, Immediate(1)); | 4449 __ Move(input_reg, Immediate(1)); |
| 4451 __ jmp(done); | 4450 __ jmp(done); |
| 4452 | 4451 |
| 4453 __ bind(&check_false); | 4452 __ bind(&check_false); |
| 4454 __ cmp(input_reg, factory()->false_value()); | 4453 __ cmp(input_reg, factory()->false_value()); |
| 4455 DeoptimizeIf(not_equal, instr, | 4454 DeoptimizeIf(not_equal, instr, |
| 4456 Deoptimizer::kNotAHeapNumberUndefinedBoolean); | 4455 DeoptimizeReason::kNotAHeapNumberUndefinedBoolean); |
| 4457 __ Move(input_reg, Immediate(0)); | 4456 __ Move(input_reg, Immediate(0)); |
| 4458 } else { | 4457 } else { |
| 4459 XMMRegister scratch = ToDoubleRegister(instr->temp()); | 4458 XMMRegister scratch = ToDoubleRegister(instr->temp()); |
| 4460 DCHECK(!scratch.is(xmm0)); | 4459 DCHECK(!scratch.is(xmm0)); |
| 4461 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4460 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4462 isolate()->factory()->heap_number_map()); | 4461 isolate()->factory()->heap_number_map()); |
| 4463 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 4462 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
| 4464 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4463 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4465 __ cvttsd2si(input_reg, Operand(xmm0)); | 4464 __ cvttsd2si(input_reg, Operand(xmm0)); |
| 4466 __ Cvtsi2sd(scratch, Operand(input_reg)); | 4465 __ Cvtsi2sd(scratch, Operand(input_reg)); |
| 4467 __ ucomisd(xmm0, scratch); | 4466 __ ucomisd(xmm0, scratch); |
| 4468 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); | 4467 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision); |
| 4469 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); | 4468 DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN); |
| 4470 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { | 4469 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { |
| 4471 __ test(input_reg, Operand(input_reg)); | 4470 __ test(input_reg, Operand(input_reg)); |
| 4472 __ j(not_zero, done); | 4471 __ j(not_zero, done); |
| 4473 __ movmskpd(input_reg, xmm0); | 4472 __ movmskpd(input_reg, xmm0); |
| 4474 __ and_(input_reg, 1); | 4473 __ and_(input_reg, 1); |
| 4475 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 4474 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
| 4476 } | 4475 } |
| 4477 } | 4476 } |
| 4478 } | 4477 } |
| 4479 | 4478 |
| 4480 | 4479 |
| 4481 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 4480 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
| 4482 class DeferredTaggedToI final : public LDeferredCode { | 4481 class DeferredTaggedToI final : public LDeferredCode { |
| 4483 public: | 4482 public: |
| 4484 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 4483 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
| 4485 : LDeferredCode(codegen), instr_(instr) { } | 4484 : LDeferredCode(codegen), instr_(instr) { } |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4545 } else { | 4544 } else { |
| 4546 Label lost_precision, is_nan, minus_zero, done; | 4545 Label lost_precision, is_nan, minus_zero, done; |
| 4547 XMMRegister input_reg = ToDoubleRegister(input); | 4546 XMMRegister input_reg = ToDoubleRegister(input); |
| 4548 XMMRegister xmm_scratch = double_scratch0(); | 4547 XMMRegister xmm_scratch = double_scratch0(); |
| 4549 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 4548 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 4550 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4549 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
| 4551 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, | 4550 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, |
| 4552 &is_nan, &minus_zero, dist); | 4551 &is_nan, &minus_zero, dist); |
| 4553 __ jmp(&done, dist); | 4552 __ jmp(&done, dist); |
| 4554 __ bind(&lost_precision); | 4553 __ bind(&lost_precision); |
| 4555 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4554 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
| 4556 __ bind(&is_nan); | 4555 __ bind(&is_nan); |
| 4557 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4556 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
| 4558 __ bind(&minus_zero); | 4557 __ bind(&minus_zero); |
| 4559 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 4558 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
| 4560 __ bind(&done); | 4559 __ bind(&done); |
| 4561 } | 4560 } |
| 4562 } | 4561 } |
| 4563 | 4562 |
| 4564 | 4563 |
| 4565 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4564 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
| 4566 LOperand* input = instr->value(); | 4565 LOperand* input = instr->value(); |
| 4567 DCHECK(input->IsDoubleRegister()); | 4566 DCHECK(input->IsDoubleRegister()); |
| 4568 LOperand* result = instr->result(); | 4567 LOperand* result = instr->result(); |
| 4569 DCHECK(result->IsRegister()); | 4568 DCHECK(result->IsRegister()); |
| 4570 Register result_reg = ToRegister(result); | 4569 Register result_reg = ToRegister(result); |
| 4571 | 4570 |
| 4572 Label lost_precision, is_nan, minus_zero, done; | 4571 Label lost_precision, is_nan, minus_zero, done; |
| 4573 XMMRegister input_reg = ToDoubleRegister(input); | 4572 XMMRegister input_reg = ToDoubleRegister(input); |
| 4574 XMMRegister xmm_scratch = double_scratch0(); | 4573 XMMRegister xmm_scratch = double_scratch0(); |
| 4575 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 4574 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 4576 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4575 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
| 4577 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, | 4576 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, |
| 4578 &minus_zero, dist); | 4577 &minus_zero, dist); |
| 4579 __ jmp(&done, dist); | 4578 __ jmp(&done, dist); |
| 4580 __ bind(&lost_precision); | 4579 __ bind(&lost_precision); |
| 4581 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4580 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
| 4582 __ bind(&is_nan); | 4581 __ bind(&is_nan); |
| 4583 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4582 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
| 4584 __ bind(&minus_zero); | 4583 __ bind(&minus_zero); |
| 4585 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 4584 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
| 4586 __ bind(&done); | 4585 __ bind(&done); |
| 4587 __ SmiTag(result_reg); | 4586 __ SmiTag(result_reg); |
| 4588 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 4587 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
| 4589 } | 4588 } |
| 4590 | 4589 |
| 4591 | 4590 |
| 4592 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 4591 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
| 4593 LOperand* input = instr->value(); | 4592 LOperand* input = instr->value(); |
| 4594 __ test(ToOperand(input), Immediate(kSmiTagMask)); | 4593 __ test(ToOperand(input), Immediate(kSmiTagMask)); |
| 4595 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); | 4594 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kNotASmi); |
| 4596 } | 4595 } |
| 4597 | 4596 |
| 4598 | 4597 |
| 4599 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 4598 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
| 4600 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 4599 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
| 4601 LOperand* input = instr->value(); | 4600 LOperand* input = instr->value(); |
| 4602 __ test(ToOperand(input), Immediate(kSmiTagMask)); | 4601 __ test(ToOperand(input), Immediate(kSmiTagMask)); |
| 4603 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); | 4602 DeoptimizeIf(zero, instr, DeoptimizeReason::kSmi); |
| 4604 } | 4603 } |
| 4605 } | 4604 } |
| 4606 | 4605 |
| 4607 | 4606 |
| 4608 void LCodeGen::DoCheckArrayBufferNotNeutered( | 4607 void LCodeGen::DoCheckArrayBufferNotNeutered( |
| 4609 LCheckArrayBufferNotNeutered* instr) { | 4608 LCheckArrayBufferNotNeutered* instr) { |
| 4610 Register view = ToRegister(instr->view()); | 4609 Register view = ToRegister(instr->view()); |
| 4611 Register scratch = ToRegister(instr->scratch()); | 4610 Register scratch = ToRegister(instr->scratch()); |
| 4612 | 4611 |
| 4613 __ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset)); | 4612 __ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset)); |
| 4614 __ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset), | 4613 __ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset), |
| 4615 Immediate(1 << JSArrayBuffer::WasNeutered::kShift)); | 4614 Immediate(1 << JSArrayBuffer::WasNeutered::kShift)); |
| 4616 DeoptimizeIf(not_zero, instr, Deoptimizer::kOutOfBounds); | 4615 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kOutOfBounds); |
| 4617 } | 4616 } |
| 4618 | 4617 |
| 4619 | 4618 |
| 4620 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 4619 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 4621 Register input = ToRegister(instr->value()); | 4620 Register input = ToRegister(instr->value()); |
| 4622 Register temp = ToRegister(instr->temp()); | 4621 Register temp = ToRegister(instr->temp()); |
| 4623 | 4622 |
| 4624 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); | 4623 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); |
| 4625 | 4624 |
| 4626 if (instr->hydrogen()->is_interval_check()) { | 4625 if (instr->hydrogen()->is_interval_check()) { |
| 4627 InstanceType first; | 4626 InstanceType first; |
| 4628 InstanceType last; | 4627 InstanceType last; |
| 4629 instr->hydrogen()->GetCheckInterval(&first, &last); | 4628 instr->hydrogen()->GetCheckInterval(&first, &last); |
| 4630 | 4629 |
| 4631 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(first)); | 4630 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(first)); |
| 4632 | 4631 |
| 4633 // If there is only one type in the interval check for equality. | 4632 // If there is only one type in the interval check for equality. |
| 4634 if (first == last) { | 4633 if (first == last) { |
| 4635 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); | 4634 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType); |
| 4636 } else { | 4635 } else { |
| 4637 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType); | 4636 DeoptimizeIf(below, instr, DeoptimizeReason::kWrongInstanceType); |
| 4638 // Omit check for the last type. | 4637 // Omit check for the last type. |
| 4639 if (last != LAST_TYPE) { | 4638 if (last != LAST_TYPE) { |
| 4640 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(last)); | 4639 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(last)); |
| 4641 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType); | 4640 DeoptimizeIf(above, instr, DeoptimizeReason::kWrongInstanceType); |
| 4642 } | 4641 } |
| 4643 } | 4642 } |
| 4644 } else { | 4643 } else { |
| 4645 uint8_t mask; | 4644 uint8_t mask; |
| 4646 uint8_t tag; | 4645 uint8_t tag; |
| 4647 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 4646 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
| 4648 | 4647 |
| 4649 if (base::bits::IsPowerOfTwo32(mask)) { | 4648 if (base::bits::IsPowerOfTwo32(mask)) { |
| 4650 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 4649 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); |
| 4651 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(mask)); | 4650 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(mask)); |
| 4652 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, | 4651 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, |
| 4653 Deoptimizer::kWrongInstanceType); | 4652 DeoptimizeReason::kWrongInstanceType); |
| 4654 } else { | 4653 } else { |
| 4655 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); | 4654 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); |
| 4656 __ and_(temp, mask); | 4655 __ and_(temp, mask); |
| 4657 __ cmp(temp, tag); | 4656 __ cmp(temp, tag); |
| 4658 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); | 4657 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType); |
| 4659 } | 4658 } |
| 4660 } | 4659 } |
| 4661 } | 4660 } |
| 4662 | 4661 |
| 4663 | 4662 |
| 4664 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 4663 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
| 4665 Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 4664 Handle<HeapObject> object = instr->hydrogen()->object().handle(); |
| 4666 if (instr->hydrogen()->object_in_new_space()) { | 4665 if (instr->hydrogen()->object_in_new_space()) { |
| 4667 Register reg = ToRegister(instr->value()); | 4666 Register reg = ToRegister(instr->value()); |
| 4668 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 4667 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
| 4669 __ cmp(reg, Operand::ForCell(cell)); | 4668 __ cmp(reg, Operand::ForCell(cell)); |
| 4670 } else { | 4669 } else { |
| 4671 Operand operand = ToOperand(instr->value()); | 4670 Operand operand = ToOperand(instr->value()); |
| 4672 __ cmp(operand, object); | 4671 __ cmp(operand, object); |
| 4673 } | 4672 } |
| 4674 DeoptimizeIf(not_equal, instr, Deoptimizer::kValueMismatch); | 4673 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kValueMismatch); |
| 4675 } | 4674 } |
| 4676 | 4675 |
| 4677 | 4676 |
| 4678 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 4677 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
| 4679 { | 4678 { |
| 4680 PushSafepointRegistersScope scope(this); | 4679 PushSafepointRegistersScope scope(this); |
| 4681 __ push(object); | 4680 __ push(object); |
| 4682 __ xor_(esi, esi); | 4681 __ xor_(esi, esi); |
| 4683 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 4682 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); |
| 4684 RecordSafepointWithRegisters( | 4683 RecordSafepointWithRegisters( |
| 4685 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 4684 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 4686 | 4685 |
| 4687 __ test(eax, Immediate(kSmiTagMask)); | 4686 __ test(eax, Immediate(kSmiTagMask)); |
| 4688 } | 4687 } |
| 4689 DeoptimizeIf(zero, instr, Deoptimizer::kInstanceMigrationFailed); | 4688 DeoptimizeIf(zero, instr, DeoptimizeReason::kInstanceMigrationFailed); |
| 4690 } | 4689 } |
| 4691 | 4690 |
| 4692 | 4691 |
| 4693 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 4692 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 4694 class DeferredCheckMaps final : public LDeferredCode { | 4693 class DeferredCheckMaps final : public LDeferredCode { |
| 4695 public: | 4694 public: |
| 4696 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 4695 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
| 4697 : LDeferredCode(codegen), instr_(instr), object_(object) { | 4696 : LDeferredCode(codegen), instr_(instr), object_(object) { |
| 4698 SetExit(check_maps()); | 4697 SetExit(check_maps()); |
| 4699 } | 4698 } |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4733 Handle<Map> map = maps->at(i).handle(); | 4732 Handle<Map> map = maps->at(i).handle(); |
| 4734 __ CompareMap(reg, map); | 4733 __ CompareMap(reg, map); |
| 4735 __ j(equal, &success, Label::kNear); | 4734 __ j(equal, &success, Label::kNear); |
| 4736 } | 4735 } |
| 4737 | 4736 |
| 4738 Handle<Map> map = maps->at(maps->size() - 1).handle(); | 4737 Handle<Map> map = maps->at(maps->size() - 1).handle(); |
| 4739 __ CompareMap(reg, map); | 4738 __ CompareMap(reg, map); |
| 4740 if (instr->hydrogen()->HasMigrationTarget()) { | 4739 if (instr->hydrogen()->HasMigrationTarget()) { |
| 4741 __ j(not_equal, deferred->entry()); | 4740 __ j(not_equal, deferred->entry()); |
| 4742 } else { | 4741 } else { |
| 4743 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); | 4742 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap); |
| 4744 } | 4743 } |
| 4745 | 4744 |
| 4746 __ bind(&success); | 4745 __ bind(&success); |
| 4747 } | 4746 } |
| 4748 | 4747 |
| 4749 | 4748 |
| 4750 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 4749 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
| 4751 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 4750 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
| 4752 XMMRegister xmm_scratch = double_scratch0(); | 4751 XMMRegister xmm_scratch = double_scratch0(); |
| 4753 Register result_reg = ToRegister(instr->result()); | 4752 Register result_reg = ToRegister(instr->result()); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 4772 __ JumpIfSmi(input_reg, &is_smi); | 4771 __ JumpIfSmi(input_reg, &is_smi); |
| 4773 | 4772 |
| 4774 // Check for heap number | 4773 // Check for heap number |
| 4775 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4774 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4776 factory()->heap_number_map()); | 4775 factory()->heap_number_map()); |
| 4777 __ j(equal, &heap_number, Label::kNear); | 4776 __ j(equal, &heap_number, Label::kNear); |
| 4778 | 4777 |
| 4779 // Check for undefined. Undefined is converted to zero for clamping | 4778 // Check for undefined. Undefined is converted to zero for clamping |
| 4780 // conversions. | 4779 // conversions. |
| 4781 __ cmp(input_reg, factory()->undefined_value()); | 4780 __ cmp(input_reg, factory()->undefined_value()); |
| 4782 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); | 4781 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumberUndefined); |
| 4783 __ mov(input_reg, 0); | 4782 __ mov(input_reg, 0); |
| 4784 __ jmp(&done, Label::kNear); | 4783 __ jmp(&done, Label::kNear); |
| 4785 | 4784 |
| 4786 // Heap number | 4785 // Heap number |
| 4787 __ bind(&heap_number); | 4786 __ bind(&heap_number); |
| 4788 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4787 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4789 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); | 4788 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); |
| 4790 __ jmp(&done, Label::kNear); | 4789 __ jmp(&done, Label::kNear); |
| 4791 | 4790 |
| 4792 // smi | 4791 // smi |
| (...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5197 __ jmp(&done, Label::kNear); | 5196 __ jmp(&done, Label::kNear); |
| 5198 | 5197 |
| 5199 __ bind(&load_cache); | 5198 __ bind(&load_cache); |
| 5200 __ LoadInstanceDescriptors(map, result); | 5199 __ LoadInstanceDescriptors(map, result); |
| 5201 __ mov(result, | 5200 __ mov(result, |
| 5202 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 5201 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
| 5203 __ mov(result, | 5202 __ mov(result, |
| 5204 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 5203 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
| 5205 __ bind(&done); | 5204 __ bind(&done); |
| 5206 __ test(result, result); | 5205 __ test(result, result); |
| 5207 DeoptimizeIf(equal, instr, Deoptimizer::kNoCache); | 5206 DeoptimizeIf(equal, instr, DeoptimizeReason::kNoCache); |
| 5208 } | 5207 } |
| 5209 | 5208 |
| 5210 | 5209 |
| 5211 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5210 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
| 5212 Register object = ToRegister(instr->value()); | 5211 Register object = ToRegister(instr->value()); |
| 5213 __ cmp(ToRegister(instr->map()), | 5212 __ cmp(ToRegister(instr->map()), |
| 5214 FieldOperand(object, HeapObject::kMapOffset)); | 5213 FieldOperand(object, HeapObject::kMapOffset)); |
| 5215 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); | 5214 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap); |
| 5216 } | 5215 } |
| 5217 | 5216 |
| 5218 | 5217 |
| 5219 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5218 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 5220 Register object, | 5219 Register object, |
| 5221 Register index) { | 5220 Register index) { |
| 5222 PushSafepointRegistersScope scope(this); | 5221 PushSafepointRegistersScope scope(this); |
| 5223 __ push(object); | 5222 __ push(object); |
| 5224 __ push(index); | 5223 __ push(index); |
| 5225 __ xor_(esi, esi); | 5224 __ xor_(esi, esi); |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5285 __ bind(deferred->exit()); | 5284 __ bind(deferred->exit()); |
| 5286 __ bind(&done); | 5285 __ bind(&done); |
| 5287 } | 5286 } |
| 5288 | 5287 |
| 5289 #undef __ | 5288 #undef __ |
| 5290 | 5289 |
| 5291 } // namespace internal | 5290 } // namespace internal |
| 5292 } // namespace v8 | 5291 } // namespace v8 |
| 5293 | 5292 |
| 5294 #endif // V8_TARGET_ARCH_IA32 | 5293 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |