OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 802 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
813 int pc_offset = masm()->pc_offset(); | 813 int pc_offset = masm()->pc_offset(); |
814 environment->Register(deoptimization_index, | 814 environment->Register(deoptimization_index, |
815 translation.index(), | 815 translation.index(), |
816 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 816 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
817 deoptimizations_.Add(environment, zone()); | 817 deoptimizations_.Add(environment, zone()); |
818 } | 818 } |
819 } | 819 } |
820 | 820 |
821 | 821 |
822 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 822 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
823 const char* detail, | 823 Deoptimizer::DeoptReason deopt_reason, |
824 Deoptimizer::BailoutType bailout_type) { | 824 Deoptimizer::BailoutType bailout_type) { |
825 LEnvironment* environment = instr->environment(); | 825 LEnvironment* environment = instr->environment(); |
826 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 826 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
827 DCHECK(environment->HasBeenRegistered()); | 827 DCHECK(environment->HasBeenRegistered()); |
828 int id = environment->deoptimization_index(); | 828 int id = environment->deoptimization_index(); |
829 DCHECK(info()->IsOptimizing() || info()->IsStub()); | 829 DCHECK(info()->IsOptimizing() || info()->IsStub()); |
830 Address entry = | 830 Address entry = |
831 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 831 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
832 if (entry == NULL) { | 832 if (entry == NULL) { |
833 Abort(kBailoutWasNotPrepared); | 833 Abort(kBailoutWasNotPrepared); |
(...skipping 22 matching lines...) Expand all Loading... |
856 } | 856 } |
857 | 857 |
858 if (info()->ShouldTrapOnDeopt()) { | 858 if (info()->ShouldTrapOnDeopt()) { |
859 Label done; | 859 Label done; |
860 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); | 860 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); |
861 __ int3(); | 861 __ int3(); |
862 __ bind(&done); | 862 __ bind(&done); |
863 } | 863 } |
864 | 864 |
865 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(), | 865 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(), |
866 instr->Mnemonic(), detail); | 866 instr->Mnemonic(), deopt_reason); |
867 DCHECK(info()->IsStub() || frame_is_built_); | 867 DCHECK(info()->IsStub() || frame_is_built_); |
868 if (cc == no_condition && frame_is_built_) { | 868 if (cc == no_condition && frame_is_built_) { |
869 DeoptComment(reason); | 869 DeoptComment(reason); |
870 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 870 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
871 } else { | 871 } else { |
872 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type, | 872 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type, |
873 !frame_is_built_); | 873 !frame_is_built_); |
874 // We often have several deopts to the same entry, reuse the last | 874 // We often have several deopts to the same entry, reuse the last |
875 // jump entry if this is the case. | 875 // jump entry if this is the case. |
876 if (jump_table_.is_empty() || | 876 if (jump_table_.is_empty() || |
877 !table_entry.IsEquivalentTo(jump_table_.last())) { | 877 !table_entry.IsEquivalentTo(jump_table_.last())) { |
878 jump_table_.Add(table_entry, zone()); | 878 jump_table_.Add(table_entry, zone()); |
879 } | 879 } |
880 if (cc == no_condition) { | 880 if (cc == no_condition) { |
881 __ jmp(&jump_table_.last().label); | 881 __ jmp(&jump_table_.last().label); |
882 } else { | 882 } else { |
883 __ j(cc, &jump_table_.last().label); | 883 __ j(cc, &jump_table_.last().label); |
884 } | 884 } |
885 } | 885 } |
886 } | 886 } |
887 | 887 |
888 | 888 |
889 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 889 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
890 const char* detail) { | 890 Deoptimizer::DeoptReason deopt_reason) { |
891 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 891 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
892 ? Deoptimizer::LAZY | 892 ? Deoptimizer::LAZY |
893 : Deoptimizer::EAGER; | 893 : Deoptimizer::EAGER; |
894 DeoptimizeIf(cc, instr, detail, bailout_type); | 894 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); |
895 } | 895 } |
896 | 896 |
897 | 897 |
898 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 898 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
899 int length = deoptimizations_.length(); | 899 int length = deoptimizations_.length(); |
900 if (length == 0) return; | 900 if (length == 0) return; |
901 Handle<DeoptimizationInputData> data = | 901 Handle<DeoptimizationInputData> data = |
902 DeoptimizationInputData::New(isolate(), length, TENURED); | 902 DeoptimizationInputData::New(isolate(), length, TENURED); |
903 | 903 |
904 Handle<ByteArray> translations = | 904 Handle<ByteArray> translations = |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1113 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1113 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1114 Label dividend_is_not_negative, done; | 1114 Label dividend_is_not_negative, done; |
1115 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 1115 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { |
1116 __ test(dividend, dividend); | 1116 __ test(dividend, dividend); |
1117 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); | 1117 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); |
1118 // Note that this is correct even for kMinInt operands. | 1118 // Note that this is correct even for kMinInt operands. |
1119 __ neg(dividend); | 1119 __ neg(dividend); |
1120 __ and_(dividend, mask); | 1120 __ and_(dividend, mask); |
1121 __ neg(dividend); | 1121 __ neg(dividend); |
1122 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1122 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1123 DeoptimizeIf(zero, instr, "minus zero"); | 1123 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); |
1124 } | 1124 } |
1125 __ jmp(&done, Label::kNear); | 1125 __ jmp(&done, Label::kNear); |
1126 } | 1126 } |
1127 | 1127 |
1128 __ bind(÷nd_is_not_negative); | 1128 __ bind(÷nd_is_not_negative); |
1129 __ and_(dividend, mask); | 1129 __ and_(dividend, mask); |
1130 __ bind(&done); | 1130 __ bind(&done); |
1131 } | 1131 } |
1132 | 1132 |
1133 | 1133 |
1134 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 1134 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
1135 Register dividend = ToRegister(instr->dividend()); | 1135 Register dividend = ToRegister(instr->dividend()); |
1136 int32_t divisor = instr->divisor(); | 1136 int32_t divisor = instr->divisor(); |
1137 DCHECK(ToRegister(instr->result()).is(eax)); | 1137 DCHECK(ToRegister(instr->result()).is(eax)); |
1138 | 1138 |
1139 if (divisor == 0) { | 1139 if (divisor == 0) { |
1140 DeoptimizeIf(no_condition, instr, "division by zero"); | 1140 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); |
1141 return; | 1141 return; |
1142 } | 1142 } |
1143 | 1143 |
1144 __ TruncatingDiv(dividend, Abs(divisor)); | 1144 __ TruncatingDiv(dividend, Abs(divisor)); |
1145 __ imul(edx, edx, Abs(divisor)); | 1145 __ imul(edx, edx, Abs(divisor)); |
1146 __ mov(eax, dividend); | 1146 __ mov(eax, dividend); |
1147 __ sub(eax, edx); | 1147 __ sub(eax, edx); |
1148 | 1148 |
1149 // Check for negative zero. | 1149 // Check for negative zero. |
1150 HMod* hmod = instr->hydrogen(); | 1150 HMod* hmod = instr->hydrogen(); |
1151 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1151 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1152 Label remainder_not_zero; | 1152 Label remainder_not_zero; |
1153 __ j(not_zero, &remainder_not_zero, Label::kNear); | 1153 __ j(not_zero, &remainder_not_zero, Label::kNear); |
1154 __ cmp(dividend, Immediate(0)); | 1154 __ cmp(dividend, Immediate(0)); |
1155 DeoptimizeIf(less, instr, "minus zero"); | 1155 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); |
1156 __ bind(&remainder_not_zero); | 1156 __ bind(&remainder_not_zero); |
1157 } | 1157 } |
1158 } | 1158 } |
1159 | 1159 |
1160 | 1160 |
1161 void LCodeGen::DoModI(LModI* instr) { | 1161 void LCodeGen::DoModI(LModI* instr) { |
1162 HMod* hmod = instr->hydrogen(); | 1162 HMod* hmod = instr->hydrogen(); |
1163 | 1163 |
1164 Register left_reg = ToRegister(instr->left()); | 1164 Register left_reg = ToRegister(instr->left()); |
1165 DCHECK(left_reg.is(eax)); | 1165 DCHECK(left_reg.is(eax)); |
1166 Register right_reg = ToRegister(instr->right()); | 1166 Register right_reg = ToRegister(instr->right()); |
1167 DCHECK(!right_reg.is(eax)); | 1167 DCHECK(!right_reg.is(eax)); |
1168 DCHECK(!right_reg.is(edx)); | 1168 DCHECK(!right_reg.is(edx)); |
1169 Register result_reg = ToRegister(instr->result()); | 1169 Register result_reg = ToRegister(instr->result()); |
1170 DCHECK(result_reg.is(edx)); | 1170 DCHECK(result_reg.is(edx)); |
1171 | 1171 |
1172 Label done; | 1172 Label done; |
1173 // Check for x % 0, idiv would signal a divide error. We have to | 1173 // Check for x % 0, idiv would signal a divide error. We have to |
1174 // deopt in this case because we can't return a NaN. | 1174 // deopt in this case because we can't return a NaN. |
1175 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1175 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
1176 __ test(right_reg, Operand(right_reg)); | 1176 __ test(right_reg, Operand(right_reg)); |
1177 DeoptimizeIf(zero, instr, "division by zero"); | 1177 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); |
1178 } | 1178 } |
1179 | 1179 |
1180 // Check for kMinInt % -1, idiv would signal a divide error. We | 1180 // Check for kMinInt % -1, idiv would signal a divide error. We |
1181 // have to deopt if we care about -0, because we can't return that. | 1181 // have to deopt if we care about -0, because we can't return that. |
1182 if (hmod->CheckFlag(HValue::kCanOverflow)) { | 1182 if (hmod->CheckFlag(HValue::kCanOverflow)) { |
1183 Label no_overflow_possible; | 1183 Label no_overflow_possible; |
1184 __ cmp(left_reg, kMinInt); | 1184 __ cmp(left_reg, kMinInt); |
1185 __ j(not_equal, &no_overflow_possible, Label::kNear); | 1185 __ j(not_equal, &no_overflow_possible, Label::kNear); |
1186 __ cmp(right_reg, -1); | 1186 __ cmp(right_reg, -1); |
1187 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1187 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1188 DeoptimizeIf(equal, instr, "minus zero"); | 1188 DeoptimizeIf(equal, instr, Deoptimizer::kMinusZero); |
1189 } else { | 1189 } else { |
1190 __ j(not_equal, &no_overflow_possible, Label::kNear); | 1190 __ j(not_equal, &no_overflow_possible, Label::kNear); |
1191 __ Move(result_reg, Immediate(0)); | 1191 __ Move(result_reg, Immediate(0)); |
1192 __ jmp(&done, Label::kNear); | 1192 __ jmp(&done, Label::kNear); |
1193 } | 1193 } |
1194 __ bind(&no_overflow_possible); | 1194 __ bind(&no_overflow_possible); |
1195 } | 1195 } |
1196 | 1196 |
1197 // Sign extend dividend in eax into edx:eax. | 1197 // Sign extend dividend in eax into edx:eax. |
1198 __ cdq(); | 1198 __ cdq(); |
1199 | 1199 |
1200 // If we care about -0, test if the dividend is <0 and the result is 0. | 1200 // If we care about -0, test if the dividend is <0 and the result is 0. |
1201 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1201 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1202 Label positive_left; | 1202 Label positive_left; |
1203 __ test(left_reg, Operand(left_reg)); | 1203 __ test(left_reg, Operand(left_reg)); |
1204 __ j(not_sign, &positive_left, Label::kNear); | 1204 __ j(not_sign, &positive_left, Label::kNear); |
1205 __ idiv(right_reg); | 1205 __ idiv(right_reg); |
1206 __ test(result_reg, Operand(result_reg)); | 1206 __ test(result_reg, Operand(result_reg)); |
1207 DeoptimizeIf(zero, instr, "minus zero"); | 1207 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); |
1208 __ jmp(&done, Label::kNear); | 1208 __ jmp(&done, Label::kNear); |
1209 __ bind(&positive_left); | 1209 __ bind(&positive_left); |
1210 } | 1210 } |
1211 __ idiv(right_reg); | 1211 __ idiv(right_reg); |
1212 __ bind(&done); | 1212 __ bind(&done); |
1213 } | 1213 } |
1214 | 1214 |
1215 | 1215 |
1216 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1216 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
1217 Register dividend = ToRegister(instr->dividend()); | 1217 Register dividend = ToRegister(instr->dividend()); |
1218 int32_t divisor = instr->divisor(); | 1218 int32_t divisor = instr->divisor(); |
1219 Register result = ToRegister(instr->result()); | 1219 Register result = ToRegister(instr->result()); |
1220 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 1220 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); |
1221 DCHECK(!result.is(dividend)); | 1221 DCHECK(!result.is(dividend)); |
1222 | 1222 |
1223 // Check for (0 / -x) that will produce negative zero. | 1223 // Check for (0 / -x) that will produce negative zero. |
1224 HDiv* hdiv = instr->hydrogen(); | 1224 HDiv* hdiv = instr->hydrogen(); |
1225 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1225 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1226 __ test(dividend, dividend); | 1226 __ test(dividend, dividend); |
1227 DeoptimizeIf(zero, instr, "minus zero"); | 1227 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); |
1228 } | 1228 } |
1229 // Check for (kMinInt / -1). | 1229 // Check for (kMinInt / -1). |
1230 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1230 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
1231 __ cmp(dividend, kMinInt); | 1231 __ cmp(dividend, kMinInt); |
1232 DeoptimizeIf(zero, instr, "overflow"); | 1232 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); |
1233 } | 1233 } |
1234 // Deoptimize if remainder will not be 0. | 1234 // Deoptimize if remainder will not be 0. |
1235 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 1235 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && |
1236 divisor != 1 && divisor != -1) { | 1236 divisor != 1 && divisor != -1) { |
1237 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1237 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1238 __ test(dividend, Immediate(mask)); | 1238 __ test(dividend, Immediate(mask)); |
1239 DeoptimizeIf(not_zero, instr, "lost precision"); | 1239 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); |
1240 } | 1240 } |
1241 __ Move(result, dividend); | 1241 __ Move(result, dividend); |
1242 int32_t shift = WhichPowerOf2Abs(divisor); | 1242 int32_t shift = WhichPowerOf2Abs(divisor); |
1243 if (shift > 0) { | 1243 if (shift > 0) { |
1244 // The arithmetic shift is always OK, the 'if' is an optimization only. | 1244 // The arithmetic shift is always OK, the 'if' is an optimization only. |
1245 if (shift > 1) __ sar(result, 31); | 1245 if (shift > 1) __ sar(result, 31); |
1246 __ shr(result, 32 - shift); | 1246 __ shr(result, 32 - shift); |
1247 __ add(result, dividend); | 1247 __ add(result, dividend); |
1248 __ sar(result, shift); | 1248 __ sar(result, shift); |
1249 } | 1249 } |
1250 if (divisor < 0) __ neg(result); | 1250 if (divisor < 0) __ neg(result); |
1251 } | 1251 } |
1252 | 1252 |
1253 | 1253 |
1254 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1254 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
1255 Register dividend = ToRegister(instr->dividend()); | 1255 Register dividend = ToRegister(instr->dividend()); |
1256 int32_t divisor = instr->divisor(); | 1256 int32_t divisor = instr->divisor(); |
1257 DCHECK(ToRegister(instr->result()).is(edx)); | 1257 DCHECK(ToRegister(instr->result()).is(edx)); |
1258 | 1258 |
1259 if (divisor == 0) { | 1259 if (divisor == 0) { |
1260 DeoptimizeIf(no_condition, instr, "division by zero"); | 1260 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); |
1261 return; | 1261 return; |
1262 } | 1262 } |
1263 | 1263 |
1264 // Check for (0 / -x) that will produce negative zero. | 1264 // Check for (0 / -x) that will produce negative zero. |
1265 HDiv* hdiv = instr->hydrogen(); | 1265 HDiv* hdiv = instr->hydrogen(); |
1266 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1266 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1267 __ test(dividend, dividend); | 1267 __ test(dividend, dividend); |
1268 DeoptimizeIf(zero, instr, "minus zero"); | 1268 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); |
1269 } | 1269 } |
1270 | 1270 |
1271 __ TruncatingDiv(dividend, Abs(divisor)); | 1271 __ TruncatingDiv(dividend, Abs(divisor)); |
1272 if (divisor < 0) __ neg(edx); | 1272 if (divisor < 0) __ neg(edx); |
1273 | 1273 |
1274 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1274 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { |
1275 __ mov(eax, edx); | 1275 __ mov(eax, edx); |
1276 __ imul(eax, eax, divisor); | 1276 __ imul(eax, eax, divisor); |
1277 __ sub(eax, dividend); | 1277 __ sub(eax, dividend); |
1278 DeoptimizeIf(not_equal, instr, "lost precision"); | 1278 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); |
1279 } | 1279 } |
1280 } | 1280 } |
1281 | 1281 |
1282 | 1282 |
1283 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1283 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
1284 void LCodeGen::DoDivI(LDivI* instr) { | 1284 void LCodeGen::DoDivI(LDivI* instr) { |
1285 HBinaryOperation* hdiv = instr->hydrogen(); | 1285 HBinaryOperation* hdiv = instr->hydrogen(); |
1286 Register dividend = ToRegister(instr->dividend()); | 1286 Register dividend = ToRegister(instr->dividend()); |
1287 Register divisor = ToRegister(instr->divisor()); | 1287 Register divisor = ToRegister(instr->divisor()); |
1288 Register remainder = ToRegister(instr->temp()); | 1288 Register remainder = ToRegister(instr->temp()); |
1289 DCHECK(dividend.is(eax)); | 1289 DCHECK(dividend.is(eax)); |
1290 DCHECK(remainder.is(edx)); | 1290 DCHECK(remainder.is(edx)); |
1291 DCHECK(ToRegister(instr->result()).is(eax)); | 1291 DCHECK(ToRegister(instr->result()).is(eax)); |
1292 DCHECK(!divisor.is(eax)); | 1292 DCHECK(!divisor.is(eax)); |
1293 DCHECK(!divisor.is(edx)); | 1293 DCHECK(!divisor.is(edx)); |
1294 | 1294 |
1295 // Check for x / 0. | 1295 // Check for x / 0. |
1296 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1296 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1297 __ test(divisor, divisor); | 1297 __ test(divisor, divisor); |
1298 DeoptimizeIf(zero, instr, "division by zero"); | 1298 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); |
1299 } | 1299 } |
1300 | 1300 |
1301 // Check for (0 / -x) that will produce negative zero. | 1301 // Check for (0 / -x) that will produce negative zero. |
1302 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1302 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1303 Label dividend_not_zero; | 1303 Label dividend_not_zero; |
1304 __ test(dividend, dividend); | 1304 __ test(dividend, dividend); |
1305 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1305 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
1306 __ test(divisor, divisor); | 1306 __ test(divisor, divisor); |
1307 DeoptimizeIf(sign, instr, "minus zero"); | 1307 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); |
1308 __ bind(÷nd_not_zero); | 1308 __ bind(÷nd_not_zero); |
1309 } | 1309 } |
1310 | 1310 |
1311 // Check for (kMinInt / -1). | 1311 // Check for (kMinInt / -1). |
1312 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1312 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
1313 Label dividend_not_min_int; | 1313 Label dividend_not_min_int; |
1314 __ cmp(dividend, kMinInt); | 1314 __ cmp(dividend, kMinInt); |
1315 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1315 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
1316 __ cmp(divisor, -1); | 1316 __ cmp(divisor, -1); |
1317 DeoptimizeIf(zero, instr, "overflow"); | 1317 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); |
1318 __ bind(÷nd_not_min_int); | 1318 __ bind(÷nd_not_min_int); |
1319 } | 1319 } |
1320 | 1320 |
1321 // Sign extend to edx (= remainder). | 1321 // Sign extend to edx (= remainder). |
1322 __ cdq(); | 1322 __ cdq(); |
1323 __ idiv(divisor); | 1323 __ idiv(divisor); |
1324 | 1324 |
1325 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1325 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1326 // Deoptimize if remainder is not 0. | 1326 // Deoptimize if remainder is not 0. |
1327 __ test(remainder, remainder); | 1327 __ test(remainder, remainder); |
1328 DeoptimizeIf(not_zero, instr, "lost precision"); | 1328 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); |
1329 } | 1329 } |
1330 } | 1330 } |
1331 | 1331 |
1332 | 1332 |
1333 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { | 1333 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { |
1334 Register dividend = ToRegister(instr->dividend()); | 1334 Register dividend = ToRegister(instr->dividend()); |
1335 int32_t divisor = instr->divisor(); | 1335 int32_t divisor = instr->divisor(); |
1336 DCHECK(dividend.is(ToRegister(instr->result()))); | 1336 DCHECK(dividend.is(ToRegister(instr->result()))); |
1337 | 1337 |
1338 // If the divisor is positive, things are easy: There can be no deopts and we | 1338 // If the divisor is positive, things are easy: There can be no deopts and we |
1339 // can simply do an arithmetic right shift. | 1339 // can simply do an arithmetic right shift. |
1340 if (divisor == 1) return; | 1340 if (divisor == 1) return; |
1341 int32_t shift = WhichPowerOf2Abs(divisor); | 1341 int32_t shift = WhichPowerOf2Abs(divisor); |
1342 if (divisor > 1) { | 1342 if (divisor > 1) { |
1343 __ sar(dividend, shift); | 1343 __ sar(dividend, shift); |
1344 return; | 1344 return; |
1345 } | 1345 } |
1346 | 1346 |
1347 // If the divisor is negative, we have to negate and handle edge cases. | 1347 // If the divisor is negative, we have to negate and handle edge cases. |
1348 __ neg(dividend); | 1348 __ neg(dividend); |
1349 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1349 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1350 DeoptimizeIf(zero, instr, "minus zero"); | 1350 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); |
1351 } | 1351 } |
1352 | 1352 |
1353 // Dividing by -1 is basically negation, unless we overflow. | 1353 // Dividing by -1 is basically negation, unless we overflow. |
1354 if (divisor == -1) { | 1354 if (divisor == -1) { |
1355 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1355 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1356 DeoptimizeIf(overflow, instr, "overflow"); | 1356 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
1357 } | 1357 } |
1358 return; | 1358 return; |
1359 } | 1359 } |
1360 | 1360 |
1361 // If the negation could not overflow, simply shifting is OK. | 1361 // If the negation could not overflow, simply shifting is OK. |
1362 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1362 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1363 __ sar(dividend, shift); | 1363 __ sar(dividend, shift); |
1364 return; | 1364 return; |
1365 } | 1365 } |
1366 | 1366 |
1367 Label not_kmin_int, done; | 1367 Label not_kmin_int, done; |
1368 __ j(no_overflow, ¬_kmin_int, Label::kNear); | 1368 __ j(no_overflow, ¬_kmin_int, Label::kNear); |
1369 __ mov(dividend, Immediate(kMinInt / divisor)); | 1369 __ mov(dividend, Immediate(kMinInt / divisor)); |
1370 __ jmp(&done, Label::kNear); | 1370 __ jmp(&done, Label::kNear); |
1371 __ bind(¬_kmin_int); | 1371 __ bind(¬_kmin_int); |
1372 __ sar(dividend, shift); | 1372 __ sar(dividend, shift); |
1373 __ bind(&done); | 1373 __ bind(&done); |
1374 } | 1374 } |
1375 | 1375 |
1376 | 1376 |
1377 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1377 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
1378 Register dividend = ToRegister(instr->dividend()); | 1378 Register dividend = ToRegister(instr->dividend()); |
1379 int32_t divisor = instr->divisor(); | 1379 int32_t divisor = instr->divisor(); |
1380 DCHECK(ToRegister(instr->result()).is(edx)); | 1380 DCHECK(ToRegister(instr->result()).is(edx)); |
1381 | 1381 |
1382 if (divisor == 0) { | 1382 if (divisor == 0) { |
1383 DeoptimizeIf(no_condition, instr, "division by zero"); | 1383 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); |
1384 return; | 1384 return; |
1385 } | 1385 } |
1386 | 1386 |
1387 // Check for (0 / -x) that will produce negative zero. | 1387 // Check for (0 / -x) that will produce negative zero. |
1388 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1388 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
1389 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1389 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1390 __ test(dividend, dividend); | 1390 __ test(dividend, dividend); |
1391 DeoptimizeIf(zero, instr, "minus zero"); | 1391 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); |
1392 } | 1392 } |
1393 | 1393 |
1394 // Easy case: We need no dynamic check for the dividend and the flooring | 1394 // Easy case: We need no dynamic check for the dividend and the flooring |
1395 // division is the same as the truncating division. | 1395 // division is the same as the truncating division. |
1396 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1396 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
1397 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1397 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
1398 __ TruncatingDiv(dividend, Abs(divisor)); | 1398 __ TruncatingDiv(dividend, Abs(divisor)); |
1399 if (divisor < 0) __ neg(edx); | 1399 if (divisor < 0) __ neg(edx); |
1400 return; | 1400 return; |
1401 } | 1401 } |
(...skipping 26 matching lines...) Expand all Loading... |
1428 Register result = ToRegister(instr->result()); | 1428 Register result = ToRegister(instr->result()); |
1429 DCHECK(dividend.is(eax)); | 1429 DCHECK(dividend.is(eax)); |
1430 DCHECK(remainder.is(edx)); | 1430 DCHECK(remainder.is(edx)); |
1431 DCHECK(result.is(eax)); | 1431 DCHECK(result.is(eax)); |
1432 DCHECK(!divisor.is(eax)); | 1432 DCHECK(!divisor.is(eax)); |
1433 DCHECK(!divisor.is(edx)); | 1433 DCHECK(!divisor.is(edx)); |
1434 | 1434 |
1435 // Check for x / 0. | 1435 // Check for x / 0. |
1436 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1436 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1437 __ test(divisor, divisor); | 1437 __ test(divisor, divisor); |
1438 DeoptimizeIf(zero, instr, "division by zero"); | 1438 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); |
1439 } | 1439 } |
1440 | 1440 |
1441 // Check for (0 / -x) that will produce negative zero. | 1441 // Check for (0 / -x) that will produce negative zero. |
1442 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1442 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1443 Label dividend_not_zero; | 1443 Label dividend_not_zero; |
1444 __ test(dividend, dividend); | 1444 __ test(dividend, dividend); |
1445 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1445 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
1446 __ test(divisor, divisor); | 1446 __ test(divisor, divisor); |
1447 DeoptimizeIf(sign, instr, "minus zero"); | 1447 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); |
1448 __ bind(÷nd_not_zero); | 1448 __ bind(÷nd_not_zero); |
1449 } | 1449 } |
1450 | 1450 |
1451 // Check for (kMinInt / -1). | 1451 // Check for (kMinInt / -1). |
1452 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1452 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
1453 Label dividend_not_min_int; | 1453 Label dividend_not_min_int; |
1454 __ cmp(dividend, kMinInt); | 1454 __ cmp(dividend, kMinInt); |
1455 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1455 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
1456 __ cmp(divisor, -1); | 1456 __ cmp(divisor, -1); |
1457 DeoptimizeIf(zero, instr, "overflow"); | 1457 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); |
1458 __ bind(÷nd_not_min_int); | 1458 __ bind(÷nd_not_min_int); |
1459 } | 1459 } |
1460 | 1460 |
1461 // Sign extend to edx (= remainder). | 1461 // Sign extend to edx (= remainder). |
1462 __ cdq(); | 1462 __ cdq(); |
1463 __ idiv(divisor); | 1463 __ idiv(divisor); |
1464 | 1464 |
1465 Label done; | 1465 Label done; |
1466 __ test(remainder, remainder); | 1466 __ test(remainder, remainder); |
1467 __ j(zero, &done, Label::kNear); | 1467 __ j(zero, &done, Label::kNear); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1525 __ imul(left, left, constant); | 1525 __ imul(left, left, constant); |
1526 } | 1526 } |
1527 } else { | 1527 } else { |
1528 if (instr->hydrogen()->representation().IsSmi()) { | 1528 if (instr->hydrogen()->representation().IsSmi()) { |
1529 __ SmiUntag(left); | 1529 __ SmiUntag(left); |
1530 } | 1530 } |
1531 __ imul(left, ToOperand(right)); | 1531 __ imul(left, ToOperand(right)); |
1532 } | 1532 } |
1533 | 1533 |
1534 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1534 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1535 DeoptimizeIf(overflow, instr, "overflow"); | 1535 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
1536 } | 1536 } |
1537 | 1537 |
1538 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1538 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1539 // Bail out if the result is supposed to be negative zero. | 1539 // Bail out if the result is supposed to be negative zero. |
1540 Label done; | 1540 Label done; |
1541 __ test(left, Operand(left)); | 1541 __ test(left, Operand(left)); |
1542 __ j(not_zero, &done, Label::kNear); | 1542 __ j(not_zero, &done, Label::kNear); |
1543 if (right->IsConstantOperand()) { | 1543 if (right->IsConstantOperand()) { |
1544 if (ToInteger32(LConstantOperand::cast(right)) < 0) { | 1544 if (ToInteger32(LConstantOperand::cast(right)) < 0) { |
1545 DeoptimizeIf(no_condition, instr, "minus zero"); | 1545 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); |
1546 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { | 1546 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { |
1547 __ cmp(ToRegister(instr->temp()), Immediate(0)); | 1547 __ cmp(ToRegister(instr->temp()), Immediate(0)); |
1548 DeoptimizeIf(less, instr, "minus zero"); | 1548 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); |
1549 } | 1549 } |
1550 } else { | 1550 } else { |
1551 // Test the non-zero operand for negative sign. | 1551 // Test the non-zero operand for negative sign. |
1552 __ or_(ToRegister(instr->temp()), ToOperand(right)); | 1552 __ or_(ToRegister(instr->temp()), ToOperand(right)); |
1553 DeoptimizeIf(sign, instr, "minus zero"); | 1553 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); |
1554 } | 1554 } |
1555 __ bind(&done); | 1555 __ bind(&done); |
1556 } | 1556 } |
1557 } | 1557 } |
1558 | 1558 |
1559 | 1559 |
1560 void LCodeGen::DoBitI(LBitI* instr) { | 1560 void LCodeGen::DoBitI(LBitI* instr) { |
1561 LOperand* left = instr->left(); | 1561 LOperand* left = instr->left(); |
1562 LOperand* right = instr->right(); | 1562 LOperand* right = instr->right(); |
1563 DCHECK(left->Equals(instr->result())); | 1563 DCHECK(left->Equals(instr->result())); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1616 case Token::ROR: | 1616 case Token::ROR: |
1617 __ ror_cl(ToRegister(left)); | 1617 __ ror_cl(ToRegister(left)); |
1618 break; | 1618 break; |
1619 case Token::SAR: | 1619 case Token::SAR: |
1620 __ sar_cl(ToRegister(left)); | 1620 __ sar_cl(ToRegister(left)); |
1621 break; | 1621 break; |
1622 case Token::SHR: | 1622 case Token::SHR: |
1623 __ shr_cl(ToRegister(left)); | 1623 __ shr_cl(ToRegister(left)); |
1624 if (instr->can_deopt()) { | 1624 if (instr->can_deopt()) { |
1625 __ test(ToRegister(left), ToRegister(left)); | 1625 __ test(ToRegister(left), ToRegister(left)); |
1626 DeoptimizeIf(sign, instr, "negative value"); | 1626 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); |
1627 } | 1627 } |
1628 break; | 1628 break; |
1629 case Token::SHL: | 1629 case Token::SHL: |
1630 __ shl_cl(ToRegister(left)); | 1630 __ shl_cl(ToRegister(left)); |
1631 break; | 1631 break; |
1632 default: | 1632 default: |
1633 UNREACHABLE(); | 1633 UNREACHABLE(); |
1634 break; | 1634 break; |
1635 } | 1635 } |
1636 } else { | 1636 } else { |
1637 int value = ToInteger32(LConstantOperand::cast(right)); | 1637 int value = ToInteger32(LConstantOperand::cast(right)); |
1638 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); | 1638 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); |
1639 switch (instr->op()) { | 1639 switch (instr->op()) { |
1640 case Token::ROR: | 1640 case Token::ROR: |
1641 if (shift_count == 0 && instr->can_deopt()) { | 1641 if (shift_count == 0 && instr->can_deopt()) { |
1642 __ test(ToRegister(left), ToRegister(left)); | 1642 __ test(ToRegister(left), ToRegister(left)); |
1643 DeoptimizeIf(sign, instr, "negative value"); | 1643 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); |
1644 } else { | 1644 } else { |
1645 __ ror(ToRegister(left), shift_count); | 1645 __ ror(ToRegister(left), shift_count); |
1646 } | 1646 } |
1647 break; | 1647 break; |
1648 case Token::SAR: | 1648 case Token::SAR: |
1649 if (shift_count != 0) { | 1649 if (shift_count != 0) { |
1650 __ sar(ToRegister(left), shift_count); | 1650 __ sar(ToRegister(left), shift_count); |
1651 } | 1651 } |
1652 break; | 1652 break; |
1653 case Token::SHR: | 1653 case Token::SHR: |
1654 if (shift_count != 0) { | 1654 if (shift_count != 0) { |
1655 __ shr(ToRegister(left), shift_count); | 1655 __ shr(ToRegister(left), shift_count); |
1656 } else if (instr->can_deopt()) { | 1656 } else if (instr->can_deopt()) { |
1657 __ test(ToRegister(left), ToRegister(left)); | 1657 __ test(ToRegister(left), ToRegister(left)); |
1658 DeoptimizeIf(sign, instr, "negative value"); | 1658 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); |
1659 } | 1659 } |
1660 break; | 1660 break; |
1661 case Token::SHL: | 1661 case Token::SHL: |
1662 if (shift_count != 0) { | 1662 if (shift_count != 0) { |
1663 if (instr->hydrogen_value()->representation().IsSmi() && | 1663 if (instr->hydrogen_value()->representation().IsSmi() && |
1664 instr->can_deopt()) { | 1664 instr->can_deopt()) { |
1665 if (shift_count != 1) { | 1665 if (shift_count != 1) { |
1666 __ shl(ToRegister(left), shift_count - 1); | 1666 __ shl(ToRegister(left), shift_count - 1); |
1667 } | 1667 } |
1668 __ SmiTag(ToRegister(left)); | 1668 __ SmiTag(ToRegister(left)); |
1669 DeoptimizeIf(overflow, instr, "overflow"); | 1669 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
1670 } else { | 1670 } else { |
1671 __ shl(ToRegister(left), shift_count); | 1671 __ shl(ToRegister(left), shift_count); |
1672 } | 1672 } |
1673 } | 1673 } |
1674 break; | 1674 break; |
1675 default: | 1675 default: |
1676 UNREACHABLE(); | 1676 UNREACHABLE(); |
1677 break; | 1677 break; |
1678 } | 1678 } |
1679 } | 1679 } |
1680 } | 1680 } |
1681 | 1681 |
1682 | 1682 |
1683 void LCodeGen::DoSubI(LSubI* instr) { | 1683 void LCodeGen::DoSubI(LSubI* instr) { |
1684 LOperand* left = instr->left(); | 1684 LOperand* left = instr->left(); |
1685 LOperand* right = instr->right(); | 1685 LOperand* right = instr->right(); |
1686 DCHECK(left->Equals(instr->result())); | 1686 DCHECK(left->Equals(instr->result())); |
1687 | 1687 |
1688 if (right->IsConstantOperand()) { | 1688 if (right->IsConstantOperand()) { |
1689 __ sub(ToOperand(left), | 1689 __ sub(ToOperand(left), |
1690 ToImmediate(right, instr->hydrogen()->representation())); | 1690 ToImmediate(right, instr->hydrogen()->representation())); |
1691 } else { | 1691 } else { |
1692 __ sub(ToRegister(left), ToOperand(right)); | 1692 __ sub(ToRegister(left), ToOperand(right)); |
1693 } | 1693 } |
1694 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1694 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1695 DeoptimizeIf(overflow, instr, "overflow"); | 1695 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
1696 } | 1696 } |
1697 } | 1697 } |
1698 | 1698 |
1699 | 1699 |
1700 void LCodeGen::DoConstantI(LConstantI* instr) { | 1700 void LCodeGen::DoConstantI(LConstantI* instr) { |
1701 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1701 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
1702 } | 1702 } |
1703 | 1703 |
1704 | 1704 |
1705 void LCodeGen::DoConstantS(LConstantS* instr) { | 1705 void LCodeGen::DoConstantS(LConstantS* instr) { |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1768 void LCodeGen::DoDateField(LDateField* instr) { | 1768 void LCodeGen::DoDateField(LDateField* instr) { |
1769 Register object = ToRegister(instr->date()); | 1769 Register object = ToRegister(instr->date()); |
1770 Register result = ToRegister(instr->result()); | 1770 Register result = ToRegister(instr->result()); |
1771 Register scratch = ToRegister(instr->temp()); | 1771 Register scratch = ToRegister(instr->temp()); |
1772 Smi* index = instr->index(); | 1772 Smi* index = instr->index(); |
1773 Label runtime, done; | 1773 Label runtime, done; |
1774 DCHECK(object.is(result)); | 1774 DCHECK(object.is(result)); |
1775 DCHECK(object.is(eax)); | 1775 DCHECK(object.is(eax)); |
1776 | 1776 |
1777 __ test(object, Immediate(kSmiTagMask)); | 1777 __ test(object, Immediate(kSmiTagMask)); |
1778 DeoptimizeIf(zero, instr, "Smi"); | 1778 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); |
1779 __ CmpObjectType(object, JS_DATE_TYPE, scratch); | 1779 __ CmpObjectType(object, JS_DATE_TYPE, scratch); |
1780 DeoptimizeIf(not_equal, instr, "not a date object"); | 1780 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotADateObject); |
1781 | 1781 |
1782 if (index->value() == 0) { | 1782 if (index->value() == 0) { |
1783 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); | 1783 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); |
1784 } else { | 1784 } else { |
1785 if (index->value() < JSDate::kFirstUncachedField) { | 1785 if (index->value() < JSDate::kFirstUncachedField) { |
1786 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 1786 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
1787 __ mov(scratch, Operand::StaticVariable(stamp)); | 1787 __ mov(scratch, Operand::StaticVariable(stamp)); |
1788 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); | 1788 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); |
1789 __ j(not_equal, &runtime, Label::kNear); | 1789 __ j(not_equal, &runtime, Label::kNear); |
1790 __ mov(result, FieldOperand(object, JSDate::kValueOffset + | 1790 __ mov(result, FieldOperand(object, JSDate::kValueOffset + |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1900 __ lea(ToRegister(instr->result()), address); | 1900 __ lea(ToRegister(instr->result()), address); |
1901 } | 1901 } |
1902 } else { | 1902 } else { |
1903 if (right->IsConstantOperand()) { | 1903 if (right->IsConstantOperand()) { |
1904 __ add(ToOperand(left), | 1904 __ add(ToOperand(left), |
1905 ToImmediate(right, instr->hydrogen()->representation())); | 1905 ToImmediate(right, instr->hydrogen()->representation())); |
1906 } else { | 1906 } else { |
1907 __ add(ToRegister(left), ToOperand(right)); | 1907 __ add(ToRegister(left), ToOperand(right)); |
1908 } | 1908 } |
1909 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1909 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1910 DeoptimizeIf(overflow, instr, "overflow"); | 1910 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
1911 } | 1911 } |
1912 } | 1912 } |
1913 } | 1913 } |
1914 | 1914 |
1915 | 1915 |
1916 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1916 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1917 LOperand* left = instr->left(); | 1917 LOperand* left = instr->left(); |
1918 LOperand* right = instr->right(); | 1918 LOperand* right = instr->right(); |
1919 DCHECK(left->Equals(instr->result())); | 1919 DCHECK(left->Equals(instr->result())); |
1920 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1920 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2148 } | 2148 } |
2149 | 2149 |
2150 if (expected.Contains(ToBooleanStub::SMI)) { | 2150 if (expected.Contains(ToBooleanStub::SMI)) { |
2151 // Smis: 0 -> false, all other -> true. | 2151 // Smis: 0 -> false, all other -> true. |
2152 __ test(reg, Operand(reg)); | 2152 __ test(reg, Operand(reg)); |
2153 __ j(equal, instr->FalseLabel(chunk_)); | 2153 __ j(equal, instr->FalseLabel(chunk_)); |
2154 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2154 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
2155 } else if (expected.NeedsMap()) { | 2155 } else if (expected.NeedsMap()) { |
2156 // If we need a map later and have a Smi -> deopt. | 2156 // If we need a map later and have a Smi -> deopt. |
2157 __ test(reg, Immediate(kSmiTagMask)); | 2157 __ test(reg, Immediate(kSmiTagMask)); |
2158 DeoptimizeIf(zero, instr, "Smi"); | 2158 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); |
2159 } | 2159 } |
2160 | 2160 |
2161 Register map = no_reg; // Keep the compiler happy. | 2161 Register map = no_reg; // Keep the compiler happy. |
2162 if (expected.NeedsMap()) { | 2162 if (expected.NeedsMap()) { |
2163 map = ToRegister(instr->temp()); | 2163 map = ToRegister(instr->temp()); |
2164 DCHECK(!map.is(reg)); | 2164 DCHECK(!map.is(reg)); |
2165 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); | 2165 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); |
2166 | 2166 |
2167 if (expected.CanBeUndetectable()) { | 2167 if (expected.CanBeUndetectable()) { |
2168 // Undetectable -> false. | 2168 // Undetectable -> false. |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2205 __ xorps(xmm_scratch, xmm_scratch); | 2205 __ xorps(xmm_scratch, xmm_scratch); |
2206 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); | 2206 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
2207 __ j(zero, instr->FalseLabel(chunk_)); | 2207 __ j(zero, instr->FalseLabel(chunk_)); |
2208 __ jmp(instr->TrueLabel(chunk_)); | 2208 __ jmp(instr->TrueLabel(chunk_)); |
2209 __ bind(¬_heap_number); | 2209 __ bind(¬_heap_number); |
2210 } | 2210 } |
2211 | 2211 |
2212 if (!expected.IsGeneric()) { | 2212 if (!expected.IsGeneric()) { |
2213 // We've seen something for the first time -> deopt. | 2213 // We've seen something for the first time -> deopt. |
2214 // This can only happen if we are not generic already. | 2214 // This can only happen if we are not generic already. |
2215 DeoptimizeIf(no_condition, instr, "unexpected object"); | 2215 DeoptimizeIf(no_condition, instr, Deoptimizer::kUnexpectedObject); |
2216 } | 2216 } |
2217 } | 2217 } |
2218 } | 2218 } |
2219 } | 2219 } |
2220 | 2220 |
2221 | 2221 |
2222 void LCodeGen::EmitGoto(int block) { | 2222 void LCodeGen::EmitGoto(int block) { |
2223 if (!IsNextEmittedBlock(block)) { | 2223 if (!IsNextEmittedBlock(block)) { |
2224 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2224 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
2225 } | 2225 } |
(...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2837 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | 2837 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); |
2838 } | 2838 } |
2839 } | 2839 } |
2840 | 2840 |
2841 | 2841 |
2842 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { | 2842 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { |
2843 Register result = ToRegister(instr->result()); | 2843 Register result = ToRegister(instr->result()); |
2844 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); | 2844 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); |
2845 if (instr->hydrogen()->RequiresHoleCheck()) { | 2845 if (instr->hydrogen()->RequiresHoleCheck()) { |
2846 __ cmp(result, factory()->the_hole_value()); | 2846 __ cmp(result, factory()->the_hole_value()); |
2847 DeoptimizeIf(equal, instr, "hole"); | 2847 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
2848 } | 2848 } |
2849 } | 2849 } |
2850 | 2850 |
2851 | 2851 |
2852 template <class T> | 2852 template <class T> |
2853 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { | 2853 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { |
2854 DCHECK(FLAG_vector_ics); | 2854 DCHECK(FLAG_vector_ics); |
2855 Register vector_register = ToRegister(instr->temp_vector()); | 2855 Register vector_register = ToRegister(instr->temp_vector()); |
2856 Register slot_register = VectorLoadICDescriptor::SlotRegister(); | 2856 Register slot_register = VectorLoadICDescriptor::SlotRegister(); |
2857 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); | 2857 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); |
(...skipping 28 matching lines...) Expand all Loading... |
2886 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { | 2886 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { |
2887 Register value = ToRegister(instr->value()); | 2887 Register value = ToRegister(instr->value()); |
2888 Handle<PropertyCell> cell_handle = instr->hydrogen()->cell().handle(); | 2888 Handle<PropertyCell> cell_handle = instr->hydrogen()->cell().handle(); |
2889 | 2889 |
2890 // If the cell we are storing to contains the hole it could have | 2890 // If the cell we are storing to contains the hole it could have |
2891 // been deleted from the property dictionary. In that case, we need | 2891 // been deleted from the property dictionary. In that case, we need |
2892 // to update the property details in the property dictionary to mark | 2892 // to update the property details in the property dictionary to mark |
2893 // it as no longer deleted. We deoptimize in that case. | 2893 // it as no longer deleted. We deoptimize in that case. |
2894 if (instr->hydrogen()->RequiresHoleCheck()) { | 2894 if (instr->hydrogen()->RequiresHoleCheck()) { |
2895 __ cmp(Operand::ForCell(cell_handle), factory()->the_hole_value()); | 2895 __ cmp(Operand::ForCell(cell_handle), factory()->the_hole_value()); |
2896 DeoptimizeIf(equal, instr, "hole"); | 2896 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
2897 } | 2897 } |
2898 | 2898 |
2899 // Store the value. | 2899 // Store the value. |
2900 __ mov(Operand::ForCell(cell_handle), value); | 2900 __ mov(Operand::ForCell(cell_handle), value); |
2901 // Cells are always rescanned, so no write barrier here. | 2901 // Cells are always rescanned, so no write barrier here. |
2902 } | 2902 } |
2903 | 2903 |
2904 | 2904 |
2905 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2905 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
2906 Register context = ToRegister(instr->context()); | 2906 Register context = ToRegister(instr->context()); |
2907 Register result = ToRegister(instr->result()); | 2907 Register result = ToRegister(instr->result()); |
2908 __ mov(result, ContextOperand(context, instr->slot_index())); | 2908 __ mov(result, ContextOperand(context, instr->slot_index())); |
2909 | 2909 |
2910 if (instr->hydrogen()->RequiresHoleCheck()) { | 2910 if (instr->hydrogen()->RequiresHoleCheck()) { |
2911 __ cmp(result, factory()->the_hole_value()); | 2911 __ cmp(result, factory()->the_hole_value()); |
2912 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2912 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2913 DeoptimizeIf(equal, instr, "hole"); | 2913 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
2914 } else { | 2914 } else { |
2915 Label is_not_hole; | 2915 Label is_not_hole; |
2916 __ j(not_equal, &is_not_hole, Label::kNear); | 2916 __ j(not_equal, &is_not_hole, Label::kNear); |
2917 __ mov(result, factory()->undefined_value()); | 2917 __ mov(result, factory()->undefined_value()); |
2918 __ bind(&is_not_hole); | 2918 __ bind(&is_not_hole); |
2919 } | 2919 } |
2920 } | 2920 } |
2921 } | 2921 } |
2922 | 2922 |
2923 | 2923 |
2924 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2924 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
2925 Register context = ToRegister(instr->context()); | 2925 Register context = ToRegister(instr->context()); |
2926 Register value = ToRegister(instr->value()); | 2926 Register value = ToRegister(instr->value()); |
2927 | 2927 |
2928 Label skip_assignment; | 2928 Label skip_assignment; |
2929 | 2929 |
2930 Operand target = ContextOperand(context, instr->slot_index()); | 2930 Operand target = ContextOperand(context, instr->slot_index()); |
2931 if (instr->hydrogen()->RequiresHoleCheck()) { | 2931 if (instr->hydrogen()->RequiresHoleCheck()) { |
2932 __ cmp(target, factory()->the_hole_value()); | 2932 __ cmp(target, factory()->the_hole_value()); |
2933 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2933 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2934 DeoptimizeIf(equal, instr, "hole"); | 2934 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
2935 } else { | 2935 } else { |
2936 __ j(not_equal, &skip_assignment, Label::kNear); | 2936 __ j(not_equal, &skip_assignment, Label::kNear); |
2937 } | 2937 } |
2938 } | 2938 } |
2939 | 2939 |
2940 __ mov(target, value); | 2940 __ mov(target, value); |
2941 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2941 if (instr->hydrogen()->NeedsWriteBarrier()) { |
2942 SmiCheck check_needed = | 2942 SmiCheck check_needed = |
2943 instr->hydrogen()->value()->type().IsHeapObject() | 2943 instr->hydrogen()->value()->type().IsHeapObject() |
2944 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2944 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3024 Register function = ToRegister(instr->function()); | 3024 Register function = ToRegister(instr->function()); |
3025 Register temp = ToRegister(instr->temp()); | 3025 Register temp = ToRegister(instr->temp()); |
3026 Register result = ToRegister(instr->result()); | 3026 Register result = ToRegister(instr->result()); |
3027 | 3027 |
3028 // Get the prototype or initial map from the function. | 3028 // Get the prototype or initial map from the function. |
3029 __ mov(result, | 3029 __ mov(result, |
3030 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 3030 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
3031 | 3031 |
3032 // Check that the function has a prototype or an initial map. | 3032 // Check that the function has a prototype or an initial map. |
3033 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); | 3033 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); |
3034 DeoptimizeIf(equal, instr, "hole"); | 3034 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
3035 | 3035 |
3036 // If the function does not have an initial map, we're done. | 3036 // If the function does not have an initial map, we're done. |
3037 Label done; | 3037 Label done; |
3038 __ CmpObjectType(result, MAP_TYPE, temp); | 3038 __ CmpObjectType(result, MAP_TYPE, temp); |
3039 __ j(not_equal, &done, Label::kNear); | 3039 __ j(not_equal, &done, Label::kNear); |
3040 | 3040 |
3041 // Get the prototype from the initial map. | 3041 // Get the prototype from the initial map. |
3042 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); | 3042 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); |
3043 | 3043 |
3044 // All done. | 3044 // All done. |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3117 break; | 3117 break; |
3118 case EXTERNAL_INT32_ELEMENTS: | 3118 case EXTERNAL_INT32_ELEMENTS: |
3119 case INT32_ELEMENTS: | 3119 case INT32_ELEMENTS: |
3120 __ mov(result, operand); | 3120 __ mov(result, operand); |
3121 break; | 3121 break; |
3122 case EXTERNAL_UINT32_ELEMENTS: | 3122 case EXTERNAL_UINT32_ELEMENTS: |
3123 case UINT32_ELEMENTS: | 3123 case UINT32_ELEMENTS: |
3124 __ mov(result, operand); | 3124 __ mov(result, operand); |
3125 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 3125 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
3126 __ test(result, Operand(result)); | 3126 __ test(result, Operand(result)); |
3127 DeoptimizeIf(negative, instr, "negative value"); | 3127 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); |
3128 } | 3128 } |
3129 break; | 3129 break; |
3130 case EXTERNAL_FLOAT32_ELEMENTS: | 3130 case EXTERNAL_FLOAT32_ELEMENTS: |
3131 case EXTERNAL_FLOAT64_ELEMENTS: | 3131 case EXTERNAL_FLOAT64_ELEMENTS: |
3132 case FLOAT32_ELEMENTS: | 3132 case FLOAT32_ELEMENTS: |
3133 case FLOAT64_ELEMENTS: | 3133 case FLOAT64_ELEMENTS: |
3134 case FAST_SMI_ELEMENTS: | 3134 case FAST_SMI_ELEMENTS: |
3135 case FAST_ELEMENTS: | 3135 case FAST_ELEMENTS: |
3136 case FAST_DOUBLE_ELEMENTS: | 3136 case FAST_DOUBLE_ELEMENTS: |
3137 case FAST_HOLEY_SMI_ELEMENTS: | 3137 case FAST_HOLEY_SMI_ELEMENTS: |
3138 case FAST_HOLEY_ELEMENTS: | 3138 case FAST_HOLEY_ELEMENTS: |
3139 case FAST_HOLEY_DOUBLE_ELEMENTS: | 3139 case FAST_HOLEY_DOUBLE_ELEMENTS: |
3140 case DICTIONARY_ELEMENTS: | 3140 case DICTIONARY_ELEMENTS: |
3141 case SLOPPY_ARGUMENTS_ELEMENTS: | 3141 case SLOPPY_ARGUMENTS_ELEMENTS: |
3142 UNREACHABLE(); | 3142 UNREACHABLE(); |
3143 break; | 3143 break; |
3144 } | 3144 } |
3145 } | 3145 } |
3146 } | 3146 } |
3147 | 3147 |
3148 | 3148 |
3149 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { | 3149 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { |
3150 if (instr->hydrogen()->RequiresHoleCheck()) { | 3150 if (instr->hydrogen()->RequiresHoleCheck()) { |
3151 Operand hole_check_operand = BuildFastArrayOperand( | 3151 Operand hole_check_operand = BuildFastArrayOperand( |
3152 instr->elements(), instr->key(), | 3152 instr->elements(), instr->key(), |
3153 instr->hydrogen()->key()->representation(), | 3153 instr->hydrogen()->key()->representation(), |
3154 FAST_DOUBLE_ELEMENTS, | 3154 FAST_DOUBLE_ELEMENTS, |
3155 instr->base_offset() + sizeof(kHoleNanLower32)); | 3155 instr->base_offset() + sizeof(kHoleNanLower32)); |
3156 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); | 3156 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); |
3157 DeoptimizeIf(equal, instr, "hole"); | 3157 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
3158 } | 3158 } |
3159 | 3159 |
3160 Operand double_load_operand = BuildFastArrayOperand( | 3160 Operand double_load_operand = BuildFastArrayOperand( |
3161 instr->elements(), | 3161 instr->elements(), |
3162 instr->key(), | 3162 instr->key(), |
3163 instr->hydrogen()->key()->representation(), | 3163 instr->hydrogen()->key()->representation(), |
3164 FAST_DOUBLE_ELEMENTS, | 3164 FAST_DOUBLE_ELEMENTS, |
3165 instr->base_offset()); | 3165 instr->base_offset()); |
3166 XMMRegister result = ToDoubleRegister(instr->result()); | 3166 XMMRegister result = ToDoubleRegister(instr->result()); |
3167 __ movsd(result, double_load_operand); | 3167 __ movsd(result, double_load_operand); |
3168 } | 3168 } |
3169 | 3169 |
3170 | 3170 |
3171 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 3171 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
3172 Register result = ToRegister(instr->result()); | 3172 Register result = ToRegister(instr->result()); |
3173 | 3173 |
3174 // Load the result. | 3174 // Load the result. |
3175 __ mov(result, | 3175 __ mov(result, |
3176 BuildFastArrayOperand(instr->elements(), instr->key(), | 3176 BuildFastArrayOperand(instr->elements(), instr->key(), |
3177 instr->hydrogen()->key()->representation(), | 3177 instr->hydrogen()->key()->representation(), |
3178 FAST_ELEMENTS, instr->base_offset())); | 3178 FAST_ELEMENTS, instr->base_offset())); |
3179 | 3179 |
3180 // Check for the hole value. | 3180 // Check for the hole value. |
3181 if (instr->hydrogen()->RequiresHoleCheck()) { | 3181 if (instr->hydrogen()->RequiresHoleCheck()) { |
3182 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 3182 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
3183 __ test(result, Immediate(kSmiTagMask)); | 3183 __ test(result, Immediate(kSmiTagMask)); |
3184 DeoptimizeIf(not_equal, instr, "not a Smi"); | 3184 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotASmi); |
3185 } else { | 3185 } else { |
3186 __ cmp(result, factory()->the_hole_value()); | 3186 __ cmp(result, factory()->the_hole_value()); |
3187 DeoptimizeIf(equal, instr, "hole"); | 3187 DeoptimizeIf(equal, instr, Deoptimizer::kHole); |
3188 } | 3188 } |
3189 } | 3189 } |
3190 } | 3190 } |
3191 | 3191 |
3192 | 3192 |
3193 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 3193 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
3194 if (instr->is_typed_elements()) { | 3194 if (instr->is_typed_elements()) { |
3195 DoLoadKeyedExternalArray(instr); | 3195 DoLoadKeyedExternalArray(instr); |
3196 } else if (instr->hydrogen()->representation().IsDouble()) { | 3196 } else if (instr->hydrogen()->representation().IsDouble()) { |
3197 DoLoadKeyedFixedDoubleArray(instr); | 3197 DoLoadKeyedFixedDoubleArray(instr); |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3324 } | 3324 } |
3325 | 3325 |
3326 // Normal function. Replace undefined or null with global receiver. | 3326 // Normal function. Replace undefined or null with global receiver. |
3327 __ cmp(receiver, factory()->null_value()); | 3327 __ cmp(receiver, factory()->null_value()); |
3328 __ j(equal, &global_object, Label::kNear); | 3328 __ j(equal, &global_object, Label::kNear); |
3329 __ cmp(receiver, factory()->undefined_value()); | 3329 __ cmp(receiver, factory()->undefined_value()); |
3330 __ j(equal, &global_object, Label::kNear); | 3330 __ j(equal, &global_object, Label::kNear); |
3331 | 3331 |
3332 // The receiver should be a JS object. | 3332 // The receiver should be a JS object. |
3333 __ test(receiver, Immediate(kSmiTagMask)); | 3333 __ test(receiver, Immediate(kSmiTagMask)); |
3334 DeoptimizeIf(equal, instr, "Smi"); | 3334 DeoptimizeIf(equal, instr, Deoptimizer::kSmi); |
3335 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch); | 3335 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch); |
3336 DeoptimizeIf(below, instr, "not a JavaScript object"); | 3336 DeoptimizeIf(below, instr, Deoptimizer::kNotAJavaScriptObject); |
3337 | 3337 |
3338 __ jmp(&receiver_ok, Label::kNear); | 3338 __ jmp(&receiver_ok, Label::kNear); |
3339 __ bind(&global_object); | 3339 __ bind(&global_object); |
3340 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); | 3340 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); |
3341 const int global_offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); | 3341 const int global_offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); |
3342 __ mov(receiver, Operand(receiver, global_offset)); | 3342 __ mov(receiver, Operand(receiver, global_offset)); |
3343 const int proxy_offset = GlobalObject::kGlobalProxyOffset; | 3343 const int proxy_offset = GlobalObject::kGlobalProxyOffset; |
3344 __ mov(receiver, FieldOperand(receiver, proxy_offset)); | 3344 __ mov(receiver, FieldOperand(receiver, proxy_offset)); |
3345 __ bind(&receiver_ok); | 3345 __ bind(&receiver_ok); |
3346 } | 3346 } |
3347 | 3347 |
3348 | 3348 |
3349 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3349 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
3350 Register receiver = ToRegister(instr->receiver()); | 3350 Register receiver = ToRegister(instr->receiver()); |
3351 Register function = ToRegister(instr->function()); | 3351 Register function = ToRegister(instr->function()); |
3352 Register length = ToRegister(instr->length()); | 3352 Register length = ToRegister(instr->length()); |
3353 Register elements = ToRegister(instr->elements()); | 3353 Register elements = ToRegister(instr->elements()); |
3354 DCHECK(receiver.is(eax)); // Used for parameter count. | 3354 DCHECK(receiver.is(eax)); // Used for parameter count. |
3355 DCHECK(function.is(edi)); // Required by InvokeFunction. | 3355 DCHECK(function.is(edi)); // Required by InvokeFunction. |
3356 DCHECK(ToRegister(instr->result()).is(eax)); | 3356 DCHECK(ToRegister(instr->result()).is(eax)); |
3357 | 3357 |
3358 // Copy the arguments to this function possibly from the | 3358 // Copy the arguments to this function possibly from the |
3359 // adaptor frame below it. | 3359 // adaptor frame below it. |
3360 const uint32_t kArgumentsLimit = 1 * KB; | 3360 const uint32_t kArgumentsLimit = 1 * KB; |
3361 __ cmp(length, kArgumentsLimit); | 3361 __ cmp(length, kArgumentsLimit); |
3362 DeoptimizeIf(above, instr, "too many arguments"); | 3362 DeoptimizeIf(above, instr, Deoptimizer::kTooManyArguments); |
3363 | 3363 |
3364 __ push(receiver); | 3364 __ push(receiver); |
3365 __ mov(receiver, length); | 3365 __ mov(receiver, length); |
3366 | 3366 |
3367 // Loop through the arguments pushing them onto the execution | 3367 // Loop through the arguments pushing them onto the execution |
3368 // stack. | 3368 // stack. |
3369 Label invoke, loop; | 3369 Label invoke, loop; |
3370 // length is a small non-negative integer, due to the test above. | 3370 // length is a small non-negative integer, due to the test above. |
3371 __ test(length, Operand(length)); | 3371 __ test(length, Operand(length)); |
3372 __ j(zero, &invoke, Label::kNear); | 3372 __ j(zero, &invoke, Label::kNear); |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3577 } | 3577 } |
3578 | 3578 |
3579 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 3579 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
3580 } | 3580 } |
3581 | 3581 |
3582 | 3582 |
3583 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3583 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3584 Register input_reg = ToRegister(instr->value()); | 3584 Register input_reg = ToRegister(instr->value()); |
3585 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 3585 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
3586 factory()->heap_number_map()); | 3586 factory()->heap_number_map()); |
3587 DeoptimizeIf(not_equal, instr, "not a heap number"); | 3587 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); |
3588 | 3588 |
3589 Label slow, allocated, done; | 3589 Label slow, allocated, done; |
3590 Register tmp = input_reg.is(eax) ? ecx : eax; | 3590 Register tmp = input_reg.is(eax) ? ecx : eax; |
3591 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; | 3591 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; |
3592 | 3592 |
3593 // Preserve the value of all registers. | 3593 // Preserve the value of all registers. |
3594 PushSafepointRegistersScope scope(this); | 3594 PushSafepointRegistersScope scope(this); |
3595 | 3595 |
3596 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 3596 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
3597 // Check the sign of the argument. If the argument is positive, just | 3597 // Check the sign of the argument. If the argument is positive, just |
(...skipping 26 matching lines...) Expand all Loading... |
3624 __ bind(&done); | 3624 __ bind(&done); |
3625 } | 3625 } |
3626 | 3626 |
3627 | 3627 |
3628 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3628 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
3629 Register input_reg = ToRegister(instr->value()); | 3629 Register input_reg = ToRegister(instr->value()); |
3630 __ test(input_reg, Operand(input_reg)); | 3630 __ test(input_reg, Operand(input_reg)); |
3631 Label is_positive; | 3631 Label is_positive; |
3632 __ j(not_sign, &is_positive, Label::kNear); | 3632 __ j(not_sign, &is_positive, Label::kNear); |
3633 __ neg(input_reg); // Sets flags. | 3633 __ neg(input_reg); // Sets flags. |
3634 DeoptimizeIf(negative, instr, "overflow"); | 3634 DeoptimizeIf(negative, instr, Deoptimizer::kOverflow); |
3635 __ bind(&is_positive); | 3635 __ bind(&is_positive); |
3636 } | 3636 } |
3637 | 3637 |
3638 | 3638 |
3639 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3639 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
3640 // Class for deferred case. | 3640 // Class for deferred case. |
3641 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { | 3641 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { |
3642 public: | 3642 public: |
3643 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, | 3643 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, |
3644 LMathAbs* instr) | 3644 LMathAbs* instr) |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3683 if (CpuFeatures::IsSupported(SSE4_1)) { | 3683 if (CpuFeatures::IsSupported(SSE4_1)) { |
3684 CpuFeatureScope scope(masm(), SSE4_1); | 3684 CpuFeatureScope scope(masm(), SSE4_1); |
3685 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3685 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3686 // Deoptimize on negative zero. | 3686 // Deoptimize on negative zero. |
3687 Label non_zero; | 3687 Label non_zero; |
3688 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. | 3688 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. |
3689 __ ucomisd(input_reg, xmm_scratch); | 3689 __ ucomisd(input_reg, xmm_scratch); |
3690 __ j(not_equal, &non_zero, Label::kNear); | 3690 __ j(not_equal, &non_zero, Label::kNear); |
3691 __ movmskpd(output_reg, input_reg); | 3691 __ movmskpd(output_reg, input_reg); |
3692 __ test(output_reg, Immediate(1)); | 3692 __ test(output_reg, Immediate(1)); |
3693 DeoptimizeIf(not_zero, instr, "minus zero"); | 3693 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); |
3694 __ bind(&non_zero); | 3694 __ bind(&non_zero); |
3695 } | 3695 } |
3696 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown); | 3696 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown); |
3697 __ cvttsd2si(output_reg, Operand(xmm_scratch)); | 3697 __ cvttsd2si(output_reg, Operand(xmm_scratch)); |
3698 // Overflow is signalled with minint. | 3698 // Overflow is signalled with minint. |
3699 __ cmp(output_reg, 0x1); | 3699 __ cmp(output_reg, 0x1); |
3700 DeoptimizeIf(overflow, instr, "overflow"); | 3700 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
3701 } else { | 3701 } else { |
3702 Label negative_sign, done; | 3702 Label negative_sign, done; |
3703 // Deoptimize on unordered. | 3703 // Deoptimize on unordered. |
3704 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. | 3704 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. |
3705 __ ucomisd(input_reg, xmm_scratch); | 3705 __ ucomisd(input_reg, xmm_scratch); |
3706 DeoptimizeIf(parity_even, instr, "NaN"); | 3706 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); |
3707 __ j(below, &negative_sign, Label::kNear); | 3707 __ j(below, &negative_sign, Label::kNear); |
3708 | 3708 |
3709 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3709 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3710 // Check for negative zero. | 3710 // Check for negative zero. |
3711 Label positive_sign; | 3711 Label positive_sign; |
3712 __ j(above, &positive_sign, Label::kNear); | 3712 __ j(above, &positive_sign, Label::kNear); |
3713 __ movmskpd(output_reg, input_reg); | 3713 __ movmskpd(output_reg, input_reg); |
3714 __ test(output_reg, Immediate(1)); | 3714 __ test(output_reg, Immediate(1)); |
3715 DeoptimizeIf(not_zero, instr, "minus zero"); | 3715 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); |
3716 __ Move(output_reg, Immediate(0)); | 3716 __ Move(output_reg, Immediate(0)); |
3717 __ jmp(&done, Label::kNear); | 3717 __ jmp(&done, Label::kNear); |
3718 __ bind(&positive_sign); | 3718 __ bind(&positive_sign); |
3719 } | 3719 } |
3720 | 3720 |
3721 // Use truncating instruction (OK because input is positive). | 3721 // Use truncating instruction (OK because input is positive). |
3722 __ cvttsd2si(output_reg, Operand(input_reg)); | 3722 __ cvttsd2si(output_reg, Operand(input_reg)); |
3723 // Overflow is signalled with minint. | 3723 // Overflow is signalled with minint. |
3724 __ cmp(output_reg, 0x1); | 3724 __ cmp(output_reg, 0x1); |
3725 DeoptimizeIf(overflow, instr, "overflow"); | 3725 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
3726 __ jmp(&done, Label::kNear); | 3726 __ jmp(&done, Label::kNear); |
3727 | 3727 |
3728 // Non-zero negative reaches here. | 3728 // Non-zero negative reaches here. |
3729 __ bind(&negative_sign); | 3729 __ bind(&negative_sign); |
3730 // Truncate, then compare and compensate. | 3730 // Truncate, then compare and compensate. |
3731 __ cvttsd2si(output_reg, Operand(input_reg)); | 3731 __ cvttsd2si(output_reg, Operand(input_reg)); |
3732 __ Cvtsi2sd(xmm_scratch, output_reg); | 3732 __ Cvtsi2sd(xmm_scratch, output_reg); |
3733 __ ucomisd(input_reg, xmm_scratch); | 3733 __ ucomisd(input_reg, xmm_scratch); |
3734 __ j(equal, &done, Label::kNear); | 3734 __ j(equal, &done, Label::kNear); |
3735 __ sub(output_reg, Immediate(1)); | 3735 __ sub(output_reg, Immediate(1)); |
3736 DeoptimizeIf(overflow, instr, "overflow"); | 3736 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
3737 | 3737 |
3738 __ bind(&done); | 3738 __ bind(&done); |
3739 } | 3739 } |
3740 } | 3740 } |
3741 | 3741 |
3742 | 3742 |
3743 void LCodeGen::DoMathRound(LMathRound* instr) { | 3743 void LCodeGen::DoMathRound(LMathRound* instr) { |
3744 Register output_reg = ToRegister(instr->result()); | 3744 Register output_reg = ToRegister(instr->result()); |
3745 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3745 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3746 XMMRegister xmm_scratch = double_scratch0(); | 3746 XMMRegister xmm_scratch = double_scratch0(); |
3747 XMMRegister input_temp = ToDoubleRegister(instr->temp()); | 3747 XMMRegister input_temp = ToDoubleRegister(instr->temp()); |
3748 ExternalReference one_half = ExternalReference::address_of_one_half(); | 3748 ExternalReference one_half = ExternalReference::address_of_one_half(); |
3749 ExternalReference minus_one_half = | 3749 ExternalReference minus_one_half = |
3750 ExternalReference::address_of_minus_one_half(); | 3750 ExternalReference::address_of_minus_one_half(); |
3751 | 3751 |
3752 Label done, round_to_zero, below_one_half, do_not_compensate; | 3752 Label done, round_to_zero, below_one_half, do_not_compensate; |
3753 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 3753 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
3754 | 3754 |
3755 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); | 3755 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); |
3756 __ ucomisd(xmm_scratch, input_reg); | 3756 __ ucomisd(xmm_scratch, input_reg); |
3757 __ j(above, &below_one_half, Label::kNear); | 3757 __ j(above, &below_one_half, Label::kNear); |
3758 | 3758 |
3759 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). | 3759 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). |
3760 __ addsd(xmm_scratch, input_reg); | 3760 __ addsd(xmm_scratch, input_reg); |
3761 __ cvttsd2si(output_reg, Operand(xmm_scratch)); | 3761 __ cvttsd2si(output_reg, Operand(xmm_scratch)); |
3762 // Overflow is signalled with minint. | 3762 // Overflow is signalled with minint. |
3763 __ cmp(output_reg, 0x1); | 3763 __ cmp(output_reg, 0x1); |
3764 DeoptimizeIf(overflow, instr, "overflow"); | 3764 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
3765 __ jmp(&done, dist); | 3765 __ jmp(&done, dist); |
3766 | 3766 |
3767 __ bind(&below_one_half); | 3767 __ bind(&below_one_half); |
3768 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); | 3768 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); |
3769 __ ucomisd(xmm_scratch, input_reg); | 3769 __ ucomisd(xmm_scratch, input_reg); |
3770 __ j(below_equal, &round_to_zero, Label::kNear); | 3770 __ j(below_equal, &round_to_zero, Label::kNear); |
3771 | 3771 |
3772 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then | 3772 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then |
3773 // compare and compensate. | 3773 // compare and compensate. |
3774 __ movaps(input_temp, input_reg); // Do not alter input_reg. | 3774 __ movaps(input_temp, input_reg); // Do not alter input_reg. |
3775 __ subsd(input_temp, xmm_scratch); | 3775 __ subsd(input_temp, xmm_scratch); |
3776 __ cvttsd2si(output_reg, Operand(input_temp)); | 3776 __ cvttsd2si(output_reg, Operand(input_temp)); |
3777 // Catch minint due to overflow, and to prevent overflow when compensating. | 3777 // Catch minint due to overflow, and to prevent overflow when compensating. |
3778 __ cmp(output_reg, 0x1); | 3778 __ cmp(output_reg, 0x1); |
3779 DeoptimizeIf(overflow, instr, "overflow"); | 3779 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
3780 | 3780 |
3781 __ Cvtsi2sd(xmm_scratch, output_reg); | 3781 __ Cvtsi2sd(xmm_scratch, output_reg); |
3782 __ ucomisd(xmm_scratch, input_temp); | 3782 __ ucomisd(xmm_scratch, input_temp); |
3783 __ j(equal, &done, dist); | 3783 __ j(equal, &done, dist); |
3784 __ sub(output_reg, Immediate(1)); | 3784 __ sub(output_reg, Immediate(1)); |
3785 // No overflow because we already ruled out minint. | 3785 // No overflow because we already ruled out minint. |
3786 __ jmp(&done, dist); | 3786 __ jmp(&done, dist); |
3787 | 3787 |
3788 __ bind(&round_to_zero); | 3788 __ bind(&round_to_zero); |
3789 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 3789 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
3790 // we can ignore the difference between a result of -0 and +0. | 3790 // we can ignore the difference between a result of -0 and +0. |
3791 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3791 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3792 // If the sign is positive, we return +0. | 3792 // If the sign is positive, we return +0. |
3793 __ movmskpd(output_reg, input_reg); | 3793 __ movmskpd(output_reg, input_reg); |
3794 __ test(output_reg, Immediate(1)); | 3794 __ test(output_reg, Immediate(1)); |
3795 DeoptimizeIf(not_zero, instr, "minus zero"); | 3795 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); |
3796 } | 3796 } |
3797 __ Move(output_reg, Immediate(0)); | 3797 __ Move(output_reg, Immediate(0)); |
3798 __ bind(&done); | 3798 __ bind(&done); |
3799 } | 3799 } |
3800 | 3800 |
3801 | 3801 |
3802 void LCodeGen::DoMathFround(LMathFround* instr) { | 3802 void LCodeGen::DoMathFround(LMathFround* instr) { |
3803 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3803 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3804 XMMRegister output_reg = ToDoubleRegister(instr->result()); | 3804 XMMRegister output_reg = ToDoubleRegister(instr->result()); |
3805 __ cvtsd2ss(output_reg, input_reg); | 3805 __ cvtsd2ss(output_reg, input_reg); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3861 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); | 3861 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); |
3862 | 3862 |
3863 if (exponent_type.IsSmi()) { | 3863 if (exponent_type.IsSmi()) { |
3864 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3864 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3865 __ CallStub(&stub); | 3865 __ CallStub(&stub); |
3866 } else if (exponent_type.IsTagged()) { | 3866 } else if (exponent_type.IsTagged()) { |
3867 Label no_deopt; | 3867 Label no_deopt; |
3868 __ JumpIfSmi(tagged_exponent, &no_deopt); | 3868 __ JumpIfSmi(tagged_exponent, &no_deopt); |
3869 DCHECK(!ecx.is(tagged_exponent)); | 3869 DCHECK(!ecx.is(tagged_exponent)); |
3870 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx); | 3870 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx); |
3871 DeoptimizeIf(not_equal, instr, "not a heap number"); | 3871 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); |
3872 __ bind(&no_deopt); | 3872 __ bind(&no_deopt); |
3873 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3873 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3874 __ CallStub(&stub); | 3874 __ CallStub(&stub); |
3875 } else if (exponent_type.IsInteger32()) { | 3875 } else if (exponent_type.IsInteger32()) { |
3876 MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3876 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
3877 __ CallStub(&stub); | 3877 __ CallStub(&stub); |
3878 } else { | 3878 } else { |
3879 DCHECK(exponent_type.IsDouble()); | 3879 DCHECK(exponent_type.IsDouble()); |
3880 MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3880 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
3881 __ CallStub(&stub); | 3881 __ CallStub(&stub); |
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4169 instr->hydrogen()->index()->representation())); | 4169 instr->hydrogen()->index()->representation())); |
4170 } else { | 4170 } else { |
4171 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); | 4171 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
4172 } | 4172 } |
4173 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 4173 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { |
4174 Label done; | 4174 Label done; |
4175 __ j(NegateCondition(cc), &done, Label::kNear); | 4175 __ j(NegateCondition(cc), &done, Label::kNear); |
4176 __ int3(); | 4176 __ int3(); |
4177 __ bind(&done); | 4177 __ bind(&done); |
4178 } else { | 4178 } else { |
4179 DeoptimizeIf(cc, instr, "out of bounds"); | 4179 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds); |
4180 } | 4180 } |
4181 } | 4181 } |
4182 | 4182 |
4183 | 4183 |
4184 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4184 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
4185 ElementsKind elements_kind = instr->elements_kind(); | 4185 ElementsKind elements_kind = instr->elements_kind(); |
4186 LOperand* key = instr->key(); | 4186 LOperand* key = instr->key(); |
4187 if (!key->IsConstantOperand() && | 4187 if (!key->IsConstantOperand() && |
4188 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), | 4188 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), |
4189 elements_kind)) { | 4189 elements_kind)) { |
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4332 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code(); | 4332 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code(); |
4333 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4333 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
4334 } | 4334 } |
4335 | 4335 |
4336 | 4336 |
4337 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4337 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4338 Register object = ToRegister(instr->object()); | 4338 Register object = ToRegister(instr->object()); |
4339 Register temp = ToRegister(instr->temp()); | 4339 Register temp = ToRegister(instr->temp()); |
4340 Label no_memento_found; | 4340 Label no_memento_found; |
4341 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4341 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
4342 DeoptimizeIf(equal, instr, "memento found"); | 4342 DeoptimizeIf(equal, instr, Deoptimizer::kMementoFound); |
4343 __ bind(&no_memento_found); | 4343 __ bind(&no_memento_found); |
4344 } | 4344 } |
4345 | 4345 |
4346 | 4346 |
4347 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { | 4347 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
4348 Register object_reg = ToRegister(instr->object()); | 4348 Register object_reg = ToRegister(instr->object()); |
4349 | 4349 |
4350 Handle<Map> from_map = instr->original_map(); | 4350 Handle<Map> from_map = instr->original_map(); |
4351 Handle<Map> to_map = instr->transitioned_map(); | 4351 Handle<Map> to_map = instr->transitioned_map(); |
4352 ElementsKind from_kind = instr->from_kind(); | 4352 ElementsKind from_kind = instr->from_kind(); |
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4678 __ StoreToSafepointRegisterSlot(reg, eax); | 4678 __ StoreToSafepointRegisterSlot(reg, eax); |
4679 } | 4679 } |
4680 | 4680 |
4681 | 4681 |
4682 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4682 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
4683 HChange* hchange = instr->hydrogen(); | 4683 HChange* hchange = instr->hydrogen(); |
4684 Register input = ToRegister(instr->value()); | 4684 Register input = ToRegister(instr->value()); |
4685 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4685 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4686 hchange->value()->CheckFlag(HValue::kUint32)) { | 4686 hchange->value()->CheckFlag(HValue::kUint32)) { |
4687 __ test(input, Immediate(0xc0000000)); | 4687 __ test(input, Immediate(0xc0000000)); |
4688 DeoptimizeIf(not_zero, instr, "overflow"); | 4688 DeoptimizeIf(not_zero, instr, Deoptimizer::kOverflow); |
4689 } | 4689 } |
4690 __ SmiTag(input); | 4690 __ SmiTag(input); |
4691 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4691 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4692 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4692 !hchange->value()->CheckFlag(HValue::kUint32)) { |
4693 DeoptimizeIf(overflow, instr, "overflow"); | 4693 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
4694 } | 4694 } |
4695 } | 4695 } |
4696 | 4696 |
4697 | 4697 |
4698 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4698 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
4699 LOperand* input = instr->value(); | 4699 LOperand* input = instr->value(); |
4700 Register result = ToRegister(input); | 4700 Register result = ToRegister(input); |
4701 DCHECK(input->IsRegister() && input->Equals(instr->result())); | 4701 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
4702 if (instr->needs_check()) { | 4702 if (instr->needs_check()) { |
4703 __ test(result, Immediate(kSmiTagMask)); | 4703 __ test(result, Immediate(kSmiTagMask)); |
4704 DeoptimizeIf(not_zero, instr, "not a Smi"); | 4704 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); |
4705 } else { | 4705 } else { |
4706 __ AssertSmi(result); | 4706 __ AssertSmi(result); |
4707 } | 4707 } |
4708 __ SmiUntag(result); | 4708 __ SmiUntag(result); |
4709 } | 4709 } |
4710 | 4710 |
4711 | 4711 |
4712 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 4712 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, |
4713 Register temp_reg, XMMRegister result_reg, | 4713 Register temp_reg, XMMRegister result_reg, |
4714 NumberUntagDMode mode) { | 4714 NumberUntagDMode mode) { |
4715 bool can_convert_undefined_to_nan = | 4715 bool can_convert_undefined_to_nan = |
4716 instr->hydrogen()->can_convert_undefined_to_nan(); | 4716 instr->hydrogen()->can_convert_undefined_to_nan(); |
4717 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 4717 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); |
4718 | 4718 |
4719 Label convert, load_smi, done; | 4719 Label convert, load_smi, done; |
4720 | 4720 |
4721 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 4721 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
4722 // Smi check. | 4722 // Smi check. |
4723 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); | 4723 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); |
4724 | 4724 |
4725 // Heap number map check. | 4725 // Heap number map check. |
4726 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4726 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
4727 factory()->heap_number_map()); | 4727 factory()->heap_number_map()); |
4728 if (can_convert_undefined_to_nan) { | 4728 if (can_convert_undefined_to_nan) { |
4729 __ j(not_equal, &convert, Label::kNear); | 4729 __ j(not_equal, &convert, Label::kNear); |
4730 } else { | 4730 } else { |
4731 DeoptimizeIf(not_equal, instr, "not a heap number"); | 4731 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); |
4732 } | 4732 } |
4733 | 4733 |
4734 // Heap number to XMM conversion. | 4734 // Heap number to XMM conversion. |
4735 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4735 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4736 | 4736 |
4737 if (deoptimize_on_minus_zero) { | 4737 if (deoptimize_on_minus_zero) { |
4738 XMMRegister xmm_scratch = double_scratch0(); | 4738 XMMRegister xmm_scratch = double_scratch0(); |
4739 __ xorps(xmm_scratch, xmm_scratch); | 4739 __ xorps(xmm_scratch, xmm_scratch); |
4740 __ ucomisd(result_reg, xmm_scratch); | 4740 __ ucomisd(result_reg, xmm_scratch); |
4741 __ j(not_zero, &done, Label::kNear); | 4741 __ j(not_zero, &done, Label::kNear); |
4742 __ movmskpd(temp_reg, result_reg); | 4742 __ movmskpd(temp_reg, result_reg); |
4743 __ test_b(temp_reg, 1); | 4743 __ test_b(temp_reg, 1); |
4744 DeoptimizeIf(not_zero, instr, "minus zero"); | 4744 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); |
4745 } | 4745 } |
4746 __ jmp(&done, Label::kNear); | 4746 __ jmp(&done, Label::kNear); |
4747 | 4747 |
4748 if (can_convert_undefined_to_nan) { | 4748 if (can_convert_undefined_to_nan) { |
4749 __ bind(&convert); | 4749 __ bind(&convert); |
4750 | 4750 |
4751 // Convert undefined to NaN. | 4751 // Convert undefined to NaN. |
4752 __ cmp(input_reg, factory()->undefined_value()); | 4752 __ cmp(input_reg, factory()->undefined_value()); |
4753 DeoptimizeIf(not_equal, instr, "not a heap number/undefined"); | 4753 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); |
4754 | 4754 |
4755 __ pcmpeqd(result_reg, result_reg); | 4755 __ pcmpeqd(result_reg, result_reg); |
4756 __ jmp(&done, Label::kNear); | 4756 __ jmp(&done, Label::kNear); |
4757 } | 4757 } |
4758 } else { | 4758 } else { |
4759 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4759 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
4760 } | 4760 } |
4761 | 4761 |
4762 __ bind(&load_smi); | 4762 __ bind(&load_smi); |
4763 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the | 4763 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4795 __ jmp(done); | 4795 __ jmp(done); |
4796 | 4796 |
4797 __ bind(&check_bools); | 4797 __ bind(&check_bools); |
4798 __ cmp(input_reg, factory()->true_value()); | 4798 __ cmp(input_reg, factory()->true_value()); |
4799 __ j(not_equal, &check_false, Label::kNear); | 4799 __ j(not_equal, &check_false, Label::kNear); |
4800 __ Move(input_reg, Immediate(1)); | 4800 __ Move(input_reg, Immediate(1)); |
4801 __ jmp(done); | 4801 __ jmp(done); |
4802 | 4802 |
4803 __ bind(&check_false); | 4803 __ bind(&check_false); |
4804 __ cmp(input_reg, factory()->false_value()); | 4804 __ cmp(input_reg, factory()->false_value()); |
4805 DeoptimizeIf(not_equal, instr, "not a heap number/undefined/true/false"); | 4805 DeoptimizeIf(not_equal, instr, |
| 4806 Deoptimizer::kNotAHeapNumberUndefinedBoolean); |
4806 __ Move(input_reg, Immediate(0)); | 4807 __ Move(input_reg, Immediate(0)); |
4807 } else { | 4808 } else { |
4808 XMMRegister scratch = ToDoubleRegister(instr->temp()); | 4809 XMMRegister scratch = ToDoubleRegister(instr->temp()); |
4809 DCHECK(!scratch.is(xmm0)); | 4810 DCHECK(!scratch.is(xmm0)); |
4810 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4811 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
4811 isolate()->factory()->heap_number_map()); | 4812 isolate()->factory()->heap_number_map()); |
4812 DeoptimizeIf(not_equal, instr, "not a heap number"); | 4813 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); |
4813 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4814 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4814 __ cvttsd2si(input_reg, Operand(xmm0)); | 4815 __ cvttsd2si(input_reg, Operand(xmm0)); |
4815 __ Cvtsi2sd(scratch, Operand(input_reg)); | 4816 __ Cvtsi2sd(scratch, Operand(input_reg)); |
4816 __ ucomisd(xmm0, scratch); | 4817 __ ucomisd(xmm0, scratch); |
4817 DeoptimizeIf(not_equal, instr, "lost precision"); | 4818 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); |
4818 DeoptimizeIf(parity_even, instr, "NaN"); | 4819 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); |
4819 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { | 4820 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { |
4820 __ test(input_reg, Operand(input_reg)); | 4821 __ test(input_reg, Operand(input_reg)); |
4821 __ j(not_zero, done); | 4822 __ j(not_zero, done); |
4822 __ movmskpd(input_reg, xmm0); | 4823 __ movmskpd(input_reg, xmm0); |
4823 __ and_(input_reg, 1); | 4824 __ and_(input_reg, 1); |
4824 DeoptimizeIf(not_zero, instr, "minus zero"); | 4825 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); |
4825 } | 4826 } |
4826 } | 4827 } |
4827 } | 4828 } |
4828 | 4829 |
4829 | 4830 |
4830 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 4831 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
4831 class DeferredTaggedToI FINAL : public LDeferredCode { | 4832 class DeferredTaggedToI FINAL : public LDeferredCode { |
4832 public: | 4833 public: |
4833 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 4834 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
4834 : LDeferredCode(codegen), instr_(instr) { } | 4835 : LDeferredCode(codegen), instr_(instr) { } |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4894 } else { | 4895 } else { |
4895 Label lost_precision, is_nan, minus_zero, done; | 4896 Label lost_precision, is_nan, minus_zero, done; |
4896 XMMRegister input_reg = ToDoubleRegister(input); | 4897 XMMRegister input_reg = ToDoubleRegister(input); |
4897 XMMRegister xmm_scratch = double_scratch0(); | 4898 XMMRegister xmm_scratch = double_scratch0(); |
4898 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 4899 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
4899 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4900 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
4900 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, | 4901 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, |
4901 &is_nan, &minus_zero, dist); | 4902 &is_nan, &minus_zero, dist); |
4902 __ jmp(&done, dist); | 4903 __ jmp(&done, dist); |
4903 __ bind(&lost_precision); | 4904 __ bind(&lost_precision); |
4904 DeoptimizeIf(no_condition, instr, "lost precision"); | 4905 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); |
4905 __ bind(&is_nan); | 4906 __ bind(&is_nan); |
4906 DeoptimizeIf(no_condition, instr, "NaN"); | 4907 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); |
4907 __ bind(&minus_zero); | 4908 __ bind(&minus_zero); |
4908 DeoptimizeIf(no_condition, instr, "minus zero"); | 4909 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); |
4909 __ bind(&done); | 4910 __ bind(&done); |
4910 } | 4911 } |
4911 } | 4912 } |
4912 | 4913 |
4913 | 4914 |
4914 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4915 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
4915 LOperand* input = instr->value(); | 4916 LOperand* input = instr->value(); |
4916 DCHECK(input->IsDoubleRegister()); | 4917 DCHECK(input->IsDoubleRegister()); |
4917 LOperand* result = instr->result(); | 4918 LOperand* result = instr->result(); |
4918 DCHECK(result->IsRegister()); | 4919 DCHECK(result->IsRegister()); |
4919 Register result_reg = ToRegister(result); | 4920 Register result_reg = ToRegister(result); |
4920 | 4921 |
4921 Label lost_precision, is_nan, minus_zero, done; | 4922 Label lost_precision, is_nan, minus_zero, done; |
4922 XMMRegister input_reg = ToDoubleRegister(input); | 4923 XMMRegister input_reg = ToDoubleRegister(input); |
4923 XMMRegister xmm_scratch = double_scratch0(); | 4924 XMMRegister xmm_scratch = double_scratch0(); |
4924 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 4925 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
4925 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4926 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
4926 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, | 4927 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, |
4927 &minus_zero, dist); | 4928 &minus_zero, dist); |
4928 __ jmp(&done, dist); | 4929 __ jmp(&done, dist); |
4929 __ bind(&lost_precision); | 4930 __ bind(&lost_precision); |
4930 DeoptimizeIf(no_condition, instr, "lost precision"); | 4931 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); |
4931 __ bind(&is_nan); | 4932 __ bind(&is_nan); |
4932 DeoptimizeIf(no_condition, instr, "NaN"); | 4933 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); |
4933 __ bind(&minus_zero); | 4934 __ bind(&minus_zero); |
4934 DeoptimizeIf(no_condition, instr, "minus zero"); | 4935 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); |
4935 __ bind(&done); | 4936 __ bind(&done); |
4936 __ SmiTag(result_reg); | 4937 __ SmiTag(result_reg); |
4937 DeoptimizeIf(overflow, instr, "overflow"); | 4938 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); |
4938 } | 4939 } |
4939 | 4940 |
4940 | 4941 |
4941 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 4942 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
4942 LOperand* input = instr->value(); | 4943 LOperand* input = instr->value(); |
4943 __ test(ToOperand(input), Immediate(kSmiTagMask)); | 4944 __ test(ToOperand(input), Immediate(kSmiTagMask)); |
4944 DeoptimizeIf(not_zero, instr, "not a Smi"); | 4945 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); |
4945 } | 4946 } |
4946 | 4947 |
4947 | 4948 |
4948 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 4949 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
4949 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 4950 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
4950 LOperand* input = instr->value(); | 4951 LOperand* input = instr->value(); |
4951 __ test(ToOperand(input), Immediate(kSmiTagMask)); | 4952 __ test(ToOperand(input), Immediate(kSmiTagMask)); |
4952 DeoptimizeIf(zero, instr, "Smi"); | 4953 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); |
4953 } | 4954 } |
4954 } | 4955 } |
4955 | 4956 |
4956 | 4957 |
4957 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 4958 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
4958 Register input = ToRegister(instr->value()); | 4959 Register input = ToRegister(instr->value()); |
4959 Register temp = ToRegister(instr->temp()); | 4960 Register temp = ToRegister(instr->temp()); |
4960 | 4961 |
4961 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); | 4962 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); |
4962 | 4963 |
4963 if (instr->hydrogen()->is_interval_check()) { | 4964 if (instr->hydrogen()->is_interval_check()) { |
4964 InstanceType first; | 4965 InstanceType first; |
4965 InstanceType last; | 4966 InstanceType last; |
4966 instr->hydrogen()->GetCheckInterval(&first, &last); | 4967 instr->hydrogen()->GetCheckInterval(&first, &last); |
4967 | 4968 |
4968 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), | 4969 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), |
4969 static_cast<int8_t>(first)); | 4970 static_cast<int8_t>(first)); |
4970 | 4971 |
4971 // If there is only one type in the interval check for equality. | 4972 // If there is only one type in the interval check for equality. |
4972 if (first == last) { | 4973 if (first == last) { |
4973 DeoptimizeIf(not_equal, instr, "wrong instance type"); | 4974 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); |
4974 } else { | 4975 } else { |
4975 DeoptimizeIf(below, instr, "wrong instance type"); | 4976 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType); |
4976 // Omit check for the last type. | 4977 // Omit check for the last type. |
4977 if (last != LAST_TYPE) { | 4978 if (last != LAST_TYPE) { |
4978 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), | 4979 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), |
4979 static_cast<int8_t>(last)); | 4980 static_cast<int8_t>(last)); |
4980 DeoptimizeIf(above, instr, "wrong instance type"); | 4981 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType); |
4981 } | 4982 } |
4982 } | 4983 } |
4983 } else { | 4984 } else { |
4984 uint8_t mask; | 4985 uint8_t mask; |
4985 uint8_t tag; | 4986 uint8_t tag; |
4986 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 4987 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
4987 | 4988 |
4988 if (base::bits::IsPowerOfTwo32(mask)) { | 4989 if (base::bits::IsPowerOfTwo32(mask)) { |
4989 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 4990 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); |
4990 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); | 4991 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); |
4991 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, "wrong instance type"); | 4992 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, |
| 4993 Deoptimizer::kWrongInstanceType); |
4992 } else { | 4994 } else { |
4993 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); | 4995 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); |
4994 __ and_(temp, mask); | 4996 __ and_(temp, mask); |
4995 __ cmp(temp, tag); | 4997 __ cmp(temp, tag); |
4996 DeoptimizeIf(not_equal, instr, "wrong instance type"); | 4998 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); |
4997 } | 4999 } |
4998 } | 5000 } |
4999 } | 5001 } |
5000 | 5002 |
5001 | 5003 |
5002 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 5004 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
5003 Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 5005 Handle<HeapObject> object = instr->hydrogen()->object().handle(); |
5004 if (instr->hydrogen()->object_in_new_space()) { | 5006 if (instr->hydrogen()->object_in_new_space()) { |
5005 Register reg = ToRegister(instr->value()); | 5007 Register reg = ToRegister(instr->value()); |
5006 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 5008 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
5007 __ cmp(reg, Operand::ForCell(cell)); | 5009 __ cmp(reg, Operand::ForCell(cell)); |
5008 } else { | 5010 } else { |
5009 Operand operand = ToOperand(instr->value()); | 5011 Operand operand = ToOperand(instr->value()); |
5010 __ cmp(operand, object); | 5012 __ cmp(operand, object); |
5011 } | 5013 } |
5012 DeoptimizeIf(not_equal, instr, "value mismatch"); | 5014 DeoptimizeIf(not_equal, instr, Deoptimizer::kValueMismatch); |
5013 } | 5015 } |
5014 | 5016 |
5015 | 5017 |
5016 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5018 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
5017 { | 5019 { |
5018 PushSafepointRegistersScope scope(this); | 5020 PushSafepointRegistersScope scope(this); |
5019 __ push(object); | 5021 __ push(object); |
5020 __ xor_(esi, esi); | 5022 __ xor_(esi, esi); |
5021 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 5023 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); |
5022 RecordSafepointWithRegisters( | 5024 RecordSafepointWithRegisters( |
5023 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 5025 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
5024 | 5026 |
5025 __ test(eax, Immediate(kSmiTagMask)); | 5027 __ test(eax, Immediate(kSmiTagMask)); |
5026 } | 5028 } |
5027 DeoptimizeIf(zero, instr, "instance migration failed"); | 5029 DeoptimizeIf(zero, instr, Deoptimizer::kInstanceMigrationFailed); |
5028 } | 5030 } |
5029 | 5031 |
5030 | 5032 |
5031 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5033 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
5032 class DeferredCheckMaps FINAL : public LDeferredCode { | 5034 class DeferredCheckMaps FINAL : public LDeferredCode { |
5033 public: | 5035 public: |
5034 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 5036 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
5035 : LDeferredCode(codegen), instr_(instr), object_(object) { | 5037 : LDeferredCode(codegen), instr_(instr), object_(object) { |
5036 SetExit(check_maps()); | 5038 SetExit(check_maps()); |
5037 } | 5039 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5071 Handle<Map> map = maps->at(i).handle(); | 5073 Handle<Map> map = maps->at(i).handle(); |
5072 __ CompareMap(reg, map); | 5074 __ CompareMap(reg, map); |
5073 __ j(equal, &success, Label::kNear); | 5075 __ j(equal, &success, Label::kNear); |
5074 } | 5076 } |
5075 | 5077 |
5076 Handle<Map> map = maps->at(maps->size() - 1).handle(); | 5078 Handle<Map> map = maps->at(maps->size() - 1).handle(); |
5077 __ CompareMap(reg, map); | 5079 __ CompareMap(reg, map); |
5078 if (instr->hydrogen()->HasMigrationTarget()) { | 5080 if (instr->hydrogen()->HasMigrationTarget()) { |
5079 __ j(not_equal, deferred->entry()); | 5081 __ j(not_equal, deferred->entry()); |
5080 } else { | 5082 } else { |
5081 DeoptimizeIf(not_equal, instr, "wrong map"); | 5083 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); |
5082 } | 5084 } |
5083 | 5085 |
5084 __ bind(&success); | 5086 __ bind(&success); |
5085 } | 5087 } |
5086 | 5088 |
5087 | 5089 |
5088 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5090 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
5089 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5091 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
5090 XMMRegister xmm_scratch = double_scratch0(); | 5092 XMMRegister xmm_scratch = double_scratch0(); |
5091 Register result_reg = ToRegister(instr->result()); | 5093 Register result_reg = ToRegister(instr->result()); |
(...skipping 18 matching lines...) Expand all Loading... |
5110 __ JumpIfSmi(input_reg, &is_smi); | 5112 __ JumpIfSmi(input_reg, &is_smi); |
5111 | 5113 |
5112 // Check for heap number | 5114 // Check for heap number |
5113 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5115 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
5114 factory()->heap_number_map()); | 5116 factory()->heap_number_map()); |
5115 __ j(equal, &heap_number, Label::kNear); | 5117 __ j(equal, &heap_number, Label::kNear); |
5116 | 5118 |
5117 // Check for undefined. Undefined is converted to zero for clamping | 5119 // Check for undefined. Undefined is converted to zero for clamping |
5118 // conversions. | 5120 // conversions. |
5119 __ cmp(input_reg, factory()->undefined_value()); | 5121 __ cmp(input_reg, factory()->undefined_value()); |
5120 DeoptimizeIf(not_equal, instr, "not a heap number/undefined"); | 5122 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); |
5121 __ mov(input_reg, 0); | 5123 __ mov(input_reg, 0); |
5122 __ jmp(&done, Label::kNear); | 5124 __ jmp(&done, Label::kNear); |
5123 | 5125 |
5124 // Heap number | 5126 // Heap number |
5125 __ bind(&heap_number); | 5127 __ bind(&heap_number); |
5126 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 5128 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
5127 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); | 5129 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); |
5128 __ jmp(&done, Label::kNear); | 5130 __ jmp(&done, Label::kNear); |
5129 | 5131 |
5130 // smi | 5132 // smi |
(...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5598 DCHECK(!environment->HasBeenRegistered()); | 5600 DCHECK(!environment->HasBeenRegistered()); |
5599 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5601 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
5600 | 5602 |
5601 GenerateOsrPrologue(); | 5603 GenerateOsrPrologue(); |
5602 } | 5604 } |
5603 | 5605 |
5604 | 5606 |
5605 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5607 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
5606 DCHECK(ToRegister(instr->context()).is(esi)); | 5608 DCHECK(ToRegister(instr->context()).is(esi)); |
5607 __ cmp(eax, isolate()->factory()->undefined_value()); | 5609 __ cmp(eax, isolate()->factory()->undefined_value()); |
5608 DeoptimizeIf(equal, instr, "undefined"); | 5610 DeoptimizeIf(equal, instr, Deoptimizer::kUndefined); |
5609 | 5611 |
5610 __ cmp(eax, isolate()->factory()->null_value()); | 5612 __ cmp(eax, isolate()->factory()->null_value()); |
5611 DeoptimizeIf(equal, instr, "null"); | 5613 DeoptimizeIf(equal, instr, Deoptimizer::kNull); |
5612 | 5614 |
5613 __ test(eax, Immediate(kSmiTagMask)); | 5615 __ test(eax, Immediate(kSmiTagMask)); |
5614 DeoptimizeIf(zero, instr, "Smi"); | 5616 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); |
5615 | 5617 |
5616 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5618 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
5617 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx); | 5619 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx); |
5618 DeoptimizeIf(below_equal, instr, "wrong instance type"); | 5620 DeoptimizeIf(below_equal, instr, Deoptimizer::kWrongInstanceType); |
5619 | 5621 |
5620 Label use_cache, call_runtime; | 5622 Label use_cache, call_runtime; |
5621 __ CheckEnumCache(&call_runtime); | 5623 __ CheckEnumCache(&call_runtime); |
5622 | 5624 |
5623 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); | 5625 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); |
5624 __ jmp(&use_cache, Label::kNear); | 5626 __ jmp(&use_cache, Label::kNear); |
5625 | 5627 |
5626 // Get the set of properties to enumerate. | 5628 // Get the set of properties to enumerate. |
5627 __ bind(&call_runtime); | 5629 __ bind(&call_runtime); |
5628 __ push(eax); | 5630 __ push(eax); |
5629 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); | 5631 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); |
5630 | 5632 |
5631 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), | 5633 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), |
5632 isolate()->factory()->meta_map()); | 5634 isolate()->factory()->meta_map()); |
5633 DeoptimizeIf(not_equal, instr, "wrong map"); | 5635 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); |
5634 __ bind(&use_cache); | 5636 __ bind(&use_cache); |
5635 } | 5637 } |
5636 | 5638 |
5637 | 5639 |
5638 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 5640 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
5639 Register map = ToRegister(instr->map()); | 5641 Register map = ToRegister(instr->map()); |
5640 Register result = ToRegister(instr->result()); | 5642 Register result = ToRegister(instr->result()); |
5641 Label load_cache, done; | 5643 Label load_cache, done; |
5642 __ EnumLength(result, map); | 5644 __ EnumLength(result, map); |
5643 __ cmp(result, Immediate(Smi::FromInt(0))); | 5645 __ cmp(result, Immediate(Smi::FromInt(0))); |
5644 __ j(not_equal, &load_cache, Label::kNear); | 5646 __ j(not_equal, &load_cache, Label::kNear); |
5645 __ mov(result, isolate()->factory()->empty_fixed_array()); | 5647 __ mov(result, isolate()->factory()->empty_fixed_array()); |
5646 __ jmp(&done, Label::kNear); | 5648 __ jmp(&done, Label::kNear); |
5647 | 5649 |
5648 __ bind(&load_cache); | 5650 __ bind(&load_cache); |
5649 __ LoadInstanceDescriptors(map, result); | 5651 __ LoadInstanceDescriptors(map, result); |
5650 __ mov(result, | 5652 __ mov(result, |
5651 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 5653 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
5652 __ mov(result, | 5654 __ mov(result, |
5653 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 5655 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
5654 __ bind(&done); | 5656 __ bind(&done); |
5655 __ test(result, result); | 5657 __ test(result, result); |
5656 DeoptimizeIf(equal, instr, "no cache"); | 5658 DeoptimizeIf(equal, instr, Deoptimizer::kNoCache); |
5657 } | 5659 } |
5658 | 5660 |
5659 | 5661 |
5660 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5662 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5661 Register object = ToRegister(instr->value()); | 5663 Register object = ToRegister(instr->value()); |
5662 __ cmp(ToRegister(instr->map()), | 5664 __ cmp(ToRegister(instr->map()), |
5663 FieldOperand(object, HeapObject::kMapOffset)); | 5665 FieldOperand(object, HeapObject::kMapOffset)); |
5664 DeoptimizeIf(not_equal, instr, "wrong map"); | 5666 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); |
5665 } | 5667 } |
5666 | 5668 |
5667 | 5669 |
5668 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5670 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
5669 Register object, | 5671 Register object, |
5670 Register index) { | 5672 Register index) { |
5671 PushSafepointRegistersScope scope(this); | 5673 PushSafepointRegistersScope scope(this); |
5672 __ push(object); | 5674 __ push(object); |
5673 __ push(index); | 5675 __ push(index); |
5674 __ xor_(esi, esi); | 5676 __ xor_(esi, esi); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5749 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 5751 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
5750 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5752 RecordSafepoint(Safepoint::kNoLazyDeopt); |
5751 } | 5753 } |
5752 | 5754 |
5753 | 5755 |
5754 #undef __ | 5756 #undef __ |
5755 | 5757 |
5756 } } // namespace v8::internal | 5758 } } // namespace v8::internal |
5757 | 5759 |
5758 #endif // V8_TARGET_ARCH_IA32 | 5760 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |