| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 796 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 807     int pc_offset = masm()->pc_offset(); | 807     int pc_offset = masm()->pc_offset(); | 
| 808     environment->Register(deoptimization_index, | 808     environment->Register(deoptimization_index, | 
| 809                           translation.index(), | 809                           translation.index(), | 
| 810                           (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 810                           (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 
| 811     deoptimizations_.Add(environment, zone()); | 811     deoptimizations_.Add(environment, zone()); | 
| 812   } | 812   } | 
| 813 } | 813 } | 
| 814 | 814 | 
| 815 | 815 | 
| 816 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, | 816 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, | 
|  | 817                             Deoptimizer::DeoptReason deopt_reason, | 
| 817                             Deoptimizer::BailoutType bailout_type, | 818                             Deoptimizer::BailoutType bailout_type, | 
| 818                             const char* detail, Register src1, | 819                             Register src1, const Operand& src2) { | 
| 819                             const Operand& src2) { |  | 
| 820   LEnvironment* environment = instr->environment(); | 820   LEnvironment* environment = instr->environment(); | 
| 821   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 821   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 
| 822   DCHECK(environment->HasBeenRegistered()); | 822   DCHECK(environment->HasBeenRegistered()); | 
| 823   int id = environment->deoptimization_index(); | 823   int id = environment->deoptimization_index(); | 
| 824   DCHECK(info()->IsOptimizing() || info()->IsStub()); | 824   DCHECK(info()->IsOptimizing() || info()->IsStub()); | 
| 825   Address entry = | 825   Address entry = | 
| 826       Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 826       Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 
| 827   if (entry == NULL) { | 827   if (entry == NULL) { | 
| 828     Abort(kBailoutWasNotPrepared); | 828     Abort(kBailoutWasNotPrepared); | 
| 829     return; | 829     return; | 
| (...skipping 21 matching lines...) Expand all  Loading... | 
| 851   if (info()->ShouldTrapOnDeopt()) { | 851   if (info()->ShouldTrapOnDeopt()) { | 
| 852     Label skip; | 852     Label skip; | 
| 853     if (condition != al) { | 853     if (condition != al) { | 
| 854       __ Branch(&skip, NegateCondition(condition), src1, src2); | 854       __ Branch(&skip, NegateCondition(condition), src1, src2); | 
| 855     } | 855     } | 
| 856     __ stop("trap_on_deopt"); | 856     __ stop("trap_on_deopt"); | 
| 857     __ bind(&skip); | 857     __ bind(&skip); | 
| 858   } | 858   } | 
| 859 | 859 | 
| 860   Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(), | 860   Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(), | 
| 861                              instr->Mnemonic(), detail); | 861                              instr->Mnemonic(), deopt_reason); | 
| 862   DCHECK(info()->IsStub() || frame_is_built_); | 862   DCHECK(info()->IsStub() || frame_is_built_); | 
| 863   // Go through jump table if we need to handle condition, build frame, or | 863   // Go through jump table if we need to handle condition, build frame, or | 
| 864   // restore caller doubles. | 864   // restore caller doubles. | 
| 865   if (condition == al && frame_is_built_ && | 865   if (condition == al && frame_is_built_ && | 
| 866       !info()->saves_caller_doubles()) { | 866       !info()->saves_caller_doubles()) { | 
| 867     DeoptComment(reason); | 867     DeoptComment(reason); | 
| 868     __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); | 868     __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); | 
| 869   } else { | 869   } else { | 
| 870     Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type, | 870     Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type, | 
| 871                                             !frame_is_built_); | 871                                             !frame_is_built_); | 
| 872     // We often have several deopts to the same entry, reuse the last | 872     // We often have several deopts to the same entry, reuse the last | 
| 873     // jump entry if this is the case. | 873     // jump entry if this is the case. | 
| 874     if (jump_table_.is_empty() || | 874     if (jump_table_.is_empty() || | 
| 875         !table_entry.IsEquivalentTo(jump_table_.last())) { | 875         !table_entry.IsEquivalentTo(jump_table_.last())) { | 
| 876       jump_table_.Add(table_entry, zone()); | 876       jump_table_.Add(table_entry, zone()); | 
| 877     } | 877     } | 
| 878     __ Branch(&jump_table_.last().label, condition, src1, src2); | 878     __ Branch(&jump_table_.last().label, condition, src1, src2); | 
| 879   } | 879   } | 
| 880 } | 880 } | 
| 881 | 881 | 
| 882 | 882 | 
| 883 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, | 883 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, | 
| 884                             const char* detail, Register src1, | 884                             Deoptimizer::DeoptReason deopt_reason, | 
| 885                             const Operand& src2) { | 885                             Register src1, const Operand& src2) { | 
| 886   Deoptimizer::BailoutType bailout_type = info()->IsStub() | 886   Deoptimizer::BailoutType bailout_type = info()->IsStub() | 
| 887       ? Deoptimizer::LAZY | 887       ? Deoptimizer::LAZY | 
| 888       : Deoptimizer::EAGER; | 888       : Deoptimizer::EAGER; | 
| 889   DeoptimizeIf(condition, instr, bailout_type, detail, src1, src2); | 889   DeoptimizeIf(condition, instr, deopt_reason, bailout_type, src1, src2); | 
| 890 } | 890 } | 
| 891 | 891 | 
| 892 | 892 | 
| 893 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 893 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 
| 894   int length = deoptimizations_.length(); | 894   int length = deoptimizations_.length(); | 
| 895   if (length == 0) return; | 895   if (length == 0) return; | 
| 896   Handle<DeoptimizationInputData> data = | 896   Handle<DeoptimizationInputData> data = | 
| 897       DeoptimizationInputData::New(isolate(), length, TENURED); | 897       DeoptimizationInputData::New(isolate(), length, TENURED); | 
| 898 | 898 | 
| 899   Handle<ByteArray> translations = | 899   Handle<ByteArray> translations = | 
| (...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1110   HMod* hmod = instr->hydrogen(); | 1110   HMod* hmod = instr->hydrogen(); | 
| 1111   int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1111   int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 
| 1112   Label dividend_is_not_negative, done; | 1112   Label dividend_is_not_negative, done; | 
| 1113 | 1113 | 
| 1114   if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 1114   if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 
| 1115     __ Branch(÷nd_is_not_negative, ge, dividend, Operand(zero_reg)); | 1115     __ Branch(÷nd_is_not_negative, ge, dividend, Operand(zero_reg)); | 
| 1116     // Note: The code below even works when right contains kMinInt. | 1116     // Note: The code below even works when right contains kMinInt. | 
| 1117     __ subu(dividend, zero_reg, dividend); | 1117     __ subu(dividend, zero_reg, dividend); | 
| 1118     __ And(dividend, dividend, Operand(mask)); | 1118     __ And(dividend, dividend, Operand(mask)); | 
| 1119     if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1119     if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1120       DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1120       DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, | 
|  | 1121                    Operand(zero_reg)); | 
| 1121     } | 1122     } | 
| 1122     __ Branch(USE_DELAY_SLOT, &done); | 1123     __ Branch(USE_DELAY_SLOT, &done); | 
| 1123     __ subu(dividend, zero_reg, dividend); | 1124     __ subu(dividend, zero_reg, dividend); | 
| 1124   } | 1125   } | 
| 1125 | 1126 | 
| 1126   __ bind(÷nd_is_not_negative); | 1127   __ bind(÷nd_is_not_negative); | 
| 1127   __ And(dividend, dividend, Operand(mask)); | 1128   __ And(dividend, dividend, Operand(mask)); | 
| 1128   __ bind(&done); | 1129   __ bind(&done); | 
| 1129 } | 1130 } | 
| 1130 | 1131 | 
| (...skipping 11 matching lines...) Expand all  Loading... | 
| 1142 | 1143 | 
| 1143   __ TruncatingDiv(result, dividend, Abs(divisor)); | 1144   __ TruncatingDiv(result, dividend, Abs(divisor)); | 
| 1144   __ Mul(result, result, Operand(Abs(divisor))); | 1145   __ Mul(result, result, Operand(Abs(divisor))); | 
| 1145   __ Subu(result, dividend, Operand(result)); | 1146   __ Subu(result, dividend, Operand(result)); | 
| 1146 | 1147 | 
| 1147   // Check for negative zero. | 1148   // Check for negative zero. | 
| 1148   HMod* hmod = instr->hydrogen(); | 1149   HMod* hmod = instr->hydrogen(); | 
| 1149   if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1150   if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1150     Label remainder_not_zero; | 1151     Label remainder_not_zero; | 
| 1151     __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg)); | 1152     __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg)); | 
| 1152     DeoptimizeIf(lt, instr, "minus zero", dividend, Operand(zero_reg)); | 1153     DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, dividend, | 
|  | 1154                  Operand(zero_reg)); | 
| 1153     __ bind(&remainder_not_zero); | 1155     __ bind(&remainder_not_zero); | 
| 1154   } | 1156   } | 
| 1155 } | 1157 } | 
| 1156 | 1158 | 
| 1157 | 1159 | 
| 1158 void LCodeGen::DoModI(LModI* instr) { | 1160 void LCodeGen::DoModI(LModI* instr) { | 
| 1159   HMod* hmod = instr->hydrogen(); | 1161   HMod* hmod = instr->hydrogen(); | 
| 1160   const Register left_reg = ToRegister(instr->left()); | 1162   const Register left_reg = ToRegister(instr->left()); | 
| 1161   const Register right_reg = ToRegister(instr->right()); | 1163   const Register right_reg = ToRegister(instr->right()); | 
| 1162   const Register result_reg = ToRegister(instr->result()); | 1164   const Register result_reg = ToRegister(instr->result()); | 
| 1163 | 1165 | 
| 1164   // div runs in the background while we check for special cases. | 1166   // div runs in the background while we check for special cases. | 
| 1165   __ Mod(result_reg, left_reg, right_reg); | 1167   __ Mod(result_reg, left_reg, right_reg); | 
| 1166 | 1168 | 
| 1167   Label done; | 1169   Label done; | 
| 1168   // Check for x % 0, we have to deopt in this case because we can't return a | 1170   // Check for x % 0, we have to deopt in this case because we can't return a | 
| 1169   // NaN. | 1171   // NaN. | 
| 1170   if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1172   if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 
| 1171     DeoptimizeIf(eq, instr, "division by zero", right_reg, Operand(zero_reg)); | 1173     DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, right_reg, | 
|  | 1174                  Operand(zero_reg)); | 
| 1172   } | 1175   } | 
| 1173 | 1176 | 
| 1174   // Check for kMinInt % -1, div will return kMinInt, which is not what we | 1177   // Check for kMinInt % -1, div will return kMinInt, which is not what we | 
| 1175   // want. We have to deopt if we care about -0, because we can't return that. | 1178   // want. We have to deopt if we care about -0, because we can't return that. | 
| 1176   if (hmod->CheckFlag(HValue::kCanOverflow)) { | 1179   if (hmod->CheckFlag(HValue::kCanOverflow)) { | 
| 1177     Label no_overflow_possible; | 1180     Label no_overflow_possible; | 
| 1178     __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt)); | 1181     __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt)); | 
| 1179     if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1182     if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1180       DeoptimizeIf(eq, instr, "minus zero", right_reg, Operand(-1)); | 1183       DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, right_reg, Operand(-1)); | 
| 1181     } else { | 1184     } else { | 
| 1182       __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1)); | 1185       __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1)); | 
| 1183       __ Branch(USE_DELAY_SLOT, &done); | 1186       __ Branch(USE_DELAY_SLOT, &done); | 
| 1184       __ mov(result_reg, zero_reg); | 1187       __ mov(result_reg, zero_reg); | 
| 1185     } | 1188     } | 
| 1186     __ bind(&no_overflow_possible); | 1189     __ bind(&no_overflow_possible); | 
| 1187   } | 1190   } | 
| 1188 | 1191 | 
| 1189   // If we care about -0, test if the dividend is <0 and the result is 0. | 1192   // If we care about -0, test if the dividend is <0 and the result is 0. | 
| 1190   __ Branch(&done, ge, left_reg, Operand(zero_reg)); | 1193   __ Branch(&done, ge, left_reg, Operand(zero_reg)); | 
| 1191   if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1194   if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1192     DeoptimizeIf(eq, instr, "minus zero", result_reg, Operand(zero_reg)); | 1195     DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result_reg, | 
|  | 1196                  Operand(zero_reg)); | 
| 1193   } | 1197   } | 
| 1194   __ bind(&done); | 1198   __ bind(&done); | 
| 1195 } | 1199 } | 
| 1196 | 1200 | 
| 1197 | 1201 | 
| 1198 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1202 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 
| 1199   Register dividend = ToRegister(instr->dividend()); | 1203   Register dividend = ToRegister(instr->dividend()); | 
| 1200   int32_t divisor = instr->divisor(); | 1204   int32_t divisor = instr->divisor(); | 
| 1201   Register result = ToRegister(instr->result()); | 1205   Register result = ToRegister(instr->result()); | 
| 1202   DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 1206   DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 
| 1203   DCHECK(!result.is(dividend)); | 1207   DCHECK(!result.is(dividend)); | 
| 1204 | 1208 | 
| 1205   // Check for (0 / -x) that will produce negative zero. | 1209   // Check for (0 / -x) that will produce negative zero. | 
| 1206   HDiv* hdiv = instr->hydrogen(); | 1210   HDiv* hdiv = instr->hydrogen(); | 
| 1207   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1211   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 
| 1208     DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1212     DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, | 
|  | 1213                  Operand(zero_reg)); | 
| 1209   } | 1214   } | 
| 1210   // Check for (kMinInt / -1). | 1215   // Check for (kMinInt / -1). | 
| 1211   if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1216   if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 
| 1212     DeoptimizeIf(eq, instr, "overflow", dividend, Operand(kMinInt)); | 1217     DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, dividend, Operand(kMinInt)); | 
| 1213   } | 1218   } | 
| 1214   // Deoptimize if remainder will not be 0. | 1219   // Deoptimize if remainder will not be 0. | 
| 1215   if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 1220   if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 
| 1216       divisor != 1 && divisor != -1) { | 1221       divisor != 1 && divisor != -1) { | 
| 1217     int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1222     int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 
| 1218     __ And(at, dividend, Operand(mask)); | 1223     __ And(at, dividend, Operand(mask)); | 
| 1219     DeoptimizeIf(ne, instr, "lost precision", at, Operand(zero_reg)); | 1224     DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, at, Operand(zero_reg)); | 
| 1220   } | 1225   } | 
| 1221 | 1226 | 
| 1222   if (divisor == -1) {  // Nice shortcut, not needed for correctness. | 1227   if (divisor == -1) {  // Nice shortcut, not needed for correctness. | 
| 1223     __ Subu(result, zero_reg, dividend); | 1228     __ Subu(result, zero_reg, dividend); | 
| 1224     return; | 1229     return; | 
| 1225   } | 1230   } | 
| 1226   uint16_t shift = WhichPowerOf2Abs(divisor); | 1231   uint16_t shift = WhichPowerOf2Abs(divisor); | 
| 1227   if (shift == 0) { | 1232   if (shift == 0) { | 
| 1228     __ Move(result, dividend); | 1233     __ Move(result, dividend); | 
| 1229   } else if (shift == 1) { | 1234   } else if (shift == 1) { | 
| (...skipping 16 matching lines...) Expand all  Loading... | 
| 1246   DCHECK(!dividend.is(result)); | 1251   DCHECK(!dividend.is(result)); | 
| 1247 | 1252 | 
| 1248   if (divisor == 0) { | 1253   if (divisor == 0) { | 
| 1249     DeoptimizeIf(al, instr); | 1254     DeoptimizeIf(al, instr); | 
| 1250     return; | 1255     return; | 
| 1251   } | 1256   } | 
| 1252 | 1257 | 
| 1253   // Check for (0 / -x) that will produce negative zero. | 1258   // Check for (0 / -x) that will produce negative zero. | 
| 1254   HDiv* hdiv = instr->hydrogen(); | 1259   HDiv* hdiv = instr->hydrogen(); | 
| 1255   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1260   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 
| 1256     DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1261     DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, | 
|  | 1262                  Operand(zero_reg)); | 
| 1257   } | 1263   } | 
| 1258 | 1264 | 
| 1259   __ TruncatingDiv(result, dividend, Abs(divisor)); | 1265   __ TruncatingDiv(result, dividend, Abs(divisor)); | 
| 1260   if (divisor < 0) __ Subu(result, zero_reg, result); | 1266   if (divisor < 0) __ Subu(result, zero_reg, result); | 
| 1261 | 1267 | 
| 1262   if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1268   if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 
| 1263     __ Mul(scratch0(), result, Operand(divisor)); | 1269     __ Mul(scratch0(), result, Operand(divisor)); | 
| 1264     __ Subu(scratch0(), scratch0(), dividend); | 1270     __ Subu(scratch0(), scratch0(), dividend); | 
| 1265     DeoptimizeIf(ne, instr, "lost precision", scratch0(), Operand(zero_reg)); | 1271     DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, scratch0(), | 
|  | 1272                  Operand(zero_reg)); | 
| 1266   } | 1273   } | 
| 1267 } | 1274 } | 
| 1268 | 1275 | 
| 1269 | 1276 | 
| 1270 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1277 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 
| 1271 void LCodeGen::DoDivI(LDivI* instr) { | 1278 void LCodeGen::DoDivI(LDivI* instr) { | 
| 1272   HBinaryOperation* hdiv = instr->hydrogen(); | 1279   HBinaryOperation* hdiv = instr->hydrogen(); | 
| 1273   Register dividend = ToRegister(instr->dividend()); | 1280   Register dividend = ToRegister(instr->dividend()); | 
| 1274   Register divisor = ToRegister(instr->divisor()); | 1281   Register divisor = ToRegister(instr->divisor()); | 
| 1275   const Register result = ToRegister(instr->result()); | 1282   const Register result = ToRegister(instr->result()); | 
| 1276   Register remainder = ToRegister(instr->temp()); | 1283   Register remainder = ToRegister(instr->temp()); | 
| 1277 | 1284 | 
| 1278   // On MIPS div is asynchronous - it will run in the background while we | 1285   // On MIPS div is asynchronous - it will run in the background while we | 
| 1279   // check for special cases. | 1286   // check for special cases. | 
| 1280   __ Div(remainder, result, dividend, divisor); | 1287   __ Div(remainder, result, dividend, divisor); | 
| 1281 | 1288 | 
| 1282   // Check for x / 0. | 1289   // Check for x / 0. | 
| 1283   if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1290   if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 
| 1284     DeoptimizeIf(eq, instr, "division by zero", divisor, Operand(zero_reg)); | 1291     DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, divisor, | 
|  | 1292                  Operand(zero_reg)); | 
| 1285   } | 1293   } | 
| 1286 | 1294 | 
| 1287   // Check for (0 / -x) that will produce negative zero. | 1295   // Check for (0 / -x) that will produce negative zero. | 
| 1288   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1296   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1289     Label left_not_zero; | 1297     Label left_not_zero; | 
| 1290     __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 1298     __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 
| 1291     DeoptimizeIf(lt, instr, "minus zero", divisor, Operand(zero_reg)); | 1299     DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, divisor, | 
|  | 1300                  Operand(zero_reg)); | 
| 1292     __ bind(&left_not_zero); | 1301     __ bind(&left_not_zero); | 
| 1293   } | 1302   } | 
| 1294 | 1303 | 
| 1295   // Check for (kMinInt / -1). | 1304   // Check for (kMinInt / -1). | 
| 1296   if (hdiv->CheckFlag(HValue::kCanOverflow) && | 1305   if (hdiv->CheckFlag(HValue::kCanOverflow) && | 
| 1297       !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1306       !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 
| 1298     Label left_not_min_int; | 1307     Label left_not_min_int; | 
| 1299     __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 1308     __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 
| 1300     DeoptimizeIf(eq, instr, "overflow", divisor, Operand(-1)); | 1309     DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, divisor, Operand(-1)); | 
| 1301     __ bind(&left_not_min_int); | 1310     __ bind(&left_not_min_int); | 
| 1302   } | 1311   } | 
| 1303 | 1312 | 
| 1304   if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1313   if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 
| 1305     DeoptimizeIf(ne, instr, "lost precision", remainder, Operand(zero_reg)); | 1314     DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, remainder, | 
|  | 1315                  Operand(zero_reg)); | 
| 1306   } | 1316   } | 
| 1307 } | 1317 } | 
| 1308 | 1318 | 
| 1309 | 1319 | 
| 1310 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { | 1320 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { | 
| 1311   DoubleRegister addend = ToDoubleRegister(instr->addend()); | 1321   DoubleRegister addend = ToDoubleRegister(instr->addend()); | 
| 1312   DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); | 1322   DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); | 
| 1313   DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); | 1323   DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); | 
| 1314 | 1324 | 
| 1315   // This is computed in-place. | 1325   // This is computed in-place. | 
| (...skipping 25 matching lines...) Expand all  Loading... | 
| 1341   } | 1351   } | 
| 1342 | 1352 | 
| 1343   // If the divisor is negative, we have to negate and handle edge cases. | 1353   // If the divisor is negative, we have to negate and handle edge cases. | 
| 1344 | 1354 | 
| 1345   // dividend can be the same register as result so save the value of it | 1355   // dividend can be the same register as result so save the value of it | 
| 1346   // for checking overflow. | 1356   // for checking overflow. | 
| 1347   __ Move(scratch, dividend); | 1357   __ Move(scratch, dividend); | 
| 1348 | 1358 | 
| 1349   __ Subu(result, zero_reg, dividend); | 1359   __ Subu(result, zero_reg, dividend); | 
| 1350   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1360   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1351     DeoptimizeIf(eq, instr, "minus zero", result, Operand(zero_reg)); | 1361     DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result, Operand(zero_reg)); | 
| 1352   } | 1362   } | 
| 1353 | 1363 | 
| 1354   // Dividing by -1 is basically negation, unless we overflow. | 1364   // Dividing by -1 is basically negation, unless we overflow. | 
| 1355   __ Xor(scratch, scratch, result); | 1365   __ Xor(scratch, scratch, result); | 
| 1356   if (divisor == -1) { | 1366   if (divisor == -1) { | 
| 1357     if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1367     if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 
| 1358       DeoptimizeIf(ge, instr, "overflow", scratch, Operand(zero_reg)); | 1368       DeoptimizeIf(ge, instr, Deoptimizer::kOverflow, scratch, | 
|  | 1369                    Operand(zero_reg)); | 
| 1359     } | 1370     } | 
| 1360     return; | 1371     return; | 
| 1361   } | 1372   } | 
| 1362 | 1373 | 
| 1363   // If the negation could not overflow, simply shifting is OK. | 1374   // If the negation could not overflow, simply shifting is OK. | 
| 1364   if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1375   if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 
| 1365     __ sra(result, result, shift); | 1376     __ sra(result, result, shift); | 
| 1366     return; | 1377     return; | 
| 1367   } | 1378   } | 
| 1368 | 1379 | 
| (...skipping 14 matching lines...) Expand all  Loading... | 
| 1383   DCHECK(!dividend.is(result)); | 1394   DCHECK(!dividend.is(result)); | 
| 1384 | 1395 | 
| 1385   if (divisor == 0) { | 1396   if (divisor == 0) { | 
| 1386     DeoptimizeIf(al, instr); | 1397     DeoptimizeIf(al, instr); | 
| 1387     return; | 1398     return; | 
| 1388   } | 1399   } | 
| 1389 | 1400 | 
| 1390   // Check for (0 / -x) that will produce negative zero. | 1401   // Check for (0 / -x) that will produce negative zero. | 
| 1391   HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1402   HMathFloorOfDiv* hdiv = instr->hydrogen(); | 
| 1392   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1403   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 
| 1393     DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1404     DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, | 
|  | 1405                  Operand(zero_reg)); | 
| 1394   } | 1406   } | 
| 1395 | 1407 | 
| 1396   // Easy case: We need no dynamic check for the dividend and the flooring | 1408   // Easy case: We need no dynamic check for the dividend and the flooring | 
| 1397   // division is the same as the truncating division. | 1409   // division is the same as the truncating division. | 
| 1398   if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1410   if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 
| 1399       (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1411       (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 
| 1400     __ TruncatingDiv(result, dividend, Abs(divisor)); | 1412     __ TruncatingDiv(result, dividend, Abs(divisor)); | 
| 1401     if (divisor < 0) __ Subu(result, zero_reg, result); | 1413     if (divisor < 0) __ Subu(result, zero_reg, result); | 
| 1402     return; | 1414     return; | 
| 1403   } | 1415   } | 
| (...skipping 23 matching lines...) Expand all  Loading... | 
| 1427   Register dividend = ToRegister(instr->dividend()); | 1439   Register dividend = ToRegister(instr->dividend()); | 
| 1428   Register divisor = ToRegister(instr->divisor()); | 1440   Register divisor = ToRegister(instr->divisor()); | 
| 1429   const Register result = ToRegister(instr->result()); | 1441   const Register result = ToRegister(instr->result()); | 
| 1430   Register remainder = scratch0(); | 1442   Register remainder = scratch0(); | 
| 1431   // On MIPS div is asynchronous - it will run in the background while we | 1443   // On MIPS div is asynchronous - it will run in the background while we | 
| 1432   // check for special cases. | 1444   // check for special cases. | 
| 1433   __ Div(remainder, result, dividend, divisor); | 1445   __ Div(remainder, result, dividend, divisor); | 
| 1434 | 1446 | 
| 1435   // Check for x / 0. | 1447   // Check for x / 0. | 
| 1436   if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1448   if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 
| 1437     DeoptimizeIf(eq, instr, "division by zero", divisor, Operand(zero_reg)); | 1449     DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, divisor, | 
|  | 1450                  Operand(zero_reg)); | 
| 1438   } | 1451   } | 
| 1439 | 1452 | 
| 1440   // Check for (0 / -x) that will produce negative zero. | 1453   // Check for (0 / -x) that will produce negative zero. | 
| 1441   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1454   if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 1442     Label left_not_zero; | 1455     Label left_not_zero; | 
| 1443     __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 1456     __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 
| 1444     DeoptimizeIf(lt, instr, "minus zero", divisor, Operand(zero_reg)); | 1457     DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, divisor, | 
|  | 1458                  Operand(zero_reg)); | 
| 1445     __ bind(&left_not_zero); | 1459     __ bind(&left_not_zero); | 
| 1446   } | 1460   } | 
| 1447 | 1461 | 
| 1448   // Check for (kMinInt / -1). | 1462   // Check for (kMinInt / -1). | 
| 1449   if (hdiv->CheckFlag(HValue::kCanOverflow) && | 1463   if (hdiv->CheckFlag(HValue::kCanOverflow) && | 
| 1450       !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1464       !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 
| 1451     Label left_not_min_int; | 1465     Label left_not_min_int; | 
| 1452     __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 1466     __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 
| 1453     DeoptimizeIf(eq, instr, "overflow", divisor, Operand(-1)); | 1467     DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, divisor, Operand(-1)); | 
| 1454     __ bind(&left_not_min_int); | 1468     __ bind(&left_not_min_int); | 
| 1455   } | 1469   } | 
| 1456 | 1470 | 
| 1457   // We performed a truncating division. Correct the result if necessary. | 1471   // We performed a truncating division. Correct the result if necessary. | 
| 1458   Label done; | 1472   Label done; | 
| 1459   __ Branch(&done, eq, remainder, Operand(zero_reg), USE_DELAY_SLOT); | 1473   __ Branch(&done, eq, remainder, Operand(zero_reg), USE_DELAY_SLOT); | 
| 1460   __ Xor(remainder, remainder, Operand(divisor)); | 1474   __ Xor(remainder, remainder, Operand(divisor)); | 
| 1461   __ Branch(&done, ge, remainder, Operand(zero_reg)); | 1475   __ Branch(&done, ge, remainder, Operand(zero_reg)); | 
| 1462   __ Subu(result, result, Operand(1)); | 1476   __ Subu(result, result, Operand(1)); | 
| 1463   __ bind(&done); | 1477   __ bind(&done); | 
| (...skipping 10 matching lines...) Expand all  Loading... | 
| 1474   bool bailout_on_minus_zero = | 1488   bool bailout_on_minus_zero = | 
| 1475     instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 1489     instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 
| 1476   bool overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1490   bool overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 
| 1477 | 1491 | 
| 1478   if (right_op->IsConstantOperand()) { | 1492   if (right_op->IsConstantOperand()) { | 
| 1479     int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); | 1493     int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); | 
| 1480 | 1494 | 
| 1481     if (bailout_on_minus_zero && (constant < 0)) { | 1495     if (bailout_on_minus_zero && (constant < 0)) { | 
| 1482       // The case of a null constant will be handled separately. | 1496       // The case of a null constant will be handled separately. | 
| 1483       // If constant is negative and left is null, the result should be -0. | 1497       // If constant is negative and left is null, the result should be -0. | 
| 1484       DeoptimizeIf(eq, instr, "minus zero", left, Operand(zero_reg)); | 1498       DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, left, Operand(zero_reg)); | 
| 1485     } | 1499     } | 
| 1486 | 1500 | 
| 1487     switch (constant) { | 1501     switch (constant) { | 
| 1488       case -1: | 1502       case -1: | 
| 1489         if (overflow) { | 1503         if (overflow) { | 
| 1490           __ SubuAndCheckForOverflow(result, zero_reg, left, scratch); | 1504           __ SubuAndCheckForOverflow(result, zero_reg, left, scratch); | 
| 1491           DeoptimizeIf(lt, instr, "overflow", scratch, Operand(zero_reg)); | 1505           DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, scratch, | 
|  | 1506                        Operand(zero_reg)); | 
| 1492         } else { | 1507         } else { | 
| 1493           __ Subu(result, zero_reg, left); | 1508           __ Subu(result, zero_reg, left); | 
| 1494         } | 1509         } | 
| 1495         break; | 1510         break; | 
| 1496       case 0: | 1511       case 0: | 
| 1497         if (bailout_on_minus_zero) { | 1512         if (bailout_on_minus_zero) { | 
| 1498           // If left is strictly negative and the constant is null, the | 1513           // If left is strictly negative and the constant is null, the | 
| 1499           // result is -0. Deoptimize if required, otherwise return 0. | 1514           // result is -0. Deoptimize if required, otherwise return 0. | 
| 1500           DeoptimizeIf(lt, instr, "minus zero", left, Operand(zero_reg)); | 1515           DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, left, | 
|  | 1516                        Operand(zero_reg)); | 
| 1501         } | 1517         } | 
| 1502         __ mov(result, zero_reg); | 1518         __ mov(result, zero_reg); | 
| 1503         break; | 1519         break; | 
| 1504       case 1: | 1520       case 1: | 
| 1505         // Nothing to do. | 1521         // Nothing to do. | 
| 1506         __ Move(result, left); | 1522         __ Move(result, left); | 
| 1507         break; | 1523         break; | 
| 1508       default: | 1524       default: | 
| 1509         // Multiplying by powers of two and powers of two plus or minus | 1525         // Multiplying by powers of two and powers of two plus or minus | 
| 1510         // one can be done faster with shifted operands. | 1526         // one can be done faster with shifted operands. | 
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1542 | 1558 | 
| 1543     if (overflow) { | 1559     if (overflow) { | 
| 1544       // hi:lo = left * right. | 1560       // hi:lo = left * right. | 
| 1545       if (instr->hydrogen()->representation().IsSmi()) { | 1561       if (instr->hydrogen()->representation().IsSmi()) { | 
| 1546         __ SmiUntag(result, left); | 1562         __ SmiUntag(result, left); | 
| 1547         __ Mul(scratch, result, result, right); | 1563         __ Mul(scratch, result, result, right); | 
| 1548       } else { | 1564       } else { | 
| 1549         __ Mul(scratch, result, left, right); | 1565         __ Mul(scratch, result, left, right); | 
| 1550       } | 1566       } | 
| 1551       __ sra(at, result, 31); | 1567       __ sra(at, result, 31); | 
| 1552       DeoptimizeIf(ne, instr, "overflow", scratch, Operand(at)); | 1568       DeoptimizeIf(ne, instr, Deoptimizer::kOverflow, scratch, Operand(at)); | 
| 1553     } else { | 1569     } else { | 
| 1554       if (instr->hydrogen()->representation().IsSmi()) { | 1570       if (instr->hydrogen()->representation().IsSmi()) { | 
| 1555         __ SmiUntag(result, left); | 1571         __ SmiUntag(result, left); | 
| 1556         __ Mul(result, result, right); | 1572         __ Mul(result, result, right); | 
| 1557       } else { | 1573       } else { | 
| 1558         __ Mul(result, left, right); | 1574         __ Mul(result, left, right); | 
| 1559       } | 1575       } | 
| 1560     } | 1576     } | 
| 1561 | 1577 | 
| 1562     if (bailout_on_minus_zero) { | 1578     if (bailout_on_minus_zero) { | 
| 1563       Label done; | 1579       Label done; | 
| 1564       __ Xor(at, left, right); | 1580       __ Xor(at, left, right); | 
| 1565       __ Branch(&done, ge, at, Operand(zero_reg)); | 1581       __ Branch(&done, ge, at, Operand(zero_reg)); | 
| 1566       // Bail out if the result is minus zero. | 1582       // Bail out if the result is minus zero. | 
| 1567       DeoptimizeIf(eq, instr, "minus zero", result, Operand(zero_reg)); | 1583       DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result, | 
|  | 1584                    Operand(zero_reg)); | 
| 1568       __ bind(&done); | 1585       __ bind(&done); | 
| 1569     } | 1586     } | 
| 1570   } | 1587   } | 
| 1571 } | 1588 } | 
| 1572 | 1589 | 
| 1573 | 1590 | 
| 1574 void LCodeGen::DoBitI(LBitI* instr) { | 1591 void LCodeGen::DoBitI(LBitI* instr) { | 
| 1575   LOperand* left_op = instr->left(); | 1592   LOperand* left_op = instr->left(); | 
| 1576   LOperand* right_op = instr->right(); | 1593   LOperand* right_op = instr->right(); | 
| 1577   DCHECK(left_op->IsRegister()); | 1594   DCHECK(left_op->IsRegister()); | 
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1621     switch (instr->op()) { | 1638     switch (instr->op()) { | 
| 1622       case Token::ROR: | 1639       case Token::ROR: | 
| 1623         __ Ror(result, left, Operand(ToRegister(right_op))); | 1640         __ Ror(result, left, Operand(ToRegister(right_op))); | 
| 1624         break; | 1641         break; | 
| 1625       case Token::SAR: | 1642       case Token::SAR: | 
| 1626         __ srav(result, left, ToRegister(right_op)); | 1643         __ srav(result, left, ToRegister(right_op)); | 
| 1627         break; | 1644         break; | 
| 1628       case Token::SHR: | 1645       case Token::SHR: | 
| 1629         __ srlv(result, left, ToRegister(right_op)); | 1646         __ srlv(result, left, ToRegister(right_op)); | 
| 1630         if (instr->can_deopt()) { | 1647         if (instr->can_deopt()) { | 
| 1631           DeoptimizeIf(lt, instr, "negative value", result, Operand(zero_reg)); | 1648           DeoptimizeIf(lt, instr, Deoptimizer::kNegativeValue, result, | 
|  | 1649                        Operand(zero_reg)); | 
| 1632         } | 1650         } | 
| 1633         break; | 1651         break; | 
| 1634       case Token::SHL: | 1652       case Token::SHL: | 
| 1635         __ sllv(result, left, ToRegister(right_op)); | 1653         __ sllv(result, left, ToRegister(right_op)); | 
| 1636         break; | 1654         break; | 
| 1637       default: | 1655       default: | 
| 1638         UNREACHABLE(); | 1656         UNREACHABLE(); | 
| 1639         break; | 1657         break; | 
| 1640     } | 1658     } | 
| 1641   } else { | 1659   } else { | 
| (...skipping 14 matching lines...) Expand all  Loading... | 
| 1656         } else { | 1674         } else { | 
| 1657           __ Move(result, left); | 1675           __ Move(result, left); | 
| 1658         } | 1676         } | 
| 1659         break; | 1677         break; | 
| 1660       case Token::SHR: | 1678       case Token::SHR: | 
| 1661         if (shift_count != 0) { | 1679         if (shift_count != 0) { | 
| 1662           __ srl(result, left, shift_count); | 1680           __ srl(result, left, shift_count); | 
| 1663         } else { | 1681         } else { | 
| 1664           if (instr->can_deopt()) { | 1682           if (instr->can_deopt()) { | 
| 1665             __ And(at, left, Operand(0x80000000)); | 1683             __ And(at, left, Operand(0x80000000)); | 
| 1666             DeoptimizeIf(ne, instr, "negative value", at, Operand(zero_reg)); | 1684             DeoptimizeIf(ne, instr, Deoptimizer::kNegativeValue, at, | 
|  | 1685                          Operand(zero_reg)); | 
| 1667           } | 1686           } | 
| 1668           __ Move(result, left); | 1687           __ Move(result, left); | 
| 1669         } | 1688         } | 
| 1670         break; | 1689         break; | 
| 1671       case Token::SHL: | 1690       case Token::SHL: | 
| 1672         if (shift_count != 0) { | 1691         if (shift_count != 0) { | 
| 1673           if (instr->hydrogen_value()->representation().IsSmi() && | 1692           if (instr->hydrogen_value()->representation().IsSmi() && | 
| 1674               instr->can_deopt()) { | 1693               instr->can_deopt()) { | 
| 1675             if (shift_count != 1) { | 1694             if (shift_count != 1) { | 
| 1676               __ sll(result, left, shift_count - 1); | 1695               __ sll(result, left, shift_count - 1); | 
| 1677               __ SmiTagCheckOverflow(result, result, scratch); | 1696               __ SmiTagCheckOverflow(result, result, scratch); | 
| 1678             } else { | 1697             } else { | 
| 1679               __ SmiTagCheckOverflow(result, left, scratch); | 1698               __ SmiTagCheckOverflow(result, left, scratch); | 
| 1680             } | 1699             } | 
| 1681             DeoptimizeIf(lt, instr, "overflow", scratch, Operand(zero_reg)); | 1700             DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, scratch, | 
|  | 1701                          Operand(zero_reg)); | 
| 1682           } else { | 1702           } else { | 
| 1683             __ sll(result, left, shift_count); | 1703             __ sll(result, left, shift_count); | 
| 1684           } | 1704           } | 
| 1685         } else { | 1705         } else { | 
| 1686           __ Move(result, left); | 1706           __ Move(result, left); | 
| 1687         } | 1707         } | 
| 1688         break; | 1708         break; | 
| 1689       default: | 1709       default: | 
| 1690         UNREACHABLE(); | 1710         UNREACHABLE(); | 
| 1691         break; | 1711         break; | 
| (...skipping 27 matching lines...) Expand all  Loading... | 
| 1719                                  overflow);  // Reg at also used as scratch. | 1739                                  overflow);  // Reg at also used as scratch. | 
| 1720     } else { | 1740     } else { | 
| 1721       DCHECK(right->IsRegister()); | 1741       DCHECK(right->IsRegister()); | 
| 1722       // Due to overflow check macros not supporting constant operands, | 1742       // Due to overflow check macros not supporting constant operands, | 
| 1723       // handling the IsConstantOperand case was moved to prev if clause. | 1743       // handling the IsConstantOperand case was moved to prev if clause. | 
| 1724       __ SubuAndCheckForOverflow(ToRegister(result), | 1744       __ SubuAndCheckForOverflow(ToRegister(result), | 
| 1725                                  ToRegister(left), | 1745                                  ToRegister(left), | 
| 1726                                  ToRegister(right), | 1746                                  ToRegister(right), | 
| 1727                                  overflow);  // Reg at also used as scratch. | 1747                                  overflow);  // Reg at also used as scratch. | 
| 1728     } | 1748     } | 
| 1729     DeoptimizeIf(lt, instr, "overflow", overflow, Operand(zero_reg)); | 1749     DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, overflow, | 
|  | 1750                  Operand(zero_reg)); | 
| 1730   } | 1751   } | 
| 1731 } | 1752 } | 
| 1732 | 1753 | 
| 1733 | 1754 | 
| 1734 void LCodeGen::DoConstantI(LConstantI* instr) { | 1755 void LCodeGen::DoConstantI(LConstantI* instr) { | 
| 1735   __ li(ToRegister(instr->result()), Operand(instr->value())); | 1756   __ li(ToRegister(instr->result()), Operand(instr->value())); | 
| 1736 } | 1757 } | 
| 1737 | 1758 | 
| 1738 | 1759 | 
| 1739 void LCodeGen::DoConstantS(LConstantS* instr) { | 1760 void LCodeGen::DoConstantS(LConstantS* instr) { | 
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1787   Register result = ToRegister(instr->result()); | 1808   Register result = ToRegister(instr->result()); | 
| 1788   Register scratch = ToRegister(instr->temp()); | 1809   Register scratch = ToRegister(instr->temp()); | 
| 1789   Smi* index = instr->index(); | 1810   Smi* index = instr->index(); | 
| 1790   Label runtime, done; | 1811   Label runtime, done; | 
| 1791   DCHECK(object.is(a0)); | 1812   DCHECK(object.is(a0)); | 
| 1792   DCHECK(result.is(v0)); | 1813   DCHECK(result.is(v0)); | 
| 1793   DCHECK(!scratch.is(scratch0())); | 1814   DCHECK(!scratch.is(scratch0())); | 
| 1794   DCHECK(!scratch.is(object)); | 1815   DCHECK(!scratch.is(object)); | 
| 1795 | 1816 | 
| 1796   __ SmiTst(object, at); | 1817   __ SmiTst(object, at); | 
| 1797   DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 1818   DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); | 
| 1798   __ GetObjectType(object, scratch, scratch); | 1819   __ GetObjectType(object, scratch, scratch); | 
| 1799   DeoptimizeIf(ne, instr, "not a date object", scratch, Operand(JS_DATE_TYPE)); | 1820   DeoptimizeIf(ne, instr, Deoptimizer::kNotADateObject, scratch, | 
|  | 1821                Operand(JS_DATE_TYPE)); | 
| 1800 | 1822 | 
| 1801   if (index->value() == 0) { | 1823   if (index->value() == 0) { | 
| 1802     __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); | 1824     __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); | 
| 1803   } else { | 1825   } else { | 
| 1804     if (index->value() < JSDate::kFirstUncachedField) { | 1826     if (index->value() < JSDate::kFirstUncachedField) { | 
| 1805       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 1827       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 
| 1806       __ li(scratch, Operand(stamp)); | 1828       __ li(scratch, Operand(stamp)); | 
| 1807       __ lw(scratch, MemOperand(scratch)); | 1829       __ lw(scratch, MemOperand(scratch)); | 
| 1808       __ lw(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset)); | 1830       __ lw(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset)); | 
| 1809       __ Branch(&runtime, ne, scratch, Operand(scratch0())); | 1831       __ Branch(&runtime, ne, scratch, Operand(scratch0())); | 
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1924                                  overflow);  // Reg at also used as scratch. | 1946                                  overflow);  // Reg at also used as scratch. | 
| 1925     } else { | 1947     } else { | 
| 1926       DCHECK(right->IsRegister()); | 1948       DCHECK(right->IsRegister()); | 
| 1927       // Due to overflow check macros not supporting constant operands, | 1949       // Due to overflow check macros not supporting constant operands, | 
| 1928       // handling the IsConstantOperand case was moved to prev if clause. | 1950       // handling the IsConstantOperand case was moved to prev if clause. | 
| 1929       __ AdduAndCheckForOverflow(ToRegister(result), | 1951       __ AdduAndCheckForOverflow(ToRegister(result), | 
| 1930                                  ToRegister(left), | 1952                                  ToRegister(left), | 
| 1931                                  ToRegister(right), | 1953                                  ToRegister(right), | 
| 1932                                  overflow);  // Reg at also used as scratch. | 1954                                  overflow);  // Reg at also used as scratch. | 
| 1933     } | 1955     } | 
| 1934     DeoptimizeIf(lt, instr, "overflow", overflow, Operand(zero_reg)); | 1956     DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, overflow, | 
|  | 1957                  Operand(zero_reg)); | 
| 1935   } | 1958   } | 
| 1936 } | 1959 } | 
| 1937 | 1960 | 
| 1938 | 1961 | 
| 1939 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1962 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 
| 1940   LOperand* left = instr->left(); | 1963   LOperand* left = instr->left(); | 
| 1941   LOperand* right = instr->right(); | 1964   LOperand* right = instr->right(); | 
| 1942   HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1965   HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 
| 1943   Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; | 1966   Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; | 
| 1944   if (instr->hydrogen()->representation().IsSmiOrInteger32()) { | 1967   if (instr->hydrogen()->representation().IsSmiOrInteger32()) { | 
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2185         __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); | 2208         __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); | 
| 2186       } | 2209       } | 
| 2187 | 2210 | 
| 2188       if (expected.Contains(ToBooleanStub::SMI)) { | 2211       if (expected.Contains(ToBooleanStub::SMI)) { | 
| 2189         // Smis: 0 -> false, all other -> true. | 2212         // Smis: 0 -> false, all other -> true. | 
| 2190         __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); | 2213         __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); | 
| 2191         __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2214         __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 
| 2192       } else if (expected.NeedsMap()) { | 2215       } else if (expected.NeedsMap()) { | 
| 2193         // If we need a map later and have a Smi -> deopt. | 2216         // If we need a map later and have a Smi -> deopt. | 
| 2194         __ SmiTst(reg, at); | 2217         __ SmiTst(reg, at); | 
| 2195         DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 2218         DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); | 
| 2196       } | 2219       } | 
| 2197 | 2220 | 
| 2198       const Register map = scratch0(); | 2221       const Register map = scratch0(); | 
| 2199       if (expected.NeedsMap()) { | 2222       if (expected.NeedsMap()) { | 
| 2200         __ lw(map, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2223         __ lw(map, FieldMemOperand(reg, HeapObject::kMapOffset)); | 
| 2201         if (expected.CanBeUndetectable()) { | 2224         if (expected.CanBeUndetectable()) { | 
| 2202           // Undetectable -> false. | 2225           // Undetectable -> false. | 
| 2203           __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); | 2226           __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); | 
| 2204           __ And(at, at, Operand(1 << Map::kIsUndetectable)); | 2227           __ And(at, at, Operand(1 << Map::kIsUndetectable)); | 
| 2205           __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); | 2228           __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); | 
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2241         __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), | 2264         __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), | 
| 2242                    ne, dbl_scratch, kDoubleRegZero); | 2265                    ne, dbl_scratch, kDoubleRegZero); | 
| 2243         // Falls through if dbl_scratch == 0. | 2266         // Falls through if dbl_scratch == 0. | 
| 2244         __ Branch(instr->FalseLabel(chunk_)); | 2267         __ Branch(instr->FalseLabel(chunk_)); | 
| 2245         __ bind(¬_heap_number); | 2268         __ bind(¬_heap_number); | 
| 2246       } | 2269       } | 
| 2247 | 2270 | 
| 2248       if (!expected.IsGeneric()) { | 2271       if (!expected.IsGeneric()) { | 
| 2249         // We've seen something for the first time -> deopt. | 2272         // We've seen something for the first time -> deopt. | 
| 2250         // This can only happen if we are not generic already. | 2273         // This can only happen if we are not generic already. | 
| 2251         DeoptimizeIf(al, instr, "unexpected object", zero_reg, | 2274         DeoptimizeIf(al, instr, Deoptimizer::kUnexpectedObject, zero_reg, | 
| 2252                      Operand(zero_reg)); | 2275                      Operand(zero_reg)); | 
| 2253       } | 2276       } | 
| 2254     } | 2277     } | 
| 2255   } | 2278   } | 
| 2256 } | 2279 } | 
| 2257 | 2280 | 
| 2258 | 2281 | 
| 2259 void LCodeGen::EmitGoto(int block) { | 2282 void LCodeGen::EmitGoto(int block) { | 
| 2260   if (!IsNextEmittedBlock(block)) { | 2283   if (!IsNextEmittedBlock(block)) { | 
| 2261     __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2284     __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 
| (...skipping 625 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2887   } | 2910   } | 
| 2888 } | 2911 } | 
| 2889 | 2912 | 
| 2890 | 2913 | 
| 2891 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { | 2914 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { | 
| 2892   Register result = ToRegister(instr->result()); | 2915   Register result = ToRegister(instr->result()); | 
| 2893   __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); | 2916   __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); | 
| 2894   __ lw(result, FieldMemOperand(at, Cell::kValueOffset)); | 2917   __ lw(result, FieldMemOperand(at, Cell::kValueOffset)); | 
| 2895   if (instr->hydrogen()->RequiresHoleCheck()) { | 2918   if (instr->hydrogen()->RequiresHoleCheck()) { | 
| 2896     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2919     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 
| 2897     DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 2920     DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); | 
| 2898   } | 2921   } | 
| 2899 } | 2922 } | 
| 2900 | 2923 | 
| 2901 | 2924 | 
| 2902 template <class T> | 2925 template <class T> | 
| 2903 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { | 2926 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { | 
| 2904   DCHECK(FLAG_vector_ics); | 2927   DCHECK(FLAG_vector_ics); | 
| 2905   Register vector_register = ToRegister(instr->temp_vector()); | 2928   Register vector_register = ToRegister(instr->temp_vector()); | 
| 2906   Register slot_register = VectorLoadICDescriptor::SlotRegister(); | 2929   Register slot_register = VectorLoadICDescriptor::SlotRegister(); | 
| 2907   DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); | 2930   DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); | 
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2942 | 2965 | 
| 2943   // If the cell we are storing to contains the hole it could have | 2966   // If the cell we are storing to contains the hole it could have | 
| 2944   // been deleted from the property dictionary. In that case, we need | 2967   // been deleted from the property dictionary. In that case, we need | 
| 2945   // to update the property details in the property dictionary to mark | 2968   // to update the property details in the property dictionary to mark | 
| 2946   // it as no longer deleted. | 2969   // it as no longer deleted. | 
| 2947   if (instr->hydrogen()->RequiresHoleCheck()) { | 2970   if (instr->hydrogen()->RequiresHoleCheck()) { | 
| 2948     // We use a temp to check the payload. | 2971     // We use a temp to check the payload. | 
| 2949     Register payload = ToRegister(instr->temp()); | 2972     Register payload = ToRegister(instr->temp()); | 
| 2950     __ lw(payload, FieldMemOperand(cell, Cell::kValueOffset)); | 2973     __ lw(payload, FieldMemOperand(cell, Cell::kValueOffset)); | 
| 2951     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2974     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 
| 2952     DeoptimizeIf(eq, instr, "hole", payload, Operand(at)); | 2975     DeoptimizeIf(eq, instr, Deoptimizer::kHole, payload, Operand(at)); | 
| 2953   } | 2976   } | 
| 2954 | 2977 | 
| 2955   // Store the value. | 2978   // Store the value. | 
| 2956   __ sw(value, FieldMemOperand(cell, Cell::kValueOffset)); | 2979   __ sw(value, FieldMemOperand(cell, Cell::kValueOffset)); | 
| 2957   // Cells are always rescanned, so no write barrier here. | 2980   // Cells are always rescanned, so no write barrier here. | 
| 2958 } | 2981 } | 
| 2959 | 2982 | 
| 2960 | 2983 | 
| 2961 | 2984 | 
| 2962 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2985 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 
| 2963   Register context = ToRegister(instr->context()); | 2986   Register context = ToRegister(instr->context()); | 
| 2964   Register result = ToRegister(instr->result()); | 2987   Register result = ToRegister(instr->result()); | 
| 2965 | 2988 | 
| 2966   __ lw(result, ContextOperand(context, instr->slot_index())); | 2989   __ lw(result, ContextOperand(context, instr->slot_index())); | 
| 2967   if (instr->hydrogen()->RequiresHoleCheck()) { | 2990   if (instr->hydrogen()->RequiresHoleCheck()) { | 
| 2968     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2991     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 
| 2969 | 2992 | 
| 2970     if (instr->hydrogen()->DeoptimizesOnHole()) { | 2993     if (instr->hydrogen()->DeoptimizesOnHole()) { | 
| 2971       DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 2994       DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); | 
| 2972     } else { | 2995     } else { | 
| 2973       Label is_not_hole; | 2996       Label is_not_hole; | 
| 2974       __ Branch(&is_not_hole, ne, result, Operand(at)); | 2997       __ Branch(&is_not_hole, ne, result, Operand(at)); | 
| 2975       __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 2998       __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 
| 2976       __ bind(&is_not_hole); | 2999       __ bind(&is_not_hole); | 
| 2977     } | 3000     } | 
| 2978   } | 3001   } | 
| 2979 } | 3002 } | 
| 2980 | 3003 | 
| 2981 | 3004 | 
| 2982 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 3005 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 
| 2983   Register context = ToRegister(instr->context()); | 3006   Register context = ToRegister(instr->context()); | 
| 2984   Register value = ToRegister(instr->value()); | 3007   Register value = ToRegister(instr->value()); | 
| 2985   Register scratch = scratch0(); | 3008   Register scratch = scratch0(); | 
| 2986   MemOperand target = ContextOperand(context, instr->slot_index()); | 3009   MemOperand target = ContextOperand(context, instr->slot_index()); | 
| 2987 | 3010 | 
| 2988   Label skip_assignment; | 3011   Label skip_assignment; | 
| 2989 | 3012 | 
| 2990   if (instr->hydrogen()->RequiresHoleCheck()) { | 3013   if (instr->hydrogen()->RequiresHoleCheck()) { | 
| 2991     __ lw(scratch, target); | 3014     __ lw(scratch, target); | 
| 2992     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3015     __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 
| 2993 | 3016 | 
| 2994     if (instr->hydrogen()->DeoptimizesOnHole()) { | 3017     if (instr->hydrogen()->DeoptimizesOnHole()) { | 
| 2995       DeoptimizeIf(eq, instr, "hole", scratch, Operand(at)); | 3018       DeoptimizeIf(eq, instr, Deoptimizer::kHole, scratch, Operand(at)); | 
| 2996     } else { | 3019     } else { | 
| 2997       __ Branch(&skip_assignment, ne, scratch, Operand(at)); | 3020       __ Branch(&skip_assignment, ne, scratch, Operand(at)); | 
| 2998     } | 3021     } | 
| 2999   } | 3022   } | 
| 3000 | 3023 | 
| 3001   __ sw(value, target); | 3024   __ sw(value, target); | 
| 3002   if (instr->hydrogen()->NeedsWriteBarrier()) { | 3025   if (instr->hydrogen()->NeedsWriteBarrier()) { | 
| 3003     SmiCheck check_needed = | 3026     SmiCheck check_needed = | 
| 3004         instr->hydrogen()->value()->type().IsHeapObject() | 3027         instr->hydrogen()->value()->type().IsHeapObject() | 
| 3005             ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 3028             ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3065   Register scratch = scratch0(); | 3088   Register scratch = scratch0(); | 
| 3066   Register function = ToRegister(instr->function()); | 3089   Register function = ToRegister(instr->function()); | 
| 3067   Register result = ToRegister(instr->result()); | 3090   Register result = ToRegister(instr->result()); | 
| 3068 | 3091 | 
| 3069   // Get the prototype or initial map from the function. | 3092   // Get the prototype or initial map from the function. | 
| 3070   __ lw(result, | 3093   __ lw(result, | 
| 3071          FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 3094          FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 
| 3072 | 3095 | 
| 3073   // Check that the function has a prototype or an initial map. | 3096   // Check that the function has a prototype or an initial map. | 
| 3074   __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3097   __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 
| 3075   DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 3098   DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); | 
| 3076 | 3099 | 
| 3077   // If the function does not have an initial map, we're done. | 3100   // If the function does not have an initial map, we're done. | 
| 3078   Label done; | 3101   Label done; | 
| 3079   __ GetObjectType(result, scratch, scratch); | 3102   __ GetObjectType(result, scratch, scratch); | 
| 3080   __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); | 3103   __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); | 
| 3081 | 3104 | 
| 3082   // Get the prototype from the initial map. | 3105   // Get the prototype from the initial map. | 
| 3083   __ lw(result, FieldMemOperand(result, Map::kPrototypeOffset)); | 3106   __ lw(result, FieldMemOperand(result, Map::kPrototypeOffset)); | 
| 3084 | 3107 | 
| 3085   // All done. | 3108   // All done. | 
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3201         __ lhu(result, mem_operand); | 3224         __ lhu(result, mem_operand); | 
| 3202         break; | 3225         break; | 
| 3203       case EXTERNAL_INT32_ELEMENTS: | 3226       case EXTERNAL_INT32_ELEMENTS: | 
| 3204       case INT32_ELEMENTS: | 3227       case INT32_ELEMENTS: | 
| 3205         __ lw(result, mem_operand); | 3228         __ lw(result, mem_operand); | 
| 3206         break; | 3229         break; | 
| 3207       case EXTERNAL_UINT32_ELEMENTS: | 3230       case EXTERNAL_UINT32_ELEMENTS: | 
| 3208       case UINT32_ELEMENTS: | 3231       case UINT32_ELEMENTS: | 
| 3209         __ lw(result, mem_operand); | 3232         __ lw(result, mem_operand); | 
| 3210         if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 3233         if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 
| 3211           DeoptimizeIf(Ugreater_equal, instr, "negative value", result, | 3234           DeoptimizeIf(Ugreater_equal, instr, Deoptimizer::kNegativeValue, | 
| 3212                        Operand(0x80000000)); | 3235                        result, Operand(0x80000000)); | 
| 3213         } | 3236         } | 
| 3214         break; | 3237         break; | 
| 3215       case FLOAT32_ELEMENTS: | 3238       case FLOAT32_ELEMENTS: | 
| 3216       case FLOAT64_ELEMENTS: | 3239       case FLOAT64_ELEMENTS: | 
| 3217       case EXTERNAL_FLOAT32_ELEMENTS: | 3240       case EXTERNAL_FLOAT32_ELEMENTS: | 
| 3218       case EXTERNAL_FLOAT64_ELEMENTS: | 3241       case EXTERNAL_FLOAT64_ELEMENTS: | 
| 3219       case FAST_DOUBLE_ELEMENTS: | 3242       case FAST_DOUBLE_ELEMENTS: | 
| 3220       case FAST_ELEMENTS: | 3243       case FAST_ELEMENTS: | 
| 3221       case FAST_SMI_ELEMENTS: | 3244       case FAST_SMI_ELEMENTS: | 
| 3222       case FAST_HOLEY_DOUBLE_ELEMENTS: | 3245       case FAST_HOLEY_DOUBLE_ELEMENTS: | 
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3255     int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 3278     int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 
| 3256         ? (element_size_shift - kSmiTagSize) : element_size_shift; | 3279         ? (element_size_shift - kSmiTagSize) : element_size_shift; | 
| 3257     __ sll(at, key, shift_size); | 3280     __ sll(at, key, shift_size); | 
| 3258     __ Addu(scratch, scratch, at); | 3281     __ Addu(scratch, scratch, at); | 
| 3259   } | 3282   } | 
| 3260 | 3283 | 
| 3261   __ ldc1(result, MemOperand(scratch)); | 3284   __ ldc1(result, MemOperand(scratch)); | 
| 3262 | 3285 | 
| 3263   if (instr->hydrogen()->RequiresHoleCheck()) { | 3286   if (instr->hydrogen()->RequiresHoleCheck()) { | 
| 3264     __ lw(scratch, MemOperand(scratch, kHoleNanUpper32Offset)); | 3287     __ lw(scratch, MemOperand(scratch, kHoleNanUpper32Offset)); | 
| 3265     DeoptimizeIf(eq, instr, "hole", scratch, Operand(kHoleNanUpper32)); | 3288     DeoptimizeIf(eq, instr, Deoptimizer::kHole, scratch, | 
|  | 3289                  Operand(kHoleNanUpper32)); | 
| 3266   } | 3290   } | 
| 3267 } | 3291 } | 
| 3268 | 3292 | 
| 3269 | 3293 | 
| 3270 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 3294 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 
| 3271   Register elements = ToRegister(instr->elements()); | 3295   Register elements = ToRegister(instr->elements()); | 
| 3272   Register result = ToRegister(instr->result()); | 3296   Register result = ToRegister(instr->result()); | 
| 3273   Register scratch = scratch0(); | 3297   Register scratch = scratch0(); | 
| 3274   Register store_base = scratch; | 3298   Register store_base = scratch; | 
| 3275   int offset = instr->base_offset(); | 3299   int offset = instr->base_offset(); | 
| (...skipping 15 matching lines...) Expand all  Loading... | 
| 3291       __ sll(scratch, key, kPointerSizeLog2); | 3315       __ sll(scratch, key, kPointerSizeLog2); | 
| 3292       __ addu(scratch, elements, scratch); | 3316       __ addu(scratch, elements, scratch); | 
| 3293     } | 3317     } | 
| 3294   } | 3318   } | 
| 3295   __ lw(result, MemOperand(store_base, offset)); | 3319   __ lw(result, MemOperand(store_base, offset)); | 
| 3296 | 3320 | 
| 3297   // Check for the hole value. | 3321   // Check for the hole value. | 
| 3298   if (instr->hydrogen()->RequiresHoleCheck()) { | 3322   if (instr->hydrogen()->RequiresHoleCheck()) { | 
| 3299     if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 3323     if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 
| 3300       __ SmiTst(result, scratch); | 3324       __ SmiTst(result, scratch); | 
| 3301       DeoptimizeIf(ne, instr, "not a Smi", scratch, Operand(zero_reg)); | 3325       DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, scratch, | 
|  | 3326                    Operand(zero_reg)); | 
| 3302     } else { | 3327     } else { | 
| 3303       __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | 3328       __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | 
| 3304       DeoptimizeIf(eq, instr, "hole", result, Operand(scratch)); | 3329       DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(scratch)); | 
| 3305     } | 3330     } | 
| 3306   } | 3331   } | 
| 3307 } | 3332 } | 
| 3308 | 3333 | 
| 3309 | 3334 | 
| 3310 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 3335 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 
| 3311   if (instr->is_typed_elements()) { | 3336   if (instr->is_typed_elements()) { | 
| 3312     DoLoadKeyedExternalArray(instr); | 3337     DoLoadKeyedExternalArray(instr); | 
| 3313   } else if (instr->hydrogen()->representation().IsDouble()) { | 3338   } else if (instr->hydrogen()->representation().IsDouble()) { | 
| 3314     DoLoadKeyedFixedDoubleArray(instr); | 3339     DoLoadKeyedFixedDoubleArray(instr); | 
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3440   } | 3465   } | 
| 3441 | 3466 | 
| 3442   // Normal function. Replace undefined or null with global receiver. | 3467   // Normal function. Replace undefined or null with global receiver. | 
| 3443   __ LoadRoot(scratch, Heap::kNullValueRootIndex); | 3468   __ LoadRoot(scratch, Heap::kNullValueRootIndex); | 
| 3444   __ Branch(&global_object, eq, receiver, Operand(scratch)); | 3469   __ Branch(&global_object, eq, receiver, Operand(scratch)); | 
| 3445   __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); | 3470   __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); | 
| 3446   __ Branch(&global_object, eq, receiver, Operand(scratch)); | 3471   __ Branch(&global_object, eq, receiver, Operand(scratch)); | 
| 3447 | 3472 | 
| 3448   // Deoptimize if the receiver is not a JS object. | 3473   // Deoptimize if the receiver is not a JS object. | 
| 3449   __ SmiTst(receiver, scratch); | 3474   __ SmiTst(receiver, scratch); | 
| 3450   DeoptimizeIf(eq, instr, "Smi", scratch, Operand(zero_reg)); | 3475   DeoptimizeIf(eq, instr, Deoptimizer::kSmi, scratch, Operand(zero_reg)); | 
| 3451 | 3476 | 
| 3452   __ GetObjectType(receiver, scratch, scratch); | 3477   __ GetObjectType(receiver, scratch, scratch); | 
| 3453   DeoptimizeIf(lt, instr, "not a JavaScript object", scratch, | 3478   DeoptimizeIf(lt, instr, Deoptimizer::kNotAJavaScriptObject, scratch, | 
| 3454                Operand(FIRST_SPEC_OBJECT_TYPE)); | 3479                Operand(FIRST_SPEC_OBJECT_TYPE)); | 
| 3455 | 3480 | 
| 3456   __ Branch(&result_in_receiver); | 3481   __ Branch(&result_in_receiver); | 
| 3457   __ bind(&global_object); | 3482   __ bind(&global_object); | 
| 3458   __ lw(result, FieldMemOperand(function, JSFunction::kContextOffset)); | 3483   __ lw(result, FieldMemOperand(function, JSFunction::kContextOffset)); | 
| 3459   __ lw(result, | 3484   __ lw(result, | 
| 3460         ContextOperand(result, Context::GLOBAL_OBJECT_INDEX)); | 3485         ContextOperand(result, Context::GLOBAL_OBJECT_INDEX)); | 
| 3461   __ lw(result, | 3486   __ lw(result, | 
| 3462         FieldMemOperand(result, GlobalObject::kGlobalProxyOffset)); | 3487         FieldMemOperand(result, GlobalObject::kGlobalProxyOffset)); | 
| 3463 | 3488 | 
| (...skipping 15 matching lines...) Expand all  Loading... | 
| 3479   Register length = ToRegister(instr->length()); | 3504   Register length = ToRegister(instr->length()); | 
| 3480   Register elements = ToRegister(instr->elements()); | 3505   Register elements = ToRegister(instr->elements()); | 
| 3481   Register scratch = scratch0(); | 3506   Register scratch = scratch0(); | 
| 3482   DCHECK(receiver.is(a0));  // Used for parameter count. | 3507   DCHECK(receiver.is(a0));  // Used for parameter count. | 
| 3483   DCHECK(function.is(a1));  // Required by InvokeFunction. | 3508   DCHECK(function.is(a1));  // Required by InvokeFunction. | 
| 3484   DCHECK(ToRegister(instr->result()).is(v0)); | 3509   DCHECK(ToRegister(instr->result()).is(v0)); | 
| 3485 | 3510 | 
| 3486   // Copy the arguments to this function possibly from the | 3511   // Copy the arguments to this function possibly from the | 
| 3487   // adaptor frame below it. | 3512   // adaptor frame below it. | 
| 3488   const uint32_t kArgumentsLimit = 1 * KB; | 3513   const uint32_t kArgumentsLimit = 1 * KB; | 
| 3489   DeoptimizeIf(hi, instr, "too many arguments", length, | 3514   DeoptimizeIf(hi, instr, Deoptimizer::kTooManyArguments, length, | 
| 3490                Operand(kArgumentsLimit)); | 3515                Operand(kArgumentsLimit)); | 
| 3491 | 3516 | 
| 3492   // Push the receiver and use the register to keep the original | 3517   // Push the receiver and use the register to keep the original | 
| 3493   // number of arguments. | 3518   // number of arguments. | 
| 3494   __ push(receiver); | 3519   __ push(receiver); | 
| 3495   __ Move(receiver, length); | 3520   __ Move(receiver, length); | 
| 3496   // The arguments are at a one pointer size offset from elements. | 3521   // The arguments are at a one pointer size offset from elements. | 
| 3497   __ Addu(elements, elements, Operand(1 * kPointerSize)); | 3522   __ Addu(elements, elements, Operand(1 * kPointerSize)); | 
| 3498 | 3523 | 
| 3499   // Loop through the arguments pushing them onto the execution | 3524   // Loop through the arguments pushing them onto the execution | 
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3605 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3630 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 
| 3606   DCHECK(instr->context() != NULL); | 3631   DCHECK(instr->context() != NULL); | 
| 3607   DCHECK(ToRegister(instr->context()).is(cp)); | 3632   DCHECK(ToRegister(instr->context()).is(cp)); | 
| 3608   Register input = ToRegister(instr->value()); | 3633   Register input = ToRegister(instr->value()); | 
| 3609   Register result = ToRegister(instr->result()); | 3634   Register result = ToRegister(instr->result()); | 
| 3610   Register scratch = scratch0(); | 3635   Register scratch = scratch0(); | 
| 3611 | 3636 | 
| 3612   // Deoptimize if not a heap number. | 3637   // Deoptimize if not a heap number. | 
| 3613   __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 3638   __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 
| 3614   __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3639   __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 
| 3615   DeoptimizeIf(ne, instr, "not a heap number", scratch, Operand(at)); | 3640   DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch, Operand(at)); | 
| 3616 | 3641 | 
| 3617   Label done; | 3642   Label done; | 
| 3618   Register exponent = scratch0(); | 3643   Register exponent = scratch0(); | 
| 3619   scratch = no_reg; | 3644   scratch = no_reg; | 
| 3620   __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); | 3645   __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); | 
| 3621   // Check the sign of the argument. If the argument is positive, just | 3646   // Check the sign of the argument. If the argument is positive, just | 
| 3622   // return it. | 3647   // return it. | 
| 3623   __ Move(result, input); | 3648   __ Move(result, input); | 
| 3624   __ And(at, exponent, Operand(HeapNumber::kSignMask)); | 3649   __ And(at, exponent, Operand(HeapNumber::kSignMask)); | 
| 3625   __ Branch(&done, eq, at, Operand(zero_reg)); | 3650   __ Branch(&done, eq, at, Operand(zero_reg)); | 
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3672 | 3697 | 
| 3673 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3698 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 
| 3674   Register input = ToRegister(instr->value()); | 3699   Register input = ToRegister(instr->value()); | 
| 3675   Register result = ToRegister(instr->result()); | 3700   Register result = ToRegister(instr->result()); | 
| 3676   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 3701   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 
| 3677   Label done; | 3702   Label done; | 
| 3678   __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg)); | 3703   __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg)); | 
| 3679   __ mov(result, input); | 3704   __ mov(result, input); | 
| 3680   __ subu(result, zero_reg, input); | 3705   __ subu(result, zero_reg, input); | 
| 3681   // Overflow if result is still negative, i.e. 0x80000000. | 3706   // Overflow if result is still negative, i.e. 0x80000000. | 
| 3682   DeoptimizeIf(lt, instr, "overflow", result, Operand(zero_reg)); | 3707   DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, result, Operand(zero_reg)); | 
| 3683   __ bind(&done); | 3708   __ bind(&done); | 
| 3684 } | 3709 } | 
| 3685 | 3710 | 
| 3686 | 3711 | 
| 3687 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3712 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 
| 3688   // Class for deferred case. | 3713   // Class for deferred case. | 
| 3689   class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { | 3714   class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { | 
| 3690    public: | 3715    public: | 
| 3691     DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) | 3716     DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) | 
| 3692         : LDeferredCode(codegen), instr_(instr) { } | 3717         : LDeferredCode(codegen), instr_(instr) { } | 
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3727   Register except_flag = ToRegister(instr->temp()); | 3752   Register except_flag = ToRegister(instr->temp()); | 
| 3728 | 3753 | 
| 3729   __ EmitFPUTruncate(kRoundToMinusInf, | 3754   __ EmitFPUTruncate(kRoundToMinusInf, | 
| 3730                      result, | 3755                      result, | 
| 3731                      input, | 3756                      input, | 
| 3732                      scratch1, | 3757                      scratch1, | 
| 3733                      double_scratch0(), | 3758                      double_scratch0(), | 
| 3734                      except_flag); | 3759                      except_flag); | 
| 3735 | 3760 | 
| 3736   // Deopt if the operation did not succeed. | 3761   // Deopt if the operation did not succeed. | 
| 3737   DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 3762   DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, | 
| 3738                Operand(zero_reg)); | 3763                Operand(zero_reg)); | 
| 3739 | 3764 | 
| 3740   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3765   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 3741     // Test for -0. | 3766     // Test for -0. | 
| 3742     Label done; | 3767     Label done; | 
| 3743     __ Branch(&done, ne, result, Operand(zero_reg)); | 3768     __ Branch(&done, ne, result, Operand(zero_reg)); | 
| 3744     __ Mfhc1(scratch1, input); | 3769     __ Mfhc1(scratch1, input); | 
| 3745     __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 3770     __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 
| 3746     DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 3771     DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, | 
|  | 3772                  Operand(zero_reg)); | 
| 3747     __ bind(&done); | 3773     __ bind(&done); | 
| 3748   } | 3774   } | 
| 3749 } | 3775 } | 
| 3750 | 3776 | 
| 3751 | 3777 | 
| 3752 void LCodeGen::DoMathRound(LMathRound* instr) { | 3778 void LCodeGen::DoMathRound(LMathRound* instr) { | 
| 3753   DoubleRegister input = ToDoubleRegister(instr->value()); | 3779   DoubleRegister input = ToDoubleRegister(instr->value()); | 
| 3754   Register result = ToRegister(instr->result()); | 3780   Register result = ToRegister(instr->result()); | 
| 3755   DoubleRegister double_scratch1 = ToDoubleRegister(instr->temp()); | 3781   DoubleRegister double_scratch1 = ToDoubleRegister(instr->temp()); | 
| 3756   Register scratch = scratch0(); | 3782   Register scratch = scratch0(); | 
| (...skipping 12 matching lines...) Expand all  Loading... | 
| 3769   __ mov(result, zero_reg); | 3795   __ mov(result, zero_reg); | 
| 3770   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3796   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 3771     __ Branch(&check_sign_on_zero); | 3797     __ Branch(&check_sign_on_zero); | 
| 3772   } else { | 3798   } else { | 
| 3773     __ Branch(&done); | 3799     __ Branch(&done); | 
| 3774   } | 3800   } | 
| 3775   __ bind(&skip1); | 3801   __ bind(&skip1); | 
| 3776 | 3802 | 
| 3777   // The following conversion will not work with numbers | 3803   // The following conversion will not work with numbers | 
| 3778   // outside of ]-2^32, 2^32[. | 3804   // outside of ]-2^32, 2^32[. | 
| 3779   DeoptimizeIf(ge, instr, "overflow", scratch, | 3805   DeoptimizeIf(ge, instr, Deoptimizer::kOverflow, scratch, | 
| 3780                Operand(HeapNumber::kExponentBias + 32)); | 3806                Operand(HeapNumber::kExponentBias + 32)); | 
| 3781 | 3807 | 
| 3782   // Save the original sign for later comparison. | 3808   // Save the original sign for later comparison. | 
| 3783   __ And(scratch, result, Operand(HeapNumber::kSignMask)); | 3809   __ And(scratch, result, Operand(HeapNumber::kSignMask)); | 
| 3784 | 3810 | 
| 3785   __ Move(double_scratch0(), 0.5); | 3811   __ Move(double_scratch0(), 0.5); | 
| 3786   __ add_d(double_scratch0(), input, double_scratch0()); | 3812   __ add_d(double_scratch0(), input, double_scratch0()); | 
| 3787 | 3813 | 
| 3788   // Check sign of the result: if the sign changed, the input | 3814   // Check sign of the result: if the sign changed, the input | 
| 3789   // value was in ]0.5, 0[ and the result should be -0. | 3815   // value was in ]0.5, 0[ and the result should be -0. | 
| 3790   __ Mfhc1(result, double_scratch0()); | 3816   __ Mfhc1(result, double_scratch0()); | 
| 3791   __ Xor(result, result, Operand(scratch)); | 3817   __ Xor(result, result, Operand(scratch)); | 
| 3792   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3818   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 3793     // ARM uses 'mi' here, which is 'lt' | 3819     // ARM uses 'mi' here, which is 'lt' | 
| 3794     DeoptimizeIf(lt, instr, "minus zero", result, Operand(zero_reg)); | 3820     DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, result, Operand(zero_reg)); | 
| 3795   } else { | 3821   } else { | 
| 3796     Label skip2; | 3822     Label skip2; | 
| 3797     // ARM uses 'mi' here, which is 'lt' | 3823     // ARM uses 'mi' here, which is 'lt' | 
| 3798     // Negating it results in 'ge' | 3824     // Negating it results in 'ge' | 
| 3799     __ Branch(&skip2, ge, result, Operand(zero_reg)); | 3825     __ Branch(&skip2, ge, result, Operand(zero_reg)); | 
| 3800     __ mov(result, zero_reg); | 3826     __ mov(result, zero_reg); | 
| 3801     __ Branch(&done); | 3827     __ Branch(&done); | 
| 3802     __ bind(&skip2); | 3828     __ bind(&skip2); | 
| 3803   } | 3829   } | 
| 3804 | 3830 | 
| 3805   Register except_flag = scratch; | 3831   Register except_flag = scratch; | 
| 3806   __ EmitFPUTruncate(kRoundToMinusInf, | 3832   __ EmitFPUTruncate(kRoundToMinusInf, | 
| 3807                      result, | 3833                      result, | 
| 3808                      double_scratch0(), | 3834                      double_scratch0(), | 
| 3809                      at, | 3835                      at, | 
| 3810                      double_scratch1, | 3836                      double_scratch1, | 
| 3811                      except_flag); | 3837                      except_flag); | 
| 3812 | 3838 | 
| 3813   DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 3839   DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, | 
| 3814                Operand(zero_reg)); | 3840                Operand(zero_reg)); | 
| 3815 | 3841 | 
| 3816   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3842   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 3817     // Test for -0. | 3843     // Test for -0. | 
| 3818     __ Branch(&done, ne, result, Operand(zero_reg)); | 3844     __ Branch(&done, ne, result, Operand(zero_reg)); | 
| 3819     __ bind(&check_sign_on_zero); | 3845     __ bind(&check_sign_on_zero); | 
| 3820     __ Mfhc1(scratch, input); | 3846     __ Mfhc1(scratch, input); | 
| 3821     __ And(scratch, scratch, Operand(HeapNumber::kSignMask)); | 3847     __ And(scratch, scratch, Operand(HeapNumber::kSignMask)); | 
| 3822     DeoptimizeIf(ne, instr, "minus zero", scratch, Operand(zero_reg)); | 3848     DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch, | 
|  | 3849                  Operand(zero_reg)); | 
| 3823   } | 3850   } | 
| 3824   __ bind(&done); | 3851   __ bind(&done); | 
| 3825 } | 3852 } | 
| 3826 | 3853 | 
| 3827 | 3854 | 
| 3828 void LCodeGen::DoMathFround(LMathFround* instr) { | 3855 void LCodeGen::DoMathFround(LMathFround* instr) { | 
| 3829   DoubleRegister input = ToDoubleRegister(instr->value()); | 3856   DoubleRegister input = ToDoubleRegister(instr->value()); | 
| 3830   DoubleRegister result = ToDoubleRegister(instr->result()); | 3857   DoubleRegister result = ToDoubleRegister(instr->result()); | 
| 3831   __ cvt_s_d(result.low(), input); | 3858   __ cvt_s_d(result.low(), input); | 
| 3832   __ cvt_d_s(result, result.low()); | 3859   __ cvt_d_s(result, result.low()); | 
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3878 | 3905 | 
| 3879   if (exponent_type.IsSmi()) { | 3906   if (exponent_type.IsSmi()) { | 
| 3880     MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3907     MathPowStub stub(isolate(), MathPowStub::TAGGED); | 
| 3881     __ CallStub(&stub); | 3908     __ CallStub(&stub); | 
| 3882   } else if (exponent_type.IsTagged()) { | 3909   } else if (exponent_type.IsTagged()) { | 
| 3883     Label no_deopt; | 3910     Label no_deopt; | 
| 3884     __ JumpIfSmi(tagged_exponent, &no_deopt); | 3911     __ JumpIfSmi(tagged_exponent, &no_deopt); | 
| 3885     DCHECK(!t3.is(tagged_exponent)); | 3912     DCHECK(!t3.is(tagged_exponent)); | 
| 3886     __ lw(t3, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); | 3913     __ lw(t3, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); | 
| 3887     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3914     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 
| 3888     DeoptimizeIf(ne, instr, "not a heap number", t3, Operand(at)); | 3915     DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, t3, Operand(at)); | 
| 3889     __ bind(&no_deopt); | 3916     __ bind(&no_deopt); | 
| 3890     MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3917     MathPowStub stub(isolate(), MathPowStub::TAGGED); | 
| 3891     __ CallStub(&stub); | 3918     __ CallStub(&stub); | 
| 3892   } else if (exponent_type.IsInteger32()) { | 3919   } else if (exponent_type.IsInteger32()) { | 
| 3893     MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3920     MathPowStub stub(isolate(), MathPowStub::INTEGER); | 
| 3894     __ CallStub(&stub); | 3921     __ CallStub(&stub); | 
| 3895   } else { | 3922   } else { | 
| 3896     DCHECK(exponent_type.IsDouble()); | 3923     DCHECK(exponent_type.IsDouble()); | 
| 3897     MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3924     MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 
| 3898     __ CallStub(&stub); | 3925     __ CallStub(&stub); | 
| (...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4270   } else { | 4297   } else { | 
| 4271     reg = ToRegister(instr->index()); | 4298     reg = ToRegister(instr->index()); | 
| 4272     operand = ToOperand(instr->length()); | 4299     operand = ToOperand(instr->length()); | 
| 4273   } | 4300   } | 
| 4274   if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 4301   if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 
| 4275     Label done; | 4302     Label done; | 
| 4276     __ Branch(&done, NegateCondition(cc), reg, operand); | 4303     __ Branch(&done, NegateCondition(cc), reg, operand); | 
| 4277     __ stop("eliminated bounds check failed"); | 4304     __ stop("eliminated bounds check failed"); | 
| 4278     __ bind(&done); | 4305     __ bind(&done); | 
| 4279   } else { | 4306   } else { | 
| 4280     DeoptimizeIf(cc, instr, "out of bounds", reg, operand); | 4307     DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds, reg, operand); | 
| 4281   } | 4308   } | 
| 4282 } | 4309 } | 
| 4283 | 4310 | 
| 4284 | 4311 | 
| 4285 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4312 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 
| 4286   Register external_pointer = ToRegister(instr->elements()); | 4313   Register external_pointer = ToRegister(instr->elements()); | 
| 4287   Register key = no_reg; | 4314   Register key = no_reg; | 
| 4288   ElementsKind elements_kind = instr->elements_kind(); | 4315   ElementsKind elements_kind = instr->elements_kind(); | 
| 4289   bool key_is_constant = instr->key()->IsConstantOperand(); | 4316   bool key_is_constant = instr->key()->IsConstantOperand(); | 
| 4290   int constant_key = 0; | 4317   int constant_key = 0; | 
| (...skipping 573 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4864 } | 4891 } | 
| 4865 | 4892 | 
| 4866 | 4893 | 
| 4867 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4894 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 
| 4868   HChange* hchange = instr->hydrogen(); | 4895   HChange* hchange = instr->hydrogen(); | 
| 4869   Register input = ToRegister(instr->value()); | 4896   Register input = ToRegister(instr->value()); | 
| 4870   Register output = ToRegister(instr->result()); | 4897   Register output = ToRegister(instr->result()); | 
| 4871   if (hchange->CheckFlag(HValue::kCanOverflow) && | 4898   if (hchange->CheckFlag(HValue::kCanOverflow) && | 
| 4872       hchange->value()->CheckFlag(HValue::kUint32)) { | 4899       hchange->value()->CheckFlag(HValue::kUint32)) { | 
| 4873     __ And(at, input, Operand(0xc0000000)); | 4900     __ And(at, input, Operand(0xc0000000)); | 
| 4874     DeoptimizeIf(ne, instr, "overflow", at, Operand(zero_reg)); | 4901     DeoptimizeIf(ne, instr, Deoptimizer::kOverflow, at, Operand(zero_reg)); | 
| 4875   } | 4902   } | 
| 4876   if (hchange->CheckFlag(HValue::kCanOverflow) && | 4903   if (hchange->CheckFlag(HValue::kCanOverflow) && | 
| 4877       !hchange->value()->CheckFlag(HValue::kUint32)) { | 4904       !hchange->value()->CheckFlag(HValue::kUint32)) { | 
| 4878     __ SmiTagCheckOverflow(output, input, at); | 4905     __ SmiTagCheckOverflow(output, input, at); | 
| 4879     DeoptimizeIf(lt, instr, "overflow", at, Operand(zero_reg)); | 4906     DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, at, Operand(zero_reg)); | 
| 4880   } else { | 4907   } else { | 
| 4881     __ SmiTag(output, input); | 4908     __ SmiTag(output, input); | 
| 4882   } | 4909   } | 
| 4883 } | 4910 } | 
| 4884 | 4911 | 
| 4885 | 4912 | 
| 4886 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4913 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 
| 4887   Register scratch = scratch0(); | 4914   Register scratch = scratch0(); | 
| 4888   Register input = ToRegister(instr->value()); | 4915   Register input = ToRegister(instr->value()); | 
| 4889   Register result = ToRegister(instr->result()); | 4916   Register result = ToRegister(instr->result()); | 
| 4890   if (instr->needs_check()) { | 4917   if (instr->needs_check()) { | 
| 4891     STATIC_ASSERT(kHeapObjectTag == 1); | 4918     STATIC_ASSERT(kHeapObjectTag == 1); | 
| 4892     // If the input is a HeapObject, value of scratch won't be zero. | 4919     // If the input is a HeapObject, value of scratch won't be zero. | 
| 4893     __ And(scratch, input, Operand(kHeapObjectTag)); | 4920     __ And(scratch, input, Operand(kHeapObjectTag)); | 
| 4894     __ SmiUntag(result, input); | 4921     __ SmiUntag(result, input); | 
| 4895     DeoptimizeIf(ne, instr, "not a Smi", scratch, Operand(zero_reg)); | 4922     DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, scratch, Operand(zero_reg)); | 
| 4896   } else { | 4923   } else { | 
| 4897     __ SmiUntag(result, input); | 4924     __ SmiUntag(result, input); | 
| 4898   } | 4925   } | 
| 4899 } | 4926 } | 
| 4900 | 4927 | 
| 4901 | 4928 | 
| 4902 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 4929 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 
| 4903                                 DoubleRegister result_reg, | 4930                                 DoubleRegister result_reg, | 
| 4904                                 NumberUntagDMode mode) { | 4931                                 NumberUntagDMode mode) { | 
| 4905   bool can_convert_undefined_to_nan = | 4932   bool can_convert_undefined_to_nan = | 
| 4906       instr->hydrogen()->can_convert_undefined_to_nan(); | 4933       instr->hydrogen()->can_convert_undefined_to_nan(); | 
| 4907   bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 4934   bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 
| 4908 | 4935 | 
| 4909   Register scratch = scratch0(); | 4936   Register scratch = scratch0(); | 
| 4910   Label convert, load_smi, done; | 4937   Label convert, load_smi, done; | 
| 4911   if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 4938   if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 
| 4912     // Smi check. | 4939     // Smi check. | 
| 4913     __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); | 4940     __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); | 
| 4914     // Heap number map check. | 4941     // Heap number map check. | 
| 4915     __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4942     __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 
| 4916     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 4943     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 
| 4917     if (can_convert_undefined_to_nan) { | 4944     if (can_convert_undefined_to_nan) { | 
| 4918       __ Branch(&convert, ne, scratch, Operand(at)); | 4945       __ Branch(&convert, ne, scratch, Operand(at)); | 
| 4919     } else { | 4946     } else { | 
| 4920       DeoptimizeIf(ne, instr, "not a heap number", scratch, Operand(at)); | 4947       DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch, | 
|  | 4948                    Operand(at)); | 
| 4921     } | 4949     } | 
| 4922     // Load heap number. | 4950     // Load heap number. | 
| 4923     __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 4951     __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 
| 4924     if (deoptimize_on_minus_zero) { | 4952     if (deoptimize_on_minus_zero) { | 
| 4925       __ mfc1(at, result_reg.low()); | 4953       __ mfc1(at, result_reg.low()); | 
| 4926       __ Branch(&done, ne, at, Operand(zero_reg)); | 4954       __ Branch(&done, ne, at, Operand(zero_reg)); | 
| 4927       __ Mfhc1(scratch, result_reg); | 4955       __ Mfhc1(scratch, result_reg); | 
| 4928       DeoptimizeIf(eq, instr, "minus zero", scratch, | 4956       DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, scratch, | 
| 4929                    Operand(HeapNumber::kSignMask)); | 4957                    Operand(HeapNumber::kSignMask)); | 
| 4930     } | 4958     } | 
| 4931     __ Branch(&done); | 4959     __ Branch(&done); | 
| 4932     if (can_convert_undefined_to_nan) { | 4960     if (can_convert_undefined_to_nan) { | 
| 4933       __ bind(&convert); | 4961       __ bind(&convert); | 
| 4934       // Convert undefined (and hole) to NaN. | 4962       // Convert undefined (and hole) to NaN. | 
| 4935       __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4963       __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 
| 4936       DeoptimizeIf(ne, instr, "not a heap number/undefined", input_reg, | 4964       DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefined, input_reg, | 
| 4937                    Operand(at)); | 4965                    Operand(at)); | 
| 4938       __ LoadRoot(scratch, Heap::kNanValueRootIndex); | 4966       __ LoadRoot(scratch, Heap::kNanValueRootIndex); | 
| 4939       __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); | 4967       __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); | 
| 4940       __ Branch(&done); | 4968       __ Branch(&done); | 
| 4941     } | 4969     } | 
| 4942   } else { | 4970   } else { | 
| 4943     __ SmiUntag(scratch, input_reg); | 4971     __ SmiUntag(scratch, input_reg); | 
| 4944     DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4972     DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 
| 4945   } | 4973   } | 
| 4946   // Smi to double register conversion | 4974   // Smi to double register conversion | 
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4991     __ mov(input_reg, zero_reg);  // In delay slot. | 5019     __ mov(input_reg, zero_reg);  // In delay slot. | 
| 4992 | 5020 | 
| 4993     __ bind(&check_bools); | 5021     __ bind(&check_bools); | 
| 4994     __ LoadRoot(at, Heap::kTrueValueRootIndex); | 5022     __ LoadRoot(at, Heap::kTrueValueRootIndex); | 
| 4995     __ Branch(&check_false, ne, scratch2, Operand(at)); | 5023     __ Branch(&check_false, ne, scratch2, Operand(at)); | 
| 4996     __ Branch(USE_DELAY_SLOT, &done); | 5024     __ Branch(USE_DELAY_SLOT, &done); | 
| 4997     __ li(input_reg, Operand(1));  // In delay slot. | 5025     __ li(input_reg, Operand(1));  // In delay slot. | 
| 4998 | 5026 | 
| 4999     __ bind(&check_false); | 5027     __ bind(&check_false); | 
| 5000     __ LoadRoot(at, Heap::kFalseValueRootIndex); | 5028     __ LoadRoot(at, Heap::kFalseValueRootIndex); | 
| 5001     DeoptimizeIf(ne, instr, "not a heap number/undefined/true/false", scratch2, | 5029     DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefinedBoolean, | 
| 5002                  Operand(at)); | 5030                  scratch2, Operand(at)); | 
| 5003     __ Branch(USE_DELAY_SLOT, &done); | 5031     __ Branch(USE_DELAY_SLOT, &done); | 
| 5004     __ mov(input_reg, zero_reg);  // In delay slot. | 5032     __ mov(input_reg, zero_reg);  // In delay slot. | 
| 5005   } else { | 5033   } else { | 
| 5006     DeoptimizeIf(ne, instr, "not a heap number", scratch1, Operand(at)); | 5034     DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch1, | 
|  | 5035                  Operand(at)); | 
| 5007 | 5036 | 
| 5008     // Load the double value. | 5037     // Load the double value. | 
| 5009     __ ldc1(double_scratch, | 5038     __ ldc1(double_scratch, | 
| 5010             FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 5039             FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 
| 5011 | 5040 | 
| 5012     Register except_flag = scratch2; | 5041     Register except_flag = scratch2; | 
| 5013     __ EmitFPUTruncate(kRoundToZero, | 5042     __ EmitFPUTruncate(kRoundToZero, | 
| 5014                        input_reg, | 5043                        input_reg, | 
| 5015                        double_scratch, | 5044                        double_scratch, | 
| 5016                        scratch1, | 5045                        scratch1, | 
| 5017                        double_scratch2, | 5046                        double_scratch2, | 
| 5018                        except_flag, | 5047                        except_flag, | 
| 5019                        kCheckForInexactConversion); | 5048                        kCheckForInexactConversion); | 
| 5020 | 5049 | 
| 5021     DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5050     DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, | 
| 5022                  Operand(zero_reg)); | 5051                  Operand(zero_reg)); | 
| 5023 | 5052 | 
| 5024     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5053     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 5025       __ Branch(&done, ne, input_reg, Operand(zero_reg)); | 5054       __ Branch(&done, ne, input_reg, Operand(zero_reg)); | 
| 5026 | 5055 | 
| 5027       __ Mfhc1(scratch1, double_scratch); | 5056       __ Mfhc1(scratch1, double_scratch); | 
| 5028       __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5057       __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 
| 5029       DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5058       DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, | 
|  | 5059                    Operand(zero_reg)); | 
| 5030     } | 5060     } | 
| 5031   } | 5061   } | 
| 5032   __ bind(&done); | 5062   __ bind(&done); | 
| 5033 } | 5063 } | 
| 5034 | 5064 | 
| 5035 | 5065 | 
| 5036 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 5066 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 
| 5037   class DeferredTaggedToI FINAL : public LDeferredCode { | 5067   class DeferredTaggedToI FINAL : public LDeferredCode { | 
| 5038    public: | 5068    public: | 
| 5039     DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 5069     DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5095 | 5125 | 
| 5096     __ EmitFPUTruncate(kRoundToMinusInf, | 5126     __ EmitFPUTruncate(kRoundToMinusInf, | 
| 5097                        result_reg, | 5127                        result_reg, | 
| 5098                        double_input, | 5128                        double_input, | 
| 5099                        scratch1, | 5129                        scratch1, | 
| 5100                        double_scratch0(), | 5130                        double_scratch0(), | 
| 5101                        except_flag, | 5131                        except_flag, | 
| 5102                        kCheckForInexactConversion); | 5132                        kCheckForInexactConversion); | 
| 5103 | 5133 | 
| 5104     // Deopt if the operation did not succeed (except_flag != 0). | 5134     // Deopt if the operation did not succeed (except_flag != 0). | 
| 5105     DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5135     DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, | 
| 5106                  Operand(zero_reg)); | 5136                  Operand(zero_reg)); | 
| 5107 | 5137 | 
| 5108     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5138     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 5109       Label done; | 5139       Label done; | 
| 5110       __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 5140       __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 
| 5111       __ Mfhc1(scratch1, double_input); | 5141       __ Mfhc1(scratch1, double_input); | 
| 5112       __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5142       __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 
| 5113       DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5143       DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, | 
|  | 5144                    Operand(zero_reg)); | 
| 5114       __ bind(&done); | 5145       __ bind(&done); | 
| 5115     } | 5146     } | 
| 5116   } | 5147   } | 
| 5117 } | 5148 } | 
| 5118 | 5149 | 
| 5119 | 5150 | 
| 5120 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 5151 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 
| 5121   Register result_reg = ToRegister(instr->result()); | 5152   Register result_reg = ToRegister(instr->result()); | 
| 5122   Register scratch1 = LCodeGen::scratch0(); | 5153   Register scratch1 = LCodeGen::scratch0(); | 
| 5123   DoubleRegister double_input = ToDoubleRegister(instr->value()); | 5154   DoubleRegister double_input = ToDoubleRegister(instr->value()); | 
| 5124 | 5155 | 
| 5125   if (instr->truncating()) { | 5156   if (instr->truncating()) { | 
| 5126     __ TruncateDoubleToI(result_reg, double_input); | 5157     __ TruncateDoubleToI(result_reg, double_input); | 
| 5127   } else { | 5158   } else { | 
| 5128     Register except_flag = LCodeGen::scratch1(); | 5159     Register except_flag = LCodeGen::scratch1(); | 
| 5129 | 5160 | 
| 5130     __ EmitFPUTruncate(kRoundToMinusInf, | 5161     __ EmitFPUTruncate(kRoundToMinusInf, | 
| 5131                        result_reg, | 5162                        result_reg, | 
| 5132                        double_input, | 5163                        double_input, | 
| 5133                        scratch1, | 5164                        scratch1, | 
| 5134                        double_scratch0(), | 5165                        double_scratch0(), | 
| 5135                        except_flag, | 5166                        except_flag, | 
| 5136                        kCheckForInexactConversion); | 5167                        kCheckForInexactConversion); | 
| 5137 | 5168 | 
| 5138     // Deopt if the operation did not succeed (except_flag != 0). | 5169     // Deopt if the operation did not succeed (except_flag != 0). | 
| 5139     DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5170     DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, | 
| 5140                  Operand(zero_reg)); | 5171                  Operand(zero_reg)); | 
| 5141 | 5172 | 
| 5142     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5173     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 
| 5143       Label done; | 5174       Label done; | 
| 5144       __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 5175       __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 
| 5145       __ Mfhc1(scratch1, double_input); | 5176       __ Mfhc1(scratch1, double_input); | 
| 5146       __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5177       __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 
| 5147       DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5178       DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, | 
|  | 5179                    Operand(zero_reg)); | 
| 5148       __ bind(&done); | 5180       __ bind(&done); | 
| 5149     } | 5181     } | 
| 5150   } | 5182   } | 
| 5151   __ SmiTagCheckOverflow(result_reg, result_reg, scratch1); | 5183   __ SmiTagCheckOverflow(result_reg, result_reg, scratch1); | 
| 5152   DeoptimizeIf(lt, instr, "overflow", scratch1, Operand(zero_reg)); | 5184   DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, scratch1, Operand(zero_reg)); | 
| 5153 } | 5185 } | 
| 5154 | 5186 | 
| 5155 | 5187 | 
| 5156 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 5188 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 
| 5157   LOperand* input = instr->value(); | 5189   LOperand* input = instr->value(); | 
| 5158   __ SmiTst(ToRegister(input), at); | 5190   __ SmiTst(ToRegister(input), at); | 
| 5159   DeoptimizeIf(ne, instr, "not a Smi", at, Operand(zero_reg)); | 5191   DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, at, Operand(zero_reg)); | 
| 5160 } | 5192 } | 
| 5161 | 5193 | 
| 5162 | 5194 | 
| 5163 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 5195 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 
| 5164   if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 5196   if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 
| 5165     LOperand* input = instr->value(); | 5197     LOperand* input = instr->value(); | 
| 5166     __ SmiTst(ToRegister(input), at); | 5198     __ SmiTst(ToRegister(input), at); | 
| 5167     DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 5199     DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); | 
| 5168   } | 5200   } | 
| 5169 } | 5201 } | 
| 5170 | 5202 | 
| 5171 | 5203 | 
| 5172 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 5204 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 
| 5173   Register input = ToRegister(instr->value()); | 5205   Register input = ToRegister(instr->value()); | 
| 5174   Register scratch = scratch0(); | 5206   Register scratch = scratch0(); | 
| 5175 | 5207 | 
| 5176   __ GetObjectType(input, scratch, scratch); | 5208   __ GetObjectType(input, scratch, scratch); | 
| 5177 | 5209 | 
| 5178   if (instr->hydrogen()->is_interval_check()) { | 5210   if (instr->hydrogen()->is_interval_check()) { | 
| 5179     InstanceType first; | 5211     InstanceType first; | 
| 5180     InstanceType last; | 5212     InstanceType last; | 
| 5181     instr->hydrogen()->GetCheckInterval(&first, &last); | 5213     instr->hydrogen()->GetCheckInterval(&first, &last); | 
| 5182 | 5214 | 
| 5183     // If there is only one type in the interval check for equality. | 5215     // If there is only one type in the interval check for equality. | 
| 5184     if (first == last) { | 5216     if (first == last) { | 
| 5185       DeoptimizeIf(ne, instr, "wrong instance type", scratch, Operand(first)); | 5217       DeoptimizeIf(ne, instr, Deoptimizer::kWrongInstanceType, scratch, | 
|  | 5218                    Operand(first)); | 
| 5186     } else { | 5219     } else { | 
| 5187       DeoptimizeIf(lo, instr, "wrong instance type", scratch, Operand(first)); | 5220       DeoptimizeIf(lo, instr, Deoptimizer::kWrongInstanceType, scratch, | 
|  | 5221                    Operand(first)); | 
| 5188       // Omit check for the last type. | 5222       // Omit check for the last type. | 
| 5189       if (last != LAST_TYPE) { | 5223       if (last != LAST_TYPE) { | 
| 5190         DeoptimizeIf(hi, instr, "wrong instance type", scratch, Operand(last)); | 5224         DeoptimizeIf(hi, instr, Deoptimizer::kWrongInstanceType, scratch, | 
|  | 5225                      Operand(last)); | 
| 5191       } | 5226       } | 
| 5192     } | 5227     } | 
| 5193   } else { | 5228   } else { | 
| 5194     uint8_t mask; | 5229     uint8_t mask; | 
| 5195     uint8_t tag; | 5230     uint8_t tag; | 
| 5196     instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 5231     instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 
| 5197 | 5232 | 
| 5198     if (base::bits::IsPowerOfTwo32(mask)) { | 5233     if (base::bits::IsPowerOfTwo32(mask)) { | 
| 5199       DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 5234       DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 
| 5200       __ And(at, scratch, mask); | 5235       __ And(at, scratch, mask); | 
| 5201       DeoptimizeIf(tag == 0 ? ne : eq, instr, "wrong instance type", at, | 5236       DeoptimizeIf(tag == 0 ? ne : eq, instr, Deoptimizer::kWrongInstanceType, | 
| 5202                    Operand(zero_reg)); | 5237                    at, Operand(zero_reg)); | 
| 5203     } else { | 5238     } else { | 
| 5204       __ And(scratch, scratch, Operand(mask)); | 5239       __ And(scratch, scratch, Operand(mask)); | 
| 5205       DeoptimizeIf(ne, instr, "wrong instance type", scratch, Operand(tag)); | 5240       DeoptimizeIf(ne, instr, Deoptimizer::kWrongInstanceType, scratch, | 
|  | 5241                    Operand(tag)); | 
| 5206     } | 5242     } | 
| 5207   } | 5243   } | 
| 5208 } | 5244 } | 
| 5209 | 5245 | 
| 5210 | 5246 | 
| 5211 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 5247 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 
| 5212   Register reg = ToRegister(instr->value()); | 5248   Register reg = ToRegister(instr->value()); | 
| 5213   Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 5249   Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 
| 5214   AllowDeferredHandleDereference smi_check; | 5250   AllowDeferredHandleDereference smi_check; | 
| 5215   if (isolate()->heap()->InNewSpace(*object)) { | 5251   if (isolate()->heap()->InNewSpace(*object)) { | 
| 5216     Register reg = ToRegister(instr->value()); | 5252     Register reg = ToRegister(instr->value()); | 
| 5217     Handle<Cell> cell = isolate()->factory()->NewCell(object); | 5253     Handle<Cell> cell = isolate()->factory()->NewCell(object); | 
| 5218     __ li(at, Operand(Handle<Object>(cell))); | 5254     __ li(at, Operand(Handle<Object>(cell))); | 
| 5219     __ lw(at, FieldMemOperand(at, Cell::kValueOffset)); | 5255     __ lw(at, FieldMemOperand(at, Cell::kValueOffset)); | 
| 5220     DeoptimizeIf(ne, instr, "value mismatch", reg, Operand(at)); | 5256     DeoptimizeIf(ne, instr, Deoptimizer::kValueMismatch, reg, Operand(at)); | 
| 5221   } else { | 5257   } else { | 
| 5222     DeoptimizeIf(ne, instr, "value mismatch", reg, Operand(object)); | 5258     DeoptimizeIf(ne, instr, Deoptimizer::kValueMismatch, reg, Operand(object)); | 
| 5223   } | 5259   } | 
| 5224 } | 5260 } | 
| 5225 | 5261 | 
| 5226 | 5262 | 
| 5227 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5263 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 
| 5228   { | 5264   { | 
| 5229     PushSafepointRegistersScope scope(this); | 5265     PushSafepointRegistersScope scope(this); | 
| 5230     __ push(object); | 5266     __ push(object); | 
| 5231     __ mov(cp, zero_reg); | 5267     __ mov(cp, zero_reg); | 
| 5232     __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 5268     __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 
| 5233     RecordSafepointWithRegisters( | 5269     RecordSafepointWithRegisters( | 
| 5234         instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 5270         instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 
| 5235     __ StoreToSafepointRegisterSlot(v0, scratch0()); | 5271     __ StoreToSafepointRegisterSlot(v0, scratch0()); | 
| 5236   } | 5272   } | 
| 5237   __ SmiTst(scratch0(), at); | 5273   __ SmiTst(scratch0(), at); | 
| 5238   DeoptimizeIf(eq, instr, "instance migration failed", at, Operand(zero_reg)); | 5274   DeoptimizeIf(eq, instr, Deoptimizer::kInstanceMigrationFailed, at, | 
|  | 5275                Operand(zero_reg)); | 
| 5239 } | 5276 } | 
| 5240 | 5277 | 
| 5241 | 5278 | 
| 5242 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5279 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 
| 5243   class DeferredCheckMaps FINAL : public LDeferredCode { | 5280   class DeferredCheckMaps FINAL : public LDeferredCode { | 
| 5244    public: | 5281    public: | 
| 5245     DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 5282     DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 
| 5246         : LDeferredCode(codegen), instr_(instr), object_(object) { | 5283         : LDeferredCode(codegen), instr_(instr), object_(object) { | 
| 5247       SetExit(check_maps()); | 5284       SetExit(check_maps()); | 
| 5248     } | 5285     } | 
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5282   Label success; | 5319   Label success; | 
| 5283   for (int i = 0; i < maps->size() - 1; i++) { | 5320   for (int i = 0; i < maps->size() - 1; i++) { | 
| 5284     Handle<Map> map = maps->at(i).handle(); | 5321     Handle<Map> map = maps->at(i).handle(); | 
| 5285     __ CompareMapAndBranch(map_reg, map, &success, eq, &success); | 5322     __ CompareMapAndBranch(map_reg, map, &success, eq, &success); | 
| 5286   } | 5323   } | 
| 5287   Handle<Map> map = maps->at(maps->size() - 1).handle(); | 5324   Handle<Map> map = maps->at(maps->size() - 1).handle(); | 
| 5288   // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). | 5325   // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). | 
| 5289   if (instr->hydrogen()->HasMigrationTarget()) { | 5326   if (instr->hydrogen()->HasMigrationTarget()) { | 
| 5290     __ Branch(deferred->entry(), ne, map_reg, Operand(map)); | 5327     __ Branch(deferred->entry(), ne, map_reg, Operand(map)); | 
| 5291   } else { | 5328   } else { | 
| 5292     DeoptimizeIf(ne, instr, "wrong map", map_reg, Operand(map)); | 5329     DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, map_reg, Operand(map)); | 
| 5293   } | 5330   } | 
| 5294 | 5331 | 
| 5295   __ bind(&success); | 5332   __ bind(&success); | 
| 5296 } | 5333 } | 
| 5297 | 5334 | 
| 5298 | 5335 | 
| 5299 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5336 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 
| 5300   DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5337   DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); | 
| 5301   Register result_reg = ToRegister(instr->result()); | 5338   Register result_reg = ToRegister(instr->result()); | 
| 5302   DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); | 5339   DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); | 
| (...skipping 17 matching lines...) Expand all  Loading... | 
| 5320 | 5357 | 
| 5321   // Both smi and heap number cases are handled. | 5358   // Both smi and heap number cases are handled. | 
| 5322   __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); | 5359   __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); | 
| 5323 | 5360 | 
| 5324   // Check for heap number | 5361   // Check for heap number | 
| 5325   __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 5362   __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 
| 5326   __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); | 5363   __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); | 
| 5327 | 5364 | 
| 5328   // Check for undefined. Undefined is converted to zero for clamping | 5365   // Check for undefined. Undefined is converted to zero for clamping | 
| 5329   // conversions. | 5366   // conversions. | 
| 5330   DeoptimizeIf(ne, instr, "not a heap number/undefined", input_reg, | 5367   DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefined, input_reg, | 
| 5331                Operand(factory()->undefined_value())); | 5368                Operand(factory()->undefined_value())); | 
| 5332   __ mov(result_reg, zero_reg); | 5369   __ mov(result_reg, zero_reg); | 
| 5333   __ jmp(&done); | 5370   __ jmp(&done); | 
| 5334 | 5371 | 
| 5335   // Heap number | 5372   // Heap number | 
| 5336   __ bind(&heap_number); | 5373   __ bind(&heap_number); | 
| 5337   __ ldc1(double_scratch0(), FieldMemOperand(input_reg, | 5374   __ ldc1(double_scratch0(), FieldMemOperand(input_reg, | 
| 5338                                              HeapNumber::kValueOffset)); | 5375                                              HeapNumber::kValueOffset)); | 
| 5339   __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); | 5376   __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); | 
| 5340   __ jmp(&done); | 5377   __ jmp(&done); | 
| (...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5743 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5780 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 
| 5744   Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5781   Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 
| 5745   // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5782   // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 
| 5746   // needed return address), even though the implementation of LAZY and EAGER is | 5783   // needed return address), even though the implementation of LAZY and EAGER is | 
| 5747   // now identical. When LAZY is eventually completely folded into EAGER, remove | 5784   // now identical. When LAZY is eventually completely folded into EAGER, remove | 
| 5748   // the special case below. | 5785   // the special case below. | 
| 5749   if (info()->IsStub() && type == Deoptimizer::EAGER) { | 5786   if (info()->IsStub() && type == Deoptimizer::EAGER) { | 
| 5750     type = Deoptimizer::LAZY; | 5787     type = Deoptimizer::LAZY; | 
| 5751   } | 5788   } | 
| 5752 | 5789 | 
| 5753   DeoptimizeIf(al, instr, type, instr->hydrogen()->reason(), zero_reg, | 5790   DeoptimizeIf(al, instr, instr->hydrogen()->reason(), type, zero_reg, | 
| 5754                Operand(zero_reg)); | 5791                Operand(zero_reg)); | 
| 5755 } | 5792 } | 
| 5756 | 5793 | 
| 5757 | 5794 | 
| 5758 void LCodeGen::DoDummy(LDummy* instr) { | 5795 void LCodeGen::DoDummy(LDummy* instr) { | 
| 5759   // Nothing to see here, move on! | 5796   // Nothing to see here, move on! | 
| 5760 } | 5797 } | 
| 5761 | 5798 | 
| 5762 | 5799 | 
| 5763 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5800 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5834   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5871   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 
| 5835 | 5872 | 
| 5836   GenerateOsrPrologue(); | 5873   GenerateOsrPrologue(); | 
| 5837 } | 5874 } | 
| 5838 | 5875 | 
| 5839 | 5876 | 
| 5840 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5877 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 
| 5841   Register result = ToRegister(instr->result()); | 5878   Register result = ToRegister(instr->result()); | 
| 5842   Register object = ToRegister(instr->object()); | 5879   Register object = ToRegister(instr->object()); | 
| 5843   __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5880   __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 
| 5844   DeoptimizeIf(eq, instr, "undefined", object, Operand(at)); | 5881   DeoptimizeIf(eq, instr, Deoptimizer::kUndefined, object, Operand(at)); | 
| 5845 | 5882 | 
| 5846   Register null_value = t1; | 5883   Register null_value = t1; | 
| 5847   __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 5884   __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 
| 5848   DeoptimizeIf(eq, instr, "null", object, Operand(null_value)); | 5885   DeoptimizeIf(eq, instr, Deoptimizer::kNull, object, Operand(null_value)); | 
| 5849 | 5886 | 
| 5850   __ And(at, object, kSmiTagMask); | 5887   __ And(at, object, kSmiTagMask); | 
| 5851   DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 5888   DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); | 
| 5852 | 5889 | 
| 5853   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5890   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 
| 5854   __ GetObjectType(object, a1, a1); | 5891   __ GetObjectType(object, a1, a1); | 
| 5855   DeoptimizeIf(le, instr, "not a JavaScript object", a1, | 5892   DeoptimizeIf(le, instr, Deoptimizer::kNotAJavaScriptObject, a1, | 
| 5856                Operand(LAST_JS_PROXY_TYPE)); | 5893                Operand(LAST_JS_PROXY_TYPE)); | 
| 5857 | 5894 | 
| 5858   Label use_cache, call_runtime; | 5895   Label use_cache, call_runtime; | 
| 5859   DCHECK(object.is(a0)); | 5896   DCHECK(object.is(a0)); | 
| 5860   __ CheckEnumCache(null_value, &call_runtime); | 5897   __ CheckEnumCache(null_value, &call_runtime); | 
| 5861 | 5898 | 
| 5862   __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 5899   __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 
| 5863   __ Branch(&use_cache); | 5900   __ Branch(&use_cache); | 
| 5864 | 5901 | 
| 5865   // Get the set of properties to enumerate. | 5902   // Get the set of properties to enumerate. | 
| 5866   __ bind(&call_runtime); | 5903   __ bind(&call_runtime); | 
| 5867   __ push(object); | 5904   __ push(object); | 
| 5868   CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); | 5905   CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); | 
| 5869 | 5906 | 
| 5870   __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 5907   __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 
| 5871   DCHECK(result.is(v0)); | 5908   DCHECK(result.is(v0)); | 
| 5872   __ LoadRoot(at, Heap::kMetaMapRootIndex); | 5909   __ LoadRoot(at, Heap::kMetaMapRootIndex); | 
| 5873   DeoptimizeIf(ne, instr, "wrong map", a1, Operand(at)); | 5910   DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, a1, Operand(at)); | 
| 5874   __ bind(&use_cache); | 5911   __ bind(&use_cache); | 
| 5875 } | 5912 } | 
| 5876 | 5913 | 
| 5877 | 5914 | 
| 5878 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 5915 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 
| 5879   Register map = ToRegister(instr->map()); | 5916   Register map = ToRegister(instr->map()); | 
| 5880   Register result = ToRegister(instr->result()); | 5917   Register result = ToRegister(instr->result()); | 
| 5881   Label load_cache, done; | 5918   Label load_cache, done; | 
| 5882   __ EnumLength(result, map); | 5919   __ EnumLength(result, map); | 
| 5883   __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0))); | 5920   __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0))); | 
| 5884   __ li(result, Operand(isolate()->factory()->empty_fixed_array())); | 5921   __ li(result, Operand(isolate()->factory()->empty_fixed_array())); | 
| 5885   __ jmp(&done); | 5922   __ jmp(&done); | 
| 5886 | 5923 | 
| 5887   __ bind(&load_cache); | 5924   __ bind(&load_cache); | 
| 5888   __ LoadInstanceDescriptors(map, result); | 5925   __ LoadInstanceDescriptors(map, result); | 
| 5889   __ lw(result, | 5926   __ lw(result, | 
| 5890         FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); | 5927         FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); | 
| 5891   __ lw(result, | 5928   __ lw(result, | 
| 5892         FieldMemOperand(result, FixedArray::SizeFor(instr->idx()))); | 5929         FieldMemOperand(result, FixedArray::SizeFor(instr->idx()))); | 
| 5893   DeoptimizeIf(eq, instr, "no cache", result, Operand(zero_reg)); | 5930   DeoptimizeIf(eq, instr, Deoptimizer::kNoCache, result, Operand(zero_reg)); | 
| 5894 | 5931 | 
| 5895   __ bind(&done); | 5932   __ bind(&done); | 
| 5896 } | 5933 } | 
| 5897 | 5934 | 
| 5898 | 5935 | 
| 5899 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5936 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 
| 5900   Register object = ToRegister(instr->value()); | 5937   Register object = ToRegister(instr->value()); | 
| 5901   Register map = ToRegister(instr->map()); | 5938   Register map = ToRegister(instr->map()); | 
| 5902   __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 5939   __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 
| 5903   DeoptimizeIf(ne, instr, "wrong map", map, Operand(scratch0())); | 5940   DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, map, Operand(scratch0())); | 
| 5904 } | 5941 } | 
| 5905 | 5942 | 
| 5906 | 5943 | 
| 5907 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5944 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 
| 5908                                            Register result, | 5945                                            Register result, | 
| 5909                                            Register object, | 5946                                            Register object, | 
| 5910                                            Register index) { | 5947                                            Register index) { | 
| 5911   PushSafepointRegistersScope scope(this); | 5948   PushSafepointRegistersScope scope(this); | 
| 5912   __ Push(object, index); | 5949   __ Push(object, index); | 
| 5913   __ mov(cp, zero_reg); | 5950   __ mov(cp, zero_reg); | 
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5990   __ li(at, scope_info); | 6027   __ li(at, scope_info); | 
| 5991   __ Push(at, ToRegister(instr->function())); | 6028   __ Push(at, ToRegister(instr->function())); | 
| 5992   CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6029   CallRuntime(Runtime::kPushBlockContext, 2, instr); | 
| 5993   RecordSafepoint(Safepoint::kNoLazyDeopt); | 6030   RecordSafepoint(Safepoint::kNoLazyDeopt); | 
| 5994 } | 6031 } | 
| 5995 | 6032 | 
| 5996 | 6033 | 
| 5997 #undef __ | 6034 #undef __ | 
| 5998 | 6035 | 
| 5999 } }  // namespace v8::internal | 6036 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|