Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(388)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 900223002: Revert of Externalize deoptimization reasons. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/lithium-codegen.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_IA32 7 #if V8_TARGET_ARCH_IA32
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/code-factory.h" 10 #include "src/code-factory.h"
(...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after
812 int pc_offset = masm()->pc_offset(); 812 int pc_offset = masm()->pc_offset();
813 environment->Register(deoptimization_index, 813 environment->Register(deoptimization_index,
814 translation.index(), 814 translation.index(),
815 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); 815 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
816 deoptimizations_.Add(environment, zone()); 816 deoptimizations_.Add(environment, zone());
817 } 817 }
818 } 818 }
819 819
820 820
821 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, 821 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
822 Deoptimizer::DeoptReason deopt_reason, 822 const char* detail,
823 Deoptimizer::BailoutType bailout_type) { 823 Deoptimizer::BailoutType bailout_type) {
824 LEnvironment* environment = instr->environment(); 824 LEnvironment* environment = instr->environment();
825 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 825 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
826 DCHECK(environment->HasBeenRegistered()); 826 DCHECK(environment->HasBeenRegistered());
827 int id = environment->deoptimization_index(); 827 int id = environment->deoptimization_index();
828 DCHECK(info()->IsOptimizing() || info()->IsStub()); 828 DCHECK(info()->IsOptimizing() || info()->IsStub());
829 Address entry = 829 Address entry =
830 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 830 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
831 if (entry == NULL) { 831 if (entry == NULL) {
832 Abort(kBailoutWasNotPrepared); 832 Abort(kBailoutWasNotPrepared);
(...skipping 22 matching lines...) Expand all
855 } 855 }
856 856
857 if (info()->ShouldTrapOnDeopt()) { 857 if (info()->ShouldTrapOnDeopt()) {
858 Label done; 858 Label done;
859 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); 859 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear);
860 __ int3(); 860 __ int3();
861 __ bind(&done); 861 __ bind(&done);
862 } 862 }
863 863
864 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(), 864 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
865 instr->Mnemonic(), deopt_reason); 865 instr->Mnemonic(), detail);
866 DCHECK(info()->IsStub() || frame_is_built_); 866 DCHECK(info()->IsStub() || frame_is_built_);
867 if (cc == no_condition && frame_is_built_) { 867 if (cc == no_condition && frame_is_built_) {
868 DeoptComment(reason); 868 DeoptComment(reason);
869 __ call(entry, RelocInfo::RUNTIME_ENTRY); 869 __ call(entry, RelocInfo::RUNTIME_ENTRY);
870 } else { 870 } else {
871 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type, 871 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
872 !frame_is_built_); 872 !frame_is_built_);
873 // We often have several deopts to the same entry, reuse the last 873 // We often have several deopts to the same entry, reuse the last
874 // jump entry if this is the case. 874 // jump entry if this is the case.
875 if (jump_table_.is_empty() || 875 if (jump_table_.is_empty() ||
876 !table_entry.IsEquivalentTo(jump_table_.last())) { 876 !table_entry.IsEquivalentTo(jump_table_.last())) {
877 jump_table_.Add(table_entry, zone()); 877 jump_table_.Add(table_entry, zone());
878 } 878 }
879 if (cc == no_condition) { 879 if (cc == no_condition) {
880 __ jmp(&jump_table_.last().label); 880 __ jmp(&jump_table_.last().label);
881 } else { 881 } else {
882 __ j(cc, &jump_table_.last().label); 882 __ j(cc, &jump_table_.last().label);
883 } 883 }
884 } 884 }
885 } 885 }
886 886
887 887
888 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, 888 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
889 Deoptimizer::DeoptReason deopt_reason) { 889 const char* detail) {
890 Deoptimizer::BailoutType bailout_type = info()->IsStub() 890 Deoptimizer::BailoutType bailout_type = info()->IsStub()
891 ? Deoptimizer::LAZY 891 ? Deoptimizer::LAZY
892 : Deoptimizer::EAGER; 892 : Deoptimizer::EAGER;
893 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); 893 DeoptimizeIf(cc, instr, detail, bailout_type);
894 } 894 }
895 895
896 896
897 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 897 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
898 int length = deoptimizations_.length(); 898 int length = deoptimizations_.length();
899 if (length == 0) return; 899 if (length == 0) return;
900 Handle<DeoptimizationInputData> data = 900 Handle<DeoptimizationInputData> data =
901 DeoptimizationInputData::New(isolate(), length, TENURED); 901 DeoptimizationInputData::New(isolate(), length, TENURED);
902 902
903 Handle<ByteArray> translations = 903 Handle<ByteArray> translations =
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
1113 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); 1113 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1114 Label dividend_is_not_negative, done; 1114 Label dividend_is_not_negative, done;
1115 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { 1115 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) {
1116 __ test(dividend, dividend); 1116 __ test(dividend, dividend);
1117 __ j(not_sign, &dividend_is_not_negative, Label::kNear); 1117 __ j(not_sign, &dividend_is_not_negative, Label::kNear);
1118 // Note that this is correct even for kMinInt operands. 1118 // Note that this is correct even for kMinInt operands.
1119 __ neg(dividend); 1119 __ neg(dividend);
1120 __ and_(dividend, mask); 1120 __ and_(dividend, mask);
1121 __ neg(dividend); 1121 __ neg(dividend);
1122 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { 1122 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1123 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); 1123 DeoptimizeIf(zero, instr, "minus zero");
1124 } 1124 }
1125 __ jmp(&done, Label::kNear); 1125 __ jmp(&done, Label::kNear);
1126 } 1126 }
1127 1127
1128 __ bind(&dividend_is_not_negative); 1128 __ bind(&dividend_is_not_negative);
1129 __ and_(dividend, mask); 1129 __ and_(dividend, mask);
1130 __ bind(&done); 1130 __ bind(&done);
1131 } 1131 }
1132 1132
1133 1133
1134 void LCodeGen::DoModByConstI(LModByConstI* instr) { 1134 void LCodeGen::DoModByConstI(LModByConstI* instr) {
1135 Register dividend = ToRegister(instr->dividend()); 1135 Register dividend = ToRegister(instr->dividend());
1136 int32_t divisor = instr->divisor(); 1136 int32_t divisor = instr->divisor();
1137 DCHECK(ToRegister(instr->result()).is(eax)); 1137 DCHECK(ToRegister(instr->result()).is(eax));
1138 1138
1139 if (divisor == 0) { 1139 if (divisor == 0) {
1140 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); 1140 DeoptimizeIf(no_condition, instr, "division by zero");
1141 return; 1141 return;
1142 } 1142 }
1143 1143
1144 __ TruncatingDiv(dividend, Abs(divisor)); 1144 __ TruncatingDiv(dividend, Abs(divisor));
1145 __ imul(edx, edx, Abs(divisor)); 1145 __ imul(edx, edx, Abs(divisor));
1146 __ mov(eax, dividend); 1146 __ mov(eax, dividend);
1147 __ sub(eax, edx); 1147 __ sub(eax, edx);
1148 1148
1149 // Check for negative zero. 1149 // Check for negative zero.
1150 HMod* hmod = instr->hydrogen(); 1150 HMod* hmod = instr->hydrogen();
1151 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { 1151 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1152 Label remainder_not_zero; 1152 Label remainder_not_zero;
1153 __ j(not_zero, &remainder_not_zero, Label::kNear); 1153 __ j(not_zero, &remainder_not_zero, Label::kNear);
1154 __ cmp(dividend, Immediate(0)); 1154 __ cmp(dividend, Immediate(0));
1155 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); 1155 DeoptimizeIf(less, instr, "minus zero");
1156 __ bind(&remainder_not_zero); 1156 __ bind(&remainder_not_zero);
1157 } 1157 }
1158 } 1158 }
1159 1159
1160 1160
1161 void LCodeGen::DoModI(LModI* instr) { 1161 void LCodeGen::DoModI(LModI* instr) {
1162 HMod* hmod = instr->hydrogen(); 1162 HMod* hmod = instr->hydrogen();
1163 1163
1164 Register left_reg = ToRegister(instr->left()); 1164 Register left_reg = ToRegister(instr->left());
1165 DCHECK(left_reg.is(eax)); 1165 DCHECK(left_reg.is(eax));
1166 Register right_reg = ToRegister(instr->right()); 1166 Register right_reg = ToRegister(instr->right());
1167 DCHECK(!right_reg.is(eax)); 1167 DCHECK(!right_reg.is(eax));
1168 DCHECK(!right_reg.is(edx)); 1168 DCHECK(!right_reg.is(edx));
1169 Register result_reg = ToRegister(instr->result()); 1169 Register result_reg = ToRegister(instr->result());
1170 DCHECK(result_reg.is(edx)); 1170 DCHECK(result_reg.is(edx));
1171 1171
1172 Label done; 1172 Label done;
1173 // Check for x % 0, idiv would signal a divide error. We have to 1173 // Check for x % 0, idiv would signal a divide error. We have to
1174 // deopt in this case because we can't return a NaN. 1174 // deopt in this case because we can't return a NaN.
1175 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { 1175 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) {
1176 __ test(right_reg, Operand(right_reg)); 1176 __ test(right_reg, Operand(right_reg));
1177 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); 1177 DeoptimizeIf(zero, instr, "division by zero");
1178 } 1178 }
1179 1179
1180 // Check for kMinInt % -1, idiv would signal a divide error. We 1180 // Check for kMinInt % -1, idiv would signal a divide error. We
1181 // have to deopt if we care about -0, because we can't return that. 1181 // have to deopt if we care about -0, because we can't return that.
1182 if (hmod->CheckFlag(HValue::kCanOverflow)) { 1182 if (hmod->CheckFlag(HValue::kCanOverflow)) {
1183 Label no_overflow_possible; 1183 Label no_overflow_possible;
1184 __ cmp(left_reg, kMinInt); 1184 __ cmp(left_reg, kMinInt);
1185 __ j(not_equal, &no_overflow_possible, Label::kNear); 1185 __ j(not_equal, &no_overflow_possible, Label::kNear);
1186 __ cmp(right_reg, -1); 1186 __ cmp(right_reg, -1);
1187 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { 1187 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1188 DeoptimizeIf(equal, instr, Deoptimizer::kMinusZero); 1188 DeoptimizeIf(equal, instr, "minus zero");
1189 } else { 1189 } else {
1190 __ j(not_equal, &no_overflow_possible, Label::kNear); 1190 __ j(not_equal, &no_overflow_possible, Label::kNear);
1191 __ Move(result_reg, Immediate(0)); 1191 __ Move(result_reg, Immediate(0));
1192 __ jmp(&done, Label::kNear); 1192 __ jmp(&done, Label::kNear);
1193 } 1193 }
1194 __ bind(&no_overflow_possible); 1194 __ bind(&no_overflow_possible);
1195 } 1195 }
1196 1196
1197 // Sign extend dividend in eax into edx:eax. 1197 // Sign extend dividend in eax into edx:eax.
1198 __ cdq(); 1198 __ cdq();
1199 1199
1200 // If we care about -0, test if the dividend is <0 and the result is 0. 1200 // If we care about -0, test if the dividend is <0 and the result is 0.
1201 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { 1201 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1202 Label positive_left; 1202 Label positive_left;
1203 __ test(left_reg, Operand(left_reg)); 1203 __ test(left_reg, Operand(left_reg));
1204 __ j(not_sign, &positive_left, Label::kNear); 1204 __ j(not_sign, &positive_left, Label::kNear);
1205 __ idiv(right_reg); 1205 __ idiv(right_reg);
1206 __ test(result_reg, Operand(result_reg)); 1206 __ test(result_reg, Operand(result_reg));
1207 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); 1207 DeoptimizeIf(zero, instr, "minus zero");
1208 __ jmp(&done, Label::kNear); 1208 __ jmp(&done, Label::kNear);
1209 __ bind(&positive_left); 1209 __ bind(&positive_left);
1210 } 1210 }
1211 __ idiv(right_reg); 1211 __ idiv(right_reg);
1212 __ bind(&done); 1212 __ bind(&done);
1213 } 1213 }
1214 1214
1215 1215
1216 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { 1216 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1217 Register dividend = ToRegister(instr->dividend()); 1217 Register dividend = ToRegister(instr->dividend());
1218 int32_t divisor = instr->divisor(); 1218 int32_t divisor = instr->divisor();
1219 Register result = ToRegister(instr->result()); 1219 Register result = ToRegister(instr->result());
1220 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); 1220 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor)));
1221 DCHECK(!result.is(dividend)); 1221 DCHECK(!result.is(dividend));
1222 1222
1223 // Check for (0 / -x) that will produce negative zero. 1223 // Check for (0 / -x) that will produce negative zero.
1224 HDiv* hdiv = instr->hydrogen(); 1224 HDiv* hdiv = instr->hydrogen();
1225 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { 1225 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1226 __ test(dividend, dividend); 1226 __ test(dividend, dividend);
1227 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); 1227 DeoptimizeIf(zero, instr, "minus zero");
1228 } 1228 }
1229 // Check for (kMinInt / -1). 1229 // Check for (kMinInt / -1).
1230 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { 1230 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) {
1231 __ cmp(dividend, kMinInt); 1231 __ cmp(dividend, kMinInt);
1232 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); 1232 DeoptimizeIf(zero, instr, "overflow");
1233 } 1233 }
1234 // Deoptimize if remainder will not be 0. 1234 // Deoptimize if remainder will not be 0.
1235 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && 1235 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1236 divisor != 1 && divisor != -1) { 1236 divisor != 1 && divisor != -1) {
1237 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); 1237 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1238 __ test(dividend, Immediate(mask)); 1238 __ test(dividend, Immediate(mask));
1239 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); 1239 DeoptimizeIf(not_zero, instr, "lost precision");
1240 } 1240 }
1241 __ Move(result, dividend); 1241 __ Move(result, dividend);
1242 int32_t shift = WhichPowerOf2Abs(divisor); 1242 int32_t shift = WhichPowerOf2Abs(divisor);
1243 if (shift > 0) { 1243 if (shift > 0) {
1244 // The arithmetic shift is always OK, the 'if' is an optimization only. 1244 // The arithmetic shift is always OK, the 'if' is an optimization only.
1245 if (shift > 1) __ sar(result, 31); 1245 if (shift > 1) __ sar(result, 31);
1246 __ shr(result, 32 - shift); 1246 __ shr(result, 32 - shift);
1247 __ add(result, dividend); 1247 __ add(result, dividend);
1248 __ sar(result, shift); 1248 __ sar(result, shift);
1249 } 1249 }
1250 if (divisor < 0) __ neg(result); 1250 if (divisor < 0) __ neg(result);
1251 } 1251 }
1252 1252
1253 1253
1254 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { 1254 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1255 Register dividend = ToRegister(instr->dividend()); 1255 Register dividend = ToRegister(instr->dividend());
1256 int32_t divisor = instr->divisor(); 1256 int32_t divisor = instr->divisor();
1257 DCHECK(ToRegister(instr->result()).is(edx)); 1257 DCHECK(ToRegister(instr->result()).is(edx));
1258 1258
1259 if (divisor == 0) { 1259 if (divisor == 0) {
1260 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); 1260 DeoptimizeIf(no_condition, instr, "division by zero");
1261 return; 1261 return;
1262 } 1262 }
1263 1263
1264 // Check for (0 / -x) that will produce negative zero. 1264 // Check for (0 / -x) that will produce negative zero.
1265 HDiv* hdiv = instr->hydrogen(); 1265 HDiv* hdiv = instr->hydrogen();
1266 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { 1266 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1267 __ test(dividend, dividend); 1267 __ test(dividend, dividend);
1268 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); 1268 DeoptimizeIf(zero, instr, "minus zero");
1269 } 1269 }
1270 1270
1271 __ TruncatingDiv(dividend, Abs(divisor)); 1271 __ TruncatingDiv(dividend, Abs(divisor));
1272 if (divisor < 0) __ neg(edx); 1272 if (divisor < 0) __ neg(edx);
1273 1273
1274 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { 1274 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1275 __ mov(eax, edx); 1275 __ mov(eax, edx);
1276 __ imul(eax, eax, divisor); 1276 __ imul(eax, eax, divisor);
1277 __ sub(eax, dividend); 1277 __ sub(eax, dividend);
1278 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); 1278 DeoptimizeIf(not_equal, instr, "lost precision");
1279 } 1279 }
1280 } 1280 }
1281 1281
1282 1282
1283 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. 1283 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI.
1284 void LCodeGen::DoDivI(LDivI* instr) { 1284 void LCodeGen::DoDivI(LDivI* instr) {
1285 HBinaryOperation* hdiv = instr->hydrogen(); 1285 HBinaryOperation* hdiv = instr->hydrogen();
1286 Register dividend = ToRegister(instr->dividend()); 1286 Register dividend = ToRegister(instr->dividend());
1287 Register divisor = ToRegister(instr->divisor()); 1287 Register divisor = ToRegister(instr->divisor());
1288 Register remainder = ToRegister(instr->temp()); 1288 Register remainder = ToRegister(instr->temp());
1289 DCHECK(dividend.is(eax)); 1289 DCHECK(dividend.is(eax));
1290 DCHECK(remainder.is(edx)); 1290 DCHECK(remainder.is(edx));
1291 DCHECK(ToRegister(instr->result()).is(eax)); 1291 DCHECK(ToRegister(instr->result()).is(eax));
1292 DCHECK(!divisor.is(eax)); 1292 DCHECK(!divisor.is(eax));
1293 DCHECK(!divisor.is(edx)); 1293 DCHECK(!divisor.is(edx));
1294 1294
1295 // Check for x / 0. 1295 // Check for x / 0.
1296 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { 1296 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
1297 __ test(divisor, divisor); 1297 __ test(divisor, divisor);
1298 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); 1298 DeoptimizeIf(zero, instr, "division by zero");
1299 } 1299 }
1300 1300
1301 // Check for (0 / -x) that will produce negative zero. 1301 // Check for (0 / -x) that will produce negative zero.
1302 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { 1302 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) {
1303 Label dividend_not_zero; 1303 Label dividend_not_zero;
1304 __ test(dividend, dividend); 1304 __ test(dividend, dividend);
1305 __ j(not_zero, &dividend_not_zero, Label::kNear); 1305 __ j(not_zero, &dividend_not_zero, Label::kNear);
1306 __ test(divisor, divisor); 1306 __ test(divisor, divisor);
1307 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); 1307 DeoptimizeIf(sign, instr, "minus zero");
1308 __ bind(&dividend_not_zero); 1308 __ bind(&dividend_not_zero);
1309 } 1309 }
1310 1310
1311 // Check for (kMinInt / -1). 1311 // Check for (kMinInt / -1).
1312 if (hdiv->CheckFlag(HValue::kCanOverflow)) { 1312 if (hdiv->CheckFlag(HValue::kCanOverflow)) {
1313 Label dividend_not_min_int; 1313 Label dividend_not_min_int;
1314 __ cmp(dividend, kMinInt); 1314 __ cmp(dividend, kMinInt);
1315 __ j(not_zero, &dividend_not_min_int, Label::kNear); 1315 __ j(not_zero, &dividend_not_min_int, Label::kNear);
1316 __ cmp(divisor, -1); 1316 __ cmp(divisor, -1);
1317 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); 1317 DeoptimizeIf(zero, instr, "overflow");
1318 __ bind(&dividend_not_min_int); 1318 __ bind(&dividend_not_min_int);
1319 } 1319 }
1320 1320
1321 // Sign extend to edx (= remainder). 1321 // Sign extend to edx (= remainder).
1322 __ cdq(); 1322 __ cdq();
1323 __ idiv(divisor); 1323 __ idiv(divisor);
1324 1324
1325 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { 1325 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
1326 // Deoptimize if remainder is not 0. 1326 // Deoptimize if remainder is not 0.
1327 __ test(remainder, remainder); 1327 __ test(remainder, remainder);
1328 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); 1328 DeoptimizeIf(not_zero, instr, "lost precision");
1329 } 1329 }
1330 } 1330 }
1331 1331
1332 1332
1333 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { 1333 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1334 Register dividend = ToRegister(instr->dividend()); 1334 Register dividend = ToRegister(instr->dividend());
1335 int32_t divisor = instr->divisor(); 1335 int32_t divisor = instr->divisor();
1336 DCHECK(dividend.is(ToRegister(instr->result()))); 1336 DCHECK(dividend.is(ToRegister(instr->result())));
1337 1337
1338 // If the divisor is positive, things are easy: There can be no deopts and we 1338 // If the divisor is positive, things are easy: There can be no deopts and we
1339 // can simply do an arithmetic right shift. 1339 // can simply do an arithmetic right shift.
1340 if (divisor == 1) return; 1340 if (divisor == 1) return;
1341 int32_t shift = WhichPowerOf2Abs(divisor); 1341 int32_t shift = WhichPowerOf2Abs(divisor);
1342 if (divisor > 1) { 1342 if (divisor > 1) {
1343 __ sar(dividend, shift); 1343 __ sar(dividend, shift);
1344 return; 1344 return;
1345 } 1345 }
1346 1346
1347 // If the divisor is negative, we have to negate and handle edge cases. 1347 // If the divisor is negative, we have to negate and handle edge cases.
1348 __ neg(dividend); 1348 __ neg(dividend);
1349 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 1349 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1350 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); 1350 DeoptimizeIf(zero, instr, "minus zero");
1351 } 1351 }
1352 1352
1353 // Dividing by -1 is basically negation, unless we overflow. 1353 // Dividing by -1 is basically negation, unless we overflow.
1354 if (divisor == -1) { 1354 if (divisor == -1) {
1355 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { 1355 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
1356 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 1356 DeoptimizeIf(overflow, instr, "overflow");
1357 } 1357 }
1358 return; 1358 return;
1359 } 1359 }
1360 1360
1361 // If the negation could not overflow, simply shifting is OK. 1361 // If the negation could not overflow, simply shifting is OK.
1362 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { 1362 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
1363 __ sar(dividend, shift); 1363 __ sar(dividend, shift);
1364 return; 1364 return;
1365 } 1365 }
1366 1366
1367 Label not_kmin_int, done; 1367 Label not_kmin_int, done;
1368 __ j(no_overflow, &not_kmin_int, Label::kNear); 1368 __ j(no_overflow, &not_kmin_int, Label::kNear);
1369 __ mov(dividend, Immediate(kMinInt / divisor)); 1369 __ mov(dividend, Immediate(kMinInt / divisor));
1370 __ jmp(&done, Label::kNear); 1370 __ jmp(&done, Label::kNear);
1371 __ bind(&not_kmin_int); 1371 __ bind(&not_kmin_int);
1372 __ sar(dividend, shift); 1372 __ sar(dividend, shift);
1373 __ bind(&done); 1373 __ bind(&done);
1374 } 1374 }
1375 1375
1376 1376
1377 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { 1377 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1378 Register dividend = ToRegister(instr->dividend()); 1378 Register dividend = ToRegister(instr->dividend());
1379 int32_t divisor = instr->divisor(); 1379 int32_t divisor = instr->divisor();
1380 DCHECK(ToRegister(instr->result()).is(edx)); 1380 DCHECK(ToRegister(instr->result()).is(edx));
1381 1381
1382 if (divisor == 0) { 1382 if (divisor == 0) {
1383 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); 1383 DeoptimizeIf(no_condition, instr, "division by zero");
1384 return; 1384 return;
1385 } 1385 }
1386 1386
1387 // Check for (0 / -x) that will produce negative zero. 1387 // Check for (0 / -x) that will produce negative zero.
1388 HMathFloorOfDiv* hdiv = instr->hydrogen(); 1388 HMathFloorOfDiv* hdiv = instr->hydrogen();
1389 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { 1389 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1390 __ test(dividend, dividend); 1390 __ test(dividend, dividend);
1391 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); 1391 DeoptimizeIf(zero, instr, "minus zero");
1392 } 1392 }
1393 1393
1394 // Easy case: We need no dynamic check for the dividend and the flooring 1394 // Easy case: We need no dynamic check for the dividend and the flooring
1395 // division is the same as the truncating division. 1395 // division is the same as the truncating division.
1396 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || 1396 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) ||
1397 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { 1397 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) {
1398 __ TruncatingDiv(dividend, Abs(divisor)); 1398 __ TruncatingDiv(dividend, Abs(divisor));
1399 if (divisor < 0) __ neg(edx); 1399 if (divisor < 0) __ neg(edx);
1400 return; 1400 return;
1401 } 1401 }
(...skipping 26 matching lines...) Expand all
1428 Register result = ToRegister(instr->result()); 1428 Register result = ToRegister(instr->result());
1429 DCHECK(dividend.is(eax)); 1429 DCHECK(dividend.is(eax));
1430 DCHECK(remainder.is(edx)); 1430 DCHECK(remainder.is(edx));
1431 DCHECK(result.is(eax)); 1431 DCHECK(result.is(eax));
1432 DCHECK(!divisor.is(eax)); 1432 DCHECK(!divisor.is(eax));
1433 DCHECK(!divisor.is(edx)); 1433 DCHECK(!divisor.is(edx));
1434 1434
1435 // Check for x / 0. 1435 // Check for x / 0.
1436 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { 1436 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
1437 __ test(divisor, divisor); 1437 __ test(divisor, divisor);
1438 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); 1438 DeoptimizeIf(zero, instr, "division by zero");
1439 } 1439 }
1440 1440
1441 // Check for (0 / -x) that will produce negative zero. 1441 // Check for (0 / -x) that will produce negative zero.
1442 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { 1442 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) {
1443 Label dividend_not_zero; 1443 Label dividend_not_zero;
1444 __ test(dividend, dividend); 1444 __ test(dividend, dividend);
1445 __ j(not_zero, &dividend_not_zero, Label::kNear); 1445 __ j(not_zero, &dividend_not_zero, Label::kNear);
1446 __ test(divisor, divisor); 1446 __ test(divisor, divisor);
1447 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); 1447 DeoptimizeIf(sign, instr, "minus zero");
1448 __ bind(&dividend_not_zero); 1448 __ bind(&dividend_not_zero);
1449 } 1449 }
1450 1450
1451 // Check for (kMinInt / -1). 1451 // Check for (kMinInt / -1).
1452 if (hdiv->CheckFlag(HValue::kCanOverflow)) { 1452 if (hdiv->CheckFlag(HValue::kCanOverflow)) {
1453 Label dividend_not_min_int; 1453 Label dividend_not_min_int;
1454 __ cmp(dividend, kMinInt); 1454 __ cmp(dividend, kMinInt);
1455 __ j(not_zero, &dividend_not_min_int, Label::kNear); 1455 __ j(not_zero, &dividend_not_min_int, Label::kNear);
1456 __ cmp(divisor, -1); 1456 __ cmp(divisor, -1);
1457 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); 1457 DeoptimizeIf(zero, instr, "overflow");
1458 __ bind(&dividend_not_min_int); 1458 __ bind(&dividend_not_min_int);
1459 } 1459 }
1460 1460
1461 // Sign extend to edx (= remainder). 1461 // Sign extend to edx (= remainder).
1462 __ cdq(); 1462 __ cdq();
1463 __ idiv(divisor); 1463 __ idiv(divisor);
1464 1464
1465 Label done; 1465 Label done;
1466 __ test(remainder, remainder); 1466 __ test(remainder, remainder);
1467 __ j(zero, &done, Label::kNear); 1467 __ j(zero, &done, Label::kNear);
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
1525 __ imul(left, left, constant); 1525 __ imul(left, left, constant);
1526 } 1526 }
1527 } else { 1527 } else {
1528 if (instr->hydrogen()->representation().IsSmi()) { 1528 if (instr->hydrogen()->representation().IsSmi()) {
1529 __ SmiUntag(left); 1529 __ SmiUntag(left);
1530 } 1530 }
1531 __ imul(left, ToOperand(right)); 1531 __ imul(left, ToOperand(right));
1532 } 1532 }
1533 1533
1534 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 1534 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1535 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 1535 DeoptimizeIf(overflow, instr, "overflow");
1536 } 1536 }
1537 1537
1538 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 1538 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1539 // Bail out if the result is supposed to be negative zero. 1539 // Bail out if the result is supposed to be negative zero.
1540 Label done; 1540 Label done;
1541 __ test(left, Operand(left)); 1541 __ test(left, Operand(left));
1542 __ j(not_zero, &done, Label::kNear); 1542 __ j(not_zero, &done, Label::kNear);
1543 if (right->IsConstantOperand()) { 1543 if (right->IsConstantOperand()) {
1544 if (ToInteger32(LConstantOperand::cast(right)) < 0) { 1544 if (ToInteger32(LConstantOperand::cast(right)) < 0) {
1545 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); 1545 DeoptimizeIf(no_condition, instr, "minus zero");
1546 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { 1546 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
1547 __ cmp(ToRegister(instr->temp()), Immediate(0)); 1547 __ cmp(ToRegister(instr->temp()), Immediate(0));
1548 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); 1548 DeoptimizeIf(less, instr, "minus zero");
1549 } 1549 }
1550 } else { 1550 } else {
1551 // Test the non-zero operand for negative sign. 1551 // Test the non-zero operand for negative sign.
1552 __ or_(ToRegister(instr->temp()), ToOperand(right)); 1552 __ or_(ToRegister(instr->temp()), ToOperand(right));
1553 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); 1553 DeoptimizeIf(sign, instr, "minus zero");
1554 } 1554 }
1555 __ bind(&done); 1555 __ bind(&done);
1556 } 1556 }
1557 } 1557 }
1558 1558
1559 1559
1560 void LCodeGen::DoBitI(LBitI* instr) { 1560 void LCodeGen::DoBitI(LBitI* instr) {
1561 LOperand* left = instr->left(); 1561 LOperand* left = instr->left();
1562 LOperand* right = instr->right(); 1562 LOperand* right = instr->right();
1563 DCHECK(left->Equals(instr->result())); 1563 DCHECK(left->Equals(instr->result()));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1616 case Token::ROR: 1616 case Token::ROR:
1617 __ ror_cl(ToRegister(left)); 1617 __ ror_cl(ToRegister(left));
1618 break; 1618 break;
1619 case Token::SAR: 1619 case Token::SAR:
1620 __ sar_cl(ToRegister(left)); 1620 __ sar_cl(ToRegister(left));
1621 break; 1621 break;
1622 case Token::SHR: 1622 case Token::SHR:
1623 __ shr_cl(ToRegister(left)); 1623 __ shr_cl(ToRegister(left));
1624 if (instr->can_deopt()) { 1624 if (instr->can_deopt()) {
1625 __ test(ToRegister(left), ToRegister(left)); 1625 __ test(ToRegister(left), ToRegister(left));
1626 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); 1626 DeoptimizeIf(sign, instr, "negative value");
1627 } 1627 }
1628 break; 1628 break;
1629 case Token::SHL: 1629 case Token::SHL:
1630 __ shl_cl(ToRegister(left)); 1630 __ shl_cl(ToRegister(left));
1631 break; 1631 break;
1632 default: 1632 default:
1633 UNREACHABLE(); 1633 UNREACHABLE();
1634 break; 1634 break;
1635 } 1635 }
1636 } else { 1636 } else {
1637 int value = ToInteger32(LConstantOperand::cast(right)); 1637 int value = ToInteger32(LConstantOperand::cast(right));
1638 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); 1638 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1639 switch (instr->op()) { 1639 switch (instr->op()) {
1640 case Token::ROR: 1640 case Token::ROR:
1641 if (shift_count == 0 && instr->can_deopt()) { 1641 if (shift_count == 0 && instr->can_deopt()) {
1642 __ test(ToRegister(left), ToRegister(left)); 1642 __ test(ToRegister(left), ToRegister(left));
1643 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); 1643 DeoptimizeIf(sign, instr, "negative value");
1644 } else { 1644 } else {
1645 __ ror(ToRegister(left), shift_count); 1645 __ ror(ToRegister(left), shift_count);
1646 } 1646 }
1647 break; 1647 break;
1648 case Token::SAR: 1648 case Token::SAR:
1649 if (shift_count != 0) { 1649 if (shift_count != 0) {
1650 __ sar(ToRegister(left), shift_count); 1650 __ sar(ToRegister(left), shift_count);
1651 } 1651 }
1652 break; 1652 break;
1653 case Token::SHR: 1653 case Token::SHR:
1654 if (shift_count != 0) { 1654 if (shift_count != 0) {
1655 __ shr(ToRegister(left), shift_count); 1655 __ shr(ToRegister(left), shift_count);
1656 } else if (instr->can_deopt()) { 1656 } else if (instr->can_deopt()) {
1657 __ test(ToRegister(left), ToRegister(left)); 1657 __ test(ToRegister(left), ToRegister(left));
1658 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); 1658 DeoptimizeIf(sign, instr, "negative value");
1659 } 1659 }
1660 break; 1660 break;
1661 case Token::SHL: 1661 case Token::SHL:
1662 if (shift_count != 0) { 1662 if (shift_count != 0) {
1663 if (instr->hydrogen_value()->representation().IsSmi() && 1663 if (instr->hydrogen_value()->representation().IsSmi() &&
1664 instr->can_deopt()) { 1664 instr->can_deopt()) {
1665 if (shift_count != 1) { 1665 if (shift_count != 1) {
1666 __ shl(ToRegister(left), shift_count - 1); 1666 __ shl(ToRegister(left), shift_count - 1);
1667 } 1667 }
1668 __ SmiTag(ToRegister(left)); 1668 __ SmiTag(ToRegister(left));
1669 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 1669 DeoptimizeIf(overflow, instr, "overflow");
1670 } else { 1670 } else {
1671 __ shl(ToRegister(left), shift_count); 1671 __ shl(ToRegister(left), shift_count);
1672 } 1672 }
1673 } 1673 }
1674 break; 1674 break;
1675 default: 1675 default:
1676 UNREACHABLE(); 1676 UNREACHABLE();
1677 break; 1677 break;
1678 } 1678 }
1679 } 1679 }
1680 } 1680 }
1681 1681
1682 1682
1683 void LCodeGen::DoSubI(LSubI* instr) { 1683 void LCodeGen::DoSubI(LSubI* instr) {
1684 LOperand* left = instr->left(); 1684 LOperand* left = instr->left();
1685 LOperand* right = instr->right(); 1685 LOperand* right = instr->right();
1686 DCHECK(left->Equals(instr->result())); 1686 DCHECK(left->Equals(instr->result()));
1687 1687
1688 if (right->IsConstantOperand()) { 1688 if (right->IsConstantOperand()) {
1689 __ sub(ToOperand(left), 1689 __ sub(ToOperand(left),
1690 ToImmediate(right, instr->hydrogen()->representation())); 1690 ToImmediate(right, instr->hydrogen()->representation()));
1691 } else { 1691 } else {
1692 __ sub(ToRegister(left), ToOperand(right)); 1692 __ sub(ToRegister(left), ToOperand(right));
1693 } 1693 }
1694 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 1694 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1695 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 1695 DeoptimizeIf(overflow, instr, "overflow");
1696 } 1696 }
1697 } 1697 }
1698 1698
1699 1699
1700 void LCodeGen::DoConstantI(LConstantI* instr) { 1700 void LCodeGen::DoConstantI(LConstantI* instr) {
1701 __ Move(ToRegister(instr->result()), Immediate(instr->value())); 1701 __ Move(ToRegister(instr->result()), Immediate(instr->value()));
1702 } 1702 }
1703 1703
1704 1704
1705 void LCodeGen::DoConstantS(LConstantS* instr) { 1705 void LCodeGen::DoConstantS(LConstantS* instr) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1768 void LCodeGen::DoDateField(LDateField* instr) { 1768 void LCodeGen::DoDateField(LDateField* instr) {
1769 Register object = ToRegister(instr->date()); 1769 Register object = ToRegister(instr->date());
1770 Register result = ToRegister(instr->result()); 1770 Register result = ToRegister(instr->result());
1771 Register scratch = ToRegister(instr->temp()); 1771 Register scratch = ToRegister(instr->temp());
1772 Smi* index = instr->index(); 1772 Smi* index = instr->index();
1773 Label runtime, done; 1773 Label runtime, done;
1774 DCHECK(object.is(result)); 1774 DCHECK(object.is(result));
1775 DCHECK(object.is(eax)); 1775 DCHECK(object.is(eax));
1776 1776
1777 __ test(object, Immediate(kSmiTagMask)); 1777 __ test(object, Immediate(kSmiTagMask));
1778 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); 1778 DeoptimizeIf(zero, instr, "Smi");
1779 __ CmpObjectType(object, JS_DATE_TYPE, scratch); 1779 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
1780 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotADateObject); 1780 DeoptimizeIf(not_equal, instr, "not a date object");
1781 1781
1782 if (index->value() == 0) { 1782 if (index->value() == 0) {
1783 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); 1783 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
1784 } else { 1784 } else {
1785 if (index->value() < JSDate::kFirstUncachedField) { 1785 if (index->value() < JSDate::kFirstUncachedField) {
1786 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 1786 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1787 __ mov(scratch, Operand::StaticVariable(stamp)); 1787 __ mov(scratch, Operand::StaticVariable(stamp));
1788 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); 1788 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
1789 __ j(not_equal, &runtime, Label::kNear); 1789 __ j(not_equal, &runtime, Label::kNear);
1790 __ mov(result, FieldOperand(object, JSDate::kValueOffset + 1790 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
1900 __ lea(ToRegister(instr->result()), address); 1900 __ lea(ToRegister(instr->result()), address);
1901 } 1901 }
1902 } else { 1902 } else {
1903 if (right->IsConstantOperand()) { 1903 if (right->IsConstantOperand()) {
1904 __ add(ToOperand(left), 1904 __ add(ToOperand(left),
1905 ToImmediate(right, instr->hydrogen()->representation())); 1905 ToImmediate(right, instr->hydrogen()->representation()));
1906 } else { 1906 } else {
1907 __ add(ToRegister(left), ToOperand(right)); 1907 __ add(ToRegister(left), ToOperand(right));
1908 } 1908 }
1909 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 1909 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1910 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 1910 DeoptimizeIf(overflow, instr, "overflow");
1911 } 1911 }
1912 } 1912 }
1913 } 1913 }
1914 1914
1915 1915
1916 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { 1916 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1917 LOperand* left = instr->left(); 1917 LOperand* left = instr->left();
1918 LOperand* right = instr->right(); 1918 LOperand* right = instr->right();
1919 DCHECK(left->Equals(instr->result())); 1919 DCHECK(left->Equals(instr->result()));
1920 HMathMinMax::Operation operation = instr->hydrogen()->operation(); 1920 HMathMinMax::Operation operation = instr->hydrogen()->operation();
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after
2148 } 2148 }
2149 2149
2150 if (expected.Contains(ToBooleanStub::SMI)) { 2150 if (expected.Contains(ToBooleanStub::SMI)) {
2151 // Smis: 0 -> false, all other -> true. 2151 // Smis: 0 -> false, all other -> true.
2152 __ test(reg, Operand(reg)); 2152 __ test(reg, Operand(reg));
2153 __ j(equal, instr->FalseLabel(chunk_)); 2153 __ j(equal, instr->FalseLabel(chunk_));
2154 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); 2154 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2155 } else if (expected.NeedsMap()) { 2155 } else if (expected.NeedsMap()) {
2156 // If we need a map later and have a Smi -> deopt. 2156 // If we need a map later and have a Smi -> deopt.
2157 __ test(reg, Immediate(kSmiTagMask)); 2157 __ test(reg, Immediate(kSmiTagMask));
2158 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); 2158 DeoptimizeIf(zero, instr, "Smi");
2159 } 2159 }
2160 2160
2161 Register map = no_reg; // Keep the compiler happy. 2161 Register map = no_reg; // Keep the compiler happy.
2162 if (expected.NeedsMap()) { 2162 if (expected.NeedsMap()) {
2163 map = ToRegister(instr->temp()); 2163 map = ToRegister(instr->temp());
2164 DCHECK(!map.is(reg)); 2164 DCHECK(!map.is(reg));
2165 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); 2165 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
2166 2166
2167 if (expected.CanBeUndetectable()) { 2167 if (expected.CanBeUndetectable()) {
2168 // Undetectable -> false. 2168 // Undetectable -> false.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2205 __ xorps(xmm_scratch, xmm_scratch); 2205 __ xorps(xmm_scratch, xmm_scratch);
2206 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); 2206 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
2207 __ j(zero, instr->FalseLabel(chunk_)); 2207 __ j(zero, instr->FalseLabel(chunk_));
2208 __ jmp(instr->TrueLabel(chunk_)); 2208 __ jmp(instr->TrueLabel(chunk_));
2209 __ bind(&not_heap_number); 2209 __ bind(&not_heap_number);
2210 } 2210 }
2211 2211
2212 if (!expected.IsGeneric()) { 2212 if (!expected.IsGeneric()) {
2213 // We've seen something for the first time -> deopt. 2213 // We've seen something for the first time -> deopt.
2214 // This can only happen if we are not generic already. 2214 // This can only happen if we are not generic already.
2215 DeoptimizeIf(no_condition, instr, Deoptimizer::kUnexpectedObject); 2215 DeoptimizeIf(no_condition, instr, "unexpected object");
2216 } 2216 }
2217 } 2217 }
2218 } 2218 }
2219 } 2219 }
2220 2220
2221 2221
2222 void LCodeGen::EmitGoto(int block) { 2222 void LCodeGen::EmitGoto(int block) {
2223 if (!IsNextEmittedBlock(block)) { 2223 if (!IsNextEmittedBlock(block)) {
2224 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); 2224 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block)));
2225 } 2225 }
(...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after
2837 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 2837 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset());
2838 } 2838 }
2839 } 2839 }
2840 2840
2841 2841
2842 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2842 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2843 Register result = ToRegister(instr->result()); 2843 Register result = ToRegister(instr->result());
2844 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); 2844 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle()));
2845 if (instr->hydrogen()->RequiresHoleCheck()) { 2845 if (instr->hydrogen()->RequiresHoleCheck()) {
2846 __ cmp(result, factory()->the_hole_value()); 2846 __ cmp(result, factory()->the_hole_value());
2847 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 2847 DeoptimizeIf(equal, instr, "hole");
2848 } 2848 }
2849 } 2849 }
2850 2850
2851 2851
2852 template <class T> 2852 template <class T>
2853 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { 2853 void LCodeGen::EmitVectorLoadICRegisters(T* instr) {
2854 DCHECK(FLAG_vector_ics); 2854 DCHECK(FLAG_vector_ics);
2855 Register vector_register = ToRegister(instr->temp_vector()); 2855 Register vector_register = ToRegister(instr->temp_vector());
2856 Register slot_register = VectorLoadICDescriptor::SlotRegister(); 2856 Register slot_register = VectorLoadICDescriptor::SlotRegister();
2857 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); 2857 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister()));
(...skipping 28 matching lines...) Expand all
2886 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 2886 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2887 Register value = ToRegister(instr->value()); 2887 Register value = ToRegister(instr->value());
2888 Handle<PropertyCell> cell_handle = instr->hydrogen()->cell().handle(); 2888 Handle<PropertyCell> cell_handle = instr->hydrogen()->cell().handle();
2889 2889
2890 // If the cell we are storing to contains the hole it could have 2890 // If the cell we are storing to contains the hole it could have
2891 // been deleted from the property dictionary. In that case, we need 2891 // been deleted from the property dictionary. In that case, we need
2892 // to update the property details in the property dictionary to mark 2892 // to update the property details in the property dictionary to mark
2893 // it as no longer deleted. We deoptimize in that case. 2893 // it as no longer deleted. We deoptimize in that case.
2894 if (instr->hydrogen()->RequiresHoleCheck()) { 2894 if (instr->hydrogen()->RequiresHoleCheck()) {
2895 __ cmp(Operand::ForCell(cell_handle), factory()->the_hole_value()); 2895 __ cmp(Operand::ForCell(cell_handle), factory()->the_hole_value());
2896 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 2896 DeoptimizeIf(equal, instr, "hole");
2897 } 2897 }
2898 2898
2899 // Store the value. 2899 // Store the value.
2900 __ mov(Operand::ForCell(cell_handle), value); 2900 __ mov(Operand::ForCell(cell_handle), value);
2901 // Cells are always rescanned, so no write barrier here. 2901 // Cells are always rescanned, so no write barrier here.
2902 } 2902 }
2903 2903
2904 2904
2905 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2905 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2906 Register context = ToRegister(instr->context()); 2906 Register context = ToRegister(instr->context());
2907 Register result = ToRegister(instr->result()); 2907 Register result = ToRegister(instr->result());
2908 __ mov(result, ContextOperand(context, instr->slot_index())); 2908 __ mov(result, ContextOperand(context, instr->slot_index()));
2909 2909
2910 if (instr->hydrogen()->RequiresHoleCheck()) { 2910 if (instr->hydrogen()->RequiresHoleCheck()) {
2911 __ cmp(result, factory()->the_hole_value()); 2911 __ cmp(result, factory()->the_hole_value());
2912 if (instr->hydrogen()->DeoptimizesOnHole()) { 2912 if (instr->hydrogen()->DeoptimizesOnHole()) {
2913 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 2913 DeoptimizeIf(equal, instr, "hole");
2914 } else { 2914 } else {
2915 Label is_not_hole; 2915 Label is_not_hole;
2916 __ j(not_equal, &is_not_hole, Label::kNear); 2916 __ j(not_equal, &is_not_hole, Label::kNear);
2917 __ mov(result, factory()->undefined_value()); 2917 __ mov(result, factory()->undefined_value());
2918 __ bind(&is_not_hole); 2918 __ bind(&is_not_hole);
2919 } 2919 }
2920 } 2920 }
2921 } 2921 }
2922 2922
2923 2923
2924 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { 2924 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2925 Register context = ToRegister(instr->context()); 2925 Register context = ToRegister(instr->context());
2926 Register value = ToRegister(instr->value()); 2926 Register value = ToRegister(instr->value());
2927 2927
2928 Label skip_assignment; 2928 Label skip_assignment;
2929 2929
2930 Operand target = ContextOperand(context, instr->slot_index()); 2930 Operand target = ContextOperand(context, instr->slot_index());
2931 if (instr->hydrogen()->RequiresHoleCheck()) { 2931 if (instr->hydrogen()->RequiresHoleCheck()) {
2932 __ cmp(target, factory()->the_hole_value()); 2932 __ cmp(target, factory()->the_hole_value());
2933 if (instr->hydrogen()->DeoptimizesOnHole()) { 2933 if (instr->hydrogen()->DeoptimizesOnHole()) {
2934 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 2934 DeoptimizeIf(equal, instr, "hole");
2935 } else { 2935 } else {
2936 __ j(not_equal, &skip_assignment, Label::kNear); 2936 __ j(not_equal, &skip_assignment, Label::kNear);
2937 } 2937 }
2938 } 2938 }
2939 2939
2940 __ mov(target, value); 2940 __ mov(target, value);
2941 if (instr->hydrogen()->NeedsWriteBarrier()) { 2941 if (instr->hydrogen()->NeedsWriteBarrier()) {
2942 SmiCheck check_needed = 2942 SmiCheck check_needed =
2943 instr->hydrogen()->value()->type().IsHeapObject() 2943 instr->hydrogen()->value()->type().IsHeapObject()
2944 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; 2944 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
3024 Register function = ToRegister(instr->function()); 3024 Register function = ToRegister(instr->function());
3025 Register temp = ToRegister(instr->temp()); 3025 Register temp = ToRegister(instr->temp());
3026 Register result = ToRegister(instr->result()); 3026 Register result = ToRegister(instr->result());
3027 3027
3028 // Get the prototype or initial map from the function. 3028 // Get the prototype or initial map from the function.
3029 __ mov(result, 3029 __ mov(result,
3030 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 3030 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3031 3031
3032 // Check that the function has a prototype or an initial map. 3032 // Check that the function has a prototype or an initial map.
3033 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); 3033 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
3034 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 3034 DeoptimizeIf(equal, instr, "hole");
3035 3035
3036 // If the function does not have an initial map, we're done. 3036 // If the function does not have an initial map, we're done.
3037 Label done; 3037 Label done;
3038 __ CmpObjectType(result, MAP_TYPE, temp); 3038 __ CmpObjectType(result, MAP_TYPE, temp);
3039 __ j(not_equal, &done, Label::kNear); 3039 __ j(not_equal, &done, Label::kNear);
3040 3040
3041 // Get the prototype from the initial map. 3041 // Get the prototype from the initial map.
3042 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); 3042 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
3043 3043
3044 // All done. 3044 // All done.
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
3117 break; 3117 break;
3118 case EXTERNAL_INT32_ELEMENTS: 3118 case EXTERNAL_INT32_ELEMENTS:
3119 case INT32_ELEMENTS: 3119 case INT32_ELEMENTS:
3120 __ mov(result, operand); 3120 __ mov(result, operand);
3121 break; 3121 break;
3122 case EXTERNAL_UINT32_ELEMENTS: 3122 case EXTERNAL_UINT32_ELEMENTS:
3123 case UINT32_ELEMENTS: 3123 case UINT32_ELEMENTS:
3124 __ mov(result, operand); 3124 __ mov(result, operand);
3125 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { 3125 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
3126 __ test(result, Operand(result)); 3126 __ test(result, Operand(result));
3127 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); 3127 DeoptimizeIf(negative, instr, "negative value");
3128 } 3128 }
3129 break; 3129 break;
3130 case EXTERNAL_FLOAT32_ELEMENTS: 3130 case EXTERNAL_FLOAT32_ELEMENTS:
3131 case EXTERNAL_FLOAT64_ELEMENTS: 3131 case EXTERNAL_FLOAT64_ELEMENTS:
3132 case FLOAT32_ELEMENTS: 3132 case FLOAT32_ELEMENTS:
3133 case FLOAT64_ELEMENTS: 3133 case FLOAT64_ELEMENTS:
3134 case FAST_SMI_ELEMENTS: 3134 case FAST_SMI_ELEMENTS:
3135 case FAST_ELEMENTS: 3135 case FAST_ELEMENTS:
3136 case FAST_DOUBLE_ELEMENTS: 3136 case FAST_DOUBLE_ELEMENTS:
3137 case FAST_HOLEY_SMI_ELEMENTS: 3137 case FAST_HOLEY_SMI_ELEMENTS:
3138 case FAST_HOLEY_ELEMENTS: 3138 case FAST_HOLEY_ELEMENTS:
3139 case FAST_HOLEY_DOUBLE_ELEMENTS: 3139 case FAST_HOLEY_DOUBLE_ELEMENTS:
3140 case DICTIONARY_ELEMENTS: 3140 case DICTIONARY_ELEMENTS:
3141 case SLOPPY_ARGUMENTS_ELEMENTS: 3141 case SLOPPY_ARGUMENTS_ELEMENTS:
3142 UNREACHABLE(); 3142 UNREACHABLE();
3143 break; 3143 break;
3144 } 3144 }
3145 } 3145 }
3146 } 3146 }
3147 3147
3148 3148
3149 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { 3149 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
3150 if (instr->hydrogen()->RequiresHoleCheck()) { 3150 if (instr->hydrogen()->RequiresHoleCheck()) {
3151 Operand hole_check_operand = BuildFastArrayOperand( 3151 Operand hole_check_operand = BuildFastArrayOperand(
3152 instr->elements(), instr->key(), 3152 instr->elements(), instr->key(),
3153 instr->hydrogen()->key()->representation(), 3153 instr->hydrogen()->key()->representation(),
3154 FAST_DOUBLE_ELEMENTS, 3154 FAST_DOUBLE_ELEMENTS,
3155 instr->base_offset() + sizeof(kHoleNanLower32)); 3155 instr->base_offset() + sizeof(kHoleNanLower32));
3156 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); 3156 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
3157 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 3157 DeoptimizeIf(equal, instr, "hole");
3158 } 3158 }
3159 3159
3160 Operand double_load_operand = BuildFastArrayOperand( 3160 Operand double_load_operand = BuildFastArrayOperand(
3161 instr->elements(), 3161 instr->elements(),
3162 instr->key(), 3162 instr->key(),
3163 instr->hydrogen()->key()->representation(), 3163 instr->hydrogen()->key()->representation(),
3164 FAST_DOUBLE_ELEMENTS, 3164 FAST_DOUBLE_ELEMENTS,
3165 instr->base_offset()); 3165 instr->base_offset());
3166 XMMRegister result = ToDoubleRegister(instr->result()); 3166 XMMRegister result = ToDoubleRegister(instr->result());
3167 __ movsd(result, double_load_operand); 3167 __ movsd(result, double_load_operand);
3168 } 3168 }
3169 3169
3170 3170
3171 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { 3171 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
3172 Register result = ToRegister(instr->result()); 3172 Register result = ToRegister(instr->result());
3173 3173
3174 // Load the result. 3174 // Load the result.
3175 __ mov(result, 3175 __ mov(result,
3176 BuildFastArrayOperand(instr->elements(), instr->key(), 3176 BuildFastArrayOperand(instr->elements(), instr->key(),
3177 instr->hydrogen()->key()->representation(), 3177 instr->hydrogen()->key()->representation(),
3178 FAST_ELEMENTS, instr->base_offset())); 3178 FAST_ELEMENTS, instr->base_offset()));
3179 3179
3180 // Check for the hole value. 3180 // Check for the hole value.
3181 if (instr->hydrogen()->RequiresHoleCheck()) { 3181 if (instr->hydrogen()->RequiresHoleCheck()) {
3182 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { 3182 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
3183 __ test(result, Immediate(kSmiTagMask)); 3183 __ test(result, Immediate(kSmiTagMask));
3184 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotASmi); 3184 DeoptimizeIf(not_equal, instr, "not a Smi");
3185 } else { 3185 } else {
3186 __ cmp(result, factory()->the_hole_value()); 3186 __ cmp(result, factory()->the_hole_value());
3187 DeoptimizeIf(equal, instr, Deoptimizer::kHole); 3187 DeoptimizeIf(equal, instr, "hole");
3188 } 3188 }
3189 } 3189 }
3190 } 3190 }
3191 3191
3192 3192
3193 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { 3193 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3194 if (instr->is_typed_elements()) { 3194 if (instr->is_typed_elements()) {
3195 DoLoadKeyedExternalArray(instr); 3195 DoLoadKeyedExternalArray(instr);
3196 } else if (instr->hydrogen()->representation().IsDouble()) { 3196 } else if (instr->hydrogen()->representation().IsDouble()) {
3197 DoLoadKeyedFixedDoubleArray(instr); 3197 DoLoadKeyedFixedDoubleArray(instr);
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
3324 } 3324 }
3325 3325
3326 // Normal function. Replace undefined or null with global receiver. 3326 // Normal function. Replace undefined or null with global receiver.
3327 __ cmp(receiver, factory()->null_value()); 3327 __ cmp(receiver, factory()->null_value());
3328 __ j(equal, &global_object, Label::kNear); 3328 __ j(equal, &global_object, Label::kNear);
3329 __ cmp(receiver, factory()->undefined_value()); 3329 __ cmp(receiver, factory()->undefined_value());
3330 __ j(equal, &global_object, Label::kNear); 3330 __ j(equal, &global_object, Label::kNear);
3331 3331
3332 // The receiver should be a JS object. 3332 // The receiver should be a JS object.
3333 __ test(receiver, Immediate(kSmiTagMask)); 3333 __ test(receiver, Immediate(kSmiTagMask));
3334 DeoptimizeIf(equal, instr, Deoptimizer::kSmi); 3334 DeoptimizeIf(equal, instr, "Smi");
3335 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch); 3335 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch);
3336 DeoptimizeIf(below, instr, Deoptimizer::kNotAJavaScriptObject); 3336 DeoptimizeIf(below, instr, "not a JavaScript object");
3337 3337
3338 __ jmp(&receiver_ok, Label::kNear); 3338 __ jmp(&receiver_ok, Label::kNear);
3339 __ bind(&global_object); 3339 __ bind(&global_object);
3340 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); 3340 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset));
3341 const int global_offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); 3341 const int global_offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
3342 __ mov(receiver, Operand(receiver, global_offset)); 3342 __ mov(receiver, Operand(receiver, global_offset));
3343 const int proxy_offset = GlobalObject::kGlobalProxyOffset; 3343 const int proxy_offset = GlobalObject::kGlobalProxyOffset;
3344 __ mov(receiver, FieldOperand(receiver, proxy_offset)); 3344 __ mov(receiver, FieldOperand(receiver, proxy_offset));
3345 __ bind(&receiver_ok); 3345 __ bind(&receiver_ok);
3346 } 3346 }
3347 3347
3348 3348
3349 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 3349 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3350 Register receiver = ToRegister(instr->receiver()); 3350 Register receiver = ToRegister(instr->receiver());
3351 Register function = ToRegister(instr->function()); 3351 Register function = ToRegister(instr->function());
3352 Register length = ToRegister(instr->length()); 3352 Register length = ToRegister(instr->length());
3353 Register elements = ToRegister(instr->elements()); 3353 Register elements = ToRegister(instr->elements());
3354 DCHECK(receiver.is(eax)); // Used for parameter count. 3354 DCHECK(receiver.is(eax)); // Used for parameter count.
3355 DCHECK(function.is(edi)); // Required by InvokeFunction. 3355 DCHECK(function.is(edi)); // Required by InvokeFunction.
3356 DCHECK(ToRegister(instr->result()).is(eax)); 3356 DCHECK(ToRegister(instr->result()).is(eax));
3357 3357
3358 // Copy the arguments to this function possibly from the 3358 // Copy the arguments to this function possibly from the
3359 // adaptor frame below it. 3359 // adaptor frame below it.
3360 const uint32_t kArgumentsLimit = 1 * KB; 3360 const uint32_t kArgumentsLimit = 1 * KB;
3361 __ cmp(length, kArgumentsLimit); 3361 __ cmp(length, kArgumentsLimit);
3362 DeoptimizeIf(above, instr, Deoptimizer::kTooManyArguments); 3362 DeoptimizeIf(above, instr, "too many arguments");
3363 3363
3364 __ push(receiver); 3364 __ push(receiver);
3365 __ mov(receiver, length); 3365 __ mov(receiver, length);
3366 3366
3367 // Loop through the arguments pushing them onto the execution 3367 // Loop through the arguments pushing them onto the execution
3368 // stack. 3368 // stack.
3369 Label invoke, loop; 3369 Label invoke, loop;
3370 // length is a small non-negative integer, due to the test above. 3370 // length is a small non-negative integer, due to the test above.
3371 __ test(length, Operand(length)); 3371 __ test(length, Operand(length));
3372 __ j(zero, &invoke, Label::kNear); 3372 __ j(zero, &invoke, Label::kNear);
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
3577 } 3577 }
3578 3578
3579 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 3579 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3580 } 3580 }
3581 3581
3582 3582
3583 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { 3583 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3584 Register input_reg = ToRegister(instr->value()); 3584 Register input_reg = ToRegister(instr->value());
3585 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 3585 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3586 factory()->heap_number_map()); 3586 factory()->heap_number_map());
3587 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); 3587 DeoptimizeIf(not_equal, instr, "not a heap number");
3588 3588
3589 Label slow, allocated, done; 3589 Label slow, allocated, done;
3590 Register tmp = input_reg.is(eax) ? ecx : eax; 3590 Register tmp = input_reg.is(eax) ? ecx : eax;
3591 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; 3591 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
3592 3592
3593 // Preserve the value of all registers. 3593 // Preserve the value of all registers.
3594 PushSafepointRegistersScope scope(this); 3594 PushSafepointRegistersScope scope(this);
3595 3595
3596 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 3596 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3597 // Check the sign of the argument. If the argument is positive, just 3597 // Check the sign of the argument. If the argument is positive, just
(...skipping 26 matching lines...) Expand all
3624 __ bind(&done); 3624 __ bind(&done);
3625 } 3625 }
3626 3626
3627 3627
3628 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { 3628 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
3629 Register input_reg = ToRegister(instr->value()); 3629 Register input_reg = ToRegister(instr->value());
3630 __ test(input_reg, Operand(input_reg)); 3630 __ test(input_reg, Operand(input_reg));
3631 Label is_positive; 3631 Label is_positive;
3632 __ j(not_sign, &is_positive, Label::kNear); 3632 __ j(not_sign, &is_positive, Label::kNear);
3633 __ neg(input_reg); // Sets flags. 3633 __ neg(input_reg); // Sets flags.
3634 DeoptimizeIf(negative, instr, Deoptimizer::kOverflow); 3634 DeoptimizeIf(negative, instr, "overflow");
3635 __ bind(&is_positive); 3635 __ bind(&is_positive);
3636 } 3636 }
3637 3637
3638 3638
3639 void LCodeGen::DoMathAbs(LMathAbs* instr) { 3639 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3640 // Class for deferred case. 3640 // Class for deferred case.
3641 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { 3641 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode {
3642 public: 3642 public:
3643 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, 3643 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
3644 LMathAbs* instr) 3644 LMathAbs* instr)
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
3683 if (CpuFeatures::IsSupported(SSE4_1)) { 3683 if (CpuFeatures::IsSupported(SSE4_1)) {
3684 CpuFeatureScope scope(masm(), SSE4_1); 3684 CpuFeatureScope scope(masm(), SSE4_1);
3685 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3685 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3686 // Deoptimize on negative zero. 3686 // Deoptimize on negative zero.
3687 Label non_zero; 3687 Label non_zero;
3688 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. 3688 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
3689 __ ucomisd(input_reg, xmm_scratch); 3689 __ ucomisd(input_reg, xmm_scratch);
3690 __ j(not_equal, &non_zero, Label::kNear); 3690 __ j(not_equal, &non_zero, Label::kNear);
3691 __ movmskpd(output_reg, input_reg); 3691 __ movmskpd(output_reg, input_reg);
3692 __ test(output_reg, Immediate(1)); 3692 __ test(output_reg, Immediate(1));
3693 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); 3693 DeoptimizeIf(not_zero, instr, "minus zero");
3694 __ bind(&non_zero); 3694 __ bind(&non_zero);
3695 } 3695 }
3696 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown); 3696 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
3697 __ cvttsd2si(output_reg, Operand(xmm_scratch)); 3697 __ cvttsd2si(output_reg, Operand(xmm_scratch));
3698 // Overflow is signalled with minint. 3698 // Overflow is signalled with minint.
3699 __ cmp(output_reg, 0x1); 3699 __ cmp(output_reg, 0x1);
3700 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 3700 DeoptimizeIf(overflow, instr, "overflow");
3701 } else { 3701 } else {
3702 Label negative_sign, done; 3702 Label negative_sign, done;
3703 // Deoptimize on unordered. 3703 // Deoptimize on unordered.
3704 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. 3704 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
3705 __ ucomisd(input_reg, xmm_scratch); 3705 __ ucomisd(input_reg, xmm_scratch);
3706 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); 3706 DeoptimizeIf(parity_even, instr, "NaN");
3707 __ j(below, &negative_sign, Label::kNear); 3707 __ j(below, &negative_sign, Label::kNear);
3708 3708
3709 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3709 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3710 // Check for negative zero. 3710 // Check for negative zero.
3711 Label positive_sign; 3711 Label positive_sign;
3712 __ j(above, &positive_sign, Label::kNear); 3712 __ j(above, &positive_sign, Label::kNear);
3713 __ movmskpd(output_reg, input_reg); 3713 __ movmskpd(output_reg, input_reg);
3714 __ test(output_reg, Immediate(1)); 3714 __ test(output_reg, Immediate(1));
3715 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); 3715 DeoptimizeIf(not_zero, instr, "minus zero");
3716 __ Move(output_reg, Immediate(0)); 3716 __ Move(output_reg, Immediate(0));
3717 __ jmp(&done, Label::kNear); 3717 __ jmp(&done, Label::kNear);
3718 __ bind(&positive_sign); 3718 __ bind(&positive_sign);
3719 } 3719 }
3720 3720
3721 // Use truncating instruction (OK because input is positive). 3721 // Use truncating instruction (OK because input is positive).
3722 __ cvttsd2si(output_reg, Operand(input_reg)); 3722 __ cvttsd2si(output_reg, Operand(input_reg));
3723 // Overflow is signalled with minint. 3723 // Overflow is signalled with minint.
3724 __ cmp(output_reg, 0x1); 3724 __ cmp(output_reg, 0x1);
3725 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 3725 DeoptimizeIf(overflow, instr, "overflow");
3726 __ jmp(&done, Label::kNear); 3726 __ jmp(&done, Label::kNear);
3727 3727
3728 // Non-zero negative reaches here. 3728 // Non-zero negative reaches here.
3729 __ bind(&negative_sign); 3729 __ bind(&negative_sign);
3730 // Truncate, then compare and compensate. 3730 // Truncate, then compare and compensate.
3731 __ cvttsd2si(output_reg, Operand(input_reg)); 3731 __ cvttsd2si(output_reg, Operand(input_reg));
3732 __ Cvtsi2sd(xmm_scratch, output_reg); 3732 __ Cvtsi2sd(xmm_scratch, output_reg);
3733 __ ucomisd(input_reg, xmm_scratch); 3733 __ ucomisd(input_reg, xmm_scratch);
3734 __ j(equal, &done, Label::kNear); 3734 __ j(equal, &done, Label::kNear);
3735 __ sub(output_reg, Immediate(1)); 3735 __ sub(output_reg, Immediate(1));
3736 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 3736 DeoptimizeIf(overflow, instr, "overflow");
3737 3737
3738 __ bind(&done); 3738 __ bind(&done);
3739 } 3739 }
3740 } 3740 }
3741 3741
3742 3742
3743 void LCodeGen::DoMathRound(LMathRound* instr) { 3743 void LCodeGen::DoMathRound(LMathRound* instr) {
3744 Register output_reg = ToRegister(instr->result()); 3744 Register output_reg = ToRegister(instr->result());
3745 XMMRegister input_reg = ToDoubleRegister(instr->value()); 3745 XMMRegister input_reg = ToDoubleRegister(instr->value());
3746 XMMRegister xmm_scratch = double_scratch0(); 3746 XMMRegister xmm_scratch = double_scratch0();
3747 XMMRegister input_temp = ToDoubleRegister(instr->temp()); 3747 XMMRegister input_temp = ToDoubleRegister(instr->temp());
3748 ExternalReference one_half = ExternalReference::address_of_one_half(); 3748 ExternalReference one_half = ExternalReference::address_of_one_half();
3749 ExternalReference minus_one_half = 3749 ExternalReference minus_one_half =
3750 ExternalReference::address_of_minus_one_half(); 3750 ExternalReference::address_of_minus_one_half();
3751 3751
3752 Label done, round_to_zero, below_one_half, do_not_compensate; 3752 Label done, round_to_zero, below_one_half, do_not_compensate;
3753 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; 3753 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3754 3754
3755 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); 3755 __ movsd(xmm_scratch, Operand::StaticVariable(one_half));
3756 __ ucomisd(xmm_scratch, input_reg); 3756 __ ucomisd(xmm_scratch, input_reg);
3757 __ j(above, &below_one_half, Label::kNear); 3757 __ j(above, &below_one_half, Label::kNear);
3758 3758
3759 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). 3759 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x).
3760 __ addsd(xmm_scratch, input_reg); 3760 __ addsd(xmm_scratch, input_reg);
3761 __ cvttsd2si(output_reg, Operand(xmm_scratch)); 3761 __ cvttsd2si(output_reg, Operand(xmm_scratch));
3762 // Overflow is signalled with minint. 3762 // Overflow is signalled with minint.
3763 __ cmp(output_reg, 0x1); 3763 __ cmp(output_reg, 0x1);
3764 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 3764 DeoptimizeIf(overflow, instr, "overflow");
3765 __ jmp(&done, dist); 3765 __ jmp(&done, dist);
3766 3766
3767 __ bind(&below_one_half); 3767 __ bind(&below_one_half);
3768 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); 3768 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half));
3769 __ ucomisd(xmm_scratch, input_reg); 3769 __ ucomisd(xmm_scratch, input_reg);
3770 __ j(below_equal, &round_to_zero, Label::kNear); 3770 __ j(below_equal, &round_to_zero, Label::kNear);
3771 3771
3772 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then 3772 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
3773 // compare and compensate. 3773 // compare and compensate.
3774 __ movaps(input_temp, input_reg); // Do not alter input_reg. 3774 __ movaps(input_temp, input_reg); // Do not alter input_reg.
3775 __ subsd(input_temp, xmm_scratch); 3775 __ subsd(input_temp, xmm_scratch);
3776 __ cvttsd2si(output_reg, Operand(input_temp)); 3776 __ cvttsd2si(output_reg, Operand(input_temp));
3777 // Catch minint due to overflow, and to prevent overflow when compensating. 3777 // Catch minint due to overflow, and to prevent overflow when compensating.
3778 __ cmp(output_reg, 0x1); 3778 __ cmp(output_reg, 0x1);
3779 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 3779 DeoptimizeIf(overflow, instr, "overflow");
3780 3780
3781 __ Cvtsi2sd(xmm_scratch, output_reg); 3781 __ Cvtsi2sd(xmm_scratch, output_reg);
3782 __ ucomisd(xmm_scratch, input_temp); 3782 __ ucomisd(xmm_scratch, input_temp);
3783 __ j(equal, &done, dist); 3783 __ j(equal, &done, dist);
3784 __ sub(output_reg, Immediate(1)); 3784 __ sub(output_reg, Immediate(1));
3785 // No overflow because we already ruled out minint. 3785 // No overflow because we already ruled out minint.
3786 __ jmp(&done, dist); 3786 __ jmp(&done, dist);
3787 3787
3788 __ bind(&round_to_zero); 3788 __ bind(&round_to_zero);
3789 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if 3789 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
3790 // we can ignore the difference between a result of -0 and +0. 3790 // we can ignore the difference between a result of -0 and +0.
3791 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3791 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3792 // If the sign is positive, we return +0. 3792 // If the sign is positive, we return +0.
3793 __ movmskpd(output_reg, input_reg); 3793 __ movmskpd(output_reg, input_reg);
3794 __ test(output_reg, Immediate(1)); 3794 __ test(output_reg, Immediate(1));
3795 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); 3795 DeoptimizeIf(not_zero, instr, "minus zero");
3796 } 3796 }
3797 __ Move(output_reg, Immediate(0)); 3797 __ Move(output_reg, Immediate(0));
3798 __ bind(&done); 3798 __ bind(&done);
3799 } 3799 }
3800 3800
3801 3801
3802 void LCodeGen::DoMathFround(LMathFround* instr) { 3802 void LCodeGen::DoMathFround(LMathFround* instr) {
3803 XMMRegister input_reg = ToDoubleRegister(instr->value()); 3803 XMMRegister input_reg = ToDoubleRegister(instr->value());
3804 XMMRegister output_reg = ToDoubleRegister(instr->result()); 3804 XMMRegister output_reg = ToDoubleRegister(instr->result());
3805 __ cvtsd2ss(output_reg, input_reg); 3805 __ cvtsd2ss(output_reg, input_reg);
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
3861 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); 3861 DCHECK(ToDoubleRegister(instr->result()).is(xmm3));
3862 3862
3863 if (exponent_type.IsSmi()) { 3863 if (exponent_type.IsSmi()) {
3864 MathPowStub stub(isolate(), MathPowStub::TAGGED); 3864 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3865 __ CallStub(&stub); 3865 __ CallStub(&stub);
3866 } else if (exponent_type.IsTagged()) { 3866 } else if (exponent_type.IsTagged()) {
3867 Label no_deopt; 3867 Label no_deopt;
3868 __ JumpIfSmi(tagged_exponent, &no_deopt); 3868 __ JumpIfSmi(tagged_exponent, &no_deopt);
3869 DCHECK(!ecx.is(tagged_exponent)); 3869 DCHECK(!ecx.is(tagged_exponent));
3870 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx); 3870 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx);
3871 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); 3871 DeoptimizeIf(not_equal, instr, "not a heap number");
3872 __ bind(&no_deopt); 3872 __ bind(&no_deopt);
3873 MathPowStub stub(isolate(), MathPowStub::TAGGED); 3873 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3874 __ CallStub(&stub); 3874 __ CallStub(&stub);
3875 } else if (exponent_type.IsInteger32()) { 3875 } else if (exponent_type.IsInteger32()) {
3876 MathPowStub stub(isolate(), MathPowStub::INTEGER); 3876 MathPowStub stub(isolate(), MathPowStub::INTEGER);
3877 __ CallStub(&stub); 3877 __ CallStub(&stub);
3878 } else { 3878 } else {
3879 DCHECK(exponent_type.IsDouble()); 3879 DCHECK(exponent_type.IsDouble());
3880 MathPowStub stub(isolate(), MathPowStub::DOUBLE); 3880 MathPowStub stub(isolate(), MathPowStub::DOUBLE);
3881 __ CallStub(&stub); 3881 __ CallStub(&stub);
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after
4191 instr->hydrogen()->index()->representation())); 4191 instr->hydrogen()->index()->representation()));
4192 } else { 4192 } else {
4193 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); 4193 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
4194 } 4194 }
4195 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { 4195 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
4196 Label done; 4196 Label done;
4197 __ j(NegateCondition(cc), &done, Label::kNear); 4197 __ j(NegateCondition(cc), &done, Label::kNear);
4198 __ int3(); 4198 __ int3();
4199 __ bind(&done); 4199 __ bind(&done);
4200 } else { 4200 } else {
4201 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds); 4201 DeoptimizeIf(cc, instr, "out of bounds");
4202 } 4202 }
4203 } 4203 }
4204 4204
4205 4205
4206 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { 4206 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
4207 ElementsKind elements_kind = instr->elements_kind(); 4207 ElementsKind elements_kind = instr->elements_kind();
4208 LOperand* key = instr->key(); 4208 LOperand* key = instr->key();
4209 if (!key->IsConstantOperand() && 4209 if (!key->IsConstantOperand() &&
4210 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), 4210 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
4211 elements_kind)) { 4211 elements_kind)) {
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
4354 CodeFactory::KeyedStoreIC(isolate(), instr->language_mode()).code(); 4354 CodeFactory::KeyedStoreIC(isolate(), instr->language_mode()).code();
4355 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4355 CallCode(ic, RelocInfo::CODE_TARGET, instr);
4356 } 4356 }
4357 4357
4358 4358
4359 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4359 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4360 Register object = ToRegister(instr->object()); 4360 Register object = ToRegister(instr->object());
4361 Register temp = ToRegister(instr->temp()); 4361 Register temp = ToRegister(instr->temp());
4362 Label no_memento_found; 4362 Label no_memento_found;
4363 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); 4363 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
4364 DeoptimizeIf(equal, instr, Deoptimizer::kMementoFound); 4364 DeoptimizeIf(equal, instr, "memento found");
4365 __ bind(&no_memento_found); 4365 __ bind(&no_memento_found);
4366 } 4366 }
4367 4367
4368 4368
4369 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { 4369 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4370 Register object_reg = ToRegister(instr->object()); 4370 Register object_reg = ToRegister(instr->object());
4371 4371
4372 Handle<Map> from_map = instr->original_map(); 4372 Handle<Map> from_map = instr->original_map();
4373 Handle<Map> to_map = instr->transitioned_map(); 4373 Handle<Map> to_map = instr->transitioned_map();
4374 ElementsKind from_kind = instr->from_kind(); 4374 ElementsKind from_kind = instr->from_kind();
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after
4700 __ StoreToSafepointRegisterSlot(reg, eax); 4700 __ StoreToSafepointRegisterSlot(reg, eax);
4701 } 4701 }
4702 4702
4703 4703
4704 void LCodeGen::DoSmiTag(LSmiTag* instr) { 4704 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4705 HChange* hchange = instr->hydrogen(); 4705 HChange* hchange = instr->hydrogen();
4706 Register input = ToRegister(instr->value()); 4706 Register input = ToRegister(instr->value());
4707 if (hchange->CheckFlag(HValue::kCanOverflow) && 4707 if (hchange->CheckFlag(HValue::kCanOverflow) &&
4708 hchange->value()->CheckFlag(HValue::kUint32)) { 4708 hchange->value()->CheckFlag(HValue::kUint32)) {
4709 __ test(input, Immediate(0xc0000000)); 4709 __ test(input, Immediate(0xc0000000));
4710 DeoptimizeIf(not_zero, instr, Deoptimizer::kOverflow); 4710 DeoptimizeIf(not_zero, instr, "overflow");
4711 } 4711 }
4712 __ SmiTag(input); 4712 __ SmiTag(input);
4713 if (hchange->CheckFlag(HValue::kCanOverflow) && 4713 if (hchange->CheckFlag(HValue::kCanOverflow) &&
4714 !hchange->value()->CheckFlag(HValue::kUint32)) { 4714 !hchange->value()->CheckFlag(HValue::kUint32)) {
4715 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 4715 DeoptimizeIf(overflow, instr, "overflow");
4716 } 4716 }
4717 } 4717 }
4718 4718
4719 4719
4720 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { 4720 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4721 LOperand* input = instr->value(); 4721 LOperand* input = instr->value();
4722 Register result = ToRegister(input); 4722 Register result = ToRegister(input);
4723 DCHECK(input->IsRegister() && input->Equals(instr->result())); 4723 DCHECK(input->IsRegister() && input->Equals(instr->result()));
4724 if (instr->needs_check()) { 4724 if (instr->needs_check()) {
4725 __ test(result, Immediate(kSmiTagMask)); 4725 __ test(result, Immediate(kSmiTagMask));
4726 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); 4726 DeoptimizeIf(not_zero, instr, "not a Smi");
4727 } else { 4727 } else {
4728 __ AssertSmi(result); 4728 __ AssertSmi(result);
4729 } 4729 }
4730 __ SmiUntag(result); 4730 __ SmiUntag(result);
4731 } 4731 }
4732 4732
4733 4733
4734 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, 4734 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg,
4735 Register temp_reg, XMMRegister result_reg, 4735 Register temp_reg, XMMRegister result_reg,
4736 NumberUntagDMode mode) { 4736 NumberUntagDMode mode) {
4737 bool can_convert_undefined_to_nan = 4737 bool can_convert_undefined_to_nan =
4738 instr->hydrogen()->can_convert_undefined_to_nan(); 4738 instr->hydrogen()->can_convert_undefined_to_nan();
4739 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); 4739 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4740 4740
4741 Label convert, load_smi, done; 4741 Label convert, load_smi, done;
4742 4742
4743 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { 4743 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4744 // Smi check. 4744 // Smi check.
4745 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); 4745 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4746 4746
4747 // Heap number map check. 4747 // Heap number map check.
4748 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 4748 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4749 factory()->heap_number_map()); 4749 factory()->heap_number_map());
4750 if (can_convert_undefined_to_nan) { 4750 if (can_convert_undefined_to_nan) {
4751 __ j(not_equal, &convert, Label::kNear); 4751 __ j(not_equal, &convert, Label::kNear);
4752 } else { 4752 } else {
4753 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); 4753 DeoptimizeIf(not_equal, instr, "not a heap number");
4754 } 4754 }
4755 4755
4756 // Heap number to XMM conversion. 4756 // Heap number to XMM conversion.
4757 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 4757 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4758 4758
4759 if (deoptimize_on_minus_zero) { 4759 if (deoptimize_on_minus_zero) {
4760 XMMRegister xmm_scratch = double_scratch0(); 4760 XMMRegister xmm_scratch = double_scratch0();
4761 __ xorps(xmm_scratch, xmm_scratch); 4761 __ xorps(xmm_scratch, xmm_scratch);
4762 __ ucomisd(result_reg, xmm_scratch); 4762 __ ucomisd(result_reg, xmm_scratch);
4763 __ j(not_zero, &done, Label::kNear); 4763 __ j(not_zero, &done, Label::kNear);
4764 __ movmskpd(temp_reg, result_reg); 4764 __ movmskpd(temp_reg, result_reg);
4765 __ test_b(temp_reg, 1); 4765 __ test_b(temp_reg, 1);
4766 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); 4766 DeoptimizeIf(not_zero, instr, "minus zero");
4767 } 4767 }
4768 __ jmp(&done, Label::kNear); 4768 __ jmp(&done, Label::kNear);
4769 4769
4770 if (can_convert_undefined_to_nan) { 4770 if (can_convert_undefined_to_nan) {
4771 __ bind(&convert); 4771 __ bind(&convert);
4772 4772
4773 // Convert undefined to NaN. 4773 // Convert undefined to NaN.
4774 __ cmp(input_reg, factory()->undefined_value()); 4774 __ cmp(input_reg, factory()->undefined_value());
4775 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); 4775 DeoptimizeIf(not_equal, instr, "not a heap number/undefined");
4776 4776
4777 __ pcmpeqd(result_reg, result_reg); 4777 __ pcmpeqd(result_reg, result_reg);
4778 __ jmp(&done, Label::kNear); 4778 __ jmp(&done, Label::kNear);
4779 } 4779 }
4780 } else { 4780 } else {
4781 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); 4781 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI);
4782 } 4782 }
4783 4783
4784 __ bind(&load_smi); 4784 __ bind(&load_smi);
4785 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the 4785 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
4817 __ jmp(done); 4817 __ jmp(done);
4818 4818
4819 __ bind(&check_bools); 4819 __ bind(&check_bools);
4820 __ cmp(input_reg, factory()->true_value()); 4820 __ cmp(input_reg, factory()->true_value());
4821 __ j(not_equal, &check_false, Label::kNear); 4821 __ j(not_equal, &check_false, Label::kNear);
4822 __ Move(input_reg, Immediate(1)); 4822 __ Move(input_reg, Immediate(1));
4823 __ jmp(done); 4823 __ jmp(done);
4824 4824
4825 __ bind(&check_false); 4825 __ bind(&check_false);
4826 __ cmp(input_reg, factory()->false_value()); 4826 __ cmp(input_reg, factory()->false_value());
4827 DeoptimizeIf(not_equal, instr, 4827 DeoptimizeIf(not_equal, instr, "not a heap number/undefined/true/false");
4828 Deoptimizer::kNotAHeapNumberUndefinedBoolean);
4829 __ Move(input_reg, Immediate(0)); 4828 __ Move(input_reg, Immediate(0));
4830 } else { 4829 } else {
4831 XMMRegister scratch = ToDoubleRegister(instr->temp()); 4830 XMMRegister scratch = ToDoubleRegister(instr->temp());
4832 DCHECK(!scratch.is(xmm0)); 4831 DCHECK(!scratch.is(xmm0));
4833 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 4832 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4834 isolate()->factory()->heap_number_map()); 4833 isolate()->factory()->heap_number_map());
4835 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); 4834 DeoptimizeIf(not_equal, instr, "not a heap number");
4836 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); 4835 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
4837 __ cvttsd2si(input_reg, Operand(xmm0)); 4836 __ cvttsd2si(input_reg, Operand(xmm0));
4838 __ Cvtsi2sd(scratch, Operand(input_reg)); 4837 __ Cvtsi2sd(scratch, Operand(input_reg));
4839 __ ucomisd(xmm0, scratch); 4838 __ ucomisd(xmm0, scratch);
4840 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); 4839 DeoptimizeIf(not_equal, instr, "lost precision");
4841 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); 4840 DeoptimizeIf(parity_even, instr, "NaN");
4842 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { 4841 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) {
4843 __ test(input_reg, Operand(input_reg)); 4842 __ test(input_reg, Operand(input_reg));
4844 __ j(not_zero, done); 4843 __ j(not_zero, done);
4845 __ movmskpd(input_reg, xmm0); 4844 __ movmskpd(input_reg, xmm0);
4846 __ and_(input_reg, 1); 4845 __ and_(input_reg, 1);
4847 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); 4846 DeoptimizeIf(not_zero, instr, "minus zero");
4848 } 4847 }
4849 } 4848 }
4850 } 4849 }
4851 4850
4852 4851
4853 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { 4852 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4854 class DeferredTaggedToI FINAL : public LDeferredCode { 4853 class DeferredTaggedToI FINAL : public LDeferredCode {
4855 public: 4854 public:
4856 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) 4855 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4857 : LDeferredCode(codegen), instr_(instr) { } 4856 : LDeferredCode(codegen), instr_(instr) { }
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
4917 } else { 4916 } else {
4918 Label lost_precision, is_nan, minus_zero, done; 4917 Label lost_precision, is_nan, minus_zero, done;
4919 XMMRegister input_reg = ToDoubleRegister(input); 4918 XMMRegister input_reg = ToDoubleRegister(input);
4920 XMMRegister xmm_scratch = double_scratch0(); 4919 XMMRegister xmm_scratch = double_scratch0();
4921 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; 4920 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4922 __ DoubleToI(result_reg, input_reg, xmm_scratch, 4921 __ DoubleToI(result_reg, input_reg, xmm_scratch,
4923 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, 4922 instr->hydrogen()->GetMinusZeroMode(), &lost_precision,
4924 &is_nan, &minus_zero, dist); 4923 &is_nan, &minus_zero, dist);
4925 __ jmp(&done, dist); 4924 __ jmp(&done, dist);
4926 __ bind(&lost_precision); 4925 __ bind(&lost_precision);
4927 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); 4926 DeoptimizeIf(no_condition, instr, "lost precision");
4928 __ bind(&is_nan); 4927 __ bind(&is_nan);
4929 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); 4928 DeoptimizeIf(no_condition, instr, "NaN");
4930 __ bind(&minus_zero); 4929 __ bind(&minus_zero);
4931 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); 4930 DeoptimizeIf(no_condition, instr, "minus zero");
4932 __ bind(&done); 4931 __ bind(&done);
4933 } 4932 }
4934 } 4933 }
4935 4934
4936 4935
4937 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { 4936 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
4938 LOperand* input = instr->value(); 4937 LOperand* input = instr->value();
4939 DCHECK(input->IsDoubleRegister()); 4938 DCHECK(input->IsDoubleRegister());
4940 LOperand* result = instr->result(); 4939 LOperand* result = instr->result();
4941 DCHECK(result->IsRegister()); 4940 DCHECK(result->IsRegister());
4942 Register result_reg = ToRegister(result); 4941 Register result_reg = ToRegister(result);
4943 4942
4944 Label lost_precision, is_nan, minus_zero, done; 4943 Label lost_precision, is_nan, minus_zero, done;
4945 XMMRegister input_reg = ToDoubleRegister(input); 4944 XMMRegister input_reg = ToDoubleRegister(input);
4946 XMMRegister xmm_scratch = double_scratch0(); 4945 XMMRegister xmm_scratch = double_scratch0();
4947 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; 4946 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4948 __ DoubleToI(result_reg, input_reg, xmm_scratch, 4947 __ DoubleToI(result_reg, input_reg, xmm_scratch,
4949 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan, 4948 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan,
4950 &minus_zero, dist); 4949 &minus_zero, dist);
4951 __ jmp(&done, dist); 4950 __ jmp(&done, dist);
4952 __ bind(&lost_precision); 4951 __ bind(&lost_precision);
4953 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); 4952 DeoptimizeIf(no_condition, instr, "lost precision");
4954 __ bind(&is_nan); 4953 __ bind(&is_nan);
4955 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); 4954 DeoptimizeIf(no_condition, instr, "NaN");
4956 __ bind(&minus_zero); 4955 __ bind(&minus_zero);
4957 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); 4956 DeoptimizeIf(no_condition, instr, "minus zero");
4958 __ bind(&done); 4957 __ bind(&done);
4959 __ SmiTag(result_reg); 4958 __ SmiTag(result_reg);
4960 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); 4959 DeoptimizeIf(overflow, instr, "overflow");
4961 } 4960 }
4962 4961
4963 4962
4964 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { 4963 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4965 LOperand* input = instr->value(); 4964 LOperand* input = instr->value();
4966 __ test(ToOperand(input), Immediate(kSmiTagMask)); 4965 __ test(ToOperand(input), Immediate(kSmiTagMask));
4967 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); 4966 DeoptimizeIf(not_zero, instr, "not a Smi");
4968 } 4967 }
4969 4968
4970 4969
4971 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { 4970 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4972 if (!instr->hydrogen()->value()->type().IsHeapObject()) { 4971 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
4973 LOperand* input = instr->value(); 4972 LOperand* input = instr->value();
4974 __ test(ToOperand(input), Immediate(kSmiTagMask)); 4973 __ test(ToOperand(input), Immediate(kSmiTagMask));
4975 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); 4974 DeoptimizeIf(zero, instr, "Smi");
4976 } 4975 }
4977 } 4976 }
4978 4977
4979 4978
4980 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 4979 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4981 Register input = ToRegister(instr->value()); 4980 Register input = ToRegister(instr->value());
4982 Register temp = ToRegister(instr->temp()); 4981 Register temp = ToRegister(instr->temp());
4983 4982
4984 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); 4983 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
4985 4984
4986 if (instr->hydrogen()->is_interval_check()) { 4985 if (instr->hydrogen()->is_interval_check()) {
4987 InstanceType first; 4986 InstanceType first;
4988 InstanceType last; 4987 InstanceType last;
4989 instr->hydrogen()->GetCheckInterval(&first, &last); 4988 instr->hydrogen()->GetCheckInterval(&first, &last);
4990 4989
4991 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), 4990 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
4992 static_cast<int8_t>(first)); 4991 static_cast<int8_t>(first));
4993 4992
4994 // If there is only one type in the interval check for equality. 4993 // If there is only one type in the interval check for equality.
4995 if (first == last) { 4994 if (first == last) {
4996 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); 4995 DeoptimizeIf(not_equal, instr, "wrong instance type");
4997 } else { 4996 } else {
4998 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType); 4997 DeoptimizeIf(below, instr, "wrong instance type");
4999 // Omit check for the last type. 4998 // Omit check for the last type.
5000 if (last != LAST_TYPE) { 4999 if (last != LAST_TYPE) {
5001 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), 5000 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
5002 static_cast<int8_t>(last)); 5001 static_cast<int8_t>(last));
5003 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType); 5002 DeoptimizeIf(above, instr, "wrong instance type");
5004 } 5003 }
5005 } 5004 }
5006 } else { 5005 } else {
5007 uint8_t mask; 5006 uint8_t mask;
5008 uint8_t tag; 5007 uint8_t tag;
5009 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); 5008 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
5010 5009
5011 if (base::bits::IsPowerOfTwo32(mask)) { 5010 if (base::bits::IsPowerOfTwo32(mask)) {
5012 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); 5011 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
5013 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); 5012 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask);
5014 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, 5013 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, "wrong instance type");
5015 Deoptimizer::kWrongInstanceType);
5016 } else { 5014 } else {
5017 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); 5015 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
5018 __ and_(temp, mask); 5016 __ and_(temp, mask);
5019 __ cmp(temp, tag); 5017 __ cmp(temp, tag);
5020 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); 5018 DeoptimizeIf(not_equal, instr, "wrong instance type");
5021 } 5019 }
5022 } 5020 }
5023 } 5021 }
5024 5022
5025 5023
5026 void LCodeGen::DoCheckValue(LCheckValue* instr) { 5024 void LCodeGen::DoCheckValue(LCheckValue* instr) {
5027 Handle<HeapObject> object = instr->hydrogen()->object().handle(); 5025 Handle<HeapObject> object = instr->hydrogen()->object().handle();
5028 if (instr->hydrogen()->object_in_new_space()) { 5026 if (instr->hydrogen()->object_in_new_space()) {
5029 Register reg = ToRegister(instr->value()); 5027 Register reg = ToRegister(instr->value());
5030 Handle<Cell> cell = isolate()->factory()->NewCell(object); 5028 Handle<Cell> cell = isolate()->factory()->NewCell(object);
5031 __ cmp(reg, Operand::ForCell(cell)); 5029 __ cmp(reg, Operand::ForCell(cell));
5032 } else { 5030 } else {
5033 Operand operand = ToOperand(instr->value()); 5031 Operand operand = ToOperand(instr->value());
5034 __ cmp(operand, object); 5032 __ cmp(operand, object);
5035 } 5033 }
5036 DeoptimizeIf(not_equal, instr, Deoptimizer::kValueMismatch); 5034 DeoptimizeIf(not_equal, instr, "value mismatch");
5037 } 5035 }
5038 5036
5039 5037
5040 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { 5038 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
5041 { 5039 {
5042 PushSafepointRegistersScope scope(this); 5040 PushSafepointRegistersScope scope(this);
5043 __ push(object); 5041 __ push(object);
5044 __ xor_(esi, esi); 5042 __ xor_(esi, esi);
5045 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); 5043 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5046 RecordSafepointWithRegisters( 5044 RecordSafepointWithRegisters(
5047 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); 5045 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5048 5046
5049 __ test(eax, Immediate(kSmiTagMask)); 5047 __ test(eax, Immediate(kSmiTagMask));
5050 } 5048 }
5051 DeoptimizeIf(zero, instr, Deoptimizer::kInstanceMigrationFailed); 5049 DeoptimizeIf(zero, instr, "instance migration failed");
5052 } 5050 }
5053 5051
5054 5052
5055 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 5053 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5056 class DeferredCheckMaps FINAL : public LDeferredCode { 5054 class DeferredCheckMaps FINAL : public LDeferredCode {
5057 public: 5055 public:
5058 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) 5056 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
5059 : LDeferredCode(codegen), instr_(instr), object_(object) { 5057 : LDeferredCode(codegen), instr_(instr), object_(object) {
5060 SetExit(check_maps()); 5058 SetExit(check_maps());
5061 } 5059 }
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
5095 Handle<Map> map = maps->at(i).handle(); 5093 Handle<Map> map = maps->at(i).handle();
5096 __ CompareMap(reg, map); 5094 __ CompareMap(reg, map);
5097 __ j(equal, &success, Label::kNear); 5095 __ j(equal, &success, Label::kNear);
5098 } 5096 }
5099 5097
5100 Handle<Map> map = maps->at(maps->size() - 1).handle(); 5098 Handle<Map> map = maps->at(maps->size() - 1).handle();
5101 __ CompareMap(reg, map); 5099 __ CompareMap(reg, map);
5102 if (instr->hydrogen()->HasMigrationTarget()) { 5100 if (instr->hydrogen()->HasMigrationTarget()) {
5103 __ j(not_equal, deferred->entry()); 5101 __ j(not_equal, deferred->entry());
5104 } else { 5102 } else {
5105 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); 5103 DeoptimizeIf(not_equal, instr, "wrong map");
5106 } 5104 }
5107 5105
5108 __ bind(&success); 5106 __ bind(&success);
5109 } 5107 }
5110 5108
5111 5109
5112 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { 5110 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5113 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); 5111 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
5114 XMMRegister xmm_scratch = double_scratch0(); 5112 XMMRegister xmm_scratch = double_scratch0();
5115 Register result_reg = ToRegister(instr->result()); 5113 Register result_reg = ToRegister(instr->result());
(...skipping 18 matching lines...) Expand all
5134 __ JumpIfSmi(input_reg, &is_smi); 5132 __ JumpIfSmi(input_reg, &is_smi);
5135 5133
5136 // Check for heap number 5134 // Check for heap number
5137 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 5135 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
5138 factory()->heap_number_map()); 5136 factory()->heap_number_map());
5139 __ j(equal, &heap_number, Label::kNear); 5137 __ j(equal, &heap_number, Label::kNear);
5140 5138
5141 // Check for undefined. Undefined is converted to zero for clamping 5139 // Check for undefined. Undefined is converted to zero for clamping
5142 // conversions. 5140 // conversions.
5143 __ cmp(input_reg, factory()->undefined_value()); 5141 __ cmp(input_reg, factory()->undefined_value());
5144 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); 5142 DeoptimizeIf(not_equal, instr, "not a heap number/undefined");
5145 __ mov(input_reg, 0); 5143 __ mov(input_reg, 0);
5146 __ jmp(&done, Label::kNear); 5144 __ jmp(&done, Label::kNear);
5147 5145
5148 // Heap number 5146 // Heap number
5149 __ bind(&heap_number); 5147 __ bind(&heap_number);
5150 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); 5148 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset));
5151 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); 5149 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg);
5152 __ jmp(&done, Label::kNear); 5150 __ jmp(&done, Label::kNear);
5153 5151
5154 // smi 5152 // smi
(...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after
5622 DCHECK(!environment->HasBeenRegistered()); 5620 DCHECK(!environment->HasBeenRegistered());
5623 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 5621 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
5624 5622
5625 GenerateOsrPrologue(); 5623 GenerateOsrPrologue();
5626 } 5624 }
5627 5625
5628 5626
5629 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { 5627 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5630 DCHECK(ToRegister(instr->context()).is(esi)); 5628 DCHECK(ToRegister(instr->context()).is(esi));
5631 __ cmp(eax, isolate()->factory()->undefined_value()); 5629 __ cmp(eax, isolate()->factory()->undefined_value());
5632 DeoptimizeIf(equal, instr, Deoptimizer::kUndefined); 5630 DeoptimizeIf(equal, instr, "undefined");
5633 5631
5634 __ cmp(eax, isolate()->factory()->null_value()); 5632 __ cmp(eax, isolate()->factory()->null_value());
5635 DeoptimizeIf(equal, instr, Deoptimizer::kNull); 5633 DeoptimizeIf(equal, instr, "null");
5636 5634
5637 __ test(eax, Immediate(kSmiTagMask)); 5635 __ test(eax, Immediate(kSmiTagMask));
5638 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); 5636 DeoptimizeIf(zero, instr, "Smi");
5639 5637
5640 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 5638 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
5641 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx); 5639 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
5642 DeoptimizeIf(below_equal, instr, Deoptimizer::kWrongInstanceType); 5640 DeoptimizeIf(below_equal, instr, "wrong instance type");
5643 5641
5644 Label use_cache, call_runtime; 5642 Label use_cache, call_runtime;
5645 __ CheckEnumCache(&call_runtime); 5643 __ CheckEnumCache(&call_runtime);
5646 5644
5647 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 5645 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
5648 __ jmp(&use_cache, Label::kNear); 5646 __ jmp(&use_cache, Label::kNear);
5649 5647
5650 // Get the set of properties to enumerate. 5648 // Get the set of properties to enumerate.
5651 __ bind(&call_runtime); 5649 __ bind(&call_runtime);
5652 __ push(eax); 5650 __ push(eax);
5653 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); 5651 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5654 5652
5655 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 5653 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
5656 isolate()->factory()->meta_map()); 5654 isolate()->factory()->meta_map());
5657 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); 5655 DeoptimizeIf(not_equal, instr, "wrong map");
5658 __ bind(&use_cache); 5656 __ bind(&use_cache);
5659 } 5657 }
5660 5658
5661 5659
5662 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5660 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5663 Register map = ToRegister(instr->map()); 5661 Register map = ToRegister(instr->map());
5664 Register result = ToRegister(instr->result()); 5662 Register result = ToRegister(instr->result());
5665 Label load_cache, done; 5663 Label load_cache, done;
5666 __ EnumLength(result, map); 5664 __ EnumLength(result, map);
5667 __ cmp(result, Immediate(Smi::FromInt(0))); 5665 __ cmp(result, Immediate(Smi::FromInt(0)));
5668 __ j(not_equal, &load_cache, Label::kNear); 5666 __ j(not_equal, &load_cache, Label::kNear);
5669 __ mov(result, isolate()->factory()->empty_fixed_array()); 5667 __ mov(result, isolate()->factory()->empty_fixed_array());
5670 __ jmp(&done, Label::kNear); 5668 __ jmp(&done, Label::kNear);
5671 5669
5672 __ bind(&load_cache); 5670 __ bind(&load_cache);
5673 __ LoadInstanceDescriptors(map, result); 5671 __ LoadInstanceDescriptors(map, result);
5674 __ mov(result, 5672 __ mov(result,
5675 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); 5673 FieldOperand(result, DescriptorArray::kEnumCacheOffset));
5676 __ mov(result, 5674 __ mov(result,
5677 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); 5675 FieldOperand(result, FixedArray::SizeFor(instr->idx())));
5678 __ bind(&done); 5676 __ bind(&done);
5679 __ test(result, result); 5677 __ test(result, result);
5680 DeoptimizeIf(equal, instr, Deoptimizer::kNoCache); 5678 DeoptimizeIf(equal, instr, "no cache");
5681 } 5679 }
5682 5680
5683 5681
5684 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { 5682 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5685 Register object = ToRegister(instr->value()); 5683 Register object = ToRegister(instr->value());
5686 __ cmp(ToRegister(instr->map()), 5684 __ cmp(ToRegister(instr->map()),
5687 FieldOperand(object, HeapObject::kMapOffset)); 5685 FieldOperand(object, HeapObject::kMapOffset));
5688 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); 5686 DeoptimizeIf(not_equal, instr, "wrong map");
5689 } 5687 }
5690 5688
5691 5689
5692 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 5690 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
5693 Register object, 5691 Register object,
5694 Register index) { 5692 Register index) {
5695 PushSafepointRegistersScope scope(this); 5693 PushSafepointRegistersScope scope(this);
5696 __ push(object); 5694 __ push(object);
5697 __ push(index); 5695 __ push(index);
5698 __ xor_(esi, esi); 5696 __ xor_(esi, esi);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
5773 CallRuntime(Runtime::kPushBlockContext, 2, instr); 5771 CallRuntime(Runtime::kPushBlockContext, 2, instr);
5774 RecordSafepoint(Safepoint::kNoLazyDeopt); 5772 RecordSafepoint(Safepoint::kNoLazyDeopt);
5775 } 5773 }
5776 5774
5777 5775
5778 #undef __ 5776 #undef __
5779 5777
5780 } } // namespace v8::internal 5778 } } // namespace v8::internal
5781 5779
5782 #endif // V8_TARGET_ARCH_IA32 5780 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/lithium-codegen.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698