Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(512)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 6529032: Merge 6168:6800 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 18 matching lines...) Expand all
30 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "regexp-macro-assembler.h" 34 #include "regexp-macro-assembler.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40
41 void ToNumberStub::Generate(MacroAssembler* masm) {
42 // The ToNumber stub takes one argument in eax.
43 NearLabel check_heap_number, call_builtin;
44 __ SmiTest(rax);
45 __ j(not_zero, &check_heap_number);
46 __ Ret();
47
48 __ bind(&check_heap_number);
49 __ Move(rbx, Factory::heap_number_map());
50 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
51 __ j(not_equal, &call_builtin);
52 __ Ret();
53
54 __ bind(&call_builtin);
55 __ pop(rcx); // Pop return address.
56 __ push(rax);
57 __ push(rcx); // Push return address.
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
59 }
60
61
40 void FastNewClosureStub::Generate(MacroAssembler* masm) { 62 void FastNewClosureStub::Generate(MacroAssembler* masm) {
41 // Create a new closure from the given function info in new 63 // Create a new closure from the given function info in new
42 // space. Set the context to the current context in rsi. 64 // space. Set the context to the current context in rsi.
43 Label gc; 65 Label gc;
44 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); 66 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
45 67
46 // Get the function info from the stack. 68 // Get the function info from the stack.
47 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 69 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
48 70
49 // Compute the function map in the current global context and set that 71 // Compute the function map in the current global context and set that
(...skipping 970 matching lines...) Expand 10 before | Expand all | Expand 10 after
1020 1042
1021 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) { 1043 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1022 GenericBinaryOpStub stub(key, type_info); 1044 GenericBinaryOpStub stub(key, type_info);
1023 return stub.GetCode(); 1045 return stub.GetCode();
1024 } 1046 }
1025 1047
1026 1048
1027 Handle<Code> GetTypeRecordingBinaryOpStub(int key, 1049 Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1028 TRBinaryOpIC::TypeInfo type_info, 1050 TRBinaryOpIC::TypeInfo type_info,
1029 TRBinaryOpIC::TypeInfo result_type_info) { 1051 TRBinaryOpIC::TypeInfo result_type_info) {
1030 UNIMPLEMENTED(); 1052 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1031 return Handle<Code>::null(); 1053 return stub.GetCode();
1054 }
1055
1056
1057 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1058 __ pop(rcx); // Save return address.
1059 __ push(rdx);
1060 __ push(rax);
1061 // Left and right arguments are now on top.
1062 // Push this stub's key. Although the operation and the type info are
1063 // encoded into the key, the encoding is opaque, so push them too.
1064 __ Push(Smi::FromInt(MinorKey()));
1065 __ Push(Smi::FromInt(op_));
1066 __ Push(Smi::FromInt(operands_type_));
1067
1068 __ push(rcx); // Push return address.
1069
1070 // Patch the caller to an appropriate specialized stub and return the
1071 // operation result to the caller of the stub.
1072 __ TailCallExternalReference(
1073 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1074 5,
1075 1);
1076 }
1077
1078
1079 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1080 switch (operands_type_) {
1081 case TRBinaryOpIC::UNINITIALIZED:
1082 GenerateTypeTransition(masm);
1083 break;
1084 case TRBinaryOpIC::SMI:
1085 GenerateSmiStub(masm);
1086 break;
1087 case TRBinaryOpIC::INT32:
1088 UNREACHABLE();
1089 // The int32 case is identical to the Smi case. We avoid creating this
1090 // ic state on x64.
1091 break;
1092 case TRBinaryOpIC::HEAP_NUMBER:
1093 GenerateHeapNumberStub(masm);
1094 break;
1095 case TRBinaryOpIC::STRING:
1096 GenerateStringStub(masm);
1097 break;
1098 case TRBinaryOpIC::GENERIC:
1099 GenerateGeneric(masm);
1100 break;
1101 default:
1102 UNREACHABLE();
1103 }
1104 }
1105
1106
1107 const char* TypeRecordingBinaryOpStub::GetName() {
1108 if (name_ != NULL) return name_;
1109 const int kMaxNameLength = 100;
1110 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1111 if (name_ == NULL) return "OOM";
1112 const char* op_name = Token::Name(op_);
1113 const char* overwrite_name;
1114 switch (mode_) {
1115 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1116 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1117 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1118 default: overwrite_name = "UnknownOverwrite"; break;
1119 }
1120
1121 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1122 "TypeRecordingBinaryOpStub_%s_%s_%s",
1123 op_name,
1124 overwrite_name,
1125 TRBinaryOpIC::GetName(operands_type_));
1126 return name_;
1127 }
1128
1129
1130 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
1131 Label* slow,
1132 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1133
1134 // We only generate heapnumber answers for overflowing calculations
1135 // for the four basic arithmetic operations.
1136 bool generate_inline_heapnumber_results =
1137 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1138 (op_ == Token::ADD || op_ == Token::SUB ||
1139 op_ == Token::MUL || op_ == Token::DIV);
1140
1141 // Arguments to TypeRecordingBinaryOpStub are in rdx and rax.
1142 Register left = rdx;
1143 Register right = rax;
1144
1145
1146 // Smi check of both operands. If op is BIT_OR, the check is delayed
1147 // until after the OR operation.
1148 Label not_smis;
1149 Label use_fp_on_smis;
1150 Label restore_MOD_registers; // Only used if op_ == Token::MOD.
1151
1152 if (op_ != Token::BIT_OR) {
1153 Comment smi_check_comment(masm, "-- Smi check arguments");
1154 __ JumpIfNotBothSmi(left, right, &not_smis);
1155 }
1156
1157 // Perform the operation.
1158 Comment perform_smi(masm, "-- Perform smi operation");
1159 switch (op_) {
1160 case Token::ADD:
1161 ASSERT(right.is(rax));
1162 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
1163 break;
1164
1165 case Token::SUB:
1166 __ SmiSub(left, left, right, &use_fp_on_smis);
1167 __ movq(rax, left);
1168 break;
1169
1170 case Token::MUL:
1171 ASSERT(right.is(rax));
1172 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
1173 break;
1174
1175 case Token::DIV:
1176 // SmiDiv will not accept left in rdx or right in rax.
1177 left = rcx;
1178 right = rbx;
1179 __ movq(rbx, rax);
1180 __ movq(rcx, rdx);
1181 __ SmiDiv(rax, left, right, &use_fp_on_smis);
1182 break;
1183
1184 case Token::MOD:
1185 // SmiMod will not accept left in rdx or right in rax.
1186 left = rcx;
1187 right = rbx;
1188 __ movq(rbx, rax);
1189 __ movq(rcx, rdx);
1190 __ SmiMod(rax, left, right, &use_fp_on_smis);
1191 break;
1192
1193 case Token::BIT_OR: {
1194 ASSERT(right.is(rax));
1195 __ movq(rcx, right); // Save the right operand.
1196 __ SmiOr(right, right, left); // BIT_OR is commutative.
1197 __ JumpIfNotSmi(right, &not_smis); // Test delayed until after BIT_OR.
1198 break;
1199 }
1200 case Token::BIT_XOR:
1201 ASSERT(right.is(rax));
1202 __ SmiXor(right, right, left); // BIT_XOR is commutative.
1203 break;
1204
1205 case Token::BIT_AND:
1206 ASSERT(right.is(rax));
1207 __ SmiAnd(right, right, left); // BIT_AND is commutative.
1208 break;
1209
1210 case Token::SHL:
1211 __ SmiShiftLeft(left, left, right);
1212 __ movq(rax, left);
1213 break;
1214
1215 case Token::SAR:
1216 __ SmiShiftArithmeticRight(left, left, right);
1217 __ movq(rax, left);
1218 break;
1219
1220 case Token::SHR:
1221 __ SmiShiftLogicalRight(left, left, right, &not_smis);
1222 __ movq(rax, left);
1223 break;
1224
1225 default:
1226 UNREACHABLE();
1227 }
1228
1229 // 5. Emit return of result in rax. Some operations have registers pushed.
1230 __ ret(0);
1231
1232 // 6. For some operations emit inline code to perform floating point
1233 // operations on known smis (e.g., if the result of the operation
1234 // overflowed the smi range).
1235 __ bind(&use_fp_on_smis);
1236 if (op_ == Token::DIV || op_ == Token::MOD) {
1237 // Restore left and right to rdx and rax.
1238 __ movq(rdx, rcx);
1239 __ movq(rax, rbx);
1240 }
1241
1242
1243 if (generate_inline_heapnumber_results) {
1244 __ AllocateHeapNumber(rcx, rbx, slow);
1245 Comment perform_float(masm, "-- Perform float operation on smis");
1246 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1247 switch (op_) {
1248 case Token::ADD: __ addsd(xmm0, xmm1); break;
1249 case Token::SUB: __ subsd(xmm0, xmm1); break;
1250 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1251 case Token::DIV: __ divsd(xmm0, xmm1); break;
1252 default: UNREACHABLE();
1253 }
1254 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
1255 __ movq(rax, rcx);
1256 __ ret(0);
1257 }
1258
1259 // 7. Non-smi operands reach the end of the code generated by
1260 // GenerateSmiCode, and fall through to subsequent code,
1261 // with the operands in rdx and rax.
1262 Comment done_comment(masm, "-- Enter non-smi code");
1263 __ bind(&not_smis);
1264 if (op_ == Token::BIT_OR) {
1265 __ movq(right, rcx);
1266 }
1267 }
1268
1269
1270 void TypeRecordingBinaryOpStub::GenerateFloatingPointCode(
1271 MacroAssembler* masm,
1272 Label* allocation_failure,
1273 Label* non_numeric_failure) {
1274 switch (op_) {
1275 case Token::ADD:
1276 case Token::SUB:
1277 case Token::MUL:
1278 case Token::DIV: {
1279 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1280
1281 switch (op_) {
1282 case Token::ADD: __ addsd(xmm0, xmm1); break;
1283 case Token::SUB: __ subsd(xmm0, xmm1); break;
1284 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1285 case Token::DIV: __ divsd(xmm0, xmm1); break;
1286 default: UNREACHABLE();
1287 }
1288 GenerateHeapResultAllocation(masm, allocation_failure);
1289 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1290 __ ret(0);
1291 break;
1292 }
1293 case Token::MOD: {
1294 // For MOD we jump to the allocation_failure label, to call runtime.
1295 __ jmp(allocation_failure);
1296 break;
1297 }
1298 case Token::BIT_OR:
1299 case Token::BIT_AND:
1300 case Token::BIT_XOR:
1301 case Token::SAR:
1302 case Token::SHL:
1303 case Token::SHR: {
1304 Label non_smi_shr_result;
1305 Register heap_number_map = r9;
1306 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1307 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1308 heap_number_map);
1309 switch (op_) {
1310 case Token::BIT_OR: __ orl(rax, rcx); break;
1311 case Token::BIT_AND: __ andl(rax, rcx); break;
1312 case Token::BIT_XOR: __ xorl(rax, rcx); break;
1313 case Token::SAR: __ sarl_cl(rax); break;
1314 case Token::SHL: __ shll_cl(rax); break;
1315 case Token::SHR: {
1316 __ shrl_cl(rax);
1317 // Check if result is negative. This can only happen for a shift
1318 // by zero.
1319 __ testl(rax, rax);
1320 __ j(negative, &non_smi_shr_result);
1321 break;
1322 }
1323 default: UNREACHABLE();
1324 }
1325 STATIC_ASSERT(kSmiValueSize == 32);
1326 // Tag smi result and return.
1327 __ Integer32ToSmi(rax, rax);
1328 __ Ret();
1329
1330 // Logical shift right can produce an unsigned int32 that is not
1331 // an int32, and so is not in the smi range. Allocate a heap number
1332 // in that case.
1333 if (op_ == Token::SHR) {
1334 __ bind(&non_smi_shr_result);
1335 Label allocation_failed;
1336 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
1337 // Allocate heap number in new space.
1338 // Not using AllocateHeapNumber macro in order to reuse
1339 // already loaded heap_number_map.
1340 __ AllocateInNewSpace(HeapNumber::kSize,
1341 rax,
1342 rcx,
1343 no_reg,
1344 &allocation_failed,
1345 TAG_OBJECT);
1346 // Set the map.
1347 if (FLAG_debug_code) {
1348 __ AbortIfNotRootValue(heap_number_map,
1349 Heap::kHeapNumberMapRootIndex,
1350 "HeapNumberMap register clobbered.");
1351 }
1352 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
1353 heap_number_map);
1354 __ cvtqsi2sd(xmm0, rbx);
1355 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1356 __ Ret();
1357
1358 __ bind(&allocation_failed);
1359 // We need tagged values in rdx and rax for the following code,
1360 // not int32 in rax and rcx.
1361 __ Integer32ToSmi(rax, rcx);
1362 __ Integer32ToSmi(rdx, rax);
1363 __ jmp(allocation_failure);
1364 }
1365 break;
1366 }
1367 default: UNREACHABLE(); break;
1368 }
1369 // No fall-through from this generated code.
1370 if (FLAG_debug_code) {
1371 __ Abort("Unexpected fall-through in "
1372 "TypeRecordingBinaryStub::GenerateFloatingPointCode.");
1373 }
1374 }
1375
1376
1377 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
1378 GenerateRegisterArgsPush(masm);
1379 // Registers containing left and right operands respectively.
1380 Register lhs = rdx;
1381 Register rhs = rax;
1382
1383 // Test for string arguments before calling runtime.
1384 Label not_strings, both_strings, not_string1, string1, string1_smi2;
1385
1386 __ JumpIfNotString(lhs, r8, &not_string1);
1387
1388 // First argument is a a string, test second.
1389 __ JumpIfSmi(rhs, &string1_smi2);
1390 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
1391 __ j(above_equal, &string1);
1392
1393 // First and second argument are strings.
1394 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1395 __ TailCallStub(&string_add_stub);
1396
1397 __ bind(&string1_smi2);
1398 // First argument is a string, second is a smi. Try to lookup the number
1399 // string for the smi in the number string cache.
1400 NumberToStringStub::GenerateLookupNumberStringCache(
1401 masm, rhs, rbx, rcx, r8, true, &string1);
1402
1403 // Replace second argument on stack and tailcall string add stub to make
1404 // the result.
1405 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
1406 __ TailCallStub(&string_add_stub);
1407
1408 // Only first argument is a string.
1409 __ bind(&string1);
1410 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
1411
1412 // First argument was not a string, test second.
1413 __ bind(&not_string1);
1414 __ JumpIfNotString(rhs, rhs, &not_strings);
1415
1416 // Only second argument is a string.
1417 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
1418
1419 __ bind(&not_strings);
1420 // Neither argument is a string.
1421 // Pop arguments, because CallRuntimeCode wants to push them again.
1422 __ pop(rcx);
1423 __ pop(rax);
1424 __ pop(rdx);
1425 __ push(rcx);
1426 }
1427
1428
1429 void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1430 GenerateRegisterArgsPush(masm);
1431 switch (op_) {
1432 case Token::ADD:
1433 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1434 break;
1435 case Token::SUB:
1436 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1437 break;
1438 case Token::MUL:
1439 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1440 break;
1441 case Token::DIV:
1442 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1443 break;
1444 case Token::MOD:
1445 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1446 break;
1447 case Token::BIT_OR:
1448 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1449 break;
1450 case Token::BIT_AND:
1451 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1452 break;
1453 case Token::BIT_XOR:
1454 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1455 break;
1456 case Token::SAR:
1457 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1458 break;
1459 case Token::SHL:
1460 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1461 break;
1462 case Token::SHR:
1463 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1464 break;
1465 default:
1466 UNREACHABLE();
1467 }
1468 }
1469
1470
1471 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1472 Label not_smi;
1473
1474 GenerateSmiCode(masm, &not_smi, NO_HEAPNUMBER_RESULTS);
1475
1476 __ bind(&not_smi);
1477 GenerateTypeTransition(masm);
1478 }
1479
1480
1481 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1482 ASSERT(op_ == Token::ADD);
1483 GenerateStringAddCode(masm);
1484
1485 GenerateTypeTransition(masm);
1486 }
1487
1488
1489 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1490 Label gc_required, not_number;
1491 GenerateFloatingPointCode(masm, &gc_required, &not_number);
1492
1493 __ bind(&not_number);
1494 GenerateTypeTransition(masm);
1495
1496 __ bind(&gc_required);
1497 GenerateCallRuntimeCode(masm);
1498 }
1499
1500
1501 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1502 Label call_runtime, call_string_add_or_runtime;
1503
1504 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1505
1506 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1507
1508 __ bind(&call_string_add_or_runtime);
1509 if (op_ == Token::ADD) {
1510 GenerateStringAddCode(masm);
1511 }
1512
1513 __ bind(&call_runtime);
1514 GenerateCallRuntimeCode(masm);
1515 }
1516
1517
1518 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
1519 MacroAssembler* masm,
1520 Label* alloc_failure) {
1521 Label skip_allocation;
1522 OverwriteMode mode = mode_;
1523 switch (mode) {
1524 case OVERWRITE_LEFT: {
1525 // If the argument in rdx is already an object, we skip the
1526 // allocation of a heap number.
1527 __ JumpIfNotSmi(rdx, &skip_allocation);
1528 // Allocate a heap number for the result. Keep eax and edx intact
1529 // for the possible runtime call.
1530 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1531 // Now rdx can be overwritten losing one of the arguments as we are
1532 // now done and will not need it any more.
1533 __ movq(rdx, rbx);
1534 __ bind(&skip_allocation);
1535 // Use object in rdx as a result holder
1536 __ movq(rax, rdx);
1537 break;
1538 }
1539 case OVERWRITE_RIGHT:
1540 // If the argument in rax is already an object, we skip the
1541 // allocation of a heap number.
1542 __ JumpIfNotSmi(rax, &skip_allocation);
1543 // Fall through!
1544 case NO_OVERWRITE:
1545 // Allocate a heap number for the result. Keep rax and rdx intact
1546 // for the possible runtime call.
1547 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1548 // Now rax can be overwritten losing one of the arguments as we are
1549 // now done and will not need it any more.
1550 __ movq(rax, rbx);
1551 __ bind(&skip_allocation);
1552 break;
1553 default: UNREACHABLE();
1554 }
1555 }
1556
1557
1558 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1559 __ pop(rcx);
1560 __ push(rdx);
1561 __ push(rax);
1562 __ push(rcx);
1032 } 1563 }
1033 1564
1034 1565
1035 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { 1566 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1036 // Input on stack: 1567 // Input on stack:
1037 // rsp[8]: argument (should be number). 1568 // rsp[8]: argument (should be number).
1038 // rsp[0]: return address. 1569 // rsp[0]: return address.
1039 Label runtime_call; 1570 Label runtime_call;
1040 Label runtime_call_clear_stack; 1571 Label runtime_call_clear_stack;
1041 Label input_not_smi; 1572 Label input_not_smi;
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
1335 __ bind(&rax_is_smi); 1866 __ bind(&rax_is_smi);
1336 __ SmiToInteger32(rcx, rax); 1867 __ SmiToInteger32(rcx, rax);
1337 1868
1338 __ bind(&done); 1869 __ bind(&done);
1339 __ movl(rax, rdx); 1870 __ movl(rax, rdx);
1340 } 1871 }
1341 1872
1342 1873
1343 // Input: rdx, rax are the left and right objects of a bit op. 1874 // Input: rdx, rax are the left and right objects of a bit op.
1344 // Output: rax, rcx are left and right integers for a bit op. 1875 // Output: rax, rcx are left and right integers for a bit op.
1876 // Jump to conversion_failure: rdx and rax are unchanged.
1345 void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm, 1877 void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1346 Label* conversion_failure, 1878 Label* conversion_failure,
1347 Register heap_number_map) { 1879 Register heap_number_map) {
1348 // Check float operands. 1880 // Check float operands.
1349 Label arg1_is_object, check_undefined_arg1; 1881 Label arg1_is_object, check_undefined_arg1;
1350 Label arg2_is_object, check_undefined_arg2; 1882 Label arg2_is_object, check_undefined_arg2;
1351 Label load_arg2, done; 1883 Label load_arg2, done;
1352 1884
1353 __ JumpIfNotSmi(rdx, &arg1_is_object); 1885 __ JumpIfNotSmi(rdx, &arg1_is_object);
1354 __ SmiToInteger32(rdx, rdx); 1886 __ SmiToInteger32(r8, rdx);
1355 __ jmp(&load_arg2); 1887 __ jmp(&load_arg2);
1356 1888
1357 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 1889 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1358 __ bind(&check_undefined_arg1); 1890 __ bind(&check_undefined_arg1);
1359 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 1891 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1360 __ j(not_equal, conversion_failure); 1892 __ j(not_equal, conversion_failure);
1361 __ movl(rdx, Immediate(0)); 1893 __ movl(r8, Immediate(0));
1362 __ jmp(&load_arg2); 1894 __ jmp(&load_arg2);
1363 1895
1364 __ bind(&arg1_is_object); 1896 __ bind(&arg1_is_object);
1365 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map); 1897 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1366 __ j(not_equal, &check_undefined_arg1); 1898 __ j(not_equal, &check_undefined_arg1);
1367 // Get the untagged integer version of the edx heap number in rcx. 1899 // Get the untagged integer version of the rdx heap number in rcx.
1368 IntegerConvert(masm, rdx, rdx); 1900 IntegerConvert(masm, r8, rdx);
1369 1901
1370 // Here rdx has the untagged integer, rax has a Smi or a heap number. 1902 // Here r8 has the untagged integer, rax has a Smi or a heap number.
1371 __ bind(&load_arg2); 1903 __ bind(&load_arg2);
1372 // Test if arg2 is a Smi. 1904 // Test if arg2 is a Smi.
1373 __ JumpIfNotSmi(rax, &arg2_is_object); 1905 __ JumpIfNotSmi(rax, &arg2_is_object);
1374 __ SmiToInteger32(rax, rax); 1906 __ SmiToInteger32(rcx, rax);
1375 __ movl(rcx, rax);
1376 __ jmp(&done); 1907 __ jmp(&done);
1377 1908
1378 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 1909 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1379 __ bind(&check_undefined_arg2); 1910 __ bind(&check_undefined_arg2);
1380 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 1911 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1381 __ j(not_equal, conversion_failure); 1912 __ j(not_equal, conversion_failure);
1382 __ movl(rcx, Immediate(0)); 1913 __ movl(rcx, Immediate(0));
1383 __ jmp(&done); 1914 __ jmp(&done);
1384 1915
1385 __ bind(&arg2_is_object); 1916 __ bind(&arg2_is_object);
1386 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map); 1917 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1387 __ j(not_equal, &check_undefined_arg2); 1918 __ j(not_equal, &check_undefined_arg2);
1388 // Get the untagged integer version of the rax heap number in rcx. 1919 // Get the untagged integer version of the rax heap number in rcx.
1389 IntegerConvert(masm, rcx, rax); 1920 IntegerConvert(masm, rcx, rax);
1390 __ bind(&done); 1921 __ bind(&done);
1391 __ movl(rax, rdx); 1922 __ movl(rax, r8);
1392 } 1923 }
1393 1924
1394 1925
1395 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) { 1926 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1396 __ SmiToInteger32(kScratchRegister, rdx); 1927 __ SmiToInteger32(kScratchRegister, rdx);
1397 __ cvtlsi2sd(xmm0, kScratchRegister); 1928 __ cvtlsi2sd(xmm0, kScratchRegister);
1398 __ SmiToInteger32(kScratchRegister, rax); 1929 __ SmiToInteger32(kScratchRegister, rax);
1399 __ cvtlsi2sd(xmm1, kScratchRegister); 1930 __ cvtlsi2sd(xmm1, kScratchRegister);
1400 } 1931 }
1401 1932
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after
1711 // at compilation. 2242 // at compilation.
1712 #ifdef V8_INTERPRETED_REGEXP 2243 #ifdef V8_INTERPRETED_REGEXP
1713 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2244 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1714 #else // V8_INTERPRETED_REGEXP 2245 #else // V8_INTERPRETED_REGEXP
1715 if (!FLAG_regexp_entry_native) { 2246 if (!FLAG_regexp_entry_native) {
1716 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2247 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1717 return; 2248 return;
1718 } 2249 }
1719 2250
1720 // Stack frame on entry. 2251 // Stack frame on entry.
1721 // esp[0]: return address 2252 // rsp[0]: return address
1722 // esp[8]: last_match_info (expected JSArray) 2253 // rsp[8]: last_match_info (expected JSArray)
1723 // esp[16]: previous index 2254 // rsp[16]: previous index
1724 // esp[24]: subject string 2255 // rsp[24]: subject string
1725 // esp[32]: JSRegExp object 2256 // rsp[32]: JSRegExp object
1726 2257
1727 static const int kLastMatchInfoOffset = 1 * kPointerSize; 2258 static const int kLastMatchInfoOffset = 1 * kPointerSize;
1728 static const int kPreviousIndexOffset = 2 * kPointerSize; 2259 static const int kPreviousIndexOffset = 2 * kPointerSize;
1729 static const int kSubjectOffset = 3 * kPointerSize; 2260 static const int kSubjectOffset = 3 * kPointerSize;
1730 static const int kJSRegExpOffset = 4 * kPointerSize; 2261 static const int kJSRegExpOffset = 4 * kPointerSize;
1731 2262
1732 Label runtime; 2263 Label runtime;
1733 2264
1734 // Ensure that a RegExp stack is allocated. 2265 // Ensure that a RegExp stack is allocated.
1735 ExternalReference address_of_regexp_stack_memory_address = 2266 ExternalReference address_of_regexp_stack_memory_address =
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1769 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); 2300 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1770 // Calculate number of capture registers (number_of_captures + 1) * 2. 2301 // Calculate number of capture registers (number_of_captures + 1) * 2.
1771 __ leal(rdx, Operand(rdx, rdx, times_1, 2)); 2302 __ leal(rdx, Operand(rdx, rdx, times_1, 2));
1772 // Check that the static offsets vector buffer is large enough. 2303 // Check that the static offsets vector buffer is large enough.
1773 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize)); 2304 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
1774 __ j(above, &runtime); 2305 __ j(above, &runtime);
1775 2306
1776 // rcx: RegExp data (FixedArray) 2307 // rcx: RegExp data (FixedArray)
1777 // rdx: Number of capture registers 2308 // rdx: Number of capture registers
1778 // Check that the second argument is a string. 2309 // Check that the second argument is a string.
1779 __ movq(rax, Operand(rsp, kSubjectOffset)); 2310 __ movq(rdi, Operand(rsp, kSubjectOffset));
1780 __ JumpIfSmi(rax, &runtime); 2311 __ JumpIfSmi(rdi, &runtime);
1781 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); 2312 Condition is_string = masm->IsObjectStringType(rdi, rbx, rbx);
1782 __ j(NegateCondition(is_string), &runtime); 2313 __ j(NegateCondition(is_string), &runtime);
1783 2314
1784 // rax: Subject string. 2315 // rdi: Subject string.
1785 // rcx: RegExp data (FixedArray). 2316 // rax: RegExp data (FixedArray).
1786 // rdx: Number of capture registers. 2317 // rdx: Number of capture registers.
1787 // Check that the third argument is a positive smi less than the string 2318 // Check that the third argument is a positive smi less than the string
1788 // length. A negative value will be greater (unsigned comparison). 2319 // length. A negative value will be greater (unsigned comparison).
1789 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); 2320 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
1790 __ JumpIfNotSmi(rbx, &runtime); 2321 __ JumpIfNotSmi(rbx, &runtime);
1791 __ SmiCompare(rbx, FieldOperand(rax, String::kLengthOffset)); 2322 __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset));
1792 __ j(above_equal, &runtime); 2323 __ j(above_equal, &runtime);
1793 2324
1794 // rcx: RegExp data (FixedArray) 2325 // rax: RegExp data (FixedArray)
1795 // rdx: Number of capture registers 2326 // rdx: Number of capture registers
1796 // Check that the fourth object is a JSArray object. 2327 // Check that the fourth object is a JSArray object.
1797 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); 2328 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset));
1798 __ JumpIfSmi(rax, &runtime); 2329 __ JumpIfSmi(rdi, &runtime);
1799 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); 2330 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister);
1800 __ j(not_equal, &runtime); 2331 __ j(not_equal, &runtime);
1801 // Check that the JSArray is in fast case. 2332 // Check that the JSArray is in fast case.
1802 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); 2333 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
1803 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); 2334 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
1804 __ Cmp(rax, Factory::fixed_array_map()); 2335 __ Cmp(rdi, Factory::fixed_array_map());
1805 __ j(not_equal, &runtime); 2336 __ j(not_equal, &runtime);
1806 // Check that the last match info has space for the capture registers and the 2337 // Check that the last match info has space for the capture registers and the
1807 // additional information. Ensure no overflow in add. 2338 // additional information. Ensure no overflow in add.
1808 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); 2339 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1809 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); 2340 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset));
1810 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); 2341 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
1811 __ cmpl(rdx, rax); 2342 __ cmpl(rdx, rdi);
1812 __ j(greater, &runtime); 2343 __ j(greater, &runtime);
1813 2344
1814 // rcx: RegExp data (FixedArray) 2345 // rax: RegExp data (FixedArray)
1815 // Check the representation and encoding of the subject string. 2346 // Check the representation and encoding of the subject string.
1816 NearLabel seq_ascii_string, seq_two_byte_string, check_code; 2347 NearLabel seq_ascii_string, seq_two_byte_string, check_code;
1817 __ movq(rax, Operand(rsp, kSubjectOffset)); 2348 __ movq(rdi, Operand(rsp, kSubjectOffset));
1818 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 2349 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1819 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 2350 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1820 // First check for flat two byte string. 2351 // First check for flat two byte string.
1821 __ andb(rbx, Immediate( 2352 __ andb(rbx, Immediate(
1822 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask)); 2353 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
1823 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 2354 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1824 __ j(zero, &seq_two_byte_string); 2355 __ j(zero, &seq_two_byte_string);
1825 // Any other flat string must be a flat ascii string. 2356 // Any other flat string must be a flat ascii string.
1826 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask)); 2357 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
1827 __ j(zero, &seq_ascii_string); 2358 __ j(zero, &seq_ascii_string);
1828 2359
1829 // Check for flat cons string. 2360 // Check for flat cons string.
1830 // A flat cons string is a cons string where the second part is the empty 2361 // A flat cons string is a cons string where the second part is the empty
1831 // string. In that case the subject string is just the first part of the cons 2362 // string. In that case the subject string is just the first part of the cons
1832 // string. Also in this case the first part of the cons string is known to be 2363 // string. Also in this case the first part of the cons string is known to be
1833 // a sequential string or an external string. 2364 // a sequential string or an external string.
1834 STATIC_ASSERT(kExternalStringTag !=0); 2365 STATIC_ASSERT(kExternalStringTag !=0);
1835 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); 2366 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
1836 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag)); 2367 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
1837 __ j(not_zero, &runtime); 2368 __ j(not_zero, &runtime);
1838 // String is a cons string. 2369 // String is a cons string.
1839 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset)); 2370 __ movq(rdx, FieldOperand(rdi, ConsString::kSecondOffset));
1840 __ Cmp(rdx, Factory::empty_string()); 2371 __ Cmp(rdx, Factory::empty_string());
1841 __ j(not_equal, &runtime); 2372 __ j(not_equal, &runtime);
1842 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset)); 2373 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
1843 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 2374 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1844 // String is a cons string with empty second part. 2375 // String is a cons string with empty second part.
1845 // rax: first part of cons string. 2376 // rdi: first part of cons string.
1846 // rbx: map of first part of cons string. 2377 // rbx: map of first part of cons string.
1847 // Is first part a flat two byte string? 2378 // Is first part a flat two byte string?
1848 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), 2379 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
1849 Immediate(kStringRepresentationMask | kStringEncodingMask)); 2380 Immediate(kStringRepresentationMask | kStringEncodingMask));
1850 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); 2381 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1851 __ j(zero, &seq_two_byte_string); 2382 __ j(zero, &seq_two_byte_string);
1852 // Any other flat string must be ascii. 2383 // Any other flat string must be ascii.
1853 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), 2384 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
1854 Immediate(kStringRepresentationMask)); 2385 Immediate(kStringRepresentationMask));
1855 __ j(not_zero, &runtime); 2386 __ j(not_zero, &runtime);
1856 2387
1857 __ bind(&seq_ascii_string); 2388 __ bind(&seq_ascii_string);
1858 // rax: subject string (sequential ascii) 2389 // rdi: subject string (sequential ascii)
1859 // rcx: RegExp data (FixedArray) 2390 // rax: RegExp data (FixedArray)
1860 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset)); 2391 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
1861 __ Set(rdi, 1); // Type is ascii. 2392 __ Set(rcx, 1); // Type is ascii.
1862 __ jmp(&check_code); 2393 __ jmp(&check_code);
1863 2394
1864 __ bind(&seq_two_byte_string); 2395 __ bind(&seq_two_byte_string);
1865 // rax: subject string (flat two-byte) 2396 // rdi: subject string (flat two-byte)
1866 // rcx: RegExp data (FixedArray) 2397 // rax: RegExp data (FixedArray)
1867 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset)); 2398 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
1868 __ Set(rdi, 0); // Type is two byte. 2399 __ Set(rcx, 0); // Type is two byte.
1869 2400
1870 __ bind(&check_code); 2401 __ bind(&check_code);
1871 // Check that the irregexp code has been generated for the actual string 2402 // Check that the irregexp code has been generated for the actual string
1872 // encoding. If it has, the field contains a code object otherwise it contains 2403 // encoding. If it has, the field contains a code object otherwise it contains
1873 // the hole. 2404 // the hole.
1874 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister); 2405 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister);
1875 __ j(not_equal, &runtime); 2406 __ j(not_equal, &runtime);
1876 2407
1877 // rax: subject string 2408 // rdi: subject string
1878 // rdi: encoding of subject string (1 if ascii, 0 if two_byte); 2409 // rcx: encoding of subject string (1 if ascii, 0 if two_byte);
1879 // r11: code 2410 // r11: code
1880 // Load used arguments before starting to push arguments for call to native 2411 // Load used arguments before starting to push arguments for call to native
1881 // RegExp code to avoid handling changing stack height. 2412 // RegExp code to avoid handling changing stack height.
1882 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset)); 2413 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
1883 2414
1884 // rax: subject string 2415 // rdi: subject string
1885 // rbx: previous index 2416 // rbx: previous index
1886 // rdi: encoding of subject string (1 if ascii 0 if two_byte); 2417 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
1887 // r11: code 2418 // r11: code
1888 // All checks done. Now push arguments for native regexp code. 2419 // All checks done. Now push arguments for native regexp code.
1889 __ IncrementCounter(&Counters::regexp_entry_native, 1); 2420 __ IncrementCounter(&Counters::regexp_entry_native, 1);
1890 2421
1891 // rsi is caller save on Windows and used to pass parameter on Linux.
1892 __ push(rsi);
1893
1894 static const int kRegExpExecuteArguments = 7; 2422 static const int kRegExpExecuteArguments = 7;
1895 __ PrepareCallCFunction(kRegExpExecuteArguments);
1896 int argument_slots_on_stack = 2423 int argument_slots_on_stack =
1897 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); 2424 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
2425 __ EnterApiExitFrame(argument_slots_on_stack); // Clobbers rax!
1898 2426
1899 // Argument 7: Indicate that this is a direct call from JavaScript. 2427 // Argument 7: Indicate that this is a direct call from JavaScript.
1900 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize), 2428 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
1901 Immediate(1)); 2429 Immediate(1));
1902 2430
1903 // Argument 6: Start (high end) of backtracking stack memory area. 2431 // Argument 6: Start (high end) of backtracking stack memory area.
1904 __ movq(kScratchRegister, address_of_regexp_stack_memory_address); 2432 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
1905 __ movq(r9, Operand(kScratchRegister, 0)); 2433 __ movq(r9, Operand(kScratchRegister, 0));
1906 __ movq(kScratchRegister, address_of_regexp_stack_memory_size); 2434 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
1907 __ addq(r9, Operand(kScratchRegister, 0)); 2435 __ addq(r9, Operand(kScratchRegister, 0));
(...skipping 16 matching lines...) Expand all
1924 Register arg2 = rdx; 2452 Register arg2 = rdx;
1925 Register arg1 = rcx; 2453 Register arg1 = rcx;
1926 #else 2454 #else
1927 Register arg4 = rcx; 2455 Register arg4 = rcx;
1928 Register arg3 = rdx; 2456 Register arg3 = rdx;
1929 Register arg2 = rsi; 2457 Register arg2 = rsi;
1930 Register arg1 = rdi; 2458 Register arg1 = rdi;
1931 #endif 2459 #endif
1932 2460
1933 // Keep track on aliasing between argX defined above and the registers used. 2461 // Keep track on aliasing between argX defined above and the registers used.
1934 // rax: subject string 2462 // rdi: subject string
1935 // rbx: previous index 2463 // rbx: previous index
1936 // rdi: encoding of subject string (1 if ascii 0 if two_byte); 2464 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
1937 // r11: code 2465 // r11: code
1938 2466
1939 // Argument 4: End of string data 2467 // Argument 4: End of string data
1940 // Argument 3: Start of string data 2468 // Argument 3: Start of string data
1941 NearLabel setup_two_byte, setup_rest; 2469 NearLabel setup_two_byte, setup_rest;
1942 __ testb(rdi, rdi); 2470 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
1943 __ j(zero, &setup_two_byte); 2471 __ j(zero, &setup_two_byte);
1944 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); 2472 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
1945 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize)); 2473 __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize));
1946 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize)); 2474 __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
1947 __ jmp(&setup_rest); 2475 __ jmp(&setup_rest);
1948 __ bind(&setup_two_byte); 2476 __ bind(&setup_two_byte);
1949 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); 2477 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
1950 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize)); 2478 __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize));
1951 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize)); 2479 __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
1952 2480
1953 __ bind(&setup_rest); 2481 __ bind(&setup_rest);
1954 // Argument 2: Previous index. 2482 // Argument 2: Previous index.
1955 __ movq(arg2, rbx); 2483 __ movq(arg2, rbx);
1956 2484
1957 // Argument 1: Subject string. 2485 // Argument 1: Subject string.
1958 __ movq(arg1, rax); 2486 #ifdef WIN64_
2487 __ movq(arg1, rdi);
2488 #else
2489 // Already there in AMD64 calling convention.
2490 ASSERT(arg1.is(rdi));
2491 #endif
1959 2492
1960 // Locate the code entry and call it. 2493 // Locate the code entry and call it.
1961 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); 2494 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1962 __ CallCFunction(r11, kRegExpExecuteArguments); 2495 __ call(r11);
1963 2496
1964 // rsi is caller save, as it is used to pass parameter. 2497 __ LeaveApiExitFrame();
1965 __ pop(rsi);
1966 2498
1967 // Check the result. 2499 // Check the result.
1968 NearLabel success; 2500 NearLabel success;
2501 Label exception;
1969 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS)); 2502 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
1970 __ j(equal, &success); 2503 __ j(equal, &success);
1971 NearLabel failure; 2504 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
2505 __ j(equal, &exception);
1972 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); 2506 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1973 __ j(equal, &failure); 2507 // If none of the above, it can only be retry.
1974 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); 2508 // Handle that in the runtime system.
1975 // If not exception it can only be retry. Handle that in the runtime system.
1976 __ j(not_equal, &runtime); 2509 __ j(not_equal, &runtime);
1977 // Result must now be exception. If there is no pending exception already a 2510
1978 // stack overflow (on the backtrack stack) was detected in RegExp code but 2511 // For failure return null.
1979 // haven't created the exception yet. Handle that in the runtime system. 2512 __ LoadRoot(rax, Heap::kNullValueRootIndex);
1980 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1981 ExternalReference pending_exception_address(Top::k_pending_exception_address);
1982 __ movq(kScratchRegister, pending_exception_address);
1983 __ Cmp(kScratchRegister, Factory::the_hole_value());
1984 __ j(equal, &runtime);
1985 __ bind(&failure);
1986 // For failure and exception return null.
1987 __ Move(rax, Factory::null_value());
1988 __ ret(4 * kPointerSize); 2513 __ ret(4 * kPointerSize);
1989 2514
1990 // Load RegExp data. 2515 // Load RegExp data.
1991 __ bind(&success); 2516 __ bind(&success);
1992 __ movq(rax, Operand(rsp, kJSRegExpOffset)); 2517 __ movq(rax, Operand(rsp, kJSRegExpOffset));
1993 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); 2518 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
1994 __ SmiToInteger32(rax, 2519 __ SmiToInteger32(rax,
1995 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); 2520 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1996 // Calculate number of capture registers (number_of_captures + 1) * 2. 2521 // Calculate number of capture registers (number_of_captures + 1) * 2.
1997 __ leal(rdx, Operand(rax, rax, times_1, 2)); 2522 __ leal(rdx, Operand(rax, rax, times_1, 2));
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2042 times_pointer_size, 2567 times_pointer_size,
2043 RegExpImpl::kFirstCaptureOffset), 2568 RegExpImpl::kFirstCaptureOffset),
2044 rdi); 2569 rdi);
2045 __ jmp(&next_capture); 2570 __ jmp(&next_capture);
2046 __ bind(&done); 2571 __ bind(&done);
2047 2572
2048 // Return last match info. 2573 // Return last match info.
2049 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); 2574 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2050 __ ret(4 * kPointerSize); 2575 __ ret(4 * kPointerSize);
2051 2576
2577 __ bind(&exception);
2578 // Result must now be exception. If there is no pending exception already a
2579 // stack overflow (on the backtrack stack) was detected in RegExp code but
2580 // haven't created the exception yet. Handle that in the runtime system.
2581 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2582 ExternalReference pending_exception_address(Top::k_pending_exception_address);
2583 __ movq(rbx, pending_exception_address);
2584 __ movq(rax, Operand(rbx, 0));
2585 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2586 __ cmpq(rax, rdx);
2587 __ j(equal, &runtime);
2588 __ movq(Operand(rbx, 0), rdx);
2589
2590 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2591 NearLabel termination_exception;
2592 __ j(equal, &termination_exception);
2593 __ Throw(rax);
2594
2595 __ bind(&termination_exception);
2596 __ ThrowUncatchable(TERMINATION, rax);
2597
2052 // Do the runtime call to execute the regexp. 2598 // Do the runtime call to execute the regexp.
2053 __ bind(&runtime); 2599 __ bind(&runtime);
2054 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2600 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2055 #endif // V8_INTERPRETED_REGEXP 2601 #endif // V8_INTERPRETED_REGEXP
2056 } 2602 }
2057 2603
2058 2604
2059 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { 2605 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2060 const int kMaxInlineLength = 100; 2606 const int kMaxInlineLength = 100;
2061 Label slowcase; 2607 Label slowcase;
2062 Label done; 2608 Label done;
2063 __ movq(r8, Operand(rsp, kPointerSize * 3)); 2609 __ movq(r8, Operand(rsp, kPointerSize * 3));
2064 __ JumpIfNotSmi(r8, &slowcase); 2610 __ JumpIfNotSmi(r8, &slowcase);
2065 __ SmiToInteger32(rbx, r8); 2611 __ SmiToInteger32(rbx, r8);
2066 __ cmpl(rbx, Immediate(kMaxInlineLength)); 2612 __ cmpl(rbx, Immediate(kMaxInlineLength));
2067 __ j(above, &slowcase); 2613 __ j(above, &slowcase);
2068 // Smi-tagging is equivalent to multiplying by 2. 2614 // Smi-tagging is equivalent to multiplying by 2.
2069 STATIC_ASSERT(kSmiTag == 0); 2615 STATIC_ASSERT(kSmiTag == 0);
2070 STATIC_ASSERT(kSmiTagSize == 1); 2616 STATIC_ASSERT(kSmiTagSize == 1);
2071 // Allocate RegExpResult followed by FixedArray with size in ebx. 2617 // Allocate RegExpResult followed by FixedArray with size in rbx.
2072 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] 2618 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2073 // Elements: [Map][Length][..elements..] 2619 // Elements: [Map][Length][..elements..]
2074 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize, 2620 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2075 times_pointer_size, 2621 times_pointer_size,
2076 rbx, // In: Number of elements. 2622 rbx, // In: Number of elements.
2077 rax, // Out: Start of allocation (tagged). 2623 rax, // Out: Start of allocation (tagged).
2078 rcx, // Out: End of allocation. 2624 rcx, // Out: End of allocation.
2079 rdx, // Scratch register 2625 rdx, // Scratch register
2080 &slowcase, 2626 &slowcase,
2081 TAG_OBJECT); 2627 TAG_OBJECT);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2120 __ Move(rdx, Factory::the_hole_value()); 2666 __ Move(rdx, Factory::the_hole_value());
2121 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); 2667 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2122 // Fill fixed array elements with hole. 2668 // Fill fixed array elements with hole.
2123 // rax: JSArray. 2669 // rax: JSArray.
2124 // rbx: Number of elements in array that remains to be filled, as int32. 2670 // rbx: Number of elements in array that remains to be filled, as int32.
2125 // rcx: Start of elements in FixedArray. 2671 // rcx: Start of elements in FixedArray.
2126 // rdx: the hole. 2672 // rdx: the hole.
2127 Label loop; 2673 Label loop;
2128 __ testl(rbx, rbx); 2674 __ testl(rbx, rbx);
2129 __ bind(&loop); 2675 __ bind(&loop);
2130 __ j(less_equal, &done); // Jump if ecx is negative or zero. 2676 __ j(less_equal, &done); // Jump if rcx is negative or zero.
2131 __ subl(rbx, Immediate(1)); 2677 __ subl(rbx, Immediate(1));
2132 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx); 2678 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
2133 __ jmp(&loop); 2679 __ jmp(&loop);
2134 2680
2135 __ bind(&done); 2681 __ bind(&done);
2136 __ ret(3 * kPointerSize); 2682 __ ret(3 * kPointerSize);
2137 2683
2138 __ bind(&slowcase); 2684 __ bind(&slowcase);
2139 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 2685 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2140 } 2686 }
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after
2483 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), 3029 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2484 Immediate(1 << Map::kIsUndetectable)); 3030 Immediate(1 << Map::kIsUndetectable));
2485 __ j(zero, &return_unequal); 3031 __ j(zero, &return_unequal);
2486 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 3032 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2487 Immediate(1 << Map::kIsUndetectable)); 3033 Immediate(1 << Map::kIsUndetectable));
2488 __ j(zero, &return_unequal); 3034 __ j(zero, &return_unequal);
2489 // The objects are both undetectable, so they both compare as the value 3035 // The objects are both undetectable, so they both compare as the value
2490 // undefined, and are equal. 3036 // undefined, and are equal.
2491 __ Set(rax, EQUAL); 3037 __ Set(rax, EQUAL);
2492 __ bind(&return_unequal); 3038 __ bind(&return_unequal);
2493 // Return non-equal by returning the non-zero object pointer in eax, 3039 // Return non-equal by returning the non-zero object pointer in rax,
2494 // or return equal if we fell through to here. 3040 // or return equal if we fell through to here.
2495 __ ret(0); 3041 __ ret(0);
2496 __ bind(&not_both_objects); 3042 __ bind(&not_both_objects);
2497 } 3043 }
2498 3044
2499 // Push arguments below the return address to prepare jump to builtin. 3045 // Push arguments below the return address to prepare jump to builtin.
2500 __ pop(rcx); 3046 __ pop(rcx);
2501 __ push(rdx); 3047 __ push(rdx);
2502 __ push(rax); 3048 __ push(rax);
2503 3049
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
2590 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); 3136 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
2591 __ Set(rax, argc_); 3137 __ Set(rax, argc_);
2592 __ Set(rbx, 0); 3138 __ Set(rbx, 0);
2593 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 3139 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2594 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); 3140 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2595 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3141 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2596 } 3142 }
2597 3143
2598 3144
2599 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 3145 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
2600 // Check that stack should contain next handler, frame pointer, state and 3146 // Throw exception in eax.
2601 // return address in that order. 3147 __ Throw(rax);
2602 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
2603 StackHandlerConstants::kStateOffset);
2604 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
2605 StackHandlerConstants::kPCOffset);
2606
2607 ExternalReference handler_address(Top::k_handler_address);
2608 __ movq(kScratchRegister, handler_address);
2609 __ movq(rsp, Operand(kScratchRegister, 0));
2610 // get next in chain
2611 __ pop(rcx);
2612 __ movq(Operand(kScratchRegister, 0), rcx);
2613 __ pop(rbp); // pop frame pointer
2614 __ pop(rdx); // remove state
2615
2616 // Before returning we restore the context from the frame pointer if not NULL.
2617 // The frame pointer is NULL in the exception handler of a JS entry frame.
2618 __ Set(rsi, 0); // Tentatively set context pointer to NULL
2619 NearLabel skip;
2620 __ cmpq(rbp, Immediate(0));
2621 __ j(equal, &skip);
2622 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2623 __ bind(&skip);
2624 __ ret(0);
2625 } 3148 }
2626 3149
2627 3150
2628 void CEntryStub::GenerateCore(MacroAssembler* masm, 3151 void CEntryStub::GenerateCore(MacroAssembler* masm,
2629 Label* throw_normal_exception, 3152 Label* throw_normal_exception,
2630 Label* throw_termination_exception, 3153 Label* throw_termination_exception,
2631 Label* throw_out_of_memory_exception, 3154 Label* throw_out_of_memory_exception,
2632 bool do_gc, 3155 bool do_gc,
2633 bool always_allocate_scope, 3156 bool always_allocate_scope) {
2634 int /* alignment_skew */) {
2635 // rax: result parameter for PerformGC, if any. 3157 // rax: result parameter for PerformGC, if any.
2636 // rbx: pointer to C function (C callee-saved). 3158 // rbx: pointer to C function (C callee-saved).
2637 // rbp: frame pointer (restored after C call). 3159 // rbp: frame pointer (restored after C call).
2638 // rsp: stack pointer (restored after C call). 3160 // rsp: stack pointer (restored after C call).
2639 // r14: number of arguments including receiver (C callee-saved). 3161 // r14: number of arguments including receiver (C callee-saved).
2640 // r12: pointer to the first argument (C callee-saved). 3162 // r12: pointer to the first argument (C callee-saved).
2641 // This pointer is reused in LeaveExitFrame(), so it is stored in a 3163 // This pointer is reused in LeaveExitFrame(), so it is stored in a
2642 // callee-saved register. 3164 // callee-saved register.
2643 3165
2644 // Simple results returned in rax (both AMD64 and Win64 calling conventions). 3166 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
2717 __ movq(rax, Operand(rsp, 6 * kPointerSize)); 3239 __ movq(rax, Operand(rsp, 6 * kPointerSize));
2718 __ movq(rdx, Operand(rsp, 7 * kPointerSize)); 3240 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
2719 } 3241 }
2720 #endif 3242 #endif
2721 __ lea(rcx, Operand(rax, 1)); 3243 __ lea(rcx, Operand(rax, 1));
2722 // Lower 2 bits of rcx are 0 iff rax has failure tag. 3244 // Lower 2 bits of rcx are 0 iff rax has failure tag.
2723 __ testl(rcx, Immediate(kFailureTagMask)); 3245 __ testl(rcx, Immediate(kFailureTagMask));
2724 __ j(zero, &failure_returned); 3246 __ j(zero, &failure_returned);
2725 3247
2726 // Exit the JavaScript to C++ exit frame. 3248 // Exit the JavaScript to C++ exit frame.
2727 __ LeaveExitFrame(); 3249 __ LeaveExitFrame(save_doubles_);
2728 __ ret(0); 3250 __ ret(0);
2729 3251
2730 // Handling of failure. 3252 // Handling of failure.
2731 __ bind(&failure_returned); 3253 __ bind(&failure_returned);
2732 3254
2733 NearLabel retry; 3255 NearLabel retry;
2734 // If the returned exception is RETRY_AFTER_GC continue at retry label 3256 // If the returned exception is RETRY_AFTER_GC continue at retry label
2735 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 3257 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
2736 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 3258 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
2737 __ j(zero, &retry); 3259 __ j(zero, &retry);
(...skipping 19 matching lines...) Expand all
2757 // Handle normal exception. 3279 // Handle normal exception.
2758 __ jmp(throw_normal_exception); 3280 __ jmp(throw_normal_exception);
2759 3281
2760 // Retry. 3282 // Retry.
2761 __ bind(&retry); 3283 __ bind(&retry);
2762 } 3284 }
2763 3285
2764 3286
2765 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, 3287 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
2766 UncatchableExceptionType type) { 3288 UncatchableExceptionType type) {
2767 // Fetch top stack handler. 3289 __ ThrowUncatchable(type, rax);
2768 ExternalReference handler_address(Top::k_handler_address);
2769 __ movq(kScratchRegister, handler_address);
2770 __ movq(rsp, Operand(kScratchRegister, 0));
2771
2772 // Unwind the handlers until the ENTRY handler is found.
2773 NearLabel loop, done;
2774 __ bind(&loop);
2775 // Load the type of the current stack handler.
2776 const int kStateOffset = StackHandlerConstants::kStateOffset;
2777 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
2778 __ j(equal, &done);
2779 // Fetch the next handler in the list.
2780 const int kNextOffset = StackHandlerConstants::kNextOffset;
2781 __ movq(rsp, Operand(rsp, kNextOffset));
2782 __ jmp(&loop);
2783 __ bind(&done);
2784
2785 // Set the top handler address to next handler past the current ENTRY handler.
2786 __ movq(kScratchRegister, handler_address);
2787 __ pop(Operand(kScratchRegister, 0));
2788
2789 if (type == OUT_OF_MEMORY) {
2790 // Set external caught exception to false.
2791 ExternalReference external_caught(Top::k_external_caught_exception_address);
2792 __ movq(rax, Immediate(false));
2793 __ store_rax(external_caught);
2794
2795 // Set pending exception and rax to out of memory exception.
2796 ExternalReference pending_exception(Top::k_pending_exception_address);
2797 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
2798 __ store_rax(pending_exception);
2799 }
2800
2801 // Clear the context pointer.
2802 __ Set(rsi, 0);
2803
2804 // Restore registers from handler.
2805 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
2806 StackHandlerConstants::kFPOffset);
2807 __ pop(rbp); // FP
2808 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
2809 StackHandlerConstants::kStateOffset);
2810 __ pop(rdx); // State
2811
2812 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
2813 StackHandlerConstants::kPCOffset);
2814 __ ret(0);
2815 } 3290 }
2816 3291
2817 3292
2818 void CEntryStub::Generate(MacroAssembler* masm) { 3293 void CEntryStub::Generate(MacroAssembler* masm) {
2819 // rax: number of arguments including receiver 3294 // rax: number of arguments including receiver
2820 // rbx: pointer to C function (C callee-saved) 3295 // rbx: pointer to C function (C callee-saved)
2821 // rbp: frame pointer of calling JS frame (restored after C call) 3296 // rbp: frame pointer of calling JS frame (restored after C call)
2822 // rsp: stack pointer (restored after C call) 3297 // rsp: stack pointer (restored after C call)
2823 // rsi: current context (restored) 3298 // rsi: current context (restored)
2824 3299
2825 // NOTE: Invocations of builtins may return failure objects 3300 // NOTE: Invocations of builtins may return failure objects
2826 // instead of a proper result. The builtin entry handles 3301 // instead of a proper result. The builtin entry handles
2827 // this by performing a garbage collection and retrying the 3302 // this by performing a garbage collection and retrying the
2828 // builtin once. 3303 // builtin once.
2829 3304
2830 // Enter the exit frame that transitions from JavaScript to C++. 3305 // Enter the exit frame that transitions from JavaScript to C++.
2831 #ifdef _WIN64 3306 #ifdef _WIN64
2832 int arg_stack_space = (result_size_ < 2 ? 2 : 4); 3307 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
2833 #else 3308 #else
2834 int arg_stack_space = 0; 3309 int arg_stack_space = 0;
2835 #endif 3310 #endif
2836 __ EnterExitFrame(arg_stack_space); 3311 __ EnterExitFrame(arg_stack_space, save_doubles_);
2837 3312
2838 // rax: Holds the context at this point, but should not be used. 3313 // rax: Holds the context at this point, but should not be used.
2839 // On entry to code generated by GenerateCore, it must hold 3314 // On entry to code generated by GenerateCore, it must hold
2840 // a failure result if the collect_garbage argument to GenerateCore 3315 // a failure result if the collect_garbage argument to GenerateCore
2841 // is true. This failure result can be the result of code 3316 // is true. This failure result can be the result of code
2842 // generated by a previous call to GenerateCore. The value 3317 // generated by a previous call to GenerateCore. The value
2843 // of rax is then passed to Runtime::PerformGC. 3318 // of rax is then passed to Runtime::PerformGC.
2844 // rbx: pointer to builtin function (C callee-saved). 3319 // rbx: pointer to builtin function (C callee-saved).
2845 // rbp: frame pointer of exit frame (restored after C call). 3320 // rbp: frame pointer of exit frame (restored after C call).
2846 // rsp: stack pointer (restored after C call). 3321 // rsp: stack pointer (restored after C call).
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
2979 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); 3454 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
2980 __ call(kScratchRegister); 3455 __ call(kScratchRegister);
2981 3456
2982 // Unlink this frame from the handler chain. 3457 // Unlink this frame from the handler chain.
2983 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address)); 3458 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
2984 __ pop(Operand(kScratchRegister, 0)); 3459 __ pop(Operand(kScratchRegister, 0));
2985 // Pop next_sp. 3460 // Pop next_sp.
2986 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 3461 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2987 3462
2988 #ifdef ENABLE_LOGGING_AND_PROFILING 3463 #ifdef ENABLE_LOGGING_AND_PROFILING
2989 // If current EBP value is the same as js_entry_sp value, it means that 3464 // If current RBP value is the same as js_entry_sp value, it means that
2990 // the current function is the outermost. 3465 // the current function is the outermost.
2991 __ movq(kScratchRegister, js_entry_sp); 3466 __ movq(kScratchRegister, js_entry_sp);
2992 __ cmpq(rbp, Operand(kScratchRegister, 0)); 3467 __ cmpq(rbp, Operand(kScratchRegister, 0));
2993 __ j(not_equal, &not_outermost_js_2); 3468 __ j(not_equal, &not_outermost_js_2);
2994 __ movq(Operand(kScratchRegister, 0), Immediate(0)); 3469 __ movq(Operand(kScratchRegister, 0), Immediate(0));
2995 __ bind(&not_outermost_js_2); 3470 __ bind(&not_outermost_js_2);
2996 #endif 3471 #endif
2997 3472
2998 // Restore the top frame descriptor from the stack. 3473 // Restore the top frame descriptor from the stack.
2999 __ bind(&exit); 3474 __ bind(&exit);
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
3097 // We have to store a non-zero value in the cache. 3572 // We have to store a non-zero value in the cache.
3098 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 3573 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3099 __ ret(2 * kPointerSize); 3574 __ ret(2 * kPointerSize);
3100 3575
3101 // Slow-case: Go through the JavaScript implementation. 3576 // Slow-case: Go through the JavaScript implementation.
3102 __ bind(&slow); 3577 __ bind(&slow);
3103 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 3578 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3104 } 3579 }
3105 3580
3106 3581
3582 Register InstanceofStub::left() { return rax; }
3583
3584
3585 Register InstanceofStub::right() { return rdx; }
3586
3587
3107 int CompareStub::MinorKey() { 3588 int CompareStub::MinorKey() {
3108 // Encode the three parameters in a unique 16 bit value. To avoid duplicate 3589 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
3109 // stubs the never NaN NaN condition is only taken into account if the 3590 // stubs the never NaN NaN condition is only taken into account if the
3110 // condition is equals. 3591 // condition is equals.
3111 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); 3592 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
3112 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); 3593 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3113 return ConditionField::encode(static_cast<unsigned>(cc_)) 3594 return ConditionField::encode(static_cast<unsigned>(cc_))
3114 | RegisterField::encode(false) // lhs_ and rhs_ are not used 3595 | RegisterField::encode(false) // lhs_ and rhs_ are not used
3115 | StrictField::encode(strict_) 3596 | StrictField::encode(strict_)
3116 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) 3597 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
(...skipping 1004 matching lines...) Expand 10 before | Expand all | Expand 10 after
4121 __ push(rcx); 4602 __ push(rcx);
4122 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); 4603 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4123 4604
4124 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 4605 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4125 // tagged as a small integer. 4606 // tagged as a small integer.
4126 __ bind(&runtime); 4607 __ bind(&runtime);
4127 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 4608 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4128 } 4609 }
4129 4610
4130 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 4611 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4131 UNIMPLEMENTED(); 4612 ASSERT(state_ == CompareIC::SMIS);
4613 NearLabel miss;
4614 __ JumpIfNotBothSmi(rdx, rax, &miss);
4615
4616 if (GetCondition() == equal) {
4617 // For equality we do not care about the sign of the result.
4618 __ subq(rax, rdx);
4619 } else {
4620 NearLabel done;
4621 __ subq(rdx, rax);
4622 __ j(no_overflow, &done);
4623 // Correct sign of result in case of overflow.
4624 __ SmiNot(rdx, rdx);
4625 __ bind(&done);
4626 __ movq(rax, rdx);
4627 }
4628 __ ret(0);
4629
4630 __ bind(&miss);
4631 GenerateMiss(masm);
4132 } 4632 }
4133 4633
4134 4634
4135 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { 4635 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
4136 UNIMPLEMENTED(); 4636 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
4637
4638 NearLabel generic_stub;
4639 NearLabel unordered;
4640 NearLabel miss;
4641 Condition either_smi = masm->CheckEitherSmi(rax, rdx);
4642 __ j(either_smi, &generic_stub);
4643
4644 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
4645 __ j(not_equal, &miss);
4646 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
4647 __ j(not_equal, &miss);
4648
4649 // Load left and right operand
4650 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
4651 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
4652
4653 // Compare operands
4654 __ ucomisd(xmm0, xmm1);
4655
4656 // Don't base result on EFLAGS when a NaN is involved.
4657 __ j(parity_even, &unordered);
4658
4659 // Return a result of -1, 0, or 1, based on EFLAGS.
4660 // Performing mov, because xor would destroy the flag register.
4661 __ movl(rax, Immediate(0));
4662 __ movl(rcx, Immediate(0));
4663 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
4664 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
4665 __ ret(0);
4666
4667 __ bind(&unordered);
4668
4669 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
4670 __ bind(&generic_stub);
4671 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4672
4673 __ bind(&miss);
4674 GenerateMiss(masm);
4137 } 4675 }
4138 4676
4139 4677
4140 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { 4678 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4141 UNIMPLEMENTED(); 4679 ASSERT(state_ == CompareIC::OBJECTS);
4680 NearLabel miss;
4681 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4682 __ j(either_smi, &miss);
4683
4684 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
4685 __ j(not_equal, &miss, not_taken);
4686 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
4687 __ j(not_equal, &miss, not_taken);
4688
4689 ASSERT(GetCondition() == equal);
4690 __ subq(rax, rdx);
4691 __ ret(0);
4692
4693 __ bind(&miss);
4694 GenerateMiss(masm);
4142 } 4695 }
4143 4696
4144 4697
4145 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 4698 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4146 UNIMPLEMENTED(); 4699 // Save the registers.
4700 __ pop(rcx);
4701 __ push(rdx);
4702 __ push(rax);
4703 __ push(rcx);
4704
4705 // Call the runtime system in a fresh internal frame.
4706 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
4707 __ EnterInternalFrame();
4708 __ push(rdx);
4709 __ push(rax);
4710 __ Push(Smi::FromInt(op_));
4711 __ CallExternalReference(miss, 3);
4712 __ LeaveInternalFrame();
4713
4714 // Compute the entry point of the rewritten stub.
4715 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
4716
4717 // Restore registers.
4718 __ pop(rcx);
4719 __ pop(rax);
4720 __ pop(rdx);
4721 __ push(rcx);
4722
4723 // Do a tail call to the rewritten stub.
4724 __ jmp(rdi);
4725 }
4726
4727
4728 void GenerateFastPixelArrayLoad(MacroAssembler* masm,
4729 Register receiver,
4730 Register key,
4731 Register elements,
4732 Register untagged_key,
4733 Register result,
4734 Label* not_pixel_array,
4735 Label* key_not_smi,
4736 Label* out_of_range) {
4737 // Register use:
4738 // receiver - holds the receiver and is unchanged.
4739 // key - holds the key and is unchanged (must be a smi).
4740 // elements - is set to the the receiver's element if
4741 // the receiver doesn't have a pixel array or the
4742 // key is not a smi, otherwise it's the elements'
4743 // external pointer.
4744 // untagged_key - is set to the untagged key
4745
4746 // Some callers already have verified that the key is a smi. key_not_smi is
4747 // set to NULL as a sentinel for that case. Otherwise, add an explicit check
4748 // to ensure the key is a smi must be added.
4749 if (key_not_smi != NULL) {
4750 __ JumpIfNotSmi(key, key_not_smi);
4751 } else {
4752 if (FLAG_debug_code) {
4753 __ AbortIfNotSmi(key);
4754 }
4755 }
4756 __ SmiToInteger32(untagged_key, key);
4757
4758 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
4759 // By passing NULL as not_pixel_array, callers signal that they have already
4760 // verified that the receiver has pixel array elements.
4761 if (not_pixel_array != NULL) {
4762 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
4763 } else {
4764 if (FLAG_debug_code) {
4765 // Map check should have already made sure that elements is a pixel array.
4766 __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
4767 Factory::pixel_array_map());
4768 __ Assert(equal, "Elements isn't a pixel array");
4769 }
4770 }
4771
4772 // Check that the smi is in range.
4773 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
4774 __ j(above_equal, out_of_range); // unsigned check handles negative keys.
4775
4776 // Load and tag the element as a smi.
4777 __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset));
4778 __ movzxbq(result, Operand(elements, untagged_key, times_1, 0));
4779 __ Integer32ToSmi(result, result);
4780 __ ret(0);
4781 }
4782
4783
4784 // Stores an indexed element into a pixel array, clamping the stored value.
4785 void GenerateFastPixelArrayStore(MacroAssembler* masm,
4786 Register receiver,
4787 Register key,
4788 Register value,
4789 Register elements,
4790 Register scratch1,
4791 bool load_elements_from_receiver,
4792 bool key_is_untagged,
4793 Label* key_not_smi,
4794 Label* value_not_smi,
4795 Label* not_pixel_array,
4796 Label* out_of_range) {
4797 // Register use:
4798 // receiver - holds the receiver and is unchanged.
4799 // key - holds the key (must be a smi) and is unchanged.
4800 // value - holds the value (must be a smi) and is unchanged.
4801 // elements - holds the element object of the receiver on entry if
4802 // load_elements_from_receiver is false, otherwise used
4803 // internally to store the pixel arrays elements and
4804 // external array pointer.
4805 //
4806 Register external_pointer = elements;
4807 Register untagged_key = scratch1;
4808 Register untagged_value = receiver; // Only set once success guaranteed.
4809
4810 // Fetch the receiver's elements if the caller hasn't already done so.
4811 if (load_elements_from_receiver) {
4812 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
4813 }
4814
4815 // By passing NULL as not_pixel_array, callers signal that they have already
4816 // verified that the receiver has pixel array elements.
4817 if (not_pixel_array != NULL) {
4818 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
4819 } else {
4820 if (FLAG_debug_code) {
4821 // Map check should have already made sure that elements is a pixel array.
4822 __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
4823 Factory::pixel_array_map());
4824 __ Assert(equal, "Elements isn't a pixel array");
4825 }
4826 }
4827
4828 // Key must be a smi and it must be in range.
4829 if (key_is_untagged) {
4830 untagged_key = key;
4831 } else {
4832 // Some callers already have verified that the key is a smi. key_not_smi is
4833 // set to NULL as a sentinel for that case. Otherwise, add an explicit
4834 // check to ensure the key is a smi.
4835 if (key_not_smi != NULL) {
4836 __ JumpIfNotSmi(key, key_not_smi);
4837 } else {
4838 if (FLAG_debug_code) {
4839 __ AbortIfNotSmi(key);
4840 }
4841 }
4842 __ SmiToInteger32(untagged_key, key);
4843 }
4844 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
4845 __ j(above_equal, out_of_range); // unsigned check handles negative keys.
4846
4847 // Value must be a smi.
4848 __ JumpIfNotSmi(value, value_not_smi);
4849 __ SmiToInteger32(untagged_value, value);
4850
4851 { // Clamp the value to [0..255].
4852 NearLabel done;
4853 __ testl(untagged_value, Immediate(0xFFFFFF00));
4854 __ j(zero, &done);
4855 __ setcc(negative, untagged_value); // 1 if negative, 0 if positive.
4856 __ decb(untagged_value); // 0 if negative, 255 if positive.
4857 __ bind(&done);
4858 }
4859
4860 __ movq(external_pointer,
4861 FieldOperand(elements, PixelArray::kExternalPointerOffset));
4862 __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value);
4863 __ ret(0); // Return value in eax.
4147 } 4864 }
4148 4865
4149 #undef __ 4866 #undef __
4150 4867
4151 } } // namespace v8::internal 4868 } } // namespace v8::internal
4152 4869
4153 #endif // V8_TARGET_ARCH_X64 4870 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698