OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1242 __ CallRuntime(Runtime::kStackGuard, 0); | 1242 __ CallRuntime(Runtime::kStackGuard, 0); |
1243 } | 1243 } |
1244 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1244 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
1245 RelocInfo::CODE_TARGET); | 1245 RelocInfo::CODE_TARGET); |
1246 | 1246 |
1247 __ bind(&ok); | 1247 __ bind(&ok); |
1248 __ Ret(); | 1248 __ Ret(); |
1249 } | 1249 } |
1250 | 1250 |
1251 | 1251 |
1252 // static | |
1253 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 1252 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
1254 // 1. Make sure we have at least one argument. | 1253 // 1. Make sure we have at least one argument. |
1255 // a0: actual number of arguments | 1254 // a0: actual number of arguments |
1256 { | 1255 { Label done; |
1257 Label done; | |
1258 __ Branch(&done, ne, a0, Operand(zero_reg)); | 1256 __ Branch(&done, ne, a0, Operand(zero_reg)); |
1259 __ PushRoot(Heap::kUndefinedValueRootIndex); | 1257 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex); |
| 1258 __ push(a6); |
1260 __ Daddu(a0, a0, Operand(1)); | 1259 __ Daddu(a0, a0, Operand(1)); |
1261 __ bind(&done); | 1260 __ bind(&done); |
1262 } | 1261 } |
1263 | 1262 |
1264 // 2. Get the function to call (passed as receiver) from the stack. | 1263 // 2. Get the function to call (passed as receiver) from the stack, check |
| 1264 // if it is a function. |
1265 // a0: actual number of arguments | 1265 // a0: actual number of arguments |
| 1266 Label slow, non_function; |
1266 __ dsll(at, a0, kPointerSizeLog2); | 1267 __ dsll(at, a0, kPointerSizeLog2); |
1267 __ daddu(at, sp, at); | 1268 __ daddu(at, sp, at); |
1268 __ ld(a1, MemOperand(at)); | 1269 __ ld(a1, MemOperand(at)); |
| 1270 __ JumpIfSmi(a1, &non_function); |
| 1271 __ GetObjectType(a1, a2, a2); |
| 1272 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1269 | 1273 |
1270 // 3. Shift arguments and return address one slot down on the stack | 1274 // 3a. Patch the first argument if necessary when calling a function. |
| 1275 // a0: actual number of arguments |
| 1276 // a1: function |
| 1277 Label shift_arguments; |
| 1278 __ li(a4, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION. |
| 1279 { Label convert_to_object, use_global_proxy, patch_receiver; |
| 1280 // Change context eagerly in case we need the global receiver. |
| 1281 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 1282 |
| 1283 // Do not transform the receiver for strict mode functions. |
| 1284 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 1285 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset)); |
| 1286 __ And(a7, a3, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 1287 __ Branch(&shift_arguments, ne, a7, Operand(zero_reg)); |
| 1288 |
| 1289 // Do not transform the receiver for native (Compilerhints already in a3). |
| 1290 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); |
| 1291 __ And(a7, a3, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
| 1292 __ Branch(&shift_arguments, ne, a7, Operand(zero_reg)); |
| 1293 |
| 1294 // Compute the receiver in sloppy mode. |
| 1295 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). |
| 1296 __ dsll(at, a0, kPointerSizeLog2); |
| 1297 __ daddu(a2, sp, at); |
| 1298 __ ld(a2, MemOperand(a2, -kPointerSize)); |
| 1299 // a0: actual number of arguments |
| 1300 // a1: function |
| 1301 // a2: first argument |
| 1302 __ JumpIfSmi(a2, &convert_to_object, a6); |
| 1303 |
| 1304 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
| 1305 __ Branch(&use_global_proxy, eq, a2, Operand(a3)); |
| 1306 __ LoadRoot(a3, Heap::kNullValueRootIndex); |
| 1307 __ Branch(&use_global_proxy, eq, a2, Operand(a3)); |
| 1308 |
| 1309 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1310 __ GetObjectType(a2, a3, a3); |
| 1311 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 1312 |
| 1313 __ bind(&convert_to_object); |
| 1314 // Enter an internal frame in order to preserve argument count. |
| 1315 { |
| 1316 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1317 __ SmiTag(a0); |
| 1318 __ Push(a0); |
| 1319 __ mov(a0, a2); |
| 1320 ToObjectStub stub(masm->isolate()); |
| 1321 __ CallStub(&stub); |
| 1322 __ mov(a2, v0); |
| 1323 |
| 1324 __ pop(a0); |
| 1325 __ SmiUntag(a0); |
| 1326 // Leave internal frame. |
| 1327 } |
| 1328 // Restore the function to a1, and the flag to a4. |
| 1329 __ dsll(at, a0, kPointerSizeLog2); |
| 1330 __ daddu(at, sp, at); |
| 1331 __ ld(a1, MemOperand(at)); |
| 1332 __ Branch(USE_DELAY_SLOT, &patch_receiver); |
| 1333 __ li(a4, Operand(0, RelocInfo::NONE32)); |
| 1334 |
| 1335 __ bind(&use_global_proxy); |
| 1336 __ ld(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 1337 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); |
| 1338 |
| 1339 __ bind(&patch_receiver); |
| 1340 __ dsll(at, a0, kPointerSizeLog2); |
| 1341 __ daddu(a3, sp, at); |
| 1342 __ sd(a2, MemOperand(a3, -kPointerSize)); |
| 1343 |
| 1344 __ Branch(&shift_arguments); |
| 1345 } |
| 1346 |
| 1347 // 3b. Check for function proxy. |
| 1348 __ bind(&slow); |
| 1349 __ li(a4, Operand(1, RelocInfo::NONE32)); // Indicate function proxy. |
| 1350 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 1351 |
| 1352 __ bind(&non_function); |
| 1353 __ li(a4, Operand(2, RelocInfo::NONE32)); // Indicate non-function. |
| 1354 |
| 1355 // 3c. Patch the first argument when calling a non-function. The |
| 1356 // CALL_NON_FUNCTION builtin expects the non-function callee as |
| 1357 // receiver, so overwrite the first argument which will ultimately |
| 1358 // become the receiver. |
| 1359 // a0: actual number of arguments |
| 1360 // a1: function |
| 1361 // a4: call type (0: JS function, 1: function proxy, 2: non-function) |
| 1362 __ dsll(at, a0, kPointerSizeLog2); |
| 1363 __ daddu(a2, sp, at); |
| 1364 __ sd(a1, MemOperand(a2, -kPointerSize)); |
| 1365 |
| 1366 // 4. Shift arguments and return address one slot down on the stack |
1271 // (overwriting the original receiver). Adjust argument count to make | 1367 // (overwriting the original receiver). Adjust argument count to make |
1272 // the original first argument the new receiver. | 1368 // the original first argument the new receiver. |
1273 // a0: actual number of arguments | 1369 // a0: actual number of arguments |
1274 // a1: function | 1370 // a1: function |
1275 { | 1371 // a4: call type (0: JS function, 1: function proxy, 2: non-function) |
1276 Label loop; | 1372 __ bind(&shift_arguments); |
| 1373 { Label loop; |
1277 // Calculate the copy start address (destination). Copy end address is sp. | 1374 // Calculate the copy start address (destination). Copy end address is sp. |
1278 __ dsll(at, a0, kPointerSizeLog2); | 1375 __ dsll(at, a0, kPointerSizeLog2); |
1279 __ daddu(a2, sp, at); | 1376 __ daddu(a2, sp, at); |
1280 | 1377 |
1281 __ bind(&loop); | 1378 __ bind(&loop); |
1282 __ ld(at, MemOperand(a2, -kPointerSize)); | 1379 __ ld(at, MemOperand(a2, -kPointerSize)); |
1283 __ sd(at, MemOperand(a2)); | 1380 __ sd(at, MemOperand(a2)); |
1284 __ Dsubu(a2, a2, Operand(kPointerSize)); | 1381 __ Dsubu(a2, a2, Operand(kPointerSize)); |
1285 __ Branch(&loop, ne, a2, Operand(sp)); | 1382 __ Branch(&loop, ne, a2, Operand(sp)); |
1286 // Adjust the actual number of arguments and remove the top element | 1383 // Adjust the actual number of arguments and remove the top element |
1287 // (which is a copy of the last argument). | 1384 // (which is a copy of the last argument). |
1288 __ Dsubu(a0, a0, Operand(1)); | 1385 __ Dsubu(a0, a0, Operand(1)); |
1289 __ Pop(); | 1386 __ Pop(); |
1290 } | 1387 } |
1291 | 1388 |
1292 // 4. Call the callable. | 1389 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, |
1293 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1390 // or a function proxy via CALL_FUNCTION_PROXY. |
| 1391 // a0: actual number of arguments |
| 1392 // a1: function |
| 1393 // a4: call type (0: JS function, 1: function proxy, 2: non-function) |
| 1394 { Label function, non_proxy; |
| 1395 __ Branch(&function, eq, a4, Operand(zero_reg)); |
| 1396 // Expected number of arguments is 0 for CALL_NON_FUNCTION. |
| 1397 __ mov(a2, zero_reg); |
| 1398 __ Branch(&non_proxy, ne, a4, Operand(1)); |
| 1399 |
| 1400 __ push(a1); // Re-add proxy object as additional argument. |
| 1401 __ Daddu(a0, a0, Operand(1)); |
| 1402 __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); |
| 1403 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1404 RelocInfo::CODE_TARGET); |
| 1405 |
| 1406 __ bind(&non_proxy); |
| 1407 __ GetBuiltinFunction(a1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); |
| 1408 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1409 RelocInfo::CODE_TARGET); |
| 1410 __ bind(&function); |
| 1411 } |
| 1412 |
| 1413 // 5b. Get the code to call from the function and check that the number of |
| 1414 // expected arguments matches what we're providing. If so, jump |
| 1415 // (tail-call) to the code in register edx without checking arguments. |
| 1416 // a0: actual number of arguments |
| 1417 // a1: function |
| 1418 __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 1419 // The argument count is stored as int32_t on 64-bit platforms. |
| 1420 // TODO(plind): Smi on 32-bit platforms. |
| 1421 __ lw(a2, |
| 1422 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 1423 // Check formal and actual parameter counts. |
| 1424 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1425 RelocInfo::CODE_TARGET, ne, a2, Operand(a0)); |
| 1426 |
| 1427 __ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
| 1428 ParameterCount expected(0); |
| 1429 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); |
1294 } | 1430 } |
1295 | 1431 |
1296 | 1432 |
1297 static void Generate_PushAppliedArguments(MacroAssembler* masm, | 1433 static void Generate_PushAppliedArguments(MacroAssembler* masm, |
1298 const int argumentsOffset, | 1434 const int argumentsOffset, |
1299 const int indexOffset, | 1435 const int indexOffset, |
1300 const int limitOffset) { | 1436 const int limitOffset) { |
1301 Label entry, loop; | 1437 Label entry, loop; |
1302 Register receiver = LoadDescriptor::ReceiverRegister(); | 1438 Register receiver = LoadDescriptor::ReceiverRegister(); |
1303 Register key = LoadDescriptor::NameRegister(); | 1439 Register key = LoadDescriptor::NameRegister(); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1347 const int kFormalParameters = targetIsArgument ? 3 : 2; | 1483 const int kFormalParameters = targetIsArgument ? 3 : 2; |
1348 const int kStackSize = kFormalParameters + 1; | 1484 const int kStackSize = kFormalParameters + 1; |
1349 | 1485 |
1350 { | 1486 { |
1351 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1487 FrameScope frame_scope(masm, StackFrame::INTERNAL); |
1352 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | 1488 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; |
1353 const int kReceiverOffset = kArgumentsOffset + kPointerSize; | 1489 const int kReceiverOffset = kArgumentsOffset + kPointerSize; |
1354 const int kFunctionOffset = kReceiverOffset + kPointerSize; | 1490 const int kFunctionOffset = kReceiverOffset + kPointerSize; |
1355 | 1491 |
1356 __ ld(a0, MemOperand(fp, kFunctionOffset)); // Get the function. | 1492 __ ld(a0, MemOperand(fp, kFunctionOffset)); // Get the function. |
1357 __ ld(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array. | 1493 __ push(a0); |
1358 __ Push(a0, a1); | 1494 __ ld(a0, MemOperand(fp, kArgumentsOffset)); // Get the args array. |
| 1495 __ push(a0); |
1359 | 1496 |
1360 // Returns (in v0) number of arguments to copy to stack as Smi. | 1497 // Returns (in v0) number of arguments to copy to stack as Smi. |
1361 if (targetIsArgument) { | 1498 if (targetIsArgument) { |
1362 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | 1499 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, |
1363 CALL_FUNCTION); | 1500 CALL_FUNCTION); |
1364 } else { | 1501 } else { |
1365 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | 1502 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); |
1366 } | 1503 } |
1367 | 1504 |
1368 // Returns the result in v0. | 1505 // Returns the result in v0. |
1369 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); | 1506 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); |
1370 | 1507 |
1371 // Push current limit and index. | 1508 // Push current limit and index. |
1372 const int kIndexOffset = | 1509 const int kIndexOffset = |
1373 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1510 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
1374 const int kLimitOffset = | 1511 const int kLimitOffset = |
1375 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | 1512 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
1376 __ mov(a1, zero_reg); | 1513 __ mov(a1, zero_reg); |
1377 __ ld(a2, MemOperand(fp, kReceiverOffset)); | 1514 __ Push(v0, a1); // Limit and initial index. |
1378 __ Push(v0, a1, a2); // limit, initial index and receiver. | 1515 |
| 1516 // Get the receiver. |
| 1517 __ ld(a0, MemOperand(fp, kReceiverOffset)); |
| 1518 |
| 1519 // Check that the function is a JS function (otherwise it must be a proxy). |
| 1520 Label push_receiver; |
| 1521 __ ld(a1, MemOperand(fp, kFunctionOffset)); |
| 1522 __ GetObjectType(a1, a2, a2); |
| 1523 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); |
| 1524 |
| 1525 // Change context eagerly to get the right global object if necessary. |
| 1526 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 1527 // Load the shared function info while the function is still in a1. |
| 1528 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 1529 |
| 1530 // Compute the receiver. |
| 1531 // Do not transform the receiver for strict mode functions. |
| 1532 Label call_to_object, use_global_proxy; |
| 1533 __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset)); |
| 1534 __ And(a7, a7, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 1535 __ Branch(&push_receiver, ne, a7, Operand(zero_reg)); |
| 1536 |
| 1537 // Do not transform the receiver for native (Compilerhints already in a2). |
| 1538 __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); |
| 1539 __ And(a7, a7, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
| 1540 __ Branch(&push_receiver, ne, a7, Operand(zero_reg)); |
| 1541 |
| 1542 // Compute the receiver in sloppy mode. |
| 1543 __ JumpIfSmi(a0, &call_to_object); |
| 1544 __ LoadRoot(a1, Heap::kNullValueRootIndex); |
| 1545 __ Branch(&use_global_proxy, eq, a0, Operand(a1)); |
| 1546 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 1547 __ Branch(&use_global_proxy, eq, a0, Operand(a2)); |
| 1548 |
| 1549 // Check if the receiver is already a JavaScript object. |
| 1550 // a0: receiver |
| 1551 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1552 __ GetObjectType(a0, a1, a1); |
| 1553 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 1554 |
| 1555 // Convert the receiver to a regular object. |
| 1556 // a0: receiver |
| 1557 __ bind(&call_to_object); |
| 1558 ToObjectStub stub(masm->isolate()); |
| 1559 __ CallStub(&stub); |
| 1560 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. |
| 1561 __ Branch(&push_receiver); |
| 1562 |
| 1563 __ bind(&use_global_proxy); |
| 1564 __ ld(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 1565 __ ld(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset)); |
| 1566 |
| 1567 // Push the receiver. |
| 1568 // a0: receiver |
| 1569 __ bind(&push_receiver); |
| 1570 __ push(a0); |
1379 | 1571 |
1380 // Copy all arguments from the array to the stack. | 1572 // Copy all arguments from the array to the stack. |
1381 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, | 1573 Generate_PushAppliedArguments( |
1382 kLimitOffset); | 1574 masm, kArgumentsOffset, kIndexOffset, kLimitOffset); |
1383 | 1575 |
1384 // Call the callable. | 1576 // Call the function. |
1385 // TODO(bmeurer): This should be a tail call according to ES6. | 1577 Label call_proxy; |
| 1578 ParameterCount actual(a0); |
1386 __ ld(a1, MemOperand(fp, kFunctionOffset)); | 1579 __ ld(a1, MemOperand(fp, kFunctionOffset)); |
1387 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1580 __ GetObjectType(a1, a2, a2); |
| 1581 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1388 | 1582 |
| 1583 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper()); |
| 1584 |
| 1585 frame_scope.GenerateLeaveFrame(); |
| 1586 __ Ret(USE_DELAY_SLOT); |
| 1587 __ Daddu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. |
| 1588 |
| 1589 // Call the function proxy. |
| 1590 __ bind(&call_proxy); |
| 1591 __ push(a1); // Add function proxy as last argument. |
| 1592 __ Daddu(a0, a0, Operand(1)); |
| 1593 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 1594 __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); |
| 1595 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1596 RelocInfo::CODE_TARGET); |
1389 // Tear down the internal frame and remove function, receiver and args. | 1597 // Tear down the internal frame and remove function, receiver and args. |
1390 } | 1598 } |
1391 | 1599 |
1392 __ Ret(USE_DELAY_SLOT); | 1600 __ Ret(USE_DELAY_SLOT); |
1393 __ Daddu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. | 1601 __ Daddu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. |
1394 } | 1602 } |
1395 | 1603 |
1396 | 1604 |
1397 static void Generate_ConstructHelper(MacroAssembler* masm) { | 1605 static void Generate_ConstructHelper(MacroAssembler* masm) { |
1398 const int kFormalParameters = 3; | 1606 const int kFormalParameters = 3; |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1515 kPointerSize))); | 1723 kPointerSize))); |
1516 __ mov(sp, fp); | 1724 __ mov(sp, fp); |
1517 __ MultiPop(fp.bit() | ra.bit()); | 1725 __ MultiPop(fp.bit() | ra.bit()); |
1518 __ SmiScale(a4, a1, kPointerSizeLog2); | 1726 __ SmiScale(a4, a1, kPointerSizeLog2); |
1519 __ Daddu(sp, sp, a4); | 1727 __ Daddu(sp, sp, a4); |
1520 // Adjust for the receiver. | 1728 // Adjust for the receiver. |
1521 __ Daddu(sp, sp, Operand(kPointerSize)); | 1729 __ Daddu(sp, sp, Operand(kPointerSize)); |
1522 } | 1730 } |
1523 | 1731 |
1524 | 1732 |
1525 // static | |
1526 void Builtins::Generate_CallFunction(MacroAssembler* masm) { | |
1527 // ----------- S t a t e ------------- | |
1528 // -- a0 : the number of arguments (not including the receiver) | |
1529 // -- a1 : the function to call (checked to be a JSFunction) | |
1530 // ----------------------------------- | |
1531 | |
1532 Label convert, convert_global_proxy, convert_to_object, done_convert; | |
1533 __ AssertFunction(a1); | |
1534 // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal | |
1535 // slot is "classConstructor". | |
1536 // Enter the context of the function; ToObject has to run in the function | |
1537 // context, and we also need to take the global proxy from the function | |
1538 // context in case of conversion. | |
1539 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | |
1540 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == | |
1541 SharedFunctionInfo::kStrictModeByteOffset); | |
1542 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
1543 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1544 // We need to convert the receiver for non-native sloppy mode functions. | |
1545 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); | |
1546 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | | |
1547 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); | |
1548 __ Branch(&done_convert, ne, at, Operand(zero_reg)); | |
1549 { | |
1550 __ dsll(at, a0, kPointerSizeLog2); | |
1551 __ daddu(at, sp, at); | |
1552 __ ld(a3, MemOperand(at)); | |
1553 | |
1554 // ----------- S t a t e ------------- | |
1555 // -- a0 : the number of arguments (not including the receiver) | |
1556 // -- a1 : the function to call (checked to be a JSFunction) | |
1557 // -- a2 : the shared function info. | |
1558 // -- a3 : the receiver | |
1559 // -- cp : the function context. | |
1560 // ----------------------------------- | |
1561 | |
1562 Label convert_receiver; | |
1563 __ JumpIfSmi(a3, &convert_to_object); | |
1564 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
1565 __ GetObjectType(a3, a4, a4); | |
1566 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE)); | |
1567 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, &convert_global_proxy); | |
1568 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); | |
1569 __ bind(&convert_global_proxy); | |
1570 { | |
1571 // Patch receiver to global proxy. | |
1572 __ LoadGlobalProxy(a3); | |
1573 } | |
1574 __ Branch(&convert_receiver); | |
1575 __ bind(&convert_to_object); | |
1576 { | |
1577 // Convert receiver using ToObject. | |
1578 // TODO(bmeurer): Inline the allocation here to avoid building the frame | |
1579 // in the fast case? (fall back to AllocateInNewSpace?) | |
1580 FrameScope scope(masm, StackFrame::INTERNAL); | |
1581 __ SmiTag(a0); | |
1582 __ Push(a0, a1); | |
1583 __ mov(a0, a3); | |
1584 ToObjectStub stub(masm->isolate()); | |
1585 __ CallStub(&stub); | |
1586 __ mov(a3, v0); | |
1587 __ Pop(a0, a1); | |
1588 __ SmiUntag(a0); | |
1589 } | |
1590 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1591 __ bind(&convert_receiver); | |
1592 __ dsll(at, a0, kPointerSizeLog2); | |
1593 __ daddu(at, sp, at); | |
1594 __ sd(a3, MemOperand(at)); | |
1595 } | |
1596 __ bind(&done_convert); | |
1597 | |
1598 // ----------- S t a t e ------------- | |
1599 // -- a0 : the number of arguments (not including the receiver) | |
1600 // -- a1 : the function to call (checked to be a JSFunction) | |
1601 // -- a2 : the shared function info. | |
1602 // -- cp : the function context. | |
1603 // ----------------------------------- | |
1604 | |
1605 __ lw(a2, | |
1606 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); | |
1607 __ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
1608 ParameterCount actual(a0); | |
1609 ParameterCount expected(a2); | |
1610 __ InvokeCode(a3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); | |
1611 } | |
1612 | |
1613 | |
1614 // static | |
1615 void Builtins::Generate_Call(MacroAssembler* masm) { | |
1616 // ----------- S t a t e ------------- | |
1617 // -- a0 : the number of arguments (not including the receiver) | |
1618 // -- a1 : the target to call (can be any Object). | |
1619 // ----------------------------------- | |
1620 | |
1621 Label non_smi, non_function; | |
1622 __ JumpIfSmi(a1, &non_function); | |
1623 __ bind(&non_smi); | |
1624 __ GetObjectType(a1, a2, a2); | |
1625 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET, | |
1626 eq, a2, Operand(JS_FUNCTION_TYPE)); | |
1627 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE)); | |
1628 | |
1629 // 1. Call to function proxy. | |
1630 // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. | |
1631 __ ld(a1, FieldMemOperand(a1, JSFunctionProxy::kCallTrapOffset)); | |
1632 __ AssertNotSmi(a1); | |
1633 __ Branch(&non_smi); | |
1634 | |
1635 // 2. Call to something else, which might have a [[Call]] internal method (if | |
1636 // not we raise an exception). | |
1637 __ bind(&non_function); | |
1638 // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could | |
1639 // be awesome instead; i.e. a trivial improvement would be to call into the | |
1640 // runtime and just deal with the API function there instead of returning a | |
1641 // delegate from a runtime call that just jumps back to the runtime once | |
1642 // called. Or, bonus points, call directly into the C API function here, as | |
1643 // we do in some Crankshaft fast cases. | |
1644 // Overwrite the original receiver with the (original) target. | |
1645 __ dsll(at, a0, kPointerSizeLog2); | |
1646 __ daddu(at, sp, at); | |
1647 __ sd(a1, MemOperand(at)); | |
1648 { | |
1649 // Determine the delegate for the target (if any). | |
1650 FrameScope scope(masm, StackFrame::INTERNAL); | |
1651 __ SmiTag(a0); | |
1652 __ Push(a0, a1); | |
1653 __ CallRuntime(Runtime::kGetFunctionDelegate, 1); | |
1654 __ mov(a1, v0); | |
1655 __ Pop(a0); | |
1656 __ SmiUntag(a0); | |
1657 } | |
1658 // The delegate is always a regular function. | |
1659 __ AssertFunction(a1); | |
1660 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); | |
1661 } | |
1662 | |
1663 | |
1664 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 1733 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
1665 // State setup as expected by MacroAssembler::InvokePrologue. | 1734 // State setup as expected by MacroAssembler::InvokePrologue. |
1666 // ----------- S t a t e ------------- | 1735 // ----------- S t a t e ------------- |
1667 // -- a0: actual arguments count | 1736 // -- a0: actual arguments count |
1668 // -- a1: function (passed through to callee) | 1737 // -- a1: function (passed through to callee) |
1669 // -- a2: expected arguments count | 1738 // -- a2: expected arguments count |
1670 // ----------------------------------- | 1739 // ----------------------------------- |
1671 | 1740 |
1672 Label stack_overflow; | 1741 Label stack_overflow; |
1673 ArgumentAdaptorStackCheck(masm, &stack_overflow); | 1742 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1813 } | 1882 } |
1814 } | 1883 } |
1815 | 1884 |
1816 | 1885 |
1817 #undef __ | 1886 #undef __ |
1818 | 1887 |
1819 } // namespace internal | 1888 } // namespace internal |
1820 } // namespace v8 | 1889 } // namespace v8 |
1821 | 1890 |
1822 #endif // V8_TARGET_ARCH_MIPS64 | 1891 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |