OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 | 5 |
6 | 6 |
7 #include "src/v8.h" | 7 #include "src/v8.h" |
8 | 8 |
9 #if V8_TARGET_ARCH_MIPS | 9 #if V8_TARGET_ARCH_MIPS |
10 | 10 |
(...skipping 1333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1344 // Check formal and actual parameter counts. | 1344 // Check formal and actual parameter counts. |
1345 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1345 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1346 RelocInfo::CODE_TARGET, ne, a2, Operand(a0)); | 1346 RelocInfo::CODE_TARGET, ne, a2, Operand(a0)); |
1347 | 1347 |
1348 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 1348 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
1349 ParameterCount expected(0); | 1349 ParameterCount expected(0); |
1350 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); | 1350 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); |
1351 } | 1351 } |
1352 | 1352 |
1353 | 1353 |
1354 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1354 static void Generate_CheckStackOverflow(MacroAssembler* masm, |
1355 const int kIndexOffset = | 1355 const int calleeOffset) { |
1356 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1356 // Check the stack for overflow. We are not trying to catch |
1357 const int kLimitOffset = | 1357 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1358 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | 1358 // limit" is checked. |
1359 const int kArgsOffset = 2 * kPointerSize; | 1359 Label okay; |
1360 const int kRecvOffset = 3 * kPointerSize; | 1360 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
1361 const int kFunctionOffset = 4 * kPointerSize; | 1361 // Make a2 the space we have left. The stack might already be overflowed |
| 1362 // here which will cause a2 to become negative. |
| 1363 __ Subu(a2, sp, a2); |
| 1364 // Check if the arguments will overflow the stack. |
| 1365 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize); |
| 1366 // Signed comparison. |
| 1367 __ Branch(&okay, gt, a2, Operand(t3)); |
| 1368 |
| 1369 // Out of stack space. |
| 1370 __ lw(a1, MemOperand(fp, calleeOffset)); |
| 1371 __ Push(a1, v0); |
| 1372 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
| 1373 |
| 1374 __ bind(&okay); |
| 1375 } |
| 1376 |
| 1377 |
| 1378 static void Generate_PushAppliedArguments(MacroAssembler* masm, |
| 1379 const int argumentsOffset, |
| 1380 const int indexOffset, |
| 1381 const int limitOffset) { |
| 1382 Label entry, loop; |
| 1383 __ lw(a0, MemOperand(fp, indexOffset)); |
| 1384 __ Branch(&entry); |
| 1385 |
| 1386 // Load the current argument from the arguments array and push it to the |
| 1387 // stack. |
| 1388 // a0: current argument index |
| 1389 __ bind(&loop); |
| 1390 __ lw(a1, MemOperand(fp, argumentsOffset)); |
| 1391 __ Push(a1, a0); |
| 1392 |
| 1393 // Call the runtime to access the property in the arguments array. |
| 1394 __ CallRuntime(Runtime::kGetProperty, 2); |
| 1395 __ push(v0); |
| 1396 |
| 1397 // Use inline caching to access the arguments. |
| 1398 __ lw(a0, MemOperand(fp, indexOffset)); |
| 1399 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); |
| 1400 __ sw(a0, MemOperand(fp, indexOffset)); |
| 1401 |
| 1402 // Test if the copy loop has finished copying all the elements from the |
| 1403 // arguments object. |
| 1404 __ bind(&entry); |
| 1405 __ lw(a1, MemOperand(fp, limitOffset)); |
| 1406 __ Branch(&loop, ne, a0, Operand(a1)); |
| 1407 |
| 1408 // On exit, the pushed arguments count is in a0, untagged |
| 1409 __ SmiUntag(a0); |
| 1410 } |
| 1411 |
| 1412 |
| 1413 // Used by FunctionApply and ReflectApply |
| 1414 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { |
| 1415 const int kFormalParameters = targetIsArgument ? 3 : 2; |
| 1416 const int kStackSize = kFormalParameters + 1; |
1362 | 1417 |
1363 { | 1418 { |
1364 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1419 FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 1420 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; |
| 1421 const int kReceiverOffset = kArgumentsOffset + kPointerSize; |
| 1422 const int kFunctionOffset = kReceiverOffset + kPointerSize; |
| 1423 |
1365 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. | 1424 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. |
1366 __ push(a0); | 1425 __ push(a0); |
1367 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. | 1426 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // Get the args array. |
1368 __ push(a0); | 1427 __ push(a0); |
1369 // Returns (in v0) number of arguments to copy to stack as Smi. | 1428 // Returns (in v0) number of arguments to copy to stack as Smi. |
1370 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1429 if (targetIsArgument) { |
| 1430 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION); |
| 1431 } else { |
| 1432 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
| 1433 } |
1371 | 1434 |
1372 // Check the stack for overflow. We are not trying to catch | 1435 Generate_CheckStackOverflow(masm, kFunctionOffset); |
1373 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
1374 // limit" is checked. | |
1375 Label okay; | |
1376 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | |
1377 // Make a2 the space we have left. The stack might already be overflowed | |
1378 // here which will cause a2 to become negative. | |
1379 __ subu(a2, sp, a2); | |
1380 // Check if the arguments will overflow the stack. | |
1381 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize); | |
1382 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison. | |
1383 | |
1384 // Out of stack space. | |
1385 __ lw(a1, MemOperand(fp, kFunctionOffset)); | |
1386 __ Push(a1, v0); | |
1387 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | |
1388 // End of stack check. | |
1389 | 1436 |
1390 // Push current limit and index. | 1437 // Push current limit and index. |
1391 __ bind(&okay); | 1438 const int kIndexOffset = |
| 1439 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
| 1440 const int kLimitOffset = |
| 1441 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
1392 __ mov(a1, zero_reg); | 1442 __ mov(a1, zero_reg); |
1393 __ Push(v0, a1); // Limit and initial index. | 1443 __ Push(v0, a1); // Limit and initial index. |
1394 | 1444 |
1395 // Get the receiver. | 1445 // Get the receiver. |
1396 __ lw(a0, MemOperand(fp, kRecvOffset)); | 1446 __ lw(a0, MemOperand(fp, kReceiverOffset)); |
1397 | 1447 |
1398 // Check that the function is a JS function (otherwise it must be a proxy). | 1448 // Check that the function is a JS function (otherwise it must be a proxy). |
1399 Label push_receiver; | 1449 Label push_receiver; |
1400 __ lw(a1, MemOperand(fp, kFunctionOffset)); | 1450 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
1401 __ GetObjectType(a1, a2, a2); | 1451 __ GetObjectType(a1, a2, a2); |
1402 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); | 1452 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1403 | 1453 |
1404 // Change context eagerly to get the right global object if necessary. | 1454 // Change context eagerly to get the right global object if necessary. |
1405 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 1455 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
1406 // Load the shared function info while the function is still in a1. | 1456 // Load the shared function info while the function is still in a1. |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1442 __ bind(&use_global_proxy); | 1492 __ bind(&use_global_proxy); |
1443 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 1493 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
1444 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset)); | 1494 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset)); |
1445 | 1495 |
1446 // Push the receiver. | 1496 // Push the receiver. |
1447 // a0: receiver | 1497 // a0: receiver |
1448 __ bind(&push_receiver); | 1498 __ bind(&push_receiver); |
1449 __ push(a0); | 1499 __ push(a0); |
1450 | 1500 |
1451 // Copy all arguments from the array to the stack. | 1501 // Copy all arguments from the array to the stack. |
1452 Label entry, loop; | 1502 Generate_PushAppliedArguments( |
1453 __ lw(a0, MemOperand(fp, kIndexOffset)); | 1503 masm, kArgumentsOffset, kIndexOffset, kLimitOffset); |
1454 __ Branch(&entry); | |
1455 | |
1456 // Load the current argument from the arguments array and push it to the | |
1457 // stack. | |
1458 // a0: current argument index | |
1459 __ bind(&loop); | |
1460 __ lw(a1, MemOperand(fp, kArgsOffset)); | |
1461 __ Push(a1, a0); | |
1462 | |
1463 // Call the runtime to access the property in the arguments array. | |
1464 __ CallRuntime(Runtime::kGetProperty, 2); | |
1465 __ push(v0); | |
1466 | |
1467 // Use inline caching to access the arguments. | |
1468 __ lw(a0, MemOperand(fp, kIndexOffset)); | |
1469 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); | |
1470 __ sw(a0, MemOperand(fp, kIndexOffset)); | |
1471 | |
1472 // Test if the copy loop has finished copying all the elements from the | |
1473 // arguments object. | |
1474 __ bind(&entry); | |
1475 __ lw(a1, MemOperand(fp, kLimitOffset)); | |
1476 __ Branch(&loop, ne, a0, Operand(a1)); | |
1477 | 1504 |
1478 // Call the function. | 1505 // Call the function. |
1479 Label call_proxy; | 1506 Label call_proxy; |
1480 ParameterCount actual(a0); | 1507 ParameterCount actual(a0); |
1481 __ sra(a0, a0, kSmiTagSize); | |
1482 __ lw(a1, MemOperand(fp, kFunctionOffset)); | 1508 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
1483 __ GetObjectType(a1, a2, a2); | 1509 __ GetObjectType(a1, a2, a2); |
1484 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); | 1510 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1485 | 1511 |
1486 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper()); | 1512 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper()); |
1487 | 1513 |
1488 frame_scope.GenerateLeaveFrame(); | 1514 frame_scope.GenerateLeaveFrame(); |
1489 __ Ret(USE_DELAY_SLOT); | 1515 __ Ret(USE_DELAY_SLOT); |
1490 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. | 1516 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. |
1491 | 1517 |
1492 // Call the function proxy. | 1518 // Call the function proxy. |
1493 __ bind(&call_proxy); | 1519 __ bind(&call_proxy); |
1494 __ push(a1); // Add function proxy as last argument. | 1520 __ push(a1); // Add function proxy as last argument. |
1495 __ Addu(a0, a0, Operand(1)); | 1521 __ Addu(a0, a0, Operand(1)); |
1496 __ li(a2, Operand(0, RelocInfo::NONE32)); | 1522 __ li(a2, Operand(0, RelocInfo::NONE32)); |
1497 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); | 1523 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); |
1498 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1524 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1499 RelocInfo::CODE_TARGET); | 1525 RelocInfo::CODE_TARGET); |
1500 // Tear down the internal frame and remove function, receiver and args. | 1526 // Tear down the internal frame and remove function, receiver and args. |
1501 } | 1527 } |
1502 | 1528 |
1503 __ Ret(USE_DELAY_SLOT); | 1529 __ Ret(USE_DELAY_SLOT); |
1504 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. | 1530 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. |
1505 } | 1531 } |
1506 | 1532 |
1507 | 1533 |
| 1534 static void Generate_ConstructHelper(MacroAssembler* masm) { |
| 1535 const int kFormalParameters = 3; |
| 1536 const int kStackSize = kFormalParameters + 1; |
| 1537 |
| 1538 { |
| 1539 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1540 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; |
| 1541 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; |
| 1542 const int kFunctionOffset = kArgumentsOffset + kPointerSize; |
| 1543 |
| 1544 // If newTarget is not supplied, set it to constructor |
| 1545 Label validate_arguments; |
| 1546 __ lw(a0, MemOperand(fp, kNewTargetOffset)); |
| 1547 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 1548 __ Branch(&validate_arguments, ne, a0, Operand(at)); |
| 1549 __ lw(a0, MemOperand(fp, kFunctionOffset)); |
| 1550 __ sw(a0, MemOperand(fp, kNewTargetOffset)); |
| 1551 |
| 1552 // Validate arguments |
| 1553 __ bind(&validate_arguments); |
| 1554 __ lw(a0, MemOperand(fp, kFunctionOffset)); // get the function |
| 1555 __ push(a0); |
| 1556 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1557 __ push(a0); |
| 1558 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target |
| 1559 __ push(a0); |
| 1560 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); |
| 1561 |
| 1562 Generate_CheckStackOverflow(masm, kFunctionOffset); |
| 1563 |
| 1564 // Push current limit and index. |
| 1565 const int kIndexOffset = |
| 1566 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
| 1567 const int kLimitOffset = |
| 1568 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
| 1569 __ push(v0); // limit |
| 1570 __ mov(a1, zero_reg); // initial index |
| 1571 __ push(a1); |
| 1572 // Push newTarget and callee functions |
| 1573 __ lw(a0, MemOperand(fp, kNewTargetOffset)); |
| 1574 __ push(a0); |
| 1575 __ lw(a0, MemOperand(fp, kFunctionOffset)); |
| 1576 __ push(a0); |
| 1577 |
| 1578 // Copy all arguments from the array to the stack. |
| 1579 Generate_PushAppliedArguments( |
| 1580 masm, kArgumentsOffset, kIndexOffset, kLimitOffset); |
| 1581 |
| 1582 // Use undefined feedback vector |
| 1583 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 1584 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
| 1585 |
| 1586 // Call the function. |
| 1587 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL); |
| 1588 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); |
| 1589 |
| 1590 __ Drop(1); |
| 1591 |
| 1592 // Leave internal frame. |
| 1593 } |
| 1594 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); |
| 1595 __ Jump(ra); |
| 1596 } |
| 1597 |
| 1598 |
| 1599 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
| 1600 Generate_ApplyHelper(masm, false); |
| 1601 } |
| 1602 |
| 1603 |
| 1604 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
| 1605 Generate_ApplyHelper(masm, true); |
| 1606 } |
| 1607 |
| 1608 |
| 1609 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
| 1610 Generate_ConstructHelper(masm); |
| 1611 } |
| 1612 |
| 1613 |
1508 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1614 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
1509 Label* stack_overflow) { | 1615 Label* stack_overflow) { |
1510 // ----------- S t a t e ------------- | 1616 // ----------- S t a t e ------------- |
1511 // -- a0 : actual number of arguments | 1617 // -- a0 : actual number of arguments |
1512 // -- a1 : function (passed through to callee) | 1618 // -- a1 : function (passed through to callee) |
1513 // -- a2 : expected number of arguments | 1619 // -- a2 : expected number of arguments |
1514 // ----------------------------------- | 1620 // ----------------------------------- |
1515 // Check the stack for overflow. We are not trying to catch | 1621 // Check the stack for overflow. We are not trying to catch |
1516 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1622 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1517 // limit" is checked. | 1623 // limit" is checked. |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1679 __ break_(0xCC); | 1785 __ break_(0xCC); |
1680 } | 1786 } |
1681 } | 1787 } |
1682 | 1788 |
1683 | 1789 |
1684 #undef __ | 1790 #undef __ |
1685 | 1791 |
1686 } } // namespace v8::internal | 1792 } } // namespace v8::internal |
1687 | 1793 |
1688 #endif // V8_TARGET_ARCH_MIPS | 1794 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |