| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/debug.h" | 10 #include "src/debug.h" |
| (...skipping 781 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 792 | 792 |
| 793 // Leave construct frame | 793 // Leave construct frame |
| 794 } | 794 } |
| 795 | 795 |
| 796 __ DropBySMI(x1); | 796 __ DropBySMI(x1); |
| 797 __ Drop(1); | 797 __ Drop(1); |
| 798 __ Ret(); | 798 __ Ret(); |
| 799 } | 799 } |
| 800 | 800 |
| 801 | 801 |
| 802 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
| 803 |
| 804 |
| 805 // Clobbers x10, x15; preserves all other registers. |
| 806 static void Generate_CheckStackOverflow(MacroAssembler* masm, |
| 807 const int calleeOffset, Register argc, |
| 808 IsTagged argc_is_tagged) { |
| 809 Register function = x15; |
| 810 |
| 811 // Check the stack for overflow. |
| 812 // We are not trying to catch interruptions (e.g. debug break and |
| 813 // preemption) here, so the "real stack limit" is checked. |
| 814 Label enough_stack_space; |
| 815 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); |
| 816 __ Ldr(function, MemOperand(fp, calleeOffset)); |
| 817 // Make x10 the space we have left. The stack might already be overflowed |
| 818 // here which will cause x10 to become negative. |
| 819 // TODO(jbramley): Check that the stack usage here is safe. |
| 820 __ Sub(x10, jssp, x10); |
| 821 // Check if the arguments will overflow the stack. |
| 822 if (argc_is_tagged == kArgcIsSmiTagged) { |
| 823 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2)); |
| 824 } else { |
| 825 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
| 826 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2)); |
| 827 } |
| 828 __ B(gt, &enough_stack_space); |
| 829 // There is not enough stack space, so use a builtin to throw an appropriate |
| 830 // error. |
| 831 __ Push(function, argc); |
| 832 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
| 833 // We should never return from the APPLY_OVERFLOW builtin. |
| 834 if (__ emit_debug_code()) { |
| 835 __ Unreachable(); |
| 836 } |
| 837 |
| 838 __ Bind(&enough_stack_space); |
| 839 } |
| 840 |
| 841 |
| 802 // Input: | 842 // Input: |
| 803 // x0: code entry. | 843 // x0: code entry. |
| 804 // x1: function. | 844 // x1: function. |
| 805 // x2: receiver. | 845 // x2: receiver. |
| 806 // x3: argc. | 846 // x3: argc. |
| 807 // x4: argv. | 847 // x4: argv. |
| 808 // Output: | 848 // Output: |
| 809 // x0: result. | 849 // x0: result. |
| 810 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 850 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
| 811 bool is_construct) { | 851 bool is_construct) { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 825 FrameScope scope(masm, StackFrame::INTERNAL); | 865 FrameScope scope(masm, StackFrame::INTERNAL); |
| 826 | 866 |
| 827 // Set up the context from the function argument. | 867 // Set up the context from the function argument. |
| 828 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | 868 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); |
| 829 | 869 |
| 830 __ InitializeRootRegister(); | 870 __ InitializeRootRegister(); |
| 831 | 871 |
| 832 // Push the function and the receiver onto the stack. | 872 // Push the function and the receiver onto the stack. |
| 833 __ Push(function, receiver); | 873 __ Push(function, receiver); |
| 834 | 874 |
| 875 // Check if we have enough stack space to push all arguments. |
| 876 // The function is the first thing that was pushed above after entering |
| 877 // the internal frame. |
| 878 const int kFunctionOffset = |
| 879 InternalFrameConstants::kCodeOffset - kPointerSize; |
| 880 // Expects argument count in eax. Clobbers ecx, edx, edi. |
| 881 Generate_CheckStackOverflow(masm, kFunctionOffset, argc, |
| 882 kArgcIsUntaggedInt); |
| 883 |
| 835 // Copy arguments to the stack in a loop, in reverse order. | 884 // Copy arguments to the stack in a loop, in reverse order. |
| 836 // x3: argc. | 885 // x3: argc. |
| 837 // x4: argv. | 886 // x4: argv. |
| 838 Label loop, entry; | 887 Label loop, entry; |
| 839 // Compute the copy end address. | 888 // Compute the copy end address. |
| 840 __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2)); | 889 __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2)); |
| 841 | 890 |
| 842 __ B(&entry); | 891 __ B(&entry); |
| 843 __ Bind(&loop); | 892 __ Bind(&loop); |
| 844 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); | 893 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); |
| (...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1317 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1366 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1318 RelocInfo::CODE_TARGET); | 1367 RelocInfo::CODE_TARGET); |
| 1319 __ Bind(&dont_adapt_args); | 1368 __ Bind(&dont_adapt_args); |
| 1320 | 1369 |
| 1321 __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); | 1370 __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); |
| 1322 ParameterCount expected(0); | 1371 ParameterCount expected(0); |
| 1323 __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); | 1372 __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); |
| 1324 } | 1373 } |
| 1325 | 1374 |
| 1326 | 1375 |
| 1327 static void Generate_CheckStackOverflow(MacroAssembler* masm, | |
| 1328 const int calleeOffset) { | |
| 1329 Register argc = x0; | |
| 1330 Register function = x15; | |
| 1331 | |
| 1332 // Check the stack for overflow. | |
| 1333 // We are not trying to catch interruptions (e.g. debug break and | |
| 1334 // preemption) here, so the "real stack limit" is checked. | |
| 1335 Label enough_stack_space; | |
| 1336 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); | |
| 1337 __ Ldr(function, MemOperand(fp, calleeOffset)); | |
| 1338 // Make x10 the space we have left. The stack might already be overflowed | |
| 1339 // here which will cause x10 to become negative. | |
| 1340 // TODO(jbramley): Check that the stack usage here is safe. | |
| 1341 __ Sub(x10, jssp, x10); | |
| 1342 // Check if the arguments will overflow the stack. | |
| 1343 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2)); | |
| 1344 __ B(gt, &enough_stack_space); | |
| 1345 // There is not enough stack space, so use a builtin to throw an appropriate | |
| 1346 // error. | |
| 1347 __ Push(function, argc); | |
| 1348 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | |
| 1349 // We should never return from the APPLY_OVERFLOW builtin. | |
| 1350 if (__ emit_debug_code()) { | |
| 1351 __ Unreachable(); | |
| 1352 } | |
| 1353 | |
| 1354 __ Bind(&enough_stack_space); | |
| 1355 } | |
| 1356 | |
| 1357 | |
| 1358 static void Generate_PushAppliedArguments(MacroAssembler* masm, | 1376 static void Generate_PushAppliedArguments(MacroAssembler* masm, |
| 1359 const int argumentsOffset, | 1377 const int argumentsOffset, |
| 1360 const int indexOffset, | 1378 const int indexOffset, |
| 1361 const int limitOffset) { | 1379 const int limitOffset) { |
| 1362 Label entry, loop; | 1380 Label entry, loop; |
| 1363 Register current = x0; | 1381 Register current = x0; |
| 1364 __ Ldr(current, MemOperand(fp, indexOffset)); | 1382 __ Ldr(current, MemOperand(fp, indexOffset)); |
| 1365 __ B(&entry); | 1383 __ B(&entry); |
| 1366 | 1384 |
| 1367 __ Bind(&loop); | 1385 __ Bind(&loop); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1415 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | 1433 __ Ldr(function, MemOperand(fp, kFunctionOffset)); |
| 1416 __ Ldr(args, MemOperand(fp, kArgumentsOffset)); | 1434 __ Ldr(args, MemOperand(fp, kArgumentsOffset)); |
| 1417 __ Push(function, args); | 1435 __ Push(function, args); |
| 1418 if (targetIsArgument) { | 1436 if (targetIsArgument) { |
| 1419 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION); | 1437 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION); |
| 1420 } else { | 1438 } else { |
| 1421 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1439 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
| 1422 } | 1440 } |
| 1423 Register argc = x0; | 1441 Register argc = x0; |
| 1424 | 1442 |
| 1425 Generate_CheckStackOverflow(masm, kFunctionOffset); | 1443 Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged); |
| 1426 | 1444 |
| 1427 // Push current limit and index. | 1445 // Push current limit and index. |
| 1428 __ Mov(x1, 0); // Initial index. | 1446 __ Mov(x1, 0); // Initial index. |
| 1429 __ Push(argc, x1); | 1447 __ Push(argc, x1); |
| 1430 | 1448 |
| 1431 Label push_receiver; | 1449 Label push_receiver; |
| 1432 __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); | 1450 __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); |
| 1433 | 1451 |
| 1434 // Check that the function is a JS function. Otherwise it must be a proxy. | 1452 // Check that the function is a JS function. Otherwise it must be a proxy. |
| 1435 // When it is not the function proxy will be invoked later. | 1453 // When it is not the function proxy will be invoked later. |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1542 | 1560 |
| 1543 // Validate arguments | 1561 // Validate arguments |
| 1544 __ Bind(&validate_arguments); | 1562 __ Bind(&validate_arguments); |
| 1545 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | 1563 __ Ldr(function, MemOperand(fp, kFunctionOffset)); |
| 1546 __ Ldr(args, MemOperand(fp, kArgumentsOffset)); | 1564 __ Ldr(args, MemOperand(fp, kArgumentsOffset)); |
| 1547 __ Ldr(newTarget, MemOperand(fp, kNewTargetOffset)); | 1565 __ Ldr(newTarget, MemOperand(fp, kNewTargetOffset)); |
| 1548 __ Push(function, args, newTarget); | 1566 __ Push(function, args, newTarget); |
| 1549 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); | 1567 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); |
| 1550 Register argc = x0; | 1568 Register argc = x0; |
| 1551 | 1569 |
| 1552 Generate_CheckStackOverflow(masm, kFunctionOffset); | 1570 Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged); |
| 1553 | 1571 |
| 1554 // Push current limit and index, constructor & newTarget | 1572 // Push current limit and index, constructor & newTarget |
| 1555 __ Mov(x1, 0); // Initial index. | 1573 __ Mov(x1, 0); // Initial index. |
| 1556 __ Ldr(newTarget, MemOperand(fp, kNewTargetOffset)); | 1574 __ Ldr(newTarget, MemOperand(fp, kNewTargetOffset)); |
| 1557 __ Push(argc, x1, newTarget, function); | 1575 __ Push(argc, x1, newTarget, function); |
| 1558 | 1576 |
| 1559 // Copy all arguments from the array to the stack. | 1577 // Copy all arguments from the array to the stack. |
| 1560 Generate_PushAppliedArguments( | 1578 Generate_PushAppliedArguments( |
| 1561 masm, kArgumentsOffset, kIndexOffset, kLimitOffset); | 1579 masm, kArgumentsOffset, kIndexOffset, kLimitOffset); |
| 1562 | 1580 |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1778 __ Unreachable(); | 1796 __ Unreachable(); |
| 1779 } | 1797 } |
| 1780 } | 1798 } |
| 1781 | 1799 |
| 1782 | 1800 |
| 1783 #undef __ | 1801 #undef __ |
| 1784 | 1802 |
| 1785 } } // namespace v8::internal | 1803 } } // namespace v8::internal |
| 1786 | 1804 |
| 1787 #endif // V8_TARGET_ARCH_ARM | 1805 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |