Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 614 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 625 // r1: preserved | 625 // r1: preserved |
| 626 // r2: preserved | 626 // r2: preserved |
| 627 | 627 |
| 628 // Drop the execution stack down to the frame pointer and restore | 628 // Drop the execution stack down to the frame pointer and restore |
| 629 // the caller frame pointer and return address. | 629 // the caller frame pointer and return address. |
| 630 mov(sp, fp); | 630 mov(sp, fp); |
| 631 ldm(ia_w, sp, fp.bit() | lr.bit()); | 631 ldm(ia_w, sp, fp.bit() | lr.bit()); |
| 632 } | 632 } |
| 633 | 633 |
| 634 | 634 |
| 635 void MacroAssembler::EnterExitFrame(bool save_doubles) { | 635 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { |
| 636 // Compute the argv pointer in a callee-saved register. | |
| 637 add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); | |
| 638 sub(r6, r6, Operand(kPointerSize)); | |
| 639 | |
| 640 // Setup the frame structure on the stack. | 636 // Setup the frame structure on the stack. |
| 641 ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); | 637 ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); |
| 642 ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); | 638 ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); |
| 643 ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); | 639 ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); |
| 644 Push(lr, fp); | 640 Push(lr, fp); |
| 645 mov(fp, Operand(sp)); // Setup new frame pointer. | 641 mov(fp, Operand(sp)); // Setup new frame pointer. |
| 646 // Reserve room for saved entry sp and code object. | 642 // Reserve room for saved entry sp and code object. |
| 647 sub(sp, sp, Operand(2 * kPointerSize)); | 643 sub(sp, sp, Operand(2 * kPointerSize)); |
| 648 if (FLAG_debug_code) { | 644 if (FLAG_debug_code) { |
| 649 mov(ip, Operand(0)); | 645 mov(ip, Operand(0)); |
| 650 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 646 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
| 651 } | 647 } |
| 652 mov(ip, Operand(CodeObject())); | 648 mov(ip, Operand(CodeObject())); |
| 653 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 649 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
| 654 | 650 |
| 655 // Save the frame pointer and the context in top. | 651 // Save the frame pointer and the context in top. |
| 656 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); | 652 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); |
| 657 str(fp, MemOperand(ip)); | 653 str(fp, MemOperand(ip)); |
| 658 mov(ip, Operand(ExternalReference(Top::k_context_address))); | 654 mov(ip, Operand(ExternalReference(Top::k_context_address))); |
| 659 str(cp, MemOperand(ip)); | 655 str(cp, MemOperand(ip)); |
| 660 | 656 |
| 661 // Setup argc and the builtin function in callee-saved registers. | |
| 662 mov(r4, Operand(r0)); | |
| 663 mov(r5, Operand(r1)); | |
| 664 | |
| 665 // Optionally save all double registers. | 657 // Optionally save all double registers. |
| 666 if (save_doubles) { | 658 if (save_doubles) { |
| 667 sub(sp, sp, Operand(DwVfpRegister::kNumRegisters * kDoubleSize)); | 659 sub(sp, sp, Operand(DwVfpRegister::kNumRegisters * kDoubleSize)); |
| 668 const int offset = -2 * kPointerSize; | 660 const int offset = -2 * kPointerSize; |
| 669 for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) { | 661 for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) { |
| 670 DwVfpRegister reg = DwVfpRegister::from_code(i); | 662 DwVfpRegister reg = DwVfpRegister::from_code(i); |
| 671 vstr(reg, fp, offset - ((i + 1) * kDoubleSize)); | 663 vstr(reg, fp, offset - ((i + 1) * kDoubleSize)); |
| 672 } | 664 } |
| 673 // Note that d0 will be accessible at | 665 // Note that d0 will be accessible at |
| 674 // fp - 2 * kPointerSize - DwVfpRegister::kNumRegisters * kDoubleSize, | 666 // fp - 2 * kPointerSize - DwVfpRegister::kNumRegisters * kDoubleSize, |
| 675 // since the sp slot and code slot were pushed after the fp. | 667 // since the sp slot and code slot were pushed after the fp. |
| 676 } | 668 } |
| 677 | 669 |
| 678 // Reserve place for the return address and align the frame preparing for | 670 // Reserve place for the return address and align the frame preparing for |
|
Søren Thygesen Gjesse
2011/02/02 13:24:38
Please update this comment.
Zaheer
2011/02/03 07:27:31
Done.
| |
| 679 // calling the runtime function. | 671 // calling the runtime function. |
| 680 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); | 672 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); |
| 681 sub(sp, sp, Operand(kPointerSize)); | 673 sub(sp, sp, Operand((stack_space + 1) * kPointerSize)); |
| 682 if (frame_alignment > 0) { | 674 if (frame_alignment > 0) { |
| 683 ASSERT(IsPowerOf2(frame_alignment)); | 675 ASSERT(IsPowerOf2(frame_alignment)); |
| 684 and_(sp, sp, Operand(-frame_alignment)); | 676 and_(sp, sp, Operand(-frame_alignment)); |
| 685 } | 677 } |
| 686 | 678 |
| 687 // Set the exit frame sp value to point just before the return address | 679 // Set the exit frame sp value to point just before the return address |
| 688 // location. | 680 // location. |
| 689 add(ip, sp, Operand(kPointerSize)); | 681 add(ip, sp, Operand(kPointerSize)); |
| 690 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 682 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
| 691 } | 683 } |
| (...skipping 787 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1479 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 1471 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
| 1480 } | 1472 } |
| 1481 | 1473 |
| 1482 | 1474 |
| 1483 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { | 1475 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { |
| 1484 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | 1476 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs |
| 1485 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 1477 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
| 1486 } | 1478 } |
| 1487 | 1479 |
| 1488 | 1480 |
| 1481 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) { | |
| 1482 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | |
|
Søren Thygesen Gjesse
2011/02/02 13:24:38
Start comment with uppercase letter end and with p
Zaheer
2011/02/03 07:27:31
Done.
| |
| 1483 Object* result; | |
| 1484 { MaybeObject* maybe_result = stub->TryGetCode(); | |
| 1485 if (!maybe_result->ToObject(&result)) return maybe_result; | |
| 1486 } | |
| 1487 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | |
| 1488 return result; | |
| 1489 } | |
| 1490 | |
| 1491 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | |
| 1492 int64_t offset = (ref0.address() - ref1.address()); | |
|
antonm
2011/02/02 13:56:28
should have noticed that earlier, sorry. I think
Zaheer
2011/02/03 07:27:31
sorry copy/paste mistake from x64. Done.
| |
| 1493 // Check that fits into int. | |
| 1494 ASSERT(static_cast<int>(offset) == offset); | |
| 1495 return static_cast<int>(offset); | |
| 1496 } | |
| 1497 | |
| 1498 | |
| 1499 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( | |
| 1500 ApiFunction* function, int stack_space) { | |
| 1501 ExternalReference next_address = | |
| 1502 ExternalReference::handle_scope_next_address(); | |
| 1503 const int kNextOffset = 0; | |
| 1504 const int kLimitOffset = AddressOffset( | |
| 1505 ExternalReference::handle_scope_limit_address(), | |
| 1506 next_address); | |
| 1507 const int kLevelOffset = AddressOffset( | |
| 1508 ExternalReference::handle_scope_level_address(), | |
| 1509 next_address); | |
| 1510 | |
| 1511 // Allocate HandleScope in callee-save registers. | |
| 1512 mov(r7, Operand(next_address)); | |
| 1513 ldr(r4, MemOperand(r7, kNextOffset)); | |
| 1514 ldr(r5, MemOperand(r7, kLimitOffset)); | |
| 1515 ldr(r6, MemOperand(r7, kLevelOffset)); | |
| 1516 add(r6, r6, Operand(1)); | |
| 1517 str(r6, MemOperand(r7, kLevelOffset)); | |
| 1518 | |
| 1519 // Native call returns to the DirectCEntry stub which redirects to the | |
| 1520 // return address pushed on stack (could have moved after GC). | |
|
Søren Thygesen Gjesse
2011/02/02 13:24:38
As far as I can see this relies on DirectCEntryStu
antonm
2011/02/02 13:56:28
Søren, yes, that's exactly the reason we call goes
Søren Thygesen Gjesse
2011/02/02 14:23:50
Sure, and as discussed offline having DirectCEntry
Zaheer
2011/02/03 07:27:31
Added comment
| |
| 1521 DirectCEntryStub stub; | |
|
Søren Thygesen Gjesse
2011/02/02 13:24:38
I thing the calling of this stub should be factore
Zaheer
2011/02/03 07:27:31
Done. passed function as a parameter instead of re
| |
| 1522 mov(lr, Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), | |
| 1523 RelocInfo::CODE_TARGET)); | |
| 1524 | |
| 1525 // Push return address (accessible to GC through exit frame pc). | |
| 1526 ExternalReference ref = | |
| 1527 ExternalReference(function, ExternalReference::DIRECT_CALL); | |
| 1528 mov(r2, Operand(reinterpret_cast<intptr_t>(ref.address()))); | |
| 1529 add(ip, pc, Operand(4)); | |
| 1530 str(ip, MemOperand(sp, 0)); | |
| 1531 Jump(r2); // Call the api function. | |
| 1532 | |
| 1533 Label promote_scheduled_exception; | |
| 1534 Label delete_allocated_handles; | |
| 1535 Label leave_exit_frame; | |
| 1536 | |
| 1537 // If result is non-zero, dereference to get the result value | |
| 1538 // otherwise set it to undefined. | |
| 1539 cmp(r0, Operand(0)); | |
| 1540 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
| 1541 ldr(r0, MemOperand(r0), ne); | |
| 1542 | |
| 1543 // No more valid handles (the result handle was the last one). Restore | |
| 1544 // previous handle scope. | |
| 1545 str(r4, MemOperand(r7, kNextOffset)); | |
| 1546 if (FLAG_debug_code) { | |
| 1547 ldr(r1, MemOperand(r7, kLevelOffset)); | |
| 1548 cmp(r1, r6); | |
| 1549 Check(eq, "Unexpected level after return from api call"); | |
| 1550 } | |
| 1551 sub(r6, r6, Operand(1)); | |
| 1552 str(r6, MemOperand(r7, kLevelOffset)); | |
| 1553 ldr(ip, MemOperand(r7, kLimitOffset)); | |
| 1554 cmp(r5, ip); | |
| 1555 b(ne, &delete_allocated_handles); | |
| 1556 | |
| 1557 // Check if the function scheduled an exception. | |
| 1558 bind(&leave_exit_frame); | |
| 1559 LoadRoot(r4, Heap::kTheHoleValueRootIndex); | |
| 1560 mov(ip, Operand(ExternalReference(Top::k_pending_exception_address))); | |
|
antonm
2011/02/02 13:56:28
that should be scheduled_exception, not pending.
Zaheer
2011/02/03 07:27:31
Thanks for catching and the explanation, i wasnt a
| |
| 1561 ldr(r5, MemOperand(ip)); | |
| 1562 cmp(r4, r5); | |
| 1563 b(ne, &promote_scheduled_exception); | |
| 1564 | |
| 1565 // LeaveExitFrame expects unwind space to be in r4. | |
| 1566 mov(r4, Operand(stack_space)); | |
| 1567 LeaveExitFrame(false); | |
| 1568 | |
| 1569 bind(&promote_scheduled_exception); | |
| 1570 MaybeObject* result = TryTailCallExternalReference( | |
| 1571 ExternalReference(Runtime::kPromoteScheduledException), 0, 1); | |
| 1572 if (result->IsFailure()) { | |
| 1573 return result; | |
| 1574 } | |
| 1575 | |
| 1576 // HandleScope limit has changed. Delete allocated extensions. | |
| 1577 bind(&delete_allocated_handles); | |
| 1578 str(r5, MemOperand(r7, kLimitOffset)); | |
| 1579 mov(r4, r0); | |
| 1580 PrepareCallCFunction(0, r5); | |
| 1581 CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0); | |
| 1582 mov(r0, r4); | |
| 1583 jmp(&leave_exit_frame); | |
| 1584 | |
| 1585 return result; | |
| 1586 } | |
| 1587 | |
| 1588 | |
| 1489 void MacroAssembler::IllegalOperation(int num_arguments) { | 1589 void MacroAssembler::IllegalOperation(int num_arguments) { |
| 1490 if (num_arguments > 0) { | 1590 if (num_arguments > 0) { |
| 1491 add(sp, sp, Operand(num_arguments * kPointerSize)); | 1591 add(sp, sp, Operand(num_arguments * kPointerSize)); |
| 1492 } | 1592 } |
| 1493 LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 1593 LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 1494 } | 1594 } |
| 1495 | 1595 |
| 1496 | 1596 |
| 1497 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 1597 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
| 1498 // If the hash field contains an array index pick it out. The assert checks | 1598 // If the hash field contains an array index pick it out. The assert checks |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1731 int num_arguments, | 1831 int num_arguments, |
| 1732 int result_size) { | 1832 int result_size) { |
| 1733 // TODO(1236192): Most runtime routines don't need the number of | 1833 // TODO(1236192): Most runtime routines don't need the number of |
| 1734 // arguments passed in because it is constant. At some point we | 1834 // arguments passed in because it is constant. At some point we |
| 1735 // should remove this need and make the runtime routine entry code | 1835 // should remove this need and make the runtime routine entry code |
| 1736 // smarter. | 1836 // smarter. |
| 1737 mov(r0, Operand(num_arguments)); | 1837 mov(r0, Operand(num_arguments)); |
| 1738 JumpToExternalReference(ext); | 1838 JumpToExternalReference(ext); |
| 1739 } | 1839 } |
| 1740 | 1840 |
| 1841 MaybeObject* MacroAssembler::TryTailCallExternalReference( | |
| 1842 const ExternalReference& ext, int num_arguments, int result_size) { | |
| 1843 // TODO(1236192): Most runtime routines don't need the number of | |
| 1844 // arguments passed in because it is constant. At some point we | |
| 1845 // should remove this need and make the runtime routine entry code | |
| 1846 // smarter. | |
| 1847 mov(r0, Operand(num_arguments)); | |
| 1848 return TryJumpToExternalReference(ext); | |
| 1849 } | |
| 1741 | 1850 |
| 1742 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 1851 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
| 1743 int num_arguments, | 1852 int num_arguments, |
| 1744 int result_size) { | 1853 int result_size) { |
| 1745 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); | 1854 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); |
| 1746 } | 1855 } |
| 1747 | 1856 |
| 1748 | 1857 |
| 1749 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { | 1858 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { |
| 1750 #if defined(__thumb__) | 1859 #if defined(__thumb__) |
| 1751 // Thumb mode builtin. | 1860 // Thumb mode builtin. |
| 1752 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | 1861 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); |
| 1753 #endif | 1862 #endif |
| 1754 mov(r1, Operand(builtin)); | 1863 mov(r1, Operand(builtin)); |
| 1755 CEntryStub stub(1); | 1864 CEntryStub stub(1); |
| 1756 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 1865 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 1757 } | 1866 } |
| 1758 | 1867 |
| 1868 MaybeObject* MacroAssembler::TryJumpToExternalReference( | |
| 1869 const ExternalReference& builtin) { | |
| 1870 #if defined(__thumb__) | |
| 1871 // Thumb mode builtin. | |
| 1872 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | |
| 1873 #endif | |
| 1874 mov(r1, Operand(builtin)); | |
| 1875 CEntryStub stub(1); | |
| 1876 return TryTailCallStub(&stub); | |
| 1877 } | |
| 1759 | 1878 |
| 1760 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 1879 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
| 1761 InvokeJSFlags flags, | 1880 InvokeJSFlags flags, |
| 1762 PostCallGenerator* post_call_generator) { | 1881 PostCallGenerator* post_call_generator) { |
| 1763 GetBuiltinEntry(r2, id); | 1882 GetBuiltinEntry(r2, id); |
| 1764 if (flags == CALL_JS) { | 1883 if (flags == CALL_JS) { |
| 1765 Call(r2); | 1884 Call(r2); |
| 1766 if (post_call_generator != NULL) post_call_generator->Generate(); | 1885 if (post_call_generator != NULL) post_call_generator->Generate(); |
| 1767 } else { | 1886 } else { |
| 1768 ASSERT(flags == JUMP_JS); | 1887 ASSERT(flags == JUMP_JS); |
| (...skipping 505 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2274 | 2393 |
| 2275 void CodePatcher::Emit(Address addr) { | 2394 void CodePatcher::Emit(Address addr) { |
| 2276 masm()->emit(reinterpret_cast<Instr>(addr)); | 2395 masm()->emit(reinterpret_cast<Instr>(addr)); |
| 2277 } | 2396 } |
| 2278 #endif // ENABLE_DEBUGGER_SUPPORT | 2397 #endif // ENABLE_DEBUGGER_SUPPORT |
| 2279 | 2398 |
| 2280 | 2399 |
| 2281 } } // namespace v8::internal | 2400 } } // namespace v8::internal |
| 2282 | 2401 |
| 2283 #endif // V8_TARGET_ARCH_ARM | 2402 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |