OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1390 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 1390 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
1391 } | 1391 } |
1392 | 1392 |
1393 | 1393 |
1394 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { | 1394 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { |
1395 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | 1395 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs |
1396 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 1396 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
1397 } | 1397 } |
1398 | 1398 |
1399 | 1399 |
1400 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) { | |
1401 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | |
1402 Object* result; | |
1403 { MaybeObject* maybe_result = stub->TryGetCode(); | |
1404 if (!maybe_result->ToObject(&result)) return maybe_result; | |
1405 } | |
1406 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | |
1407 return result; | |
1408 } | |
1409 | |
1410 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space, | |
1411 int unwind_space) { | |
1412 // Create the exit frame. | |
1413 // +- caller pc -+- sp on exit -+- caller fp -+- code object -+- | |
1414 // +- marker -+- exit frame sp -+- stack grows here -+- | |
1415 add(ip, sp, Operand(unwind_space * kPointerSize)); | |
1416 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); | |
1417 mov(fp, Operand(sp)); | |
1418 | |
1419 mov(ip, Operand(CodeObject())); | |
1420 push(ip); | |
1421 mov(ip, Operand(ExitApiFrameConstants::kMarker)); | |
1422 push(ip); | |
1423 push(ip); // Exit Frame sp patched before call. | |
1424 | |
1425 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); | |
1426 str(fp, MemOperand(ip)); | |
1427 mov(ip, Operand(ExternalReference(Top::k_context_address))); | |
1428 str(cp, MemOperand(ip)); | |
1429 | |
1430 // Create space for the arguments below the exit frame. | |
1431 // +- exit frame -+- arguments -+- stack grows here -+ | |
1432 sub(sp, sp, Operand(arg_stack_space * kPointerSize)); | |
1433 } | |
1434 | |
1435 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | |
1436 int64_t offset = (ref0.address() - ref1.address()); | |
1437 // Check that fits into int. | |
1438 ASSERT(static_cast<int>(offset) == offset); | |
1439 return static_cast<int>(offset); | |
1440 } | |
1441 | |
1442 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( | |
1443 ApiFunction* function) { | |
1444 ExternalReference next_address = | |
1445 ExternalReference::handle_scope_next_address(); | |
1446 const int kNextOffset = 0; | |
1447 const int kLimitOffset = AddressOffset( | |
1448 ExternalReference::handle_scope_limit_address(), | |
1449 next_address); | |
1450 const int kLevelOffset = AddressOffset( | |
1451 ExternalReference::handle_scope_level_address(), | |
1452 next_address); | |
1453 | |
1454 // Allocate HandleScope in callee-save registers. | |
1455 mov(r7, Operand(next_address)); | |
1456 ldr(r4, MemOperand(r7, kNextOffset)); | |
1457 ldr(r5, MemOperand(r7, kLimitOffset)); | |
1458 ldr(r6, MemOperand(r7, kLevelOffset)); | |
1459 add(r6, r6, Operand(1)); | |
1460 str(r6, MemOperand(r7, kLevelOffset)); | |
1461 | |
1462 // Create one slot for the return address to be pushed later | |
1463 // and align the stack. | |
1464 int frame_alignment = ActivationFrameAlignment(); | |
SeRya
2011/01/20 12:09:28
Currently CEntryStub stack layout is following:
<c
Zaheer
2011/01/20 13:23:51
Explaining the issue below, my layout in bit more
SeRya
2011/01/20 15:18:47
the placeholder above arguments.
Zaheer
2011/01/20 16:26:59
Thanks. it does simplify it a lot.
| |
1465 int frame_alignment_mask = frame_alignment - 1; | |
1466 if (frame_alignment > kPointerSize) { | |
1467 ASSERT(frame_alignment == 2 * kPointerSize); | |
1468 sub(sp, sp, Operand(1 * kPointerSize)); | |
1469 tst(sp, Operand(frame_alignment_mask)); | |
1470 // Stack alignment place holder need not be initialized as its below | |
1471 // c_entry_fp_address and does not affect GC. | |
1472 push(ip, nz); | |
1473 } else { | |
1474 sub(sp, sp, Operand(1 * kPointerSize)); | |
1475 } | |
1476 | |
1477 // Store sp in the exit frame sp slot. sp - 1 points to return address | |
1478 // pushed before call | |
1479 add(sp, sp, Operand(1 * kPointerSize)); | |
1480 str(sp, MemOperand(fp, ExitApiFrameConstants::kSPOffset)); | |
SeRya
2011/01/20 12:09:28
It looks like this sequence is equal to the shorte
Zaheer
2011/01/20 13:23:51
Thanks for the catch.
| |
1481 | |
1482 // Native call returns to the DirectCEntry stub which redirects to the | |
1483 // return address pushed on stack (could have moved after GC). | |
1484 DirectCEntryStub stub; | |
1485 mov(lr, Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), | |
1486 RelocInfo::CODE_TARGET)); | |
1487 | |
1488 // Push return address (accessible to GC through exit frame pc). | |
1489 mov(r2, Operand(reinterpret_cast<intptr_t>(function->address()))); | |
1490 add(ip, pc, Operand(4)); | |
1491 push(ip); | |
1492 Jump(r2); // Call the api function. | |
1493 | |
1494 Label promote_scheduled_exception; | |
1495 Label delete_allocated_handles; | |
1496 Label leave_exit_frame; | |
1497 | |
1498 // If result is non-zero, dereference to get the result value | |
1499 // otherwise set it to undefined. | |
1500 cmp(r0, Operand(0)); | |
1501 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
1502 ldr(r0, MemOperand(r0), ne); | |
1503 | |
1504 // No more valid handles (the result handle was the last one). Restore | |
1505 // previous handle scope. | |
1506 str(r4, MemOperand(r7, kNextOffset)); | |
1507 if (FLAG_debug_code) { | |
1508 ldr(r1, MemOperand(r7, kLevelOffset)); | |
1509 cmp(r1, r6); | |
1510 Check(eq, "Unexpected level after return from api call"); | |
1511 } | |
1512 sub(r6, r6, Operand(1)); | |
1513 str(r6, MemOperand(r7, kLevelOffset)); | |
1514 ldr(ip, MemOperand(r7, kLimitOffset)); | |
1515 cmp(r5, ip); | |
1516 b(ne, &delete_allocated_handles); | |
1517 | |
1518 // Check if the function scheduled an exception. | |
1519 bind(&leave_exit_frame); | |
1520 LoadRoot(r4, Heap::kTheHoleValueRootIndex); | |
1521 mov(ip, Operand(ExternalReference(Top::k_pending_exception_address))); | |
1522 ldr(r5, MemOperand(ip)); | |
1523 cmp(r4, r5); | |
1524 b(ne, &promote_scheduled_exception); | |
1525 LeaveExitFrame(false); | |
1526 | |
1527 bind(&promote_scheduled_exception); | |
1528 MaybeObject* result = TryTailCallExternalReference( | |
1529 ExternalReference(Runtime::kPromoteScheduledException), 0, 1); | |
1530 if (result->IsFailure()) { | |
1531 return result; | |
1532 } | |
1533 | |
1534 // HandleScope limit has changed. Delete allocated extensions. | |
1535 bind(&delete_allocated_handles); | |
1536 str(r5, MemOperand(r7, kLimitOffset)); | |
1537 mov(r4, r0); | |
1538 PrepareCallCFunction(0, r5); | |
1539 CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0); | |
1540 mov(r0, r4); | |
1541 jmp(&leave_exit_frame); | |
1542 | |
1543 return result; | |
1544 } | |
1545 | |
1546 | |
1400 void MacroAssembler::IllegalOperation(int num_arguments) { | 1547 void MacroAssembler::IllegalOperation(int num_arguments) { |
1401 if (num_arguments > 0) { | 1548 if (num_arguments > 0) { |
1402 add(sp, sp, Operand(num_arguments * kPointerSize)); | 1549 add(sp, sp, Operand(num_arguments * kPointerSize)); |
1403 } | 1550 } |
1404 LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 1551 LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
1405 } | 1552 } |
1406 | 1553 |
1407 | 1554 |
1408 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 1555 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
1409 // If the hash field contains an array index pick it out. The assert checks | 1556 // If the hash field contains an array index pick it out. The assert checks |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1642 int num_arguments, | 1789 int num_arguments, |
1643 int result_size) { | 1790 int result_size) { |
1644 // TODO(1236192): Most runtime routines don't need the number of | 1791 // TODO(1236192): Most runtime routines don't need the number of |
1645 // arguments passed in because it is constant. At some point we | 1792 // arguments passed in because it is constant. At some point we |
1646 // should remove this need and make the runtime routine entry code | 1793 // should remove this need and make the runtime routine entry code |
1647 // smarter. | 1794 // smarter. |
1648 mov(r0, Operand(num_arguments)); | 1795 mov(r0, Operand(num_arguments)); |
1649 JumpToExternalReference(ext); | 1796 JumpToExternalReference(ext); |
1650 } | 1797 } |
1651 | 1798 |
1799 MaybeObject* MacroAssembler::TryTailCallExternalReference( | |
1800 const ExternalReference& ext, int num_arguments, int result_size) { | |
1801 // TODO(1236192): Most runtime routines don't need the number of | |
1802 // arguments passed in because it is constant. At some point we | |
1803 // should remove this need and make the runtime routine entry code | |
1804 // smarter. | |
1805 mov(r0, Operand(num_arguments)); | |
1806 return TryJumpToExternalReference(ext); | |
1807 } | |
1652 | 1808 |
1653 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 1809 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
1654 int num_arguments, | 1810 int num_arguments, |
1655 int result_size) { | 1811 int result_size) { |
1656 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); | 1812 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); |
1657 } | 1813 } |
1658 | 1814 |
1659 | 1815 |
1660 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { | 1816 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { |
1661 #if defined(__thumb__) | 1817 #if defined(__thumb__) |
1662 // Thumb mode builtin. | 1818 // Thumb mode builtin. |
1663 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | 1819 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); |
1664 #endif | 1820 #endif |
1665 mov(r1, Operand(builtin)); | 1821 mov(r1, Operand(builtin)); |
1666 CEntryStub stub(1); | 1822 CEntryStub stub(1); |
1667 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 1823 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
1668 } | 1824 } |
1669 | 1825 |
1826 MaybeObject* MacroAssembler::TryJumpToExternalReference( | |
1827 const ExternalReference& builtin) { | |
1828 #if defined(__thumb__) | |
1829 // Thumb mode builtin. | |
1830 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | |
1831 #endif | |
1832 mov(r1, Operand(builtin)); | |
1833 CEntryStub stub(1); | |
1834 return TryTailCallStub(&stub); | |
1835 } | |
1670 | 1836 |
1671 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 1837 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
1672 InvokeJSFlags flags) { | 1838 InvokeJSFlags flags) { |
1673 GetBuiltinEntry(r2, id); | 1839 GetBuiltinEntry(r2, id); |
1674 if (flags == CALL_JS) { | 1840 if (flags == CALL_JS) { |
1675 Call(r2); | 1841 Call(r2); |
1676 } else { | 1842 } else { |
1677 ASSERT(flags == JUMP_JS); | 1843 ASSERT(flags == JUMP_JS); |
1678 Jump(r2); | 1844 Jump(r2); |
1679 } | 1845 } |
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2145 | 2311 |
2146 void CodePatcher::Emit(Address addr) { | 2312 void CodePatcher::Emit(Address addr) { |
2147 masm()->emit(reinterpret_cast<Instr>(addr)); | 2313 masm()->emit(reinterpret_cast<Instr>(addr)); |
2148 } | 2314 } |
2149 #endif // ENABLE_DEBUGGER_SUPPORT | 2315 #endif // ENABLE_DEBUGGER_SUPPORT |
2150 | 2316 |
2151 | 2317 |
2152 } } // namespace v8::internal | 2318 } } // namespace v8::internal |
2153 | 2319 |
2154 #endif // V8_TARGET_ARCH_ARM | 2320 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |