OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
535 // r1: preserved | 535 // r1: preserved |
536 // r2: preserved | 536 // r2: preserved |
537 | 537 |
538 // Drop the execution stack down to the frame pointer and restore | 538 // Drop the execution stack down to the frame pointer and restore |
539 // the caller frame pointer and return address. | 539 // the caller frame pointer and return address. |
540 mov(sp, fp); | 540 mov(sp, fp); |
541 ldm(ia_w, sp, fp.bit() | lr.bit()); | 541 ldm(ia_w, sp, fp.bit() | lr.bit()); |
542 } | 542 } |
543 | 543 |
544 | 544 |
545 void MacroAssembler::EnterExitFrame(bool save_doubles) { | 545 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { |
546 // r0 is argc. | 546 // Prepare the stack to be aligned when calling into C. |
547 // Compute callee's stack pointer before making changes and save it as | 547 int pending_pushes = stack_space + 4; // 4 pushes in this function. |
antonm
2011/01/21 17:56:36
May you explain: it looks like only pending_pushes
Zaheer
2011/01/24 09:43:31
sorry i miss your comment.
antonm
2011/01/26 11:36:37
I am sorry, I meant it's somewhat strange to see p
Zaheer
2011/02/02 10:05:58
not required with the rebase.
| |
548 // ip register so that it is restored as sp register on exit, thereby | |
549 // popping the args. | |
550 | |
551 // ip = sp + kPointerSize * #args; | |
552 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2)); | |
553 | |
554 // Compute the argv pointer and keep it in a callee-saved register. | |
555 sub(r6, ip, Operand(kPointerSize)); | |
556 | |
557 // Prepare the stack to be aligned when calling into C. After this point there | |
558 // are 5 pushes before the call into C, so the stack needs to be aligned after | |
559 // 5 pushes. | |
560 int frame_alignment = ActivationFrameAlignment(); | 548 int frame_alignment = ActivationFrameAlignment(); |
561 int frame_alignment_mask = frame_alignment - 1; | 549 int frame_alignment_mask = frame_alignment - 1; |
562 if (frame_alignment != kPointerSize) { | 550 if (frame_alignment != kPointerSize) { |
563 // The following code needs to be more general if this assert does not hold. | 551 // The following code needs to be more general if this assert does not hold. |
564 ASSERT(frame_alignment == 2 * kPointerSize); | 552 ASSERT(frame_alignment == 2 * kPointerSize); |
565 // With 5 pushes left the frame must be unaligned at this point. | |
566 mov(r7, Operand(Smi::FromInt(0))); | 553 mov(r7, Operand(Smi::FromInt(0))); |
567 tst(sp, Operand((frame_alignment - kPointerSize) & frame_alignment_mask)); | 554 tst(sp, Operand(frame_alignment_mask)); |
568 push(r7, eq); // Push if aligned to make it unaligned. | 555 // If stack is unaligned, align it if requesting even slots otherwise |
556 // unalign it if requesting odd slots. | |
557 if (pending_pushes % 2 == 0) { | |
558 push(r7, ne); | |
559 } else { | |
560 push(r7, eq); | |
561 } | |
569 } | 562 } |
570 | 563 |
571 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc. | 564 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc. |
572 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); | 565 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); |
573 mov(fp, Operand(sp)); // Setup new frame pointer. | 566 mov(fp, Operand(sp)); // Setup new frame pointer. |
574 | 567 |
575 mov(ip, Operand(CodeObject())); | 568 mov(ip, Operand(CodeObject())); |
576 push(ip); // Accessed from ExitFrame::code_slot. | 569 push(ip); // Accessed from ExitFrame::code_slot. |
577 | 570 |
578 // Save the frame pointer and the context in top. | 571 // Save the frame pointer and the context in top. |
579 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); | 572 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); |
580 str(fp, MemOperand(ip)); | 573 str(fp, MemOperand(ip)); |
581 mov(ip, Operand(ExternalReference(Top::k_context_address))); | 574 mov(ip, Operand(ExternalReference(Top::k_context_address))); |
582 str(cp, MemOperand(ip)); | 575 str(cp, MemOperand(ip)); |
583 | 576 |
584 // Setup argc and the builtin function in callee-saved registers. | |
585 mov(r4, Operand(r0)); | |
586 mov(r5, Operand(r1)); | |
587 | |
588 // Optionally save all double registers. | 577 // Optionally save all double registers. |
589 if (save_doubles) { | 578 if (save_doubles) { |
590 // TODO(regis): Use vstrm instruction. | 579 // TODO(regis): Use vstrm instruction. |
591 // The stack alignment code above made sp unaligned, so add space for one | 580 // The stack alignment code above made sp unaligned, so add space for one |
592 // more double register and use aligned addresses. | 581 // more double register and use aligned addresses. |
593 ASSERT(kDoubleSize == frame_alignment); | 582 ASSERT(kDoubleSize == frame_alignment); |
594 // Mark the frame as containing doubles by pushing a non-valid return | 583 // Mark the frame as containing doubles by pushing a non-valid return |
595 // address, i.e. 0. | 584 // address, i.e. 0. |
596 ASSERT(ExitFrameConstants::kMarkerOffset == -2 * kPointerSize); | 585 ASSERT(ExitFrameConstants::kMarkerOffset == -2 * kPointerSize); |
597 mov(ip, Operand(0)); // Marker and alignment word. | 586 mov(ip, Operand(0)); // Marker and alignment word. |
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1390 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 1379 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
1391 } | 1380 } |
1392 | 1381 |
1393 | 1382 |
1394 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { | 1383 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { |
1395 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | 1384 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs |
1396 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 1385 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
1397 } | 1386 } |
1398 | 1387 |
1399 | 1388 |
1389 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) { | |
1390 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | |
1391 Object* result; | |
1392 { MaybeObject* maybe_result = stub->TryGetCode(); | |
1393 if (!maybe_result->ToObject(&result)) return maybe_result; | |
1394 } | |
1395 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | |
1396 return result; | |
1397 } | |
1398 | |
1399 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space, | |
1400 int unwind_space) { | |
1401 add(ip, sp, Operand(unwind_space * kPointerSize)); | |
1402 EnterExitFrame(false, arg_stack_space + 1); | |
1403 | |
1404 // Create space for the arguments below the exit frame. | |
1405 // +- exit frame -+- arguments -+- stack grows here -+ | |
1406 // 1 for the return address | |
1407 sub(sp, sp, Operand((arg_stack_space + 1) * kPointerSize)); | |
1408 } | |
1409 | |
1410 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | |
1411 int64_t offset = (ref0.address() - ref1.address()); | |
1412 // Check that fits into int. | |
1413 ASSERT(static_cast<int>(offset) == offset); | |
1414 return static_cast<int>(offset); | |
1415 } | |
1416 | |
1417 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( | |
1418 ApiFunction* function) { | |
1419 ExternalReference next_address = | |
1420 ExternalReference::handle_scope_next_address(); | |
1421 const int kNextOffset = 0; | |
1422 const int kLimitOffset = AddressOffset( | |
1423 ExternalReference::handle_scope_limit_address(), | |
1424 next_address); | |
1425 const int kLevelOffset = AddressOffset( | |
1426 ExternalReference::handle_scope_level_address(), | |
1427 next_address); | |
1428 | |
1429 // Allocate HandleScope in callee-save registers. | |
1430 mov(r7, Operand(next_address)); | |
1431 ldr(r4, MemOperand(r7, kNextOffset)); | |
1432 ldr(r5, MemOperand(r7, kLimitOffset)); | |
1433 ldr(r6, MemOperand(r7, kLevelOffset)); | |
1434 add(r6, r6, Operand(1)); | |
1435 str(r6, MemOperand(r7, kLevelOffset)); | |
1436 | |
1437 // Native call returns to the DirectCEntry stub which redirects to the | |
1438 // return address pushed on stack (could have moved after GC). | |
1439 DirectCEntryStub stub; | |
1440 mov(lr, Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), | |
1441 RelocInfo::CODE_TARGET)); | |
1442 | |
1443 // Push return address (accessible to GC through exit frame pc). | |
1444 ExternalReference ref = | |
1445 ExternalReference(function, false, ExternalReference::DIRECT_CALL); | |
1446 mov(r2, Operand(reinterpret_cast<intptr_t>(ref.address()))); | |
1447 add(ip, pc, Operand(4)); | |
1448 str(ip, MemOperand(fp, ExitFrameConstants::kPCOffset)); | |
1449 Jump(r2); // Call the api function. | |
1450 | |
1451 Label promote_scheduled_exception; | |
1452 Label delete_allocated_handles; | |
1453 Label leave_exit_frame; | |
1454 | |
1455 // If result is non-zero, dereference to get the result value | |
1456 // otherwise set it to undefined. | |
1457 cmp(r0, Operand(0)); | |
1458 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
1459 ldr(r0, MemOperand(r0), ne); | |
1460 | |
1461 // No more valid handles (the result handle was the last one). Restore | |
1462 // previous handle scope. | |
1463 str(r4, MemOperand(r7, kNextOffset)); | |
1464 if (FLAG_debug_code) { | |
1465 ldr(r1, MemOperand(r7, kLevelOffset)); | |
1466 cmp(r1, r6); | |
1467 Check(eq, "Unexpected level after return from api call"); | |
1468 } | |
1469 sub(r6, r6, Operand(1)); | |
1470 str(r6, MemOperand(r7, kLevelOffset)); | |
1471 ldr(ip, MemOperand(r7, kLimitOffset)); | |
1472 cmp(r5, ip); | |
1473 b(ne, &delete_allocated_handles); | |
1474 | |
1475 // Check if the function scheduled an exception. | |
1476 bind(&leave_exit_frame); | |
1477 LoadRoot(r4, Heap::kTheHoleValueRootIndex); | |
1478 mov(ip, Operand(ExternalReference(Top::k_pending_exception_address))); | |
1479 ldr(r5, MemOperand(ip)); | |
1480 cmp(r4, r5); | |
1481 b(ne, &promote_scheduled_exception); | |
1482 LeaveExitFrame(false); | |
1483 | |
1484 bind(&promote_scheduled_exception); | |
1485 MaybeObject* result = TryTailCallExternalReference( | |
1486 ExternalReference(Runtime::kPromoteScheduledException), 0, 1); | |
1487 if (result->IsFailure()) { | |
1488 return result; | |
1489 } | |
1490 | |
1491 // HandleScope limit has changed. Delete allocated extensions. | |
1492 bind(&delete_allocated_handles); | |
1493 str(r5, MemOperand(r7, kLimitOffset)); | |
1494 mov(r4, r0); | |
1495 PrepareCallCFunction(0, r5); | |
1496 CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0); | |
1497 mov(r0, r4); | |
1498 jmp(&leave_exit_frame); | |
1499 | |
1500 return result; | |
1501 } | |
1502 | |
1503 | |
1400 void MacroAssembler::IllegalOperation(int num_arguments) { | 1504 void MacroAssembler::IllegalOperation(int num_arguments) { |
1401 if (num_arguments > 0) { | 1505 if (num_arguments > 0) { |
1402 add(sp, sp, Operand(num_arguments * kPointerSize)); | 1506 add(sp, sp, Operand(num_arguments * kPointerSize)); |
1403 } | 1507 } |
1404 LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 1508 LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
1405 } | 1509 } |
1406 | 1510 |
1407 | 1511 |
1408 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 1512 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
1409 // If the hash field contains an array index pick it out. The assert checks | 1513 // If the hash field contains an array index pick it out. The assert checks |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1642 int num_arguments, | 1746 int num_arguments, |
1643 int result_size) { | 1747 int result_size) { |
1644 // TODO(1236192): Most runtime routines don't need the number of | 1748 // TODO(1236192): Most runtime routines don't need the number of |
1645 // arguments passed in because it is constant. At some point we | 1749 // arguments passed in because it is constant. At some point we |
1646 // should remove this need and make the runtime routine entry code | 1750 // should remove this need and make the runtime routine entry code |
1647 // smarter. | 1751 // smarter. |
1648 mov(r0, Operand(num_arguments)); | 1752 mov(r0, Operand(num_arguments)); |
1649 JumpToExternalReference(ext); | 1753 JumpToExternalReference(ext); |
1650 } | 1754 } |
1651 | 1755 |
1756 MaybeObject* MacroAssembler::TryTailCallExternalReference( | |
1757 const ExternalReference& ext, int num_arguments, int result_size) { | |
1758 // TODO(1236192): Most runtime routines don't need the number of | |
1759 // arguments passed in because it is constant. At some point we | |
1760 // should remove this need and make the runtime routine entry code | |
1761 // smarter. | |
1762 mov(r0, Operand(num_arguments)); | |
1763 return TryJumpToExternalReference(ext); | |
1764 } | |
1652 | 1765 |
1653 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 1766 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
1654 int num_arguments, | 1767 int num_arguments, |
1655 int result_size) { | 1768 int result_size) { |
1656 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); | 1769 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); |
1657 } | 1770 } |
1658 | 1771 |
1659 | 1772 |
1660 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { | 1773 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { |
1661 #if defined(__thumb__) | 1774 #if defined(__thumb__) |
1662 // Thumb mode builtin. | 1775 // Thumb mode builtin. |
1663 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | 1776 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); |
1664 #endif | 1777 #endif |
1665 mov(r1, Operand(builtin)); | 1778 mov(r1, Operand(builtin)); |
1666 CEntryStub stub(1); | 1779 CEntryStub stub(1); |
1667 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 1780 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
1668 } | 1781 } |
1669 | 1782 |
1783 MaybeObject* MacroAssembler::TryJumpToExternalReference( | |
1784 const ExternalReference& builtin) { | |
1785 #if defined(__thumb__) | |
1786 // Thumb mode builtin. | |
1787 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | |
1788 #endif | |
1789 mov(r1, Operand(builtin)); | |
1790 CEntryStub stub(1); | |
1791 return TryTailCallStub(&stub); | |
1792 } | |
1670 | 1793 |
1671 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 1794 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
1672 InvokeJSFlags flags) { | 1795 InvokeJSFlags flags) { |
1673 GetBuiltinEntry(r2, id); | 1796 GetBuiltinEntry(r2, id); |
1674 if (flags == CALL_JS) { | 1797 if (flags == CALL_JS) { |
1675 Call(r2); | 1798 Call(r2); |
1676 } else { | 1799 } else { |
1677 ASSERT(flags == JUMP_JS); | 1800 ASSERT(flags == JUMP_JS); |
1678 Jump(r2); | 1801 Jump(r2); |
1679 } | 1802 } |
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2145 | 2268 |
2146 void CodePatcher::Emit(Address addr) { | 2269 void CodePatcher::Emit(Address addr) { |
2147 masm()->emit(reinterpret_cast<Instr>(addr)); | 2270 masm()->emit(reinterpret_cast<Instr>(addr)); |
2148 } | 2271 } |
2149 #endif // ENABLE_DEBUGGER_SUPPORT | 2272 #endif // ENABLE_DEBUGGER_SUPPORT |
2150 | 2273 |
2151 | 2274 |
2152 } } // namespace v8::internal | 2275 } } // namespace v8::internal |
2153 | 2276 |
2154 #endif // V8_TARGET_ARCH_ARM | 2277 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |