Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(80)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 6170001: Direct call api functions (arm implementation) (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after
535 // r1: preserved 535 // r1: preserved
536 // r2: preserved 536 // r2: preserved
537 537
538 // Drop the execution stack down to the frame pointer and restore 538 // Drop the execution stack down to the frame pointer and restore
539 // the caller frame pointer and return address. 539 // the caller frame pointer and return address.
540 mov(sp, fp); 540 mov(sp, fp);
541 ldm(ia_w, sp, fp.bit() | lr.bit()); 541 ldm(ia_w, sp, fp.bit() | lr.bit());
542 } 542 }
543 543
544 544
545 void MacroAssembler::EnterExitFrame(bool save_doubles) { 545 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
546 // r0 is argc. 546 // Prepare the stack to be aligned when calling into C.
547 // Compute callee's stack pointer before making changes and save it as
548 // ip register so that it is restored as sp register on exit, thereby
549 // popping the args.
550
551 // ip = sp + kPointerSize * #args;
552 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
553
554 // Compute the argv pointer and keep it in a callee-saved register.
555 sub(r6, ip, Operand(kPointerSize));
556
557 // Prepare the stack to be aligned when calling into C. After this point there
558 // are 5 pushes before the call into C, so the stack needs to be aligned after
559 // 5 pushes.
560 int frame_alignment = ActivationFrameAlignment(); 547 int frame_alignment = ActivationFrameAlignment();
561 int frame_alignment_mask = frame_alignment - 1; 548 int frame_alignment_mask = frame_alignment - 1;
562 if (frame_alignment != kPointerSize) { 549 if (frame_alignment != kPointerSize) {
563 // The following code needs to be more general if this assert does not hold. 550 // The following code needs to be more general if this assert does not hold.
564 ASSERT(frame_alignment == 2 * kPointerSize); 551 ASSERT(frame_alignment == 2 * kPointerSize);
565 // With 5 pushes left the frame must be unaligned at this point.
566 mov(r7, Operand(Smi::FromInt(0))); 552 mov(r7, Operand(Smi::FromInt(0)));
567 tst(sp, Operand((frame_alignment - kPointerSize) & frame_alignment_mask)); 553 tst(sp, Operand(frame_alignment_mask));
568 push(r7, eq); // Push if aligned to make it unaligned. 554 // If stack is unaligned, align it if requesting even slots otherwise
555 // unalign it if requesting odd slots.
556 if (stack_space % 2 == 0) {
557 push(r7, ne);
558 } else {
559 push(r7, eq);
560 }
569 } 561 }
570 562
571 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc. 563 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
572 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); 564 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
573 mov(fp, Operand(sp)); // Setup new frame pointer. 565 mov(fp, Operand(sp)); // Setup new frame pointer.
574 566
575 mov(ip, Operand(CodeObject())); 567 mov(ip, Operand(CodeObject()));
576 push(ip); // Accessed from ExitFrame::code_slot. 568 push(ip); // Accessed from ExitFrame::code_slot.
577 569
578 // Save the frame pointer and the context in top. 570 // Save the frame pointer and the context in top.
579 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); 571 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
580 str(fp, MemOperand(ip)); 572 str(fp, MemOperand(ip));
581 mov(ip, Operand(ExternalReference(Top::k_context_address))); 573 mov(ip, Operand(ExternalReference(Top::k_context_address)));
582 str(cp, MemOperand(ip)); 574 str(cp, MemOperand(ip));
583 575
584 // Setup argc and the builtin function in callee-saved registers.
585 mov(r4, Operand(r0));
586 mov(r5, Operand(r1));
587
588 // Optionally save all double registers. 576 // Optionally save all double registers.
589 if (save_doubles) { 577 if (save_doubles) {
590 // TODO(regis): Use vstrm instruction. 578 // TODO(regis): Use vstrm instruction.
591 // The stack alignment code above made sp unaligned, so add space for one 579 // The stack alignment code above made sp unaligned, so add space for one
592 // more double register and use aligned addresses. 580 // more double register and use aligned addresses.
593 ASSERT(kDoubleSize == frame_alignment); 581 ASSERT(kDoubleSize == frame_alignment);
594 // Mark the frame as containing doubles by pushing a non-valid return 582 // Mark the frame as containing doubles by pushing a non-valid return
595 // address, i.e. 0. 583 // address, i.e. 0.
596 ASSERT(ExitFrameConstants::kMarkerOffset == -2 * kPointerSize); 584 ASSERT(ExitFrameConstants::kMarkerOffset == -2 * kPointerSize);
597 mov(ip, Operand(0)); // Marker and alignment word. 585 mov(ip, Operand(0)); // Marker and alignment word.
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after
1390 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond); 1378 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1391 } 1379 }
1392 1380
1393 1381
1394 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { 1382 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1395 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs 1383 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1396 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); 1384 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1397 } 1385 }
1398 1386
1399 1387
1388 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) {
1389 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1390 Object* result;
1391 { MaybeObject* maybe_result = stub->TryGetCode();
1392 if (!maybe_result->ToObject(&result)) return maybe_result;
1393 }
1394 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1395 return result;
1396 }
1397
1398 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space,
1399 int unwind_space) {
1400 add(ip, sp, Operand(unwind_space * kPointerSize));
1401 EnterExitFrame(false, arg_stack_space + 5);
SeRya 2011/01/21 09:52:36 +1?
Zaheer 2011/01/21 11:51:50 Done. updated EnterExitFrame to encapsulate the pu
1402
1403 // Create space for the arguments below the exit frame.
1404 // +- exit frame -+- arguments -+- stack grows here -+
1405 // 1 for the return address
1406 sub(sp, sp, Operand((arg_stack_space + 1) * kPointerSize));
1407 }
1408
1409 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
1410 int64_t offset = (ref0.address() - ref1.address());
1411 // Check that fits into int.
1412 ASSERT(static_cast<int>(offset) == offset);
1413 return static_cast<int>(offset);
1414 }
1415
1416 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
1417 ApiFunction* function) {
1418 ExternalReference next_address =
1419 ExternalReference::handle_scope_next_address();
1420 const int kNextOffset = 0;
1421 const int kLimitOffset = AddressOffset(
1422 ExternalReference::handle_scope_limit_address(),
1423 next_address);
1424 const int kLevelOffset = AddressOffset(
1425 ExternalReference::handle_scope_level_address(),
1426 next_address);
1427
1428 // Allocate HandleScope in callee-save registers.
1429 mov(r7, Operand(next_address));
1430 ldr(r4, MemOperand(r7, kNextOffset));
1431 ldr(r5, MemOperand(r7, kLimitOffset));
1432 ldr(r6, MemOperand(r7, kLevelOffset));
1433 add(r6, r6, Operand(1));
1434 str(r6, MemOperand(r7, kLevelOffset));
1435
1436 // Native call returns to the DirectCEntry stub which redirects to the
1437 // return address pushed on stack (could have moved after GC).
1438 DirectCEntryStub stub;
1439 mov(lr, Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1440 RelocInfo::CODE_TARGET));
1441
1442 // Push return address (accessible to GC through exit frame pc).
1443 ExternalReference ref =
1444 ExternalReference(function, false, ExternalReference::DIRECT_CALL);
1445 mov(r2, Operand(reinterpret_cast<intptr_t>(ref.address())));
1446 add(ip, pc, Operand(4));
1447 str(ip, MemOperand(fp, ExitFrameConstants::kPCOffset));
1448 Jump(r2); // Call the api function.
1449
1450 Label promote_scheduled_exception;
1451 Label delete_allocated_handles;
1452 Label leave_exit_frame;
1453
1454 // If result is non-zero, dereference to get the result value
1455 // otherwise set it to undefined.
1456 cmp(r0, Operand(0));
1457 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1458 ldr(r0, MemOperand(r0), ne);
1459
1460 // No more valid handles (the result handle was the last one). Restore
1461 // previous handle scope.
1462 str(r4, MemOperand(r7, kNextOffset));
1463 if (FLAG_debug_code) {
1464 ldr(r1, MemOperand(r7, kLevelOffset));
1465 cmp(r1, r6);
1466 Check(eq, "Unexpected level after return from api call");
1467 }
1468 sub(r6, r6, Operand(1));
1469 str(r6, MemOperand(r7, kLevelOffset));
1470 ldr(ip, MemOperand(r7, kLimitOffset));
1471 cmp(r5, ip);
1472 b(ne, &delete_allocated_handles);
1473
1474 // Check if the function scheduled an exception.
1475 bind(&leave_exit_frame);
1476 LoadRoot(r4, Heap::kTheHoleValueRootIndex);
1477 mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
1478 ldr(r5, MemOperand(ip));
1479 cmp(r4, r5);
1480 b(ne, &promote_scheduled_exception);
1481 LeaveExitFrame(false);
1482
1483 bind(&promote_scheduled_exception);
1484 MaybeObject* result = TryTailCallExternalReference(
1485 ExternalReference(Runtime::kPromoteScheduledException), 0, 1);
1486 if (result->IsFailure()) {
1487 return result;
1488 }
1489
1490 // HandleScope limit has changed. Delete allocated extensions.
1491 bind(&delete_allocated_handles);
1492 str(r5, MemOperand(r7, kLimitOffset));
1493 mov(r4, r0);
1494 PrepareCallCFunction(0, r5);
1495 CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0);
1496 mov(r0, r4);
1497 jmp(&leave_exit_frame);
1498
1499 return result;
1500 }
1501
1502
1400 void MacroAssembler::IllegalOperation(int num_arguments) { 1503 void MacroAssembler::IllegalOperation(int num_arguments) {
1401 if (num_arguments > 0) { 1504 if (num_arguments > 0) {
1402 add(sp, sp, Operand(num_arguments * kPointerSize)); 1505 add(sp, sp, Operand(num_arguments * kPointerSize));
1403 } 1506 }
1404 LoadRoot(r0, Heap::kUndefinedValueRootIndex); 1507 LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1405 } 1508 }
1406 1509
1407 1510
1408 void MacroAssembler::IndexFromHash(Register hash, Register index) { 1511 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1409 // If the hash field contains an array index pick it out. The assert checks 1512 // If the hash field contains an array index pick it out. The assert checks
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
1642 int num_arguments, 1745 int num_arguments,
1643 int result_size) { 1746 int result_size) {
1644 // TODO(1236192): Most runtime routines don't need the number of 1747 // TODO(1236192): Most runtime routines don't need the number of
1645 // arguments passed in because it is constant. At some point we 1748 // arguments passed in because it is constant. At some point we
1646 // should remove this need and make the runtime routine entry code 1749 // should remove this need and make the runtime routine entry code
1647 // smarter. 1750 // smarter.
1648 mov(r0, Operand(num_arguments)); 1751 mov(r0, Operand(num_arguments));
1649 JumpToExternalReference(ext); 1752 JumpToExternalReference(ext);
1650 } 1753 }
1651 1754
1755 MaybeObject* MacroAssembler::TryTailCallExternalReference(
1756 const ExternalReference& ext, int num_arguments, int result_size) {
1757 // TODO(1236192): Most runtime routines don't need the number of
1758 // arguments passed in because it is constant. At some point we
1759 // should remove this need and make the runtime routine entry code
1760 // smarter.
1761 mov(r0, Operand(num_arguments));
1762 return TryJumpToExternalReference(ext);
1763 }
1652 1764
1653 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, 1765 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1654 int num_arguments, 1766 int num_arguments,
1655 int result_size) { 1767 int result_size) {
1656 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); 1768 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
1657 } 1769 }
1658 1770
1659 1771
1660 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { 1772 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
1661 #if defined(__thumb__) 1773 #if defined(__thumb__)
1662 // Thumb mode builtin. 1774 // Thumb mode builtin.
1663 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); 1775 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
1664 #endif 1776 #endif
1665 mov(r1, Operand(builtin)); 1777 mov(r1, Operand(builtin));
1666 CEntryStub stub(1); 1778 CEntryStub stub(1);
1667 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 1779 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1668 } 1780 }
1669 1781
1782 MaybeObject* MacroAssembler::TryJumpToExternalReference(
1783 const ExternalReference& builtin) {
1784 #if defined(__thumb__)
1785 // Thumb mode builtin.
1786 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
1787 #endif
1788 mov(r1, Operand(builtin));
1789 CEntryStub stub(1);
1790 return TryTailCallStub(&stub);
1791 }
1670 1792
1671 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 1793 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1672 InvokeJSFlags flags) { 1794 InvokeJSFlags flags) {
1673 GetBuiltinEntry(r2, id); 1795 GetBuiltinEntry(r2, id);
1674 if (flags == CALL_JS) { 1796 if (flags == CALL_JS) {
1675 Call(r2); 1797 Call(r2);
1676 } else { 1798 } else {
1677 ASSERT(flags == JUMP_JS); 1799 ASSERT(flags == JUMP_JS);
1678 Jump(r2); 1800 Jump(r2);
1679 } 1801 }
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after
2145 2267
2146 void CodePatcher::Emit(Address addr) { 2268 void CodePatcher::Emit(Address addr) {
2147 masm()->emit(reinterpret_cast<Instr>(addr)); 2269 masm()->emit(reinterpret_cast<Instr>(addr));
2148 } 2270 }
2149 #endif // ENABLE_DEBUGGER_SUPPORT 2271 #endif // ENABLE_DEBUGGER_SUPPORT
2150 2272
2151 2273
2152 } } // namespace v8::internal 2274 } } // namespace v8::internal
2153 2275
2154 #endif // V8_TARGET_ARCH_ARM 2276 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698