| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 320 MaybeObject* result = stub->TryGetCode(); | 320 MaybeObject* result = stub->TryGetCode(); |
| 321 if (!result->IsFailure()) { | 321 if (!result->IsFailure()) { |
| 322 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())), | 322 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())), |
| 323 RelocInfo::CODE_TARGET); | 323 RelocInfo::CODE_TARGET); |
| 324 } | 324 } |
| 325 return result; | 325 return result; |
| 326 } | 326 } |
| 327 | 327 |
| 328 | 328 |
| 329 void MacroAssembler::TailCallStub(CodeStub* stub) { | 329 void MacroAssembler::TailCallStub(CodeStub* stub) { |
| 330 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs | 330 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. |
| 331 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); | 331 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); |
| 332 } | 332 } |
| 333 | 333 |
| 334 | 334 |
| 335 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { | 335 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { |
| 336 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 336 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. |
| 337 MaybeObject* result = stub->TryGetCode(); | 337 MaybeObject* result = stub->TryGetCode(); |
| 338 if (!result->IsFailure()) { | 338 if (!result->IsFailure()) { |
| 339 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())), | 339 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())), |
| 340 RelocInfo::CODE_TARGET); | 340 RelocInfo::CODE_TARGET); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 449 | 449 |
| 450 // TODO(1236192): Most runtime routines don't need the number of | 450 // TODO(1236192): Most runtime routines don't need the number of |
| 451 // arguments passed in because it is constant. At some point we | 451 // arguments passed in because it is constant. At some point we |
| 452 // should remove this need and make the runtime routine entry code | 452 // should remove this need and make the runtime routine entry code |
| 453 // smarter. | 453 // smarter. |
| 454 Set(rax, num_arguments); | 454 Set(rax, num_arguments); |
| 455 JumpToExternalReference(ext, result_size); | 455 JumpToExternalReference(ext, result_size); |
| 456 } | 456 } |
| 457 | 457 |
| 458 | 458 |
| 459 MaybeObject* MacroAssembler::TryTailCallExternalReference( |
| 460 const ExternalReference& ext, int num_arguments, int result_size) { |
| 461 // ----------- S t a t e ------------- |
| 462 // -- rsp[0] : return address |
| 463 // -- rsp[8] : argument num_arguments - 1 |
| 464 // ... |
| 465 // -- rsp[8 * num_arguments] : argument 0 (receiver) |
| 466 // ----------------------------------- |
| 467 |
| 468 // TODO(1236192): Most runtime routines don't need the number of |
| 469 // arguments passed in because it is constant. At some point we |
| 470 // should remove this need and make the runtime routine entry code |
| 471 // smarter. |
| 472 Set(rax, num_arguments); |
| 473 return TryJumpToExternalReference(ext, result_size); |
| 474 } |
| 475 |
| 476 |
| 459 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 477 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
| 460 int num_arguments, | 478 int num_arguments, |
| 461 int result_size) { | 479 int result_size) { |
| 462 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); | 480 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); |
| 463 } | 481 } |
| 464 | 482 |
| 465 | 483 |
| 484 MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid, |
| 485 int num_arguments, |
| 486 int result_size) { |
| 487 return TryTailCallExternalReference(ExternalReference(fid), |
| 488 num_arguments, |
| 489 result_size); |
| 490 } |
| 491 |
| 492 |
| 466 static int Offset(ExternalReference ref0, ExternalReference ref1) { | 493 static int Offset(ExternalReference ref0, ExternalReference ref1) { |
| 467 int64_t offset = (ref0.address() - ref1.address()); | 494 int64_t offset = (ref0.address() - ref1.address()); |
| 468 // Check that fits into int. | 495 // Check that fits into int. |
| 469 ASSERT(static_cast<int>(offset) == offset); | 496 ASSERT(static_cast<int>(offset) == offset); |
| 470 return static_cast<int>(offset); | 497 return static_cast<int>(offset); |
| 471 } | 498 } |
| 472 | 499 |
| 473 | 500 |
| 474 void MacroAssembler::PrepareCallApiFunction(int stack_space) { | 501 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) { |
| 475 EnterApiExitFrame(stack_space, 0); | 502 #ifdef _WIN64 |
| 503 // We need to prepare a slot for result handle on stack and put |
| 504 // a pointer to it into 1st arg register. |
| 505 EnterApiExitFrame(arg_stack_space + 1); |
| 506 |
| 507 // rcx must be used to pass the pointer to the return value slot. |
| 508 lea(rcx, StackSpaceOperand(arg_stack_space)); |
| 509 #else |
| 510 EnterApiExitFrame(arg_stack_space); |
| 511 #endif |
| 476 } | 512 } |
| 477 | 513 |
| 478 | 514 |
| 479 void MacroAssembler::CallApiFunctionAndReturn(ApiFunction* function) { | 515 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( |
| 516 ApiFunction* function, int stack_space) { |
| 480 Label empty_result; | 517 Label empty_result; |
| 481 Label prologue; | 518 Label prologue; |
| 482 Label promote_scheduled_exception; | 519 Label promote_scheduled_exception; |
| 483 Label delete_allocated_handles; | 520 Label delete_allocated_handles; |
| 484 Label leave_exit_frame; | 521 Label leave_exit_frame; |
| 485 Label write_back; | 522 Label write_back; |
| 486 | 523 |
| 487 ExternalReference next_address = | 524 ExternalReference next_address = |
| 488 ExternalReference::handle_scope_next_address(); | 525 ExternalReference::handle_scope_next_address(); |
| 489 const int kNextOffset = 0; | 526 const int kNextOffset = 0; |
| 490 const int kLimitOffset = Offset( | 527 const int kLimitOffset = Offset( |
| 491 ExternalReference::handle_scope_limit_address(), | 528 ExternalReference::handle_scope_limit_address(), |
| 492 next_address); | 529 next_address); |
| 493 const int kLevelOffset = Offset( | 530 const int kLevelOffset = Offset( |
| 494 ExternalReference::handle_scope_level_address(), | 531 ExternalReference::handle_scope_level_address(), |
| 495 next_address); | 532 next_address); |
| 496 ExternalReference scheduled_exception_address = | 533 ExternalReference scheduled_exception_address = |
| 497 ExternalReference::scheduled_exception_address(); | 534 ExternalReference::scheduled_exception_address(); |
| 498 | 535 |
| 499 // Allocate HandleScope in callee-save registers. | 536 // Allocate HandleScope in callee-save registers. |
| 500 Register prev_next_address_reg = r14; | 537 Register prev_next_address_reg = r14; |
| 501 Register prev_limit_reg = rbx; | 538 Register prev_limit_reg = rbx; |
| 502 Register base_reg = kSmiConstantRegister; | 539 Register base_reg = r12; |
| 503 movq(base_reg, next_address); | 540 movq(base_reg, next_address); |
| 504 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); | 541 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); |
| 505 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 542 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
| 506 addl(Operand(base_reg, kLevelOffset), Immediate(1)); | 543 addl(Operand(base_reg, kLevelOffset), Immediate(1)); |
| 507 // Call the api function! | 544 // Call the api function! |
| 508 movq(rax, | 545 movq(rax, |
| 509 reinterpret_cast<int64_t>(function->address()), | 546 reinterpret_cast<int64_t>(function->address()), |
| 510 RelocInfo::RUNTIME_ENTRY); | 547 RelocInfo::RUNTIME_ENTRY); |
| 511 call(rax); | 548 call(rax); |
| 512 | 549 |
| 513 #ifdef _WIN64 | 550 #ifdef _WIN64 |
| 514 // rax keeps a pointer to v8::Handle, unpack it. | 551 // rax keeps a pointer to v8::Handle, unpack it. |
| 515 movq(rax, Operand(rax, 0)); | 552 movq(rax, Operand(rax, 0)); |
| 516 #endif | 553 #endif |
| 517 // Check if the result handle holds 0. | 554 // Check if the result handle holds 0. |
| 518 testq(rax, rax); | 555 testq(rax, rax); |
| 519 j(zero, &empty_result); | 556 j(zero, &empty_result); |
| 520 // It was non-zero. Dereference to get the result value. | 557 // It was non-zero. Dereference to get the result value. |
| 521 movq(rax, Operand(rax, 0)); | 558 movq(rax, Operand(rax, 0)); |
| 522 bind(&prologue); | 559 bind(&prologue); |
| 523 | 560 |
| 524 // No more valid handles (the result handle was the last one). Restore | 561 // No more valid handles (the result handle was the last one). Restore |
| 525 // previous handle scope. | 562 // previous handle scope. |
| 526 subl(Operand(base_reg, kLevelOffset), Immediate(1)); | 563 subl(Operand(base_reg, kLevelOffset), Immediate(1)); |
| 527 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); | 564 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); |
| 528 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 565 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
| 529 j(not_equal, &delete_allocated_handles); | 566 j(not_equal, &delete_allocated_handles); |
| 530 bind(&leave_exit_frame); | 567 bind(&leave_exit_frame); |
| 531 InitializeSmiConstantRegister(); | |
| 532 | 568 |
| 533 // Check if the function scheduled an exception. | 569 // Check if the function scheduled an exception. |
| 534 movq(rsi, scheduled_exception_address); | 570 movq(rsi, scheduled_exception_address); |
| 535 Cmp(Operand(rsi, 0), Factory::the_hole_value()); | 571 Cmp(Operand(rsi, 0), Factory::the_hole_value()); |
| 536 j(not_equal, &promote_scheduled_exception); | 572 j(not_equal, &promote_scheduled_exception); |
| 537 | 573 |
| 538 LeaveExitFrame(); | 574 LeaveApiExitFrame(); |
| 539 ret(0); | 575 ret(stack_space * kPointerSize); |
| 540 | 576 |
| 541 bind(&promote_scheduled_exception); | 577 bind(&promote_scheduled_exception); |
| 542 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 578 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException, |
| 579 0, 1); |
| 580 if (result->IsFailure()) { |
| 581 return result; |
| 582 } |
| 543 | 583 |
| 544 bind(&empty_result); | 584 bind(&empty_result); |
| 545 // It was zero; the result is undefined. | 585 // It was zero; the result is undefined. |
| 546 Move(rax, Factory::undefined_value()); | 586 Move(rax, Factory::undefined_value()); |
| 547 jmp(&prologue); | 587 jmp(&prologue); |
| 548 | 588 |
| 549 // HandleScope limit has changed. Delete allocated extensions. | 589 // HandleScope limit has changed. Delete allocated extensions. |
| 550 bind(&delete_allocated_handles); | 590 bind(&delete_allocated_handles); |
| 551 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); | 591 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); |
| 552 movq(prev_limit_reg, rax); | 592 movq(prev_limit_reg, rax); |
| 553 movq(rax, ExternalReference::delete_handle_scope_extensions()); | 593 movq(rax, ExternalReference::delete_handle_scope_extensions()); |
| 554 call(rax); | 594 call(rax); |
| 555 movq(rax, prev_limit_reg); | 595 movq(rax, prev_limit_reg); |
| 556 jmp(&leave_exit_frame); | 596 jmp(&leave_exit_frame); |
| 597 |
| 598 return result; |
| 557 } | 599 } |
| 558 | 600 |
| 559 | 601 |
| 560 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, | 602 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, |
| 561 int result_size) { | 603 int result_size) { |
| 562 // Set the entry point and jump to the C entry runtime stub. | 604 // Set the entry point and jump to the C entry runtime stub. |
| 563 movq(rbx, ext); | 605 movq(rbx, ext); |
| 564 CEntryStub ces(result_size); | 606 CEntryStub ces(result_size); |
| 565 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); | 607 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); |
| 566 } | 608 } |
| 567 | 609 |
| 568 | 610 |
| 611 MaybeObject* MacroAssembler::TryJumpToExternalReference( |
| 612 const ExternalReference& ext, int result_size) { |
| 613 // Set the entry point and jump to the C entry runtime stub. |
| 614 movq(rbx, ext); |
| 615 CEntryStub ces(result_size); |
| 616 return TryTailCallStub(&ces); |
| 617 } |
| 618 |
| 619 |
| 569 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { | 620 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { |
| 570 // Calls are not allowed in some stubs. | 621 // Calls are not allowed in some stubs. |
| 571 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); | 622 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); |
| 572 | 623 |
| 573 // Rely on the assertion to check that the number of provided | 624 // Rely on the assertion to check that the number of provided |
| 574 // arguments match the expected number of arguments. Fake a | 625 // arguments match the expected number of arguments. Fake a |
| 575 // parameter count to avoid emitting code to do the check. | 626 // parameter count to avoid emitting code to do the check. |
| 576 ParameterCount expected(0); | 627 ParameterCount expected(0); |
| 577 GetBuiltinEntry(rdx, id); | 628 GetBuiltinEntry(rdx, id); |
| 578 InvokeCode(rdx, expected, expected, flag); | 629 InvokeCode(rdx, expected, expected, flag); |
| (...skipping 1104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1683 if (save_rax) { | 1734 if (save_rax) { |
| 1684 movq(r14, rax); // Backup rax before we use it. | 1735 movq(r14, rax); // Backup rax before we use it. |
| 1685 } | 1736 } |
| 1686 | 1737 |
| 1687 movq(rax, rbp); | 1738 movq(rax, rbp); |
| 1688 store_rax(c_entry_fp_address); | 1739 store_rax(c_entry_fp_address); |
| 1689 movq(rax, rsi); | 1740 movq(rax, rsi); |
| 1690 store_rax(context_address); | 1741 store_rax(context_address); |
| 1691 } | 1742 } |
| 1692 | 1743 |
| 1693 void MacroAssembler::EnterExitFrameEpilogue(int result_size, | 1744 |
| 1694 int argc) { | 1745 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) { |
| 1695 #ifdef _WIN64 | 1746 #ifdef _WIN64 |
| 1696 // Reserve space on stack for result and argument structures, if necessary. | 1747 const int kShaddowSpace = 4; |
| 1697 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize; | 1748 arg_stack_space += kShaddowSpace; |
| 1698 // Reserve space for the Arguments object. The Windows 64-bit ABI | |
| 1699 // requires us to pass this structure as a pointer to its location on | |
| 1700 // the stack. The structure contains 2 values. | |
| 1701 int argument_stack_space = argc * kPointerSize; | |
| 1702 // We also need backing space for 4 parameters, even though | |
| 1703 // we only pass one or two parameter, and it is in a register. | |
| 1704 int argument_mirror_space = 4 * kPointerSize; | |
| 1705 int total_stack_space = | |
| 1706 argument_mirror_space + argument_stack_space + result_stack_space; | |
| 1707 subq(rsp, Immediate(total_stack_space)); | |
| 1708 #endif | 1749 #endif |
| 1750 if (arg_stack_space > 0) { |
| 1751 subq(rsp, Immediate(arg_stack_space * kPointerSize)); |
| 1752 } |
| 1709 | 1753 |
| 1710 // Get the required frame alignment for the OS. | 1754 // Get the required frame alignment for the OS. |
| 1711 static const int kFrameAlignment = OS::ActivationFrameAlignment(); | 1755 static const int kFrameAlignment = OS::ActivationFrameAlignment(); |
| 1712 if (kFrameAlignment > 0) { | 1756 if (kFrameAlignment > 0) { |
| 1713 ASSERT(IsPowerOf2(kFrameAlignment)); | 1757 ASSERT(IsPowerOf2(kFrameAlignment)); |
| 1714 movq(kScratchRegister, Immediate(-kFrameAlignment)); | 1758 movq(kScratchRegister, Immediate(-kFrameAlignment)); |
| 1715 and_(rsp, kScratchRegister); | 1759 and_(rsp, kScratchRegister); |
| 1716 } | 1760 } |
| 1717 | 1761 |
| 1718 // Patch the saved entry sp. | 1762 // Patch the saved entry sp. |
| 1719 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); | 1763 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); |
| 1720 } | 1764 } |
| 1721 | 1765 |
| 1722 | 1766 |
| 1723 void MacroAssembler::EnterExitFrame(int result_size) { | 1767 void MacroAssembler::EnterExitFrame(int arg_stack_space) { |
| 1724 EnterExitFramePrologue(true); | 1768 EnterExitFramePrologue(true); |
| 1725 | 1769 |
| 1726 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, | 1770 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, |
| 1727 // so it must be retained across the C-call. | 1771 // so it must be retained across the C-call. |
| 1728 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | 1772 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; |
| 1729 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); | 1773 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); |
| 1730 | 1774 |
| 1731 EnterExitFrameEpilogue(result_size, 2); | 1775 EnterExitFrameEpilogue(arg_stack_space); |
| 1732 } | 1776 } |
| 1733 | 1777 |
| 1734 | 1778 |
| 1735 void MacroAssembler::EnterApiExitFrame(int stack_space, | 1779 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) { |
| 1736 int argc, | |
| 1737 int result_size) { | |
| 1738 EnterExitFramePrologue(false); | 1780 EnterExitFramePrologue(false); |
| 1739 | 1781 EnterExitFrameEpilogue(arg_stack_space); |
| 1740 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, | |
| 1741 // so it must be retained across the C-call. | |
| 1742 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | |
| 1743 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset)); | |
| 1744 | |
| 1745 EnterExitFrameEpilogue(result_size, argc); | |
| 1746 } | 1782 } |
| 1747 | 1783 |
| 1748 | 1784 |
| 1749 void MacroAssembler::LeaveExitFrame(int result_size) { | 1785 void MacroAssembler::LeaveExitFrame() { |
| 1750 // Registers: | 1786 // Registers: |
| 1751 // r12 : argv | 1787 // r12 : argv |
| 1752 | 1788 |
| 1753 // Get the return address from the stack and restore the frame pointer. | 1789 // Get the return address from the stack and restore the frame pointer. |
| 1754 movq(rcx, Operand(rbp, 1 * kPointerSize)); | 1790 movq(rcx, Operand(rbp, 1 * kPointerSize)); |
| 1755 movq(rbp, Operand(rbp, 0 * kPointerSize)); | 1791 movq(rbp, Operand(rbp, 0 * kPointerSize)); |
| 1756 | 1792 |
| 1757 // Pop everything up to and including the arguments and the receiver | 1793 // Pop everything up to and including the arguments and the receiver |
| 1758 // from the caller stack. | 1794 // from the caller stack. |
| 1759 lea(rsp, Operand(r12, 1 * kPointerSize)); | 1795 lea(rsp, Operand(r12, 1 * kPointerSize)); |
| 1760 | 1796 |
| 1797 // Push the return address to get ready to return. |
| 1798 push(rcx); |
| 1799 |
| 1800 LeaveExitFrameEpilogue(); |
| 1801 } |
| 1802 |
| 1803 |
| 1804 void MacroAssembler::LeaveApiExitFrame() { |
| 1805 movq(rsp, rbp); |
| 1806 pop(rbp); |
| 1807 |
| 1808 LeaveExitFrameEpilogue(); |
| 1809 } |
| 1810 |
| 1811 |
| 1812 void MacroAssembler::LeaveExitFrameEpilogue() { |
| 1761 // Restore current context from top and clear it in debug mode. | 1813 // Restore current context from top and clear it in debug mode. |
| 1762 ExternalReference context_address(Top::k_context_address); | 1814 ExternalReference context_address(Top::k_context_address); |
| 1763 movq(kScratchRegister, context_address); | 1815 movq(kScratchRegister, context_address); |
| 1764 movq(rsi, Operand(kScratchRegister, 0)); | 1816 movq(rsi, Operand(kScratchRegister, 0)); |
| 1765 #ifdef DEBUG | 1817 #ifdef DEBUG |
| 1766 movq(Operand(kScratchRegister, 0), Immediate(0)); | 1818 movq(Operand(kScratchRegister, 0), Immediate(0)); |
| 1767 #endif | 1819 #endif |
| 1768 | 1820 |
| 1769 // Push the return address to get ready to return. | |
| 1770 push(rcx); | |
| 1771 | |
| 1772 // Clear the top frame. | 1821 // Clear the top frame. |
| 1773 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 1822 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); |
| 1774 movq(kScratchRegister, c_entry_fp_address); | 1823 movq(kScratchRegister, c_entry_fp_address); |
| 1775 movq(Operand(kScratchRegister, 0), Immediate(0)); | 1824 movq(Operand(kScratchRegister, 0), Immediate(0)); |
| 1776 } | 1825 } |
| 1777 | 1826 |
| 1778 | 1827 |
| 1779 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | 1828 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
| 1780 Register scratch, | 1829 Register scratch, |
| 1781 Label* miss) { | 1830 Label* miss) { |
| (...skipping 498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2280 CPU::FlushICache(address_, size_); | 2329 CPU::FlushICache(address_, size_); |
| 2281 | 2330 |
| 2282 // Check that the code was patched as expected. | 2331 // Check that the code was patched as expected. |
| 2283 ASSERT(masm_.pc_ == address_ + size_); | 2332 ASSERT(masm_.pc_ == address_ + size_); |
| 2284 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2333 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2285 } | 2334 } |
| 2286 | 2335 |
| 2287 } } // namespace v8::internal | 2336 } } // namespace v8::internal |
| 2288 | 2337 |
| 2289 #endif // V8_TARGET_ARCH_X64 | 2338 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |