OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 26 matching lines...) Expand all Loading... |
37 #include "debug.h" | 37 #include "debug.h" |
38 #include "heap.h" | 38 #include "heap.h" |
39 | 39 |
40 namespace v8 { | 40 namespace v8 { |
41 namespace internal { | 41 namespace internal { |
42 | 42 |
43 MacroAssembler::MacroAssembler(void* buffer, int size) | 43 MacroAssembler::MacroAssembler(void* buffer, int size) |
44 : Assembler(buffer, size), | 44 : Assembler(buffer, size), |
45 generating_stub_(false), | 45 generating_stub_(false), |
46 allow_stub_calls_(true), | 46 allow_stub_calls_(true), |
47 code_object_(Heap::undefined_value()) { | 47 code_object_(HEAP->undefined_value()) { |
48 } | 48 } |
49 | 49 |
50 | 50 |
51 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { | 51 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { |
52 movq(destination, Operand(kRootRegister, | 52 movq(destination, Operand(kRootRegister, |
53 (index << kPointerSizeLog2) - kRootRegisterBias)); | 53 (index << kPointerSizeLog2) - kRootRegisterBias)); |
54 } | 54 } |
55 | 55 |
56 | 56 |
57 void MacroAssembler::LoadRootIndexed(Register destination, | 57 void MacroAssembler::LoadRootIndexed(Register destination, |
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
384 Integer32ToSmi(index, hash); | 384 Integer32ToSmi(index, hash); |
385 } | 385 } |
386 | 386 |
387 | 387 |
388 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | 388 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { |
389 CallRuntime(Runtime::FunctionForId(id), num_arguments); | 389 CallRuntime(Runtime::FunctionForId(id), num_arguments); |
390 } | 390 } |
391 | 391 |
392 | 392 |
393 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { | 393 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
394 Runtime::Function* function = Runtime::FunctionForId(id); | 394 const Runtime::Function* function = Runtime::FunctionForId(id); |
395 Set(rax, function->nargs); | 395 Set(rax, function->nargs); |
396 movq(rbx, ExternalReference(function)); | 396 movq(rbx, ExternalReference(function)); |
397 CEntryStub ces(1); | 397 CEntryStub ces(1); |
398 ces.SaveDoubles(); | 398 ces.SaveDoubles(); |
399 CallStub(&ces); | 399 CallStub(&ces); |
400 } | 400 } |
401 | 401 |
402 | 402 |
403 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, | 403 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, |
404 int num_arguments) { | 404 int num_arguments) { |
405 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); | 405 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); |
406 } | 406 } |
407 | 407 |
408 | 408 |
409 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 409 void MacroAssembler::CallRuntime(const Runtime::Function* f, |
| 410 int num_arguments) { |
410 // If the expected number of arguments of the runtime function is | 411 // If the expected number of arguments of the runtime function is |
411 // constant, we check that the actual number of arguments match the | 412 // constant, we check that the actual number of arguments match the |
412 // expectation. | 413 // expectation. |
413 if (f->nargs >= 0 && f->nargs != num_arguments) { | 414 if (f->nargs >= 0 && f->nargs != num_arguments) { |
414 IllegalOperation(num_arguments); | 415 IllegalOperation(num_arguments); |
415 return; | 416 return; |
416 } | 417 } |
417 | 418 |
418 // TODO(1236192): Most runtime routines don't need the number of | 419 // TODO(1236192): Most runtime routines don't need the number of |
419 // arguments passed in because it is constant. At some point we | 420 // arguments passed in because it is constant. At some point we |
420 // should remove this need and make the runtime routine entry code | 421 // should remove this need and make the runtime routine entry code |
421 // smarter. | 422 // smarter. |
422 Set(rax, num_arguments); | 423 Set(rax, num_arguments); |
423 movq(rbx, ExternalReference(f)); | 424 movq(rbx, ExternalReference(f)); |
424 CEntryStub ces(f->result_size); | 425 CEntryStub ces(f->result_size); |
425 CallStub(&ces); | 426 CallStub(&ces); |
426 } | 427 } |
427 | 428 |
428 | 429 |
429 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f, | 430 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f, |
430 int num_arguments) { | 431 int num_arguments) { |
431 if (f->nargs >= 0 && f->nargs != num_arguments) { | 432 if (f->nargs >= 0 && f->nargs != num_arguments) { |
432 IllegalOperation(num_arguments); | 433 IllegalOperation(num_arguments); |
433 // Since we did not call the stub, there was no allocation failure. | 434 // Since we did not call the stub, there was no allocation failure. |
434 // Return some non-failure object. | 435 // Return some non-failure object. |
435 return Heap::undefined_value(); | 436 return HEAP->undefined_value(); |
436 } | 437 } |
437 | 438 |
438 // TODO(1236192): Most runtime routines don't need the number of | 439 // TODO(1236192): Most runtime routines don't need the number of |
439 // arguments passed in because it is constant. At some point we | 440 // arguments passed in because it is constant. At some point we |
440 // should remove this need and make the runtime routine entry code | 441 // should remove this need and make the runtime routine entry code |
441 // smarter. | 442 // smarter. |
442 Set(rax, num_arguments); | 443 Set(rax, num_arguments); |
443 movq(rbx, ExternalReference(f)); | 444 movq(rbx, ExternalReference(f)); |
444 CEntryStub ces(f->result_size); | 445 CEntryStub ces(f->result_size); |
445 return TryCallStub(&ces); | 446 return TryCallStub(&ces); |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
580 // No more valid handles (the result handle was the last one). Restore | 581 // No more valid handles (the result handle was the last one). Restore |
581 // previous handle scope. | 582 // previous handle scope. |
582 subl(Operand(base_reg, kLevelOffset), Immediate(1)); | 583 subl(Operand(base_reg, kLevelOffset), Immediate(1)); |
583 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); | 584 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); |
584 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 585 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
585 j(not_equal, &delete_allocated_handles); | 586 j(not_equal, &delete_allocated_handles); |
586 bind(&leave_exit_frame); | 587 bind(&leave_exit_frame); |
587 | 588 |
588 // Check if the function scheduled an exception. | 589 // Check if the function scheduled an exception. |
589 movq(rsi, scheduled_exception_address); | 590 movq(rsi, scheduled_exception_address); |
590 Cmp(Operand(rsi, 0), Factory::the_hole_value()); | 591 Cmp(Operand(rsi, 0), FACTORY->the_hole_value()); |
591 j(not_equal, &promote_scheduled_exception); | 592 j(not_equal, &promote_scheduled_exception); |
592 | 593 |
593 LeaveApiExitFrame(); | 594 LeaveApiExitFrame(); |
594 ret(stack_space * kPointerSize); | 595 ret(stack_space * kPointerSize); |
595 | 596 |
596 bind(&promote_scheduled_exception); | 597 bind(&promote_scheduled_exception); |
597 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException, | 598 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException, |
598 0, 1); | 599 0, 1); |
599 if (result->IsFailure()) { | 600 if (result->IsFailure()) { |
600 return result; | 601 return result; |
601 } | 602 } |
602 | 603 |
603 bind(&empty_result); | 604 bind(&empty_result); |
604 // It was zero; the result is undefined. | 605 // It was zero; the result is undefined. |
605 Move(rax, Factory::undefined_value()); | 606 Move(rax, FACTORY->undefined_value()); |
606 jmp(&prologue); | 607 jmp(&prologue); |
607 | 608 |
608 // HandleScope limit has changed. Delete allocated extensions. | 609 // HandleScope limit has changed. Delete allocated extensions. |
609 bind(&delete_allocated_handles); | 610 bind(&delete_allocated_handles); |
610 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); | 611 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); |
611 movq(prev_limit_reg, rax); | 612 movq(prev_limit_reg, rax); |
| 613 #ifdef _WIN64 |
| 614 movq(rcx, ExternalReference::isolate_address()); |
| 615 #else |
| 616 movq(rdi, ExternalReference::isolate_address()); |
| 617 #endif |
612 movq(rax, ExternalReference::delete_handle_scope_extensions()); | 618 movq(rax, ExternalReference::delete_handle_scope_extensions()); |
613 call(rax); | 619 call(rax); |
614 movq(rax, prev_limit_reg); | 620 movq(rax, prev_limit_reg); |
615 jmp(&leave_exit_frame); | 621 jmp(&leave_exit_frame); |
616 | 622 |
617 return result; | 623 return result; |
618 } | 624 } |
619 | 625 |
620 | 626 |
621 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, | 627 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, |
(...skipping 980 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1602 push(rbp); | 1608 push(rbp); |
1603 } else { | 1609 } else { |
1604 ASSERT(try_location == IN_JS_ENTRY); | 1610 ASSERT(try_location == IN_JS_ENTRY); |
1605 // The frame pointer does not point to a JS frame so we save NULL | 1611 // The frame pointer does not point to a JS frame so we save NULL |
1606 // for rbp. We expect the code throwing an exception to check rbp | 1612 // for rbp. We expect the code throwing an exception to check rbp |
1607 // before dereferencing it to restore the context. | 1613 // before dereferencing it to restore the context. |
1608 push(Immediate(StackHandler::ENTRY)); | 1614 push(Immediate(StackHandler::ENTRY)); |
1609 push(Immediate(0)); // NULL frame pointer. | 1615 push(Immediate(0)); // NULL frame pointer. |
1610 } | 1616 } |
1611 // Save the current handler. | 1617 // Save the current handler. |
1612 movq(kScratchRegister, ExternalReference(Top::k_handler_address)); | 1618 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); |
1613 push(Operand(kScratchRegister, 0)); | 1619 push(Operand(kScratchRegister, 0)); |
1614 // Link this handler. | 1620 // Link this handler. |
1615 movq(Operand(kScratchRegister, 0), rsp); | 1621 movq(Operand(kScratchRegister, 0), rsp); |
1616 } | 1622 } |
1617 | 1623 |
1618 | 1624 |
1619 void MacroAssembler::PopTryHandler() { | 1625 void MacroAssembler::PopTryHandler() { |
1620 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); | 1626 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); |
1621 // Unlink this handler. | 1627 // Unlink this handler. |
1622 movq(kScratchRegister, ExternalReference(Top::k_handler_address)); | 1628 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); |
1623 pop(Operand(kScratchRegister, 0)); | 1629 pop(Operand(kScratchRegister, 0)); |
1624 // Remove the remaining fields. | 1630 // Remove the remaining fields. |
1625 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); | 1631 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); |
1626 } | 1632 } |
1627 | 1633 |
1628 | 1634 |
1629 void MacroAssembler::Throw(Register value) { | 1635 void MacroAssembler::Throw(Register value) { |
1630 // Check that stack should contain next handler, frame pointer, state and | 1636 // Check that stack should contain next handler, frame pointer, state and |
1631 // return address in that order. | 1637 // return address in that order. |
1632 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == | 1638 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == |
1633 StackHandlerConstants::kStateOffset); | 1639 StackHandlerConstants::kStateOffset); |
1634 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == | 1640 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == |
1635 StackHandlerConstants::kPCOffset); | 1641 StackHandlerConstants::kPCOffset); |
1636 // Keep thrown value in rax. | 1642 // Keep thrown value in rax. |
1637 if (!value.is(rax)) { | 1643 if (!value.is(rax)) { |
1638 movq(rax, value); | 1644 movq(rax, value); |
1639 } | 1645 } |
1640 | 1646 |
1641 ExternalReference handler_address(Top::k_handler_address); | 1647 ExternalReference handler_address(Isolate::k_handler_address); |
1642 movq(kScratchRegister, handler_address); | 1648 movq(kScratchRegister, handler_address); |
1643 movq(rsp, Operand(kScratchRegister, 0)); | 1649 movq(rsp, Operand(kScratchRegister, 0)); |
1644 // get next in chain | 1650 // get next in chain |
1645 pop(rcx); | 1651 pop(rcx); |
1646 movq(Operand(kScratchRegister, 0), rcx); | 1652 movq(Operand(kScratchRegister, 0), rcx); |
1647 pop(rbp); // pop frame pointer | 1653 pop(rbp); // pop frame pointer |
1648 pop(rdx); // remove state | 1654 pop(rdx); // remove state |
1649 | 1655 |
1650 // Before returning we restore the context from the frame pointer if not NULL. | 1656 // Before returning we restore the context from the frame pointer if not NULL. |
1651 // The frame pointer is NULL in the exception handler of a JS entry frame. | 1657 // The frame pointer is NULL in the exception handler of a JS entry frame. |
1652 Set(rsi, 0); // Tentatively set context pointer to NULL | 1658 Set(rsi, 0); // Tentatively set context pointer to NULL |
1653 NearLabel skip; | 1659 NearLabel skip; |
1654 cmpq(rbp, Immediate(0)); | 1660 cmpq(rbp, Immediate(0)); |
1655 j(equal, &skip); | 1661 j(equal, &skip); |
1656 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1662 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
1657 bind(&skip); | 1663 bind(&skip); |
1658 ret(0); | 1664 ret(0); |
1659 } | 1665 } |
1660 | 1666 |
1661 | 1667 |
1662 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, | 1668 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, |
1663 Register value) { | 1669 Register value) { |
1664 // Keep thrown value in rax. | 1670 // Keep thrown value in rax. |
1665 if (!value.is(rax)) { | 1671 if (!value.is(rax)) { |
1666 movq(rax, value); | 1672 movq(rax, value); |
1667 } | 1673 } |
1668 // Fetch top stack handler. | 1674 // Fetch top stack handler. |
1669 ExternalReference handler_address(Top::k_handler_address); | 1675 ExternalReference handler_address(Isolate::k_handler_address); |
1670 movq(kScratchRegister, handler_address); | 1676 movq(kScratchRegister, handler_address); |
1671 movq(rsp, Operand(kScratchRegister, 0)); | 1677 movq(rsp, Operand(kScratchRegister, 0)); |
1672 | 1678 |
1673 // Unwind the handlers until the ENTRY handler is found. | 1679 // Unwind the handlers until the ENTRY handler is found. |
1674 NearLabel loop, done; | 1680 NearLabel loop, done; |
1675 bind(&loop); | 1681 bind(&loop); |
1676 // Load the type of the current stack handler. | 1682 // Load the type of the current stack handler. |
1677 const int kStateOffset = StackHandlerConstants::kStateOffset; | 1683 const int kStateOffset = StackHandlerConstants::kStateOffset; |
1678 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); | 1684 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); |
1679 j(equal, &done); | 1685 j(equal, &done); |
1680 // Fetch the next handler in the list. | 1686 // Fetch the next handler in the list. |
1681 const int kNextOffset = StackHandlerConstants::kNextOffset; | 1687 const int kNextOffset = StackHandlerConstants::kNextOffset; |
1682 movq(rsp, Operand(rsp, kNextOffset)); | 1688 movq(rsp, Operand(rsp, kNextOffset)); |
1683 jmp(&loop); | 1689 jmp(&loop); |
1684 bind(&done); | 1690 bind(&done); |
1685 | 1691 |
1686 // Set the top handler address to next handler past the current ENTRY handler. | 1692 // Set the top handler address to next handler past the current ENTRY handler. |
1687 movq(kScratchRegister, handler_address); | 1693 movq(kScratchRegister, handler_address); |
1688 pop(Operand(kScratchRegister, 0)); | 1694 pop(Operand(kScratchRegister, 0)); |
1689 | 1695 |
1690 if (type == OUT_OF_MEMORY) { | 1696 if (type == OUT_OF_MEMORY) { |
1691 // Set external caught exception to false. | 1697 // Set external caught exception to false. |
1692 ExternalReference external_caught(Top::k_external_caught_exception_address); | 1698 ExternalReference external_caught( |
| 1699 Isolate::k_external_caught_exception_address); |
1693 movq(rax, Immediate(false)); | 1700 movq(rax, Immediate(false)); |
1694 store_rax(external_caught); | 1701 store_rax(external_caught); |
1695 | 1702 |
1696 // Set pending exception and rax to out of memory exception. | 1703 // Set pending exception and rax to out of memory exception. |
1697 ExternalReference pending_exception(Top::k_pending_exception_address); | 1704 ExternalReference pending_exception(Isolate::k_pending_exception_address); |
1698 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); | 1705 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); |
1699 store_rax(pending_exception); | 1706 store_rax(pending_exception); |
1700 } | 1707 } |
1701 | 1708 |
1702 // Clear the context pointer. | 1709 // Clear the context pointer. |
1703 Set(rsi, 0); | 1710 Set(rsi, 0); |
1704 | 1711 |
1705 // Restore registers from handler. | 1712 // Restore registers from handler. |
1706 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == | 1713 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == |
1707 StackHandlerConstants::kFPOffset); | 1714 StackHandlerConstants::kFPOffset); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1763 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); | 1770 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); |
1764 j(not_equal, fail); | 1771 j(not_equal, fail); |
1765 } | 1772 } |
1766 | 1773 |
1767 | 1774 |
1768 void MacroAssembler::AbortIfNotNumber(Register object) { | 1775 void MacroAssembler::AbortIfNotNumber(Register object) { |
1769 NearLabel ok; | 1776 NearLabel ok; |
1770 Condition is_smi = CheckSmi(object); | 1777 Condition is_smi = CheckSmi(object); |
1771 j(is_smi, &ok); | 1778 j(is_smi, &ok); |
1772 Cmp(FieldOperand(object, HeapObject::kMapOffset), | 1779 Cmp(FieldOperand(object, HeapObject::kMapOffset), |
1773 Factory::heap_number_map()); | 1780 FACTORY->heap_number_map()); |
1774 Assert(equal, "Operand not a number"); | 1781 Assert(equal, "Operand not a number"); |
1775 bind(&ok); | 1782 bind(&ok); |
1776 } | 1783 } |
1777 | 1784 |
1778 | 1785 |
1779 void MacroAssembler::AbortIfSmi(Register object) { | 1786 void MacroAssembler::AbortIfSmi(Register object) { |
1780 NearLabel ok; | 1787 NearLabel ok; |
1781 Condition is_smi = CheckSmi(object); | 1788 Condition is_smi = CheckSmi(object); |
1782 Assert(NegateCondition(is_smi), "Operand is a smi"); | 1789 Assert(NegateCondition(is_smi), "Operand is a smi"); |
1783 } | 1790 } |
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2022 | 2029 |
2023 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 2030 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
2024 push(rbp); | 2031 push(rbp); |
2025 movq(rbp, rsp); | 2032 movq(rbp, rsp); |
2026 push(rsi); // Context. | 2033 push(rsi); // Context. |
2027 Push(Smi::FromInt(type)); | 2034 Push(Smi::FromInt(type)); |
2028 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 2035 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
2029 push(kScratchRegister); | 2036 push(kScratchRegister); |
2030 if (emit_debug_code()) { | 2037 if (emit_debug_code()) { |
2031 movq(kScratchRegister, | 2038 movq(kScratchRegister, |
2032 Factory::undefined_value(), | 2039 FACTORY->undefined_value(), |
2033 RelocInfo::EMBEDDED_OBJECT); | 2040 RelocInfo::EMBEDDED_OBJECT); |
2034 cmpq(Operand(rsp, 0), kScratchRegister); | 2041 cmpq(Operand(rsp, 0), kScratchRegister); |
2035 Check(not_equal, "code object not properly patched"); | 2042 Check(not_equal, "code object not properly patched"); |
2036 } | 2043 } |
2037 } | 2044 } |
2038 | 2045 |
2039 | 2046 |
2040 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 2047 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
2041 if (emit_debug_code()) { | 2048 if (emit_debug_code()) { |
2042 Move(kScratchRegister, Smi::FromInt(type)); | 2049 Move(kScratchRegister, Smi::FromInt(type)); |
(...skipping 18 matching lines...) Expand all Loading... |
2061 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); | 2068 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); |
2062 push(Immediate(0)); // Saved entry sp, patched before call. | 2069 push(Immediate(0)); // Saved entry sp, patched before call. |
2063 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 2070 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
2064 push(kScratchRegister); // Accessed from EditFrame::code_slot. | 2071 push(kScratchRegister); // Accessed from EditFrame::code_slot. |
2065 | 2072 |
2066 // Save the frame pointer and the context in top. | 2073 // Save the frame pointer and the context in top. |
2067 if (save_rax) { | 2074 if (save_rax) { |
2068 movq(r14, rax); // Backup rax in callee-save register. | 2075 movq(r14, rax); // Backup rax in callee-save register. |
2069 } | 2076 } |
2070 | 2077 |
2071 movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address)); | 2078 movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address)); |
2072 movq(Operand(kScratchRegister, 0), rbp); | 2079 movq(Operand(kScratchRegister, 0), rbp); |
2073 | 2080 |
2074 movq(kScratchRegister, ExternalReference(Top::k_context_address)); | 2081 movq(kScratchRegister, ExternalReference(Isolate::k_context_address)); |
2075 movq(Operand(kScratchRegister, 0), rsi); | 2082 movq(Operand(kScratchRegister, 0), rsi); |
2076 } | 2083 } |
2077 | 2084 |
2078 | 2085 |
2079 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, | 2086 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, |
2080 bool save_doubles) { | 2087 bool save_doubles) { |
2081 #ifdef _WIN64 | 2088 #ifdef _WIN64 |
2082 const int kShadowSpace = 4; | 2089 const int kShadowSpace = 4; |
2083 arg_stack_space += kShadowSpace; | 2090 arg_stack_space += kShadowSpace; |
2084 #endif | 2091 #endif |
2085 // Optionally save all XMM registers. | 2092 // Optionally save all XMM registers. |
2086 if (save_doubles) { | 2093 if (save_doubles) { |
2087 CpuFeatures::Scope scope(SSE2); | 2094 CpuFeatures::Scope scope(SSE2); |
2088 int space = XMMRegister::kNumRegisters * kDoubleSize + | 2095 int space = XMMRegister::kNumRegisters * kDoubleSize + |
2089 arg_stack_space * kPointerSize; | 2096 arg_stack_space * kPointerSize; |
2090 subq(rsp, Immediate(space)); | 2097 subq(rsp, Immediate(space)); |
2091 int offset = -2 * kPointerSize; | 2098 int offset = -2 * kPointerSize; |
2092 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { | 2099 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { |
2093 XMMRegister reg = XMMRegister::FromAllocationIndex(i); | 2100 XMMRegister reg = XMMRegister::FromAllocationIndex(i); |
2094 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); | 2101 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); |
2095 } | 2102 } |
2096 } else if (arg_stack_space > 0) { | 2103 } else if (arg_stack_space > 0) { |
2097 subq(rsp, Immediate(arg_stack_space * kPointerSize)); | 2104 subq(rsp, Immediate(arg_stack_space * kPointerSize)); |
2098 } | 2105 } |
2099 | 2106 |
2100 // Get the required frame alignment for the OS. | 2107 // Get the required frame alignment for the OS. |
2101 static const int kFrameAlignment = OS::ActivationFrameAlignment(); | 2108 const int kFrameAlignment = OS::ActivationFrameAlignment(); |
2102 if (kFrameAlignment > 0) { | 2109 if (kFrameAlignment > 0) { |
2103 ASSERT(IsPowerOf2(kFrameAlignment)); | 2110 ASSERT(IsPowerOf2(kFrameAlignment)); |
2104 movq(kScratchRegister, Immediate(-kFrameAlignment)); | 2111 movq(kScratchRegister, Immediate(-kFrameAlignment)); |
2105 and_(rsp, kScratchRegister); | 2112 and_(rsp, kScratchRegister); |
2106 } | 2113 } |
2107 | 2114 |
2108 // Patch the saved entry sp. | 2115 // Patch the saved entry sp. |
2109 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); | 2116 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); |
2110 } | 2117 } |
2111 | 2118 |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2156 void MacroAssembler::LeaveApiExitFrame() { | 2163 void MacroAssembler::LeaveApiExitFrame() { |
2157 movq(rsp, rbp); | 2164 movq(rsp, rbp); |
2158 pop(rbp); | 2165 pop(rbp); |
2159 | 2166 |
2160 LeaveExitFrameEpilogue(); | 2167 LeaveExitFrameEpilogue(); |
2161 } | 2168 } |
2162 | 2169 |
2163 | 2170 |
2164 void MacroAssembler::LeaveExitFrameEpilogue() { | 2171 void MacroAssembler::LeaveExitFrameEpilogue() { |
2165 // Restore current context from top and clear it in debug mode. | 2172 // Restore current context from top and clear it in debug mode. |
2166 ExternalReference context_address(Top::k_context_address); | 2173 ExternalReference context_address(Isolate::k_context_address); |
2167 movq(kScratchRegister, context_address); | 2174 movq(kScratchRegister, context_address); |
2168 movq(rsi, Operand(kScratchRegister, 0)); | 2175 movq(rsi, Operand(kScratchRegister, 0)); |
2169 #ifdef DEBUG | 2176 #ifdef DEBUG |
2170 movq(Operand(kScratchRegister, 0), Immediate(0)); | 2177 movq(Operand(kScratchRegister, 0), Immediate(0)); |
2171 #endif | 2178 #endif |
2172 | 2179 |
2173 // Clear the top frame. | 2180 // Clear the top frame. |
2174 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 2181 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); |
2175 movq(kScratchRegister, c_entry_fp_address); | 2182 movq(kScratchRegister, c_entry_fp_address); |
2176 movq(Operand(kScratchRegister, 0), Immediate(0)); | 2183 movq(Operand(kScratchRegister, 0), Immediate(0)); |
2177 } | 2184 } |
2178 | 2185 |
2179 | 2186 |
2180 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | 2187 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
2181 Register scratch, | 2188 Register scratch, |
2182 Label* miss) { | 2189 Label* miss) { |
2183 Label same_contexts; | 2190 Label same_contexts; |
2184 | 2191 |
2185 ASSERT(!holder_reg.is(scratch)); | 2192 ASSERT(!holder_reg.is(scratch)); |
2186 ASSERT(!scratch.is(kScratchRegister)); | 2193 ASSERT(!scratch.is(kScratchRegister)); |
2187 // Load current lexical context from the stack frame. | 2194 // Load current lexical context from the stack frame. |
2188 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2195 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2189 | 2196 |
2190 // When generating debug code, make sure the lexical context is set. | 2197 // When generating debug code, make sure the lexical context is set. |
2191 if (emit_debug_code()) { | 2198 if (emit_debug_code()) { |
2192 cmpq(scratch, Immediate(0)); | 2199 cmpq(scratch, Immediate(0)); |
2193 Check(not_equal, "we should not have an empty lexical context"); | 2200 Check(not_equal, "we should not have an empty lexical context"); |
2194 } | 2201 } |
2195 // Load the global context of the current context. | 2202 // Load the global context of the current context. |
2196 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 2203 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
2197 movq(scratch, FieldOperand(scratch, offset)); | 2204 movq(scratch, FieldOperand(scratch, offset)); |
2198 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); | 2205 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); |
2199 | 2206 |
2200 // Check the context is a global context. | 2207 // Check the context is a global context. |
2201 if (emit_debug_code()) { | 2208 if (emit_debug_code()) { |
2202 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), | 2209 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), |
2203 Factory::global_context_map()); | 2210 FACTORY->global_context_map()); |
2204 Check(equal, "JSGlobalObject::global_context should be a global context."); | 2211 Check(equal, "JSGlobalObject::global_context should be a global context."); |
2205 } | 2212 } |
2206 | 2213 |
2207 // Check if both contexts are the same. | 2214 // Check if both contexts are the same. |
2208 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 2215 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
2209 j(equal, &same_contexts); | 2216 j(equal, &same_contexts); |
2210 | 2217 |
2211 // Compare security tokens. | 2218 // Compare security tokens. |
2212 // Check that the security token in the calling global object is | 2219 // Check that the security token in the calling global object is |
2213 // compatible with the security token in the receiving global | 2220 // compatible with the security token in the receiving global |
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2618 // (i.e., the static scope chain and runtime context chain do not agree). | 2625 // (i.e., the static scope chain and runtime context chain do not agree). |
2619 // A variable occurring in such a scope should have slot type LOOKUP and | 2626 // A variable occurring in such a scope should have slot type LOOKUP and |
2620 // not CONTEXT. | 2627 // not CONTEXT. |
2621 if (emit_debug_code()) { | 2628 if (emit_debug_code()) { |
2622 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 2629 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
2623 Check(equal, "Yo dawg, I heard you liked function contexts " | 2630 Check(equal, "Yo dawg, I heard you liked function contexts " |
2624 "so I put function contexts in all your contexts"); | 2631 "so I put function contexts in all your contexts"); |
2625 } | 2632 } |
2626 } | 2633 } |
2627 | 2634 |
| 2635 #ifdef _WIN64 |
| 2636 static const int kRegisterPassedArguments = 4; |
| 2637 #else |
| 2638 static const int kRegisterPassedArguments = 6; |
| 2639 #endif |
2628 | 2640 |
2629 void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 2641 void MacroAssembler::LoadGlobalFunction(int index, Register function) { |
2630 // Load the global or builtins object from the current context. | 2642 // Load the global or builtins object from the current context. |
2631 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2643 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
2632 // Load the global context from the global or builtins object. | 2644 // Load the global context from the global or builtins object. |
2633 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); | 2645 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); |
2634 // Load the function from the global context. | 2646 // Load the function from the global context. |
2635 movq(function, Operand(function, Context::SlotOffset(index))); | 2647 movq(function, Operand(function, Context::SlotOffset(index))); |
2636 } | 2648 } |
2637 | 2649 |
2638 | 2650 |
2639 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2651 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
2640 Register map) { | 2652 Register map) { |
2641 // Load the initial map. The global functions all have initial maps. | 2653 // Load the initial map. The global functions all have initial maps. |
2642 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2654 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
2643 if (emit_debug_code()) { | 2655 if (emit_debug_code()) { |
2644 Label ok, fail; | 2656 Label ok, fail; |
2645 CheckMap(map, Factory::meta_map(), &fail, false); | 2657 CheckMap(map, FACTORY->meta_map(), &fail, false); |
2646 jmp(&ok); | 2658 jmp(&ok); |
2647 bind(&fail); | 2659 bind(&fail); |
2648 Abort("Global functions must have initial map"); | 2660 Abort("Global functions must have initial map"); |
2649 bind(&ok); | 2661 bind(&ok); |
2650 } | 2662 } |
2651 } | 2663 } |
2652 | 2664 |
2653 | 2665 |
2654 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { | 2666 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { |
2655 // On Windows 64 stack slots are reserved by the caller for all arguments | 2667 // On Windows 64 stack slots are reserved by the caller for all arguments |
2656 // including the ones passed in registers, and space is always allocated for | 2668 // including the ones passed in registers, and space is always allocated for |
2657 // the four register arguments even if the function takes fewer than four | 2669 // the four register arguments even if the function takes fewer than four |
2658 // arguments. | 2670 // arguments. |
2659 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers | 2671 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers |
2660 // and the caller does not reserve stack slots for them. | 2672 // and the caller does not reserve stack slots for them. |
2661 ASSERT(num_arguments >= 0); | 2673 ASSERT(num_arguments >= 0); |
2662 #ifdef _WIN64 | 2674 #ifdef _WIN64 |
2663 static const int kMinimumStackSlots = 4; | 2675 const int kMinimumStackSlots = kRegisterPassedArguments; |
2664 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots; | 2676 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots; |
2665 return num_arguments; | 2677 return num_arguments; |
2666 #else | 2678 #else |
2667 static const int kRegisterPassedArguments = 6; | |
2668 if (num_arguments < kRegisterPassedArguments) return 0; | 2679 if (num_arguments < kRegisterPassedArguments) return 0; |
2669 return num_arguments - kRegisterPassedArguments; | 2680 return num_arguments - kRegisterPassedArguments; |
2670 #endif | 2681 #endif |
2671 } | 2682 } |
2672 | 2683 |
2673 | 2684 |
2674 void MacroAssembler::PrepareCallCFunction(int num_arguments) { | 2685 void MacroAssembler::PrepareCallCFunction(int num_arguments) { |
2675 int frame_alignment = OS::ActivationFrameAlignment(); | 2686 int frame_alignment = OS::ActivationFrameAlignment(); |
2676 ASSERT(frame_alignment != 0); | 2687 ASSERT(frame_alignment != 0); |
2677 ASSERT(num_arguments >= 0); | 2688 ASSERT(num_arguments >= 0); |
| 2689 |
| 2690 // Reserve space for Isolate address which is always passed as last parameter |
| 2691 num_arguments += 1; |
| 2692 |
2678 // Make stack end at alignment and allocate space for arguments and old rsp. | 2693 // Make stack end at alignment and allocate space for arguments and old rsp. |
2679 movq(kScratchRegister, rsp); | 2694 movq(kScratchRegister, rsp); |
2680 ASSERT(IsPowerOf2(frame_alignment)); | 2695 ASSERT(IsPowerOf2(frame_alignment)); |
2681 int argument_slots_on_stack = | 2696 int argument_slots_on_stack = |
2682 ArgumentStackSlotsForCFunctionCall(num_arguments); | 2697 ArgumentStackSlotsForCFunctionCall(num_arguments); |
2683 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); | 2698 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); |
2684 and_(rsp, Immediate(-frame_alignment)); | 2699 and_(rsp, Immediate(-frame_alignment)); |
2685 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); | 2700 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); |
2686 } | 2701 } |
2687 | 2702 |
2688 | 2703 |
2689 void MacroAssembler::CallCFunction(ExternalReference function, | 2704 void MacroAssembler::CallCFunction(ExternalReference function, |
2690 int num_arguments) { | 2705 int num_arguments) { |
2691 movq(rax, function); | 2706 movq(rax, function); |
2692 CallCFunction(rax, num_arguments); | 2707 CallCFunction(rax, num_arguments); |
2693 } | 2708 } |
2694 | 2709 |
2695 | 2710 |
2696 void MacroAssembler::CallCFunction(Register function, int num_arguments) { | 2711 void MacroAssembler::CallCFunction(Register function, int num_arguments) { |
| 2712 // Pass current isolate address as additional parameter. |
| 2713 if (num_arguments < kRegisterPassedArguments) { |
| 2714 #ifdef _WIN64 |
| 2715 // First four arguments are passed in registers on Windows. |
| 2716 Register arg_to_reg[] = {rcx, rdx, r8, r9}; |
| 2717 #else |
| 2718 // First six arguments are passed in registers on other platforms. |
| 2719 Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9}; |
| 2720 #endif |
| 2721 Register reg = arg_to_reg[num_arguments]; |
| 2722 movq(reg, ExternalReference::isolate_address()); |
| 2723 } else { |
| 2724 // Push Isolate pointer after all parameters. |
| 2725 int argument_slots_on_stack = |
| 2726 ArgumentStackSlotsForCFunctionCall(num_arguments); |
| 2727 movq(kScratchRegister, ExternalReference::isolate_address()); |
| 2728 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), |
| 2729 kScratchRegister); |
| 2730 } |
| 2731 |
2697 // Check stack alignment. | 2732 // Check stack alignment. |
2698 if (emit_debug_code()) { | 2733 if (emit_debug_code()) { |
2699 CheckStackAlignment(); | 2734 CheckStackAlignment(); |
2700 } | 2735 } |
2701 | 2736 |
2702 call(function); | 2737 call(function); |
2703 ASSERT(OS::ActivationFrameAlignment() != 0); | 2738 ASSERT(OS::ActivationFrameAlignment() != 0); |
2704 ASSERT(num_arguments >= 0); | 2739 ASSERT(num_arguments >= 0); |
| 2740 num_arguments += 1; |
2705 int argument_slots_on_stack = | 2741 int argument_slots_on_stack = |
2706 ArgumentStackSlotsForCFunctionCall(num_arguments); | 2742 ArgumentStackSlotsForCFunctionCall(num_arguments); |
2707 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize)); | 2743 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize)); |
2708 } | 2744 } |
2709 | 2745 |
2710 | 2746 |
2711 CodePatcher::CodePatcher(byte* address, int size) | 2747 CodePatcher::CodePatcher(byte* address, int size) |
2712 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { | 2748 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { |
2713 // Create a new macro assembler pointing to the address of the code to patch. | 2749 // Create a new macro assembler pointing to the address of the code to patch. |
2714 // The size is adjusted with kGap on order for the assembler to generate size | 2750 // The size is adjusted with kGap on order for the assembler to generate size |
2715 // bytes of instructions without failing with buffer size constraints. | 2751 // bytes of instructions without failing with buffer size constraints. |
2716 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2752 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2717 } | 2753 } |
2718 | 2754 |
2719 | 2755 |
2720 CodePatcher::~CodePatcher() { | 2756 CodePatcher::~CodePatcher() { |
2721 // Indicate that code has changed. | 2757 // Indicate that code has changed. |
2722 CPU::FlushICache(address_, size_); | 2758 CPU::FlushICache(address_, size_); |
2723 | 2759 |
2724 // Check that the code was patched as expected. | 2760 // Check that the code was patched as expected. |
2725 ASSERT(masm_.pc_ == address_ + size_); | 2761 ASSERT(masm_.pc_ == address_ + size_); |
2726 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2762 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2727 } | 2763 } |
2728 | 2764 |
2729 } } // namespace v8::internal | 2765 } } // namespace v8::internal |
2730 | 2766 |
2731 #endif // V8_TARGET_ARCH_X64 | 2767 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |