OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3639 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3650 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers | 3650 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers |
3651 | 3651 |
3652 // Restore frame pointer and return. | 3652 // Restore frame pointer and return. |
3653 __ pop(rbp); | 3653 __ pop(rbp); |
3654 __ ret(0); | 3654 __ ret(0); |
3655 } | 3655 } |
3656 | 3656 |
3657 | 3657 |
3658 void InstanceofStub::Generate(MacroAssembler* masm) { | 3658 void InstanceofStub::Generate(MacroAssembler* masm) { |
3659 // Implements "value instanceof function" operator. | 3659 // Implements "value instanceof function" operator. |
3660 // Expected input state: | 3660 // Expected input state with no inline cache: |
3661 // rsp[0] : return address | 3661 // rsp[0] : return address |
3662 // rsp[1] : function pointer | 3662 // rsp[1] : function pointer |
3663 // rsp[2] : value | 3663 // rsp[2] : value |
| 3664 // Expected input state with an inline one-element cache: |
| 3665 // rsp[0] : return address |
| 3666 // rsp[1] : offset from return address to location of inline cache |
| 3667 // rsp[2] : function pointer |
| 3668 // rsp[3] : value |
3664 // Returns a bitwise zero to indicate that the value | 3669 // Returns a bitwise zero to indicate that the value |
3665 // is and instance of the function and anything else to | 3670 // is and instance of the function and anything else to |
3666 // indicate that the value is not an instance. | 3671 // indicate that the value is not an instance. |
3667 | 3672 |
3668 // None of the flags are supported on X64. | 3673 static const int kOffsetToMapCheckValue = 5; |
3669 ASSERT(flags_ == kNoFlags); | 3674 static const int kOffsetToResultValue = 21; |
| 3675 // The last 4 bytes of the instruction sequence |
| 3676 // movq(rax, FieldOperand(rdi, HeapObject::kMapOffset) |
| 3677 // Move(kScratchRegister, Factory::the_hole_value) |
| 3678 // in front of the hole value address. |
| 3679 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78; |
| 3680 // The last 4 bytes of the instruction sequence |
| 3681 // __ j(not_equal, &cache_miss); |
| 3682 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); |
| 3683 // before the offset of the hole value in the root array. |
| 3684 static const unsigned int kWordBeforeResultValue = 0x458B4909; |
| 3685 // Only the inline check flag is supported on X64. |
| 3686 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); |
| 3687 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0; |
3670 | 3688 |
3671 // Get the object - go slow case if it's a smi. | 3689 // Get the object - go slow case if it's a smi. |
3672 Label slow; | 3690 Label slow; |
3673 __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 3691 |
| 3692 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); |
3674 __ JumpIfSmi(rax, &slow); | 3693 __ JumpIfSmi(rax, &slow); |
3675 | 3694 |
3676 // Check that the left hand is a JS object. Leave its map in rax. | 3695 // Check that the left hand is a JS object. Leave its map in rax. |
3677 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); | 3696 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); |
3678 __ j(below, &slow); | 3697 __ j(below, &slow); |
3679 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); | 3698 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); |
3680 __ j(above, &slow); | 3699 __ j(above, &slow); |
3681 | 3700 |
3682 // Get the prototype of the function. | 3701 // Get the prototype of the function. |
3683 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 3702 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); |
3684 // rdx is function, rax is map. | 3703 // rdx is function, rax is map. |
3685 | 3704 |
3686 // Look up the function and the map in the instanceof cache. | 3705 // If there is a call site cache don't look in the global cache, but do the |
3687 NearLabel miss; | 3706 // real lookup and update the call site cache. |
3688 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3707 if (!HasCallSiteInlineCheck()) { |
3689 __ j(not_equal, &miss); | 3708 // Look up the function and the map in the instanceof cache. |
3690 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3709 NearLabel miss; |
3691 __ j(not_equal, &miss); | 3710 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
3692 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3711 __ j(not_equal, &miss); |
3693 __ ret(2 * kPointerSize); | 3712 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| 3713 __ j(not_equal, &miss); |
| 3714 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
| 3715 __ ret(2 * kPointerSize); |
| 3716 __ bind(&miss); |
| 3717 } |
3694 | 3718 |
3695 __ bind(&miss); | |
3696 __ TryGetFunctionPrototype(rdx, rbx, &slow); | 3719 __ TryGetFunctionPrototype(rdx, rbx, &slow); |
3697 | 3720 |
3698 // Check that the function prototype is a JS object. | 3721 // Check that the function prototype is a JS object. |
3699 __ JumpIfSmi(rbx, &slow); | 3722 __ JumpIfSmi(rbx, &slow); |
3700 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister); | 3723 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister); |
3701 __ j(below, &slow); | 3724 __ j(below, &slow); |
3702 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); | 3725 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); |
3703 __ j(above, &slow); | 3726 __ j(above, &slow); |
3704 | 3727 |
3705 // Register mapping: | 3728 // Register mapping: |
3706 // rax is object map. | 3729 // rax is object map. |
3707 // rdx is function. | 3730 // rdx is function. |
3708 // rbx is function prototype. | 3731 // rbx is function prototype. |
3709 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3732 if (!HasCallSiteInlineCheck()) { |
3710 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3733 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 3734 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| 3735 } else { |
| 3736 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 3737 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| 3738 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); |
| 3739 if (FLAG_debug_code) { |
| 3740 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
| 3741 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
| 3742 __ Assert(equal, "InstanceofStub unexpected call site cache."); |
| 3743 } |
| 3744 } |
3711 | 3745 |
3712 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); | 3746 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); |
3713 | 3747 |
3714 // Loop through the prototype chain looking for the function prototype. | 3748 // Loop through the prototype chain looking for the function prototype. |
3715 NearLabel loop, is_instance, is_not_instance; | 3749 NearLabel loop, is_instance, is_not_instance; |
3716 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); | 3750 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); |
3717 __ bind(&loop); | 3751 __ bind(&loop); |
3718 __ cmpq(rcx, rbx); | 3752 __ cmpq(rcx, rbx); |
3719 __ j(equal, &is_instance); | 3753 __ j(equal, &is_instance); |
3720 __ cmpq(rcx, kScratchRegister); | 3754 __ cmpq(rcx, kScratchRegister); |
3721 // The code at is_not_instance assumes that kScratchRegister contains a | 3755 // The code at is_not_instance assumes that kScratchRegister contains a |
3722 // non-zero GCable value (the null object in this case). | 3756 // non-zero GCable value (the null object in this case). |
3723 __ j(equal, &is_not_instance); | 3757 __ j(equal, &is_not_instance); |
3724 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 3758 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
3725 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); | 3759 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); |
3726 __ jmp(&loop); | 3760 __ jmp(&loop); |
3727 | 3761 |
3728 __ bind(&is_instance); | 3762 __ bind(&is_instance); |
3729 __ xorl(rax, rax); | 3763 if (!HasCallSiteInlineCheck()) { |
3730 // Store bitwise zero in the cache. This is a Smi in GC terms. | 3764 __ xorl(rax, rax); |
3731 STATIC_ASSERT(kSmiTag == 0); | 3765 // Store bitwise zero in the cache. This is a Smi in GC terms. |
3732 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3766 STATIC_ASSERT(kSmiTag == 0); |
3733 __ ret(2 * kPointerSize); | 3767 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
| 3768 } else { |
| 3769 // Store offset of true in the root array at the inline check site. |
| 3770 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias |
| 3771 == 0xB0 - 0x100); |
| 3772 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize. |
| 3773 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 3774 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| 3775 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| 3776 if (FLAG_debug_code) { |
| 3777 __ movl(rax, Immediate(kWordBeforeResultValue)); |
| 3778 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 3779 __ Assert(equal, "InstanceofStub unexpected call site cache."); |
| 3780 } |
| 3781 __ xorl(rax, rax); |
| 3782 } |
| 3783 __ ret(2 * kPointerSize + extra_stack_space); |
3734 | 3784 |
3735 __ bind(&is_not_instance); | 3785 __ bind(&is_not_instance); |
3736 // We have to store a non-zero value in the cache. | 3786 if (!HasCallSiteInlineCheck()) { |
3737 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3787 // We have to store a non-zero value in the cache. |
3738 __ ret(2 * kPointerSize); | 3788 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 3789 } else { |
| 3790 // Store offset of false in the root array at the inline check site. |
| 3791 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias |
| 3792 == 0xB8 - 0x100); |
| 3793 __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize. |
| 3794 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 3795 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| 3796 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| 3797 if (FLAG_debug_code) { |
| 3798 __ movl(rax, Immediate(kWordBeforeResultValue)); |
| 3799 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 3800 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 3801 } |
| 3802 } |
| 3803 __ ret(2 * kPointerSize + extra_stack_space); |
3739 | 3804 |
3740 // Slow-case: Go through the JavaScript implementation. | 3805 // Slow-case: Go through the JavaScript implementation. |
3741 __ bind(&slow); | 3806 __ bind(&slow); |
| 3807 if (HasCallSiteInlineCheck()) { |
| 3808 // Remove extra value from the stack. |
| 3809 __ pop(rcx); |
| 3810 __ pop(rax); |
| 3811 __ push(rcx); |
| 3812 } |
3742 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3813 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
3743 } | 3814 } |
3744 | 3815 |
3745 | 3816 |
3746 // Passing arguments in registers is not supported. | 3817 // Passing arguments in registers is not supported. |
3747 Register InstanceofStub::left() { return no_reg; } | 3818 Register InstanceofStub::left() { return no_reg; } |
3748 | 3819 |
3749 | 3820 |
3750 Register InstanceofStub::right() { return no_reg; } | 3821 Register InstanceofStub::right() { return no_reg; } |
3751 | 3822 |
(...skipping 1254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5006 // Do a tail call to the rewritten stub. | 5077 // Do a tail call to the rewritten stub. |
5007 __ jmp(rdi); | 5078 __ jmp(rdi); |
5008 } | 5079 } |
5009 | 5080 |
5010 | 5081 |
5011 #undef __ | 5082 #undef __ |
5012 | 5083 |
5013 } } // namespace v8::internal | 5084 } } // namespace v8::internal |
5014 | 5085 |
5015 #endif // V8_TARGET_ARCH_X64 | 5086 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |