Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3635 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3646 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers | 3646 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers |
| 3647 | 3647 |
| 3648 // Restore frame pointer and return. | 3648 // Restore frame pointer and return. |
| 3649 __ pop(rbp); | 3649 __ pop(rbp); |
| 3650 __ ret(0); | 3650 __ ret(0); |
| 3651 } | 3651 } |
| 3652 | 3652 |
| 3653 | 3653 |
| 3654 void InstanceofStub::Generate(MacroAssembler* masm) { | 3654 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 3655 // Implements "value instanceof function" operator. | 3655 // Implements "value instanceof function" operator. |
| 3656 // Expected input state: | 3656 // Expected input state with no inline cache: |
| 3657 // rsp[0] : return address | 3657 // rsp[0] : return address |
| 3658 // rsp[1] : function pointer | 3658 // rsp[1] : function pointer |
| 3659 // rsp[2] : value | 3659 // rsp[2] : value |
| 3660 // Expected input state with an inline one-element cache: | |
| 3661 // rsp[0] : return address | |
| 3662 // rsp[1] : offset from return address to location of inline cache | |
| 3663 // rsp[2] : function pointer | |
| 3664 // rsp[3] : value | |
| 3660 // Returns a bitwise zero to indicate that the value | 3665 // Returns a bitwise zero to indicate that the value |
| 3661 // is and instance of the function and anything else to | 3666 // is and instance of the function and anything else to |
| 3662 // indicate that the value is not an instance. | 3667 // indicate that the value is not an instance. |
| 3663 | 3668 |
| 3669 static const int kOffsetToMapCheckValue = 5; | |
| 3670 static const int kOffsetToResultValue = 21; | |
|
Søren Thygesen Gjesse
2011/03/09 15:21:53
Please comment on what instructions these words re
| |
| 3671 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78; | |
| 3672 static const unsigned int kWordBeforeResultValue = 0x458B4909; | |
| 3664 // None of the flags are supported on X64. | 3673 // None of the flags are supported on X64. |
|
Søren Thygesen Gjesse
2011/03/09 15:21:53
This comment seems no longer valid. None -> Some?
| |
| 3665 ASSERT(flags_ == kNoFlags); | 3674 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); |
| 3675 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0; | |
| 3666 | 3676 |
| 3667 // Get the object - go slow case if it's a smi. | 3677 // Get the object - go slow case if it's a smi. |
| 3668 Label slow; | 3678 Label slow; |
| 3669 __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 3679 |
| 3680 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); | |
| 3670 __ JumpIfSmi(rax, &slow); | 3681 __ JumpIfSmi(rax, &slow); |
| 3671 | 3682 |
| 3672 // Check that the left hand is a JS object. Leave its map in rax. | 3683 // Check that the left hand is a JS object. Leave its map in rax. |
| 3673 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); | 3684 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); |
| 3674 __ j(below, &slow); | 3685 __ j(below, &slow); |
| 3675 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); | 3686 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); |
| 3676 __ j(above, &slow); | 3687 __ j(above, &slow); |
| 3677 | 3688 |
| 3678 // Get the prototype of the function. | 3689 // Get the prototype of the function. |
| 3679 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 3690 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); |
| 3680 // rdx is function, rax is map. | 3691 // rdx is function, rax is map. |
| 3681 | 3692 |
| 3682 // Look up the function and the map in the instanceof cache. | 3693 // If there is a call site cache don't look in the global cache, but do the |
| 3683 NearLabel miss; | 3694 // real lookup and update the call site cache. |
| 3684 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3695 if (!HasCallSiteInlineCheck()) { |
| 3685 __ j(not_equal, &miss); | 3696 // Look up the function and the map in the instanceof cache. |
| 3686 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3697 NearLabel miss; |
| 3687 __ j(not_equal, &miss); | 3698 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 3688 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3699 __ j(not_equal, &miss); |
| 3689 __ ret(2 * kPointerSize); | 3700 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| 3701 __ j(not_equal, &miss); | |
| 3702 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | |
| 3703 __ ret(2 * kPointerSize); | |
| 3704 __ bind(&miss); | |
| 3705 } | |
| 3690 | 3706 |
| 3691 __ bind(&miss); | |
| 3692 __ TryGetFunctionPrototype(rdx, rbx, &slow); | 3707 __ TryGetFunctionPrototype(rdx, rbx, &slow); |
| 3693 | 3708 |
| 3694 // Check that the function prototype is a JS object. | 3709 // Check that the function prototype is a JS object. |
| 3695 __ JumpIfSmi(rbx, &slow); | 3710 __ JumpIfSmi(rbx, &slow); |
| 3696 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister); | 3711 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister); |
| 3697 __ j(below, &slow); | 3712 __ j(below, &slow); |
| 3698 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); | 3713 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); |
| 3699 __ j(above, &slow); | 3714 __ j(above, &slow); |
| 3700 | 3715 |
| 3701 // Register mapping: | 3716 // Register mapping: |
| 3702 // rax is object map. | 3717 // rax is object map. |
| 3703 // rdx is function. | 3718 // rdx is function. |
| 3704 // rbx is function prototype. | 3719 // rbx is function prototype. |
| 3705 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3720 if (!HasCallSiteInlineCheck()) { |
| 3706 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3721 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 3722 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | |
| 3723 } else { | |
| 3724 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | |
| 3725 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | |
| 3726 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); | |
| 3727 if (FLAG_debug_code) { | |
| 3728 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); | |
| 3729 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); | |
| 3730 __ Assert(equal, "InstanceofStub unexpected call site cache."); | |
| 3731 } | |
| 3732 } | |
| 3707 | 3733 |
| 3708 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); | 3734 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); |
| 3709 | 3735 |
| 3710 // Loop through the prototype chain looking for the function prototype. | 3736 // Loop through the prototype chain looking for the function prototype. |
| 3711 NearLabel loop, is_instance, is_not_instance; | 3737 NearLabel loop, is_instance, is_not_instance; |
| 3712 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); | 3738 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); |
| 3713 __ bind(&loop); | 3739 __ bind(&loop); |
| 3714 __ cmpq(rcx, rbx); | 3740 __ cmpq(rcx, rbx); |
| 3715 __ j(equal, &is_instance); | 3741 __ j(equal, &is_instance); |
| 3716 __ cmpq(rcx, kScratchRegister); | 3742 __ cmpq(rcx, kScratchRegister); |
| 3717 // The code at is_not_instance assumes that kScratchRegister contains a | 3743 // The code at is_not_instance assumes that kScratchRegister contains a |
| 3718 // non-zero GCable value (the null object in this case). | 3744 // non-zero GCable value (the null object in this case). |
| 3719 __ j(equal, &is_not_instance); | 3745 __ j(equal, &is_not_instance); |
| 3720 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 3746 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
| 3721 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); | 3747 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); |
| 3722 __ jmp(&loop); | 3748 __ jmp(&loop); |
| 3723 | 3749 |
| 3724 __ bind(&is_instance); | 3750 __ bind(&is_instance); |
| 3725 __ xorl(rax, rax); | 3751 if (!HasCallSiteInlineCheck()) { |
| 3726 // Store bitwise zero in the cache. This is a Smi in GC terms. | 3752 __ xorl(rax, rax); |
| 3727 STATIC_ASSERT(kSmiTag == 0); | 3753 // Store bitwise zero in the cache. This is a Smi in GC terms. |
| 3728 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3754 STATIC_ASSERT(kSmiTag == 0); |
| 3729 __ ret(2 * kPointerSize); | 3755 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
| 3756 } else { | |
| 3757 // Store offset of true in the root array at the inline check site. | |
| 3758 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias | |
| 3759 == 0xB0 - 0x100); | |
| 3760 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize. | |
| 3761 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | |
| 3762 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | |
| 3763 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | |
| 3764 if (FLAG_debug_code) { | |
| 3765 __ movl(rax, Immediate(kWordBeforeResultValue)); | |
| 3766 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | |
| 3767 __ Assert(equal, "InstanceofStub unexpected call site cache."); | |
| 3768 } | |
| 3769 __ xorl(rax, rax); | |
| 3770 } | |
| 3771 __ ret(2 * kPointerSize + extra_stack_space); | |
| 3730 | 3772 |
| 3731 __ bind(&is_not_instance); | 3773 __ bind(&is_not_instance); |
| 3732 // We have to store a non-zero value in the cache. | 3774 if (!HasCallSiteInlineCheck()) { |
| 3733 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3775 // We have to store a non-zero value in the cache. |
| 3734 __ ret(2 * kPointerSize); | 3776 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 3777 } else { | |
| 3778 // Store offset of false in the root array at the inline check site. | |
| 3779 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias | |
| 3780 == 0xB8 - 0x100); | |
| 3781 __ movl(rax, Immediate(0xB8)); // TrueValue is at -9 * kPointerSize. | |
|
Søren Thygesen Gjesse
2011/03/09 15:21:53
TrueValue -> FalseValue
| |
| 3782 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | |
| 3783 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | |
| 3784 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | |
| 3785 if (FLAG_debug_code) { | |
| 3786 __ movl(rax, Immediate(kWordBeforeResultValue)); | |
| 3787 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | |
| 3788 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); | |
| 3789 } | |
| 3790 } | |
| 3791 __ ret(2 * kPointerSize + extra_stack_space); | |
| 3735 | 3792 |
| 3736 // Slow-case: Go through the JavaScript implementation. | 3793 // Slow-case: Go through the JavaScript implementation. |
| 3737 __ bind(&slow); | 3794 __ bind(&slow); |
| 3795 if (HasCallSiteInlineCheck()) { | |
| 3796 // Remove extra value from the stack. | |
| 3797 __ pop(rcx); | |
| 3798 __ pop(rax); | |
| 3799 __ push(rcx); | |
| 3800 } | |
| 3738 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3801 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 3739 } | 3802 } |
| 3740 | 3803 |
| 3741 | 3804 |
| 3742 // Passing arguments in registers is not supported. | 3805 // Passing arguments in registers is not supported. |
| 3743 Register InstanceofStub::left() { return no_reg; } | 3806 Register InstanceofStub::left() { return no_reg; } |
| 3744 | 3807 |
| 3745 | 3808 |
| 3746 Register InstanceofStub::right() { return no_reg; } | 3809 Register InstanceofStub::right() { return no_reg; } |
| 3747 | 3810 |
| (...skipping 1392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5140 FieldOperand(elements, PixelArray::kExternalPointerOffset)); | 5203 FieldOperand(elements, PixelArray::kExternalPointerOffset)); |
| 5141 __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value); | 5204 __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value); |
| 5142 __ ret(0); // Return value in eax. | 5205 __ ret(0); // Return value in eax. |
| 5143 } | 5206 } |
| 5144 | 5207 |
| 5145 #undef __ | 5208 #undef __ |
| 5146 | 5209 |
| 5147 } } // namespace v8::internal | 5210 } } // namespace v8::internal |
| 5148 | 5211 |
| 5149 #endif // V8_TARGET_ARCH_X64 | 5212 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |