Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(999)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 1304633002: Correctify instanceof and make it optimizable. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: REBASE. Add MIPS/MIPS64 ports. Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/v8natives.js ('k') | src/x64/interface-descriptors-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X64 5 #if V8_TARGET_ARCH_X64
6 6
7 #include "src/bootstrapper.h" 7 #include "src/bootstrapper.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/handler-compiler.h" 10 #include "src/ic/handler-compiler.h"
(...skipping 2666 matching lines...) Expand 10 before | Expand all | Expand 10 after
2677 __ popq(r13); 2677 __ popq(r13);
2678 __ popq(r12); 2678 __ popq(r12);
2679 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers 2679 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
2680 2680
2681 // Restore frame pointer and return. 2681 // Restore frame pointer and return.
2682 __ popq(rbp); 2682 __ popq(rbp);
2683 __ ret(0); 2683 __ ret(0);
2684 } 2684 }
2685 2685
2686 2686
2687 void InstanceofStub::Generate(MacroAssembler* masm) { 2687 void InstanceOfStub::Generate(MacroAssembler* masm) {
2688 // Implements "value instanceof function" operator. 2688 Register const object = rdx; // Object (lhs).
2689 // Expected input state with no inline cache: 2689 Register const function = rax; // Function (rhs).
2690 // rsp[0] : return address 2690 Register const object_map = rcx; // Map of {object}.
2691 // rsp[8] : function pointer 2691 Register const function_map = r8; // Map of {function}.
2692 // rsp[16] : value 2692 Register const function_prototype = rdi; // Prototype of {function}.
2693 // Expected input state with an inline one-element cache:
2694 // rsp[0] : return address
2695 // rsp[8] : offset from return address to location of inline cache
2696 // rsp[16] : function pointer
2697 // rsp[24] : value
2698 // Returns a bitwise zero to indicate that the value
2699 // is and instance of the function and anything else to
2700 // indicate that the value is not an instance.
2701 2693
2702 // Fixed register usage throughout the stub. 2694 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2703 Register object = rax; // Object (lhs). 2695 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
2704 Register map = rbx; // Map of the object.
2705 Register function = rdx; // Function (rhs).
2706 Register prototype = rdi; // Prototype of the function.
2707 Register scratch = rcx;
2708 2696
2709 static const int kOffsetToMapCheckValue = 2; 2697 // Check if {object} is a smi.
2710 static const int kOffsetToResultValue = kPointerSize == kInt64Size ? 18 : 14; 2698 Label object_is_smi;
2711 // The last 4 bytes of the instruction sequence 2699 __ JumpIfSmi(object, &object_is_smi, Label::kNear);
2712 // movp(rdi, FieldOperand(rax, HeapObject::kMapOffset))
2713 // Move(kScratchRegister, Factory::the_hole_value())
2714 // in front of the hole value address.
2715 static const unsigned int kWordBeforeMapCheckValue =
2716 kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78;
2717 // The last 4 bytes of the instruction sequence
2718 // __ j(not_equal, &cache_miss);
2719 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2720 // before the offset of the hole value in the root array.
2721 static const unsigned int kWordBeforeResultValue =
2722 kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106;
2723 2700
2724 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0; 2701 // Lookup the {function} and the {object} map in the global instanceof cache.
2702 // Note: This is safe because we clear the global instanceof cache whenever
2703 // we change the prototype of any object.
2704 Label fast_case, slow_case;
2705 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2706 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2707 __ j(not_equal, &fast_case, Label::kNear);
2708 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
2709 __ j(not_equal, &fast_case, Label::kNear);
2710 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2711 __ ret(0);
2725 2712
2726 DCHECK_EQ(object.code(), InstanceofStub::left().code()); 2713 // If {object} is a smi we can safely return false if {function} is a JS
2727 DCHECK_EQ(function.code(), InstanceofStub::right().code()); 2714 // function, otherwise we have to miss to the runtime and throw an exception.
2715 __ bind(&object_is_smi);
2716 __ JumpIfSmi(function, &slow_case);
2717 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2718 __ j(not_equal, &slow_case);
2719 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2720 __ ret(0);
2728 2721
2729 // Get the object and function - they are always both needed. 2722 // Fast-case: The {function} must be a valid JSFunction.
2730 // Go slow case if the object is a smi. 2723 __ bind(&fast_case);
2731 Label slow; 2724 __ JumpIfSmi(function, &slow_case);
2732 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset, 2725 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2733 ARGUMENTS_DONT_CONTAIN_RECEIVER); 2726 __ j(not_equal, &slow_case);
2734 if (!HasArgsInRegisters()) {
2735 __ movp(object, args.GetArgumentOperand(0));
2736 __ movp(function, args.GetArgumentOperand(1));
2737 }
2738 __ JumpIfSmi(object, &slow);
2739 2727
2740 // Check that the left hand is a JS object. Leave its map in rax. 2728 // Ensure that {function} has an instance prototype.
2741 __ CmpObjectType(object, FIRST_SPEC_OBJECT_TYPE, map); 2729 __ testb(FieldOperand(function_map, Map::kBitFieldOffset),
2742 __ j(below, &slow); 2730 Immediate(1 << Map::kHasNonInstancePrototype));
2743 __ CmpInstanceType(map, LAST_SPEC_OBJECT_TYPE); 2731 __ j(not_zero, &slow_case);
2744 __ j(above, &slow);
2745 2732
2746 // If there is a call site cache don't look in the global cache, but do the 2733 // Ensure that {function} is not bound.
2747 // real lookup and update the call site cache. 2734 Register const shared_info = kScratchRegister;
2748 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { 2735 __ movp(shared_info,
2749 // Look up the function and the map in the instanceof cache. 2736 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2750 Label miss; 2737 __ TestBitSharedFunctionInfoSpecialField(
2751 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 2738 shared_info, SharedFunctionInfo::kCompilerHintsOffset,
2752 __ j(not_equal, &miss, Label::kNear); 2739 SharedFunctionInfo::kBoundFunction);
2753 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex); 2740 __ j(not_zero, &slow_case);
2754 __ j(not_equal, &miss, Label::kNear);
2755 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2756 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2757 __ bind(&miss);
2758 }
2759 2741
2760 // Get the prototype of the function. 2742 // Get the "prototype" (or initial map) of the {function}.
2761 __ TryGetFunctionPrototype(function, prototype, &slow, true); 2743 __ movp(function_prototype,
2744 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2745 __ AssertNotSmi(function_prototype);
2762 2746
2763 // Check that the function prototype is a JS object. 2747 // Resolve the prototype if the {function} has an initial map. Afterwards the
2764 __ JumpIfSmi(prototype, &slow); 2748 // {function_prototype} will be either the JSReceiver prototype object or the
2765 __ CmpObjectType(prototype, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); 2749 // hole value, which means that no instances of the {function} were created so
2766 __ j(below, &slow); 2750 // far and hence we should return false.
2767 __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE); 2751 Label function_prototype_valid;
2768 __ j(above, &slow); 2752 Register const function_prototype_map = kScratchRegister;
2753 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2754 __ j(not_equal, &function_prototype_valid, Label::kNear);
2755 __ movp(function_prototype,
2756 FieldOperand(function_prototype, Map::kPrototypeOffset));
2757 __ bind(&function_prototype_valid);
2758 __ AssertNotSmi(function_prototype);
2769 2759
2770 // Update the global instanceof or call site inlined cache with the current 2760 // Update the global instanceof cache with the current {object} map and
2771 // map and function. The cached answer will be set when it is known below. 2761 // {function}. The cached answer will be set when it is known below.
2772 if (!HasCallSiteInlineCheck()) { 2762 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2773 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 2763 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
2774 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
2775 } else {
2776 // The constants for the code patching are based on push instructions
2777 // at the call site.
2778 DCHECK(!HasArgsInRegisters());
2779 // Get return address and delta to inlined map check.
2780 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2781 __ subp(kScratchRegister, args.GetArgumentOperand(2));
2782 if (FLAG_debug_code) {
2783 __ movl(scratch, Immediate(kWordBeforeMapCheckValue));
2784 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), scratch);
2785 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
2786 }
2787 __ movp(kScratchRegister,
2788 Operand(kScratchRegister, kOffsetToMapCheckValue));
2789 __ movp(Operand(kScratchRegister, 0), map);
2790 2764
2791 __ movp(r8, map); 2765 // Loop through the prototype chain looking for the {function} prototype.
2792 // Scratch points at the cell payload. Calculate the start of the object. 2766 // Assume true, and change to false if not found.
2793 __ subp(kScratchRegister, Immediate(Cell::kValueOffset - 1)); 2767 Register const object_prototype = object_map;
2794 __ RecordWriteField(kScratchRegister, Cell::kValueOffset, r8, function, 2768 Label done, loop;
2795 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2769 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2796 } 2770 __ bind(&loop);
2771 __ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
2772 __ cmpp(object_prototype, function_prototype);
2773 __ j(equal, &done, Label::kNear);
2774 __ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
2775 __ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
2776 __ j(not_equal, &loop);
2777 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2778 __ bind(&done);
2779 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2780 __ ret(0);
2797 2781
2798 // Loop through the prototype chain looking for the function prototype. 2782 // Slow-case: Call the runtime function.
2799 __ movp(scratch, FieldOperand(map, Map::kPrototypeOffset)); 2783 __ bind(&slow_case);
2800 Label loop, is_instance, is_not_instance; 2784 __ PopReturnAddressTo(kScratchRegister);
2801 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); 2785 __ Push(object);
2802 __ bind(&loop); 2786 __ Push(function);
2803 __ cmpp(scratch, prototype); 2787 __ PushReturnAddressFrom(kScratchRegister);
2804 __ j(equal, &is_instance, Label::kNear); 2788 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
2805 __ cmpp(scratch, kScratchRegister);
2806 // The code at is_not_instance assumes that kScratchRegister contains a
2807 // non-zero GCable value (the null object in this case).
2808 __ j(equal, &is_not_instance, Label::kNear);
2809 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2810 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
2811 __ jmp(&loop);
2812
2813 __ bind(&is_instance);
2814 if (!HasCallSiteInlineCheck()) {
2815 __ xorl(rax, rax);
2816 // Store bitwise zero in the cache. This is a Smi in GC terms.
2817 STATIC_ASSERT(kSmiTag == 0);
2818 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2819 if (ReturnTrueFalseObject()) {
2820 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2821 }
2822 } else {
2823 // Store offset of true in the root array at the inline check site.
2824 int true_offset = 0x100 +
2825 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2826 // Assert it is a 1-byte signed value.
2827 DCHECK(true_offset >= 0 && true_offset < 0x100);
2828 __ movl(rax, Immediate(true_offset));
2829 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2830 __ subp(kScratchRegister, args.GetArgumentOperand(2));
2831 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2832 if (FLAG_debug_code) {
2833 __ movl(rax, Immediate(kWordBeforeResultValue));
2834 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2835 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2836 }
2837 if (!ReturnTrueFalseObject()) {
2838 __ Set(rax, 0);
2839 }
2840 }
2841 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2842 kPointerSize);
2843
2844 __ bind(&is_not_instance);
2845 if (!HasCallSiteInlineCheck()) {
2846 // We have to store a non-zero value in the cache.
2847 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
2848 if (ReturnTrueFalseObject()) {
2849 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2850 }
2851 } else {
2852 // Store offset of false in the root array at the inline check site.
2853 int false_offset = 0x100 +
2854 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2855 // Assert it is a 1-byte signed value.
2856 DCHECK(false_offset >= 0 && false_offset < 0x100);
2857 __ movl(rax, Immediate(false_offset));
2858 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2859 __ subp(kScratchRegister, args.GetArgumentOperand(2));
2860 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2861 if (FLAG_debug_code) {
2862 __ movl(rax, Immediate(kWordBeforeResultValue));
2863 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2864 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2865 }
2866 }
2867 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2868 kPointerSize);
2869
2870 // Slow-case: Go through the JavaScript implementation.
2871 __ bind(&slow);
2872 if (!ReturnTrueFalseObject()) {
2873 // Tail call the builtin which returns 0 or 1.
2874 DCHECK(!HasArgsInRegisters());
2875 if (HasCallSiteInlineCheck()) {
2876 // Remove extra value from the stack.
2877 __ PopReturnAddressTo(rcx);
2878 __ Pop(rax);
2879 __ PushReturnAddressFrom(rcx);
2880 }
2881 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2882 } else {
2883 // Call the builtin and convert 0/1 to true/false.
2884 {
2885 FrameScope scope(masm, StackFrame::INTERNAL);
2886 __ Push(object);
2887 __ Push(function);
2888 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2889 }
2890 Label true_value, done;
2891 __ testq(rax, rax);
2892 __ j(zero, &true_value, Label::kNear);
2893 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2894 __ jmp(&done, Label::kNear);
2895 __ bind(&true_value);
2896 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2897 __ bind(&done);
2898 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2899 kPointerSize);
2900 }
2901 } 2789 }
2902 2790
2903 2791
2904 // ------------------------------------------------------------------------- 2792 // -------------------------------------------------------------------------
2905 // StringCharCodeAtGenerator 2793 // StringCharCodeAtGenerator
2906 2794
2907 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2795 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2908 // If the receiver is a smi trigger the non-string case. 2796 // If the receiver is a smi trigger the non-string case.
2909 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2797 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2910 __ JumpIfSmi(object_, receiver_not_string_); 2798 __ JumpIfSmi(object_, receiver_not_string_);
(...skipping 2675 matching lines...) Expand 10 before | Expand all | Expand 10 after
5586 kStackSpace, nullptr, return_value_operand, NULL); 5474 kStackSpace, nullptr, return_value_operand, NULL);
5587 } 5475 }
5588 5476
5589 5477
5590 #undef __ 5478 #undef __
5591 5479
5592 } // namespace internal 5480 } // namespace internal
5593 } // namespace v8 5481 } // namespace v8
5594 5482
5595 #endif // V8_TARGET_ARCH_X64 5483 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/v8natives.js ('k') | src/x64/interface-descriptors-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698