| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 2700 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2711 __ pop(esi); | 2711 __ pop(esi); |
| 2712 __ pop(edi); | 2712 __ pop(edi); |
| 2713 __ add(esp, Immediate(2 * kPointerSize)); // remove markers | 2713 __ add(esp, Immediate(2 * kPointerSize)); // remove markers |
| 2714 | 2714 |
| 2715 // Restore frame pointer and return. | 2715 // Restore frame pointer and return. |
| 2716 __ pop(ebp); | 2716 __ pop(ebp); |
| 2717 __ ret(0); | 2717 __ ret(0); |
| 2718 } | 2718 } |
| 2719 | 2719 |
| 2720 | 2720 |
| 2721 // Generate stub code for instanceof. | 2721 void InstanceOfStub::Generate(MacroAssembler* masm) { |
| 2722 // This code can patch a call site inlined cache of the instance of check, | 2722 Register const object = edx; // Object (lhs). |
| 2723 // which looks like this. | 2723 Register const function = eax; // Function (rhs). |
| 2724 // | 2724 Register const object_map = ecx; // Map of {object}. |
| 2725 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map> | 2725 Register const function_map = ebx; // Map of {function}. |
| 2726 // 75 0a jne <some near label> | 2726 Register const function_prototype = function_map; // Prototype of {function}. |
| 2727 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false> | 2727 Register const scratch = edi; |
| 2728 // | |
| 2729 // If call site patching is requested the stack will have the delta from the | |
| 2730 // return address to the cmp instruction just below the return address. This | |
| 2731 // also means that call site patching can only take place with arguments in | |
| 2732 // registers. TOS looks like this when call site patching is requested | |
| 2733 // | |
| 2734 // esp[0] : return address | |
| 2735 // esp[4] : delta from return address to cmp instruction | |
| 2736 // | |
| 2737 void InstanceofStub::Generate(MacroAssembler* masm) { | |
| 2738 // Call site inlining and patching implies arguments in registers. | |
| 2739 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck()); | |
| 2740 | 2728 |
| 2741 // Fixed register usage throughout the stub. | 2729 DCHECK(object.is(InstanceOfDescriptor::LeftRegister())); |
| 2742 Register object = eax; // Object (lhs). | 2730 DCHECK(function.is(InstanceOfDescriptor::RightRegister())); |
| 2743 Register map = ebx; // Map of the object. | |
| 2744 Register function = edx; // Function (rhs). | |
| 2745 Register prototype = edi; // Prototype of the function. | |
| 2746 Register scratch = ecx; | |
| 2747 | 2731 |
| 2748 // Constants describing the call site code to patch. | 2732 // Check if {object} is a smi. |
| 2749 static const int kDeltaToCmpImmediate = 2; | 2733 Label object_is_smi; |
| 2750 static const int kDeltaToMov = 8; | 2734 __ JumpIfSmi(object, &object_is_smi, Label::kNear); |
| 2751 static const int kDeltaToMovImmediate = 9; | |
| 2752 static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b); | |
| 2753 static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d); | |
| 2754 static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8); | |
| 2755 | 2735 |
| 2756 DCHECK_EQ(object.code(), InstanceofStub::left().code()); | 2736 // Lookup the {function} and the {object} map in the global instanceof cache. |
| 2757 DCHECK_EQ(function.code(), InstanceofStub::right().code()); | 2737 Label fast_case, slow_case; |
| 2738 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2739 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); |
| 2740 __ j(not_equal, &fast_case, Label::kNear); |
| 2741 __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex); |
| 2742 __ j(not_equal, &fast_case, Label::kNear); |
| 2743 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex); |
| 2744 __ ret(0); |
| 2758 | 2745 |
| 2759 // Get the object and function - they are always both needed. | 2746 // If {object} is a smi we can safely return false if {function} is a JS |
| 2760 Label slow, not_js_object; | 2747 // function, otherwise we have to miss to the runtime and throw an exception. |
| 2761 if (!HasArgsInRegisters()) { | 2748 __ bind(&object_is_smi); |
| 2762 __ mov(object, Operand(esp, 2 * kPointerSize)); | 2749 __ JumpIfSmi(function, &slow_case); |
| 2763 __ mov(function, Operand(esp, 1 * kPointerSize)); | 2750 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map); |
| 2764 } | 2751 __ j(not_equal, &slow_case); |
| 2752 __ LoadRoot(eax, Heap::kFalseValueRootIndex); |
| 2753 __ ret(0); |
| 2765 | 2754 |
| 2766 // Check that the left hand is a JS object. | 2755 // Fast-case: The {function} must be a valid JSFunction. |
| 2767 __ JumpIfSmi(object, ¬_js_object); | 2756 __ bind(&fast_case); |
| 2768 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 2757 __ JumpIfSmi(function, &slow_case); |
| 2758 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map); |
| 2759 __ j(not_equal, &slow_case); |
| 2769 | 2760 |
| 2770 // If there is a call site cache don't look in the global cache, but do the | 2761 // Ensure that {function} has an instance prototype. |
| 2771 // real lookup and update the call site cache. | 2762 __ test_b(FieldOperand(function_map, Map::kBitFieldOffset), |
| 2772 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { | 2763 static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype)); |
| 2773 // Look up the function and the map in the instanceof cache. | 2764 __ j(not_zero, &slow_case); |
| 2774 Label miss; | |
| 2775 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); | |
| 2776 __ j(not_equal, &miss, Label::kNear); | |
| 2777 __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); | |
| 2778 __ j(not_equal, &miss, Label::kNear); | |
| 2779 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex); | |
| 2780 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2781 __ bind(&miss); | |
| 2782 } | |
| 2783 | 2765 |
| 2784 // Get the prototype of the function. | 2766 // Ensure that {function} is not bound. |
| 2785 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); | 2767 Register const shared_info = scratch; |
| 2768 __ mov(shared_info, |
| 2769 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 2770 __ BooleanBitTest(shared_info, SharedFunctionInfo::kCompilerHintsOffset, |
| 2771 SharedFunctionInfo::kBoundFunction); |
| 2772 __ j(not_zero, &slow_case); |
| 2786 | 2773 |
| 2787 // Check that the function prototype is a JS object. | 2774 // Get the "prototype" (or initial map) of the {function}. |
| 2788 __ JumpIfSmi(prototype, &slow); | 2775 __ mov(function_prototype, |
| 2789 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 2776 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 2777 __ AssertNotSmi(function_prototype); |
| 2790 | 2778 |
| 2791 // Update the global instanceof or call site inlined cache with the current | 2779 // Resolve the prototype if the {function} has an initial map. Afterwards the |
| 2792 // map and function. The cached answer will be set when it is known below. | 2780 // {function_prototype} will be either the JSReceiver prototype object or the |
| 2793 if (!HasCallSiteInlineCheck()) { | 2781 // hole value, which means that no instances of the {function} were created so |
| 2794 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); | 2782 // far and hence we should return false. |
| 2795 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); | 2783 Label function_prototype_valid; |
| 2796 } else { | 2784 Register const function_prototype_map = scratch; |
| 2797 // The constants for the code patching are based on no push instructions | 2785 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map); |
| 2798 // at the call site. | 2786 __ j(not_equal, &function_prototype_valid, Label::kNear); |
| 2799 DCHECK(HasArgsInRegisters()); | 2787 __ mov(function_prototype, |
| 2800 // Get return address and delta to inlined map check. | 2788 FieldOperand(function_prototype, Map::kPrototypeOffset)); |
| 2801 __ mov(scratch, Operand(esp, 0 * kPointerSize)); | 2789 __ bind(&function_prototype_valid); |
| 2802 __ sub(scratch, Operand(esp, 1 * kPointerSize)); | 2790 __ AssertNotSmi(function_prototype); |
| 2803 if (FLAG_debug_code) { | |
| 2804 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1); | |
| 2805 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1); | |
| 2806 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2); | |
| 2807 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2); | |
| 2808 } | |
| 2809 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate)); | |
| 2810 __ mov(Operand(scratch, 0), map); | |
| 2811 __ push(map); | |
| 2812 // Scratch points at the cell payload. Calculate the start of the object. | |
| 2813 __ sub(scratch, Immediate(Cell::kValueOffset - 1)); | |
| 2814 __ RecordWriteField(scratch, Cell::kValueOffset, map, function, | |
| 2815 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 2816 __ pop(map); | |
| 2817 } | |
| 2818 | 2791 |
| 2819 // Loop through the prototype chain of the object looking for the function | 2792 // Update the global instanceof cache with the current {object} map and |
| 2820 // prototype. | 2793 // {function}. The cached answer will be set when it is known below. |
| 2821 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); | 2794 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); |
| 2822 Label loop, is_instance, is_not_instance; | 2795 __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex); |
| 2796 |
| 2797 // Loop through the prototype chain looking for the {function} prototype. |
| 2798 // Assume true, and change to false if not found. |
| 2799 Register const object_prototype = object_map; |
| 2800 Label done, loop; |
| 2801 __ mov(eax, isolate()->factory()->true_value()); |
| 2823 __ bind(&loop); | 2802 __ bind(&loop); |
| 2824 __ cmp(scratch, prototype); | 2803 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
| 2825 __ j(equal, &is_instance, Label::kNear); | 2804 __ cmp(object_prototype, function_prototype); |
| 2826 Factory* factory = isolate()->factory(); | 2805 __ j(equal, &done, Label::kNear); |
| 2827 __ cmp(scratch, Immediate(factory->null_value())); | 2806 __ cmp(object_prototype, isolate()->factory()->null_value()); |
| 2828 __ j(equal, &is_not_instance, Label::kNear); | 2807 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
| 2829 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 2808 __ j(not_equal, &loop); |
| 2830 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 2809 __ mov(eax, isolate()->factory()->false_value()); |
| 2831 __ jmp(&loop); | 2810 __ bind(&done); |
| 2811 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); |
| 2812 __ ret(0); |
| 2832 | 2813 |
| 2833 __ bind(&is_instance); | 2814 // Slow-case: Call the runtime function. |
| 2834 if (!HasCallSiteInlineCheck()) { | 2815 __ bind(&slow_case); |
| 2835 __ mov(eax, Immediate(0)); | 2816 __ pop(scratch); // Pop return address. |
| 2836 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); | 2817 __ push(object); // Push {object}. |
| 2837 if (ReturnTrueFalseObject()) { | 2818 __ push(function); // Push {function}. |
| 2838 __ mov(eax, factory->true_value()); | 2819 __ push(scratch); // Push return address. |
| 2839 } | 2820 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); |
| 2840 } else { | |
| 2841 // Get return address and delta to inlined map check. | |
| 2842 __ mov(eax, factory->true_value()); | |
| 2843 __ mov(scratch, Operand(esp, 0 * kPointerSize)); | |
| 2844 __ sub(scratch, Operand(esp, 1 * kPointerSize)); | |
| 2845 if (FLAG_debug_code) { | |
| 2846 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); | |
| 2847 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | |
| 2848 } | |
| 2849 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); | |
| 2850 if (!ReturnTrueFalseObject()) { | |
| 2851 __ Move(eax, Immediate(0)); | |
| 2852 } | |
| 2853 } | |
| 2854 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2855 | |
| 2856 __ bind(&is_not_instance); | |
| 2857 if (!HasCallSiteInlineCheck()) { | |
| 2858 __ mov(eax, Immediate(Smi::FromInt(1))); | |
| 2859 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); | |
| 2860 if (ReturnTrueFalseObject()) { | |
| 2861 __ mov(eax, factory->false_value()); | |
| 2862 } | |
| 2863 } else { | |
| 2864 // Get return address and delta to inlined map check. | |
| 2865 __ mov(eax, factory->false_value()); | |
| 2866 __ mov(scratch, Operand(esp, 0 * kPointerSize)); | |
| 2867 __ sub(scratch, Operand(esp, 1 * kPointerSize)); | |
| 2868 if (FLAG_debug_code) { | |
| 2869 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); | |
| 2870 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | |
| 2871 } | |
| 2872 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); | |
| 2873 if (!ReturnTrueFalseObject()) { | |
| 2874 __ Move(eax, Immediate(Smi::FromInt(1))); | |
| 2875 } | |
| 2876 } | |
| 2877 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2878 | |
| 2879 Label object_not_null, object_not_null_or_smi; | |
| 2880 __ bind(¬_js_object); | |
| 2881 // Before null, smi and string value checks, check that the rhs is a function | |
| 2882 // as for a non-function rhs an exception needs to be thrown. | |
| 2883 __ JumpIfSmi(function, &slow, Label::kNear); | |
| 2884 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); | |
| 2885 __ j(not_equal, &slow, Label::kNear); | |
| 2886 | |
| 2887 // Null is not instance of anything. | |
| 2888 __ cmp(object, factory->null_value()); | |
| 2889 __ j(not_equal, &object_not_null, Label::kNear); | |
| 2890 if (ReturnTrueFalseObject()) { | |
| 2891 __ mov(eax, factory->false_value()); | |
| 2892 } else { | |
| 2893 __ Move(eax, Immediate(Smi::FromInt(1))); | |
| 2894 } | |
| 2895 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2896 | |
| 2897 __ bind(&object_not_null); | |
| 2898 // Smi values is not instance of anything. | |
| 2899 __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear); | |
| 2900 if (ReturnTrueFalseObject()) { | |
| 2901 __ mov(eax, factory->false_value()); | |
| 2902 } else { | |
| 2903 __ Move(eax, Immediate(Smi::FromInt(1))); | |
| 2904 } | |
| 2905 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2906 | |
| 2907 __ bind(&object_not_null_or_smi); | |
| 2908 // String values is not instance of anything. | |
| 2909 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); | |
| 2910 __ j(NegateCondition(is_string), &slow, Label::kNear); | |
| 2911 if (ReturnTrueFalseObject()) { | |
| 2912 __ mov(eax, factory->false_value()); | |
| 2913 } else { | |
| 2914 __ Move(eax, Immediate(Smi::FromInt(1))); | |
| 2915 } | |
| 2916 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2917 | |
| 2918 // Slow-case: Go through the JavaScript implementation. | |
| 2919 __ bind(&slow); | |
| 2920 if (!ReturnTrueFalseObject()) { | |
| 2921 // Tail call the builtin which returns 0 or 1. | |
| 2922 if (HasArgsInRegisters()) { | |
| 2923 // Push arguments below return address. | |
| 2924 __ pop(scratch); | |
| 2925 __ push(object); | |
| 2926 __ push(function); | |
| 2927 __ push(scratch); | |
| 2928 } | |
| 2929 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | |
| 2930 } else { | |
| 2931 // Call the builtin and convert 0/1 to true/false. | |
| 2932 { | |
| 2933 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2934 __ push(object); | |
| 2935 __ push(function); | |
| 2936 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | |
| 2937 } | |
| 2938 Label true_value, done; | |
| 2939 __ test(eax, eax); | |
| 2940 __ j(zero, &true_value, Label::kNear); | |
| 2941 __ mov(eax, factory->false_value()); | |
| 2942 __ jmp(&done, Label::kNear); | |
| 2943 __ bind(&true_value); | |
| 2944 __ mov(eax, factory->true_value()); | |
| 2945 __ bind(&done); | |
| 2946 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | |
| 2947 } | |
| 2948 } | 2821 } |
| 2949 | 2822 |
| 2950 | 2823 |
| 2951 // ------------------------------------------------------------------------- | 2824 // ------------------------------------------------------------------------- |
| 2952 // StringCharCodeAtGenerator | 2825 // StringCharCodeAtGenerator |
| 2953 | 2826 |
| 2954 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 2827 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
| 2955 // If the receiver is a smi trigger the non-string case. | 2828 // If the receiver is a smi trigger the non-string case. |
| 2956 STATIC_ASSERT(kSmiTag == 0); | 2829 STATIC_ASSERT(kSmiTag == 0); |
| 2957 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 2830 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
| (...skipping 2700 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5658 Operand(ebp, 7 * kPointerSize), NULL); | 5531 Operand(ebp, 7 * kPointerSize), NULL); |
| 5659 } | 5532 } |
| 5660 | 5533 |
| 5661 | 5534 |
| 5662 #undef __ | 5535 #undef __ |
| 5663 | 5536 |
| 5664 } // namespace internal | 5537 } // namespace internal |
| 5665 } // namespace v8 | 5538 } // namespace v8 |
| 5666 | 5539 |
| 5667 #endif // V8_TARGET_ARCH_IA32 | 5540 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |