| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 2816 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2827 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset; | 2827 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset; |
| 2828 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); | 2828 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); |
| 2829 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 2829 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 2830 __ pop(ecx); | 2830 __ pop(ecx); |
| 2831 int additional_offset = | 2831 int additional_offset = |
| 2832 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; | 2832 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; |
| 2833 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); | 2833 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); |
| 2834 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. | 2834 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. |
| 2835 } | 2835 } |
| 2836 | 2836 |
| 2837 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | |
| 2838 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | |
| 2839 KeyedStoreICStub stub(isolate(), state()); | |
| 2840 stub.GenerateForTrampoline(masm); | |
| 2841 } | |
| 2842 | |
| 2843 // value is on the stack already. | |
| 2844 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver, | |
| 2845 Register key, Register vector, | |
| 2846 Register slot, Register feedback, | |
| 2847 bool is_polymorphic, Label* miss) { | |
| 2848 // feedback initially contains the feedback array | |
| 2849 Label next, next_loop, prepare_next; | |
| 2850 Label load_smi_map, compare_map; | |
| 2851 Label start_polymorphic; | |
| 2852 Label pop_and_miss; | |
| 2853 | |
| 2854 __ push(receiver); | |
| 2855 // Value, vector and slot are passed on the stack, so no need to save/restore | |
| 2856 // them. | |
| 2857 | |
| 2858 Register receiver_map = receiver; | |
| 2859 Register cached_map = vector; | |
| 2860 | |
| 2861 // Receiver might not be a heap object. | |
| 2862 __ JumpIfSmi(receiver, &load_smi_map); | |
| 2863 __ mov(receiver_map, FieldOperand(receiver, 0)); | |
| 2864 __ bind(&compare_map); | |
| 2865 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0))); | |
| 2866 | |
| 2867 // A named keyed store might have a 2 element array, all other cases can count | |
| 2868 // on an array with at least 2 {map, handler} pairs, so they can go right | |
| 2869 // into polymorphic array handling. | |
| 2870 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); | |
| 2871 __ j(not_equal, &start_polymorphic); | |
| 2872 | |
| 2873 // found, now call handler. | |
| 2874 Register handler = feedback; | |
| 2875 DCHECK(handler.is(StoreWithVectorDescriptor::ValueRegister())); | |
| 2876 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1))); | |
| 2877 __ pop(receiver); | |
| 2878 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); | |
| 2879 __ jmp(handler); | |
| 2880 | |
| 2881 // Polymorphic, we have to loop from 2 to N | |
| 2882 __ bind(&start_polymorphic); | |
| 2883 __ push(key); | |
| 2884 Register counter = key; | |
| 2885 __ mov(counter, Immediate(Smi::FromInt(2))); | |
| 2886 | |
| 2887 if (!is_polymorphic) { | |
| 2888 // If is_polymorphic is false, we may only have a two element array. | |
| 2889 // Check against length now in that case. | |
| 2890 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); | |
| 2891 __ j(greater_equal, &pop_and_miss); | |
| 2892 } | |
| 2893 | |
| 2894 __ bind(&next_loop); | |
| 2895 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, | |
| 2896 FixedArray::kHeaderSize)); | |
| 2897 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); | |
| 2898 __ j(not_equal, &prepare_next); | |
| 2899 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size, | |
| 2900 FixedArray::kHeaderSize + kPointerSize)); | |
| 2901 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); | |
| 2902 __ pop(key); | |
| 2903 __ pop(receiver); | |
| 2904 __ jmp(handler); | |
| 2905 | |
| 2906 __ bind(&prepare_next); | |
| 2907 __ add(counter, Immediate(Smi::FromInt(2))); | |
| 2908 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); | |
| 2909 __ j(less, &next_loop); | |
| 2910 | |
| 2911 // We exhausted our array of map handler pairs. | |
| 2912 __ bind(&pop_and_miss); | |
| 2913 __ pop(key); | |
| 2914 __ pop(receiver); | |
| 2915 __ jmp(miss); | |
| 2916 | |
| 2917 __ bind(&load_smi_map); | |
| 2918 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
| 2919 __ jmp(&compare_map); | |
| 2920 } | |
| 2921 | |
| 2922 | |
| 2923 static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver, | |
| 2924 Register key, Register vector, | |
| 2925 Register slot, Register weak_cell, | |
| 2926 Label* miss) { | |
| 2927 // The store ic value is on the stack. | |
| 2928 DCHECK(weak_cell.is(StoreWithVectorDescriptor::ValueRegister())); | |
| 2929 | |
| 2930 // feedback initially contains the feedback array | |
| 2931 Label compare_smi_map; | |
| 2932 | |
| 2933 // Move the weak map into the weak_cell register. | |
| 2934 Register ic_map = weak_cell; | |
| 2935 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset)); | |
| 2936 | |
| 2937 // Receiver might not be a heap object. | |
| 2938 __ JumpIfSmi(receiver, &compare_smi_map); | |
| 2939 __ cmp(ic_map, FieldOperand(receiver, 0)); | |
| 2940 __ j(not_equal, miss); | |
| 2941 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size, | |
| 2942 FixedArray::kHeaderSize + kPointerSize)); | |
| 2943 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize)); | |
| 2944 // jump to the handler. | |
| 2945 __ jmp(weak_cell); | |
| 2946 | |
| 2947 // In microbenchmarks, it made sense to unroll this code so that the call to | |
| 2948 // the handler is duplicated for a HeapObject receiver and a Smi receiver. | |
| 2949 __ bind(&compare_smi_map); | |
| 2950 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex); | |
| 2951 __ j(not_equal, miss); | |
| 2952 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size, | |
| 2953 FixedArray::kHeaderSize + kPointerSize)); | |
| 2954 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize)); | |
| 2955 // jump to the handler. | |
| 2956 __ jmp(weak_cell); | |
| 2957 } | |
| 2958 | |
| 2959 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | |
| 2960 GenerateImpl(masm, false); | |
| 2961 } | |
| 2962 | |
| 2963 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
| 2964 GenerateImpl(masm, true); | |
| 2965 } | |
| 2966 | |
| 2967 | |
| 2968 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm, | |
| 2969 Register receiver, Register key, | |
| 2970 Register vector, Register slot, | |
| 2971 Register feedback, Label* miss) { | |
| 2972 // feedback initially contains the feedback array | |
| 2973 Label next, next_loop, prepare_next; | |
| 2974 Label load_smi_map, compare_map; | |
| 2975 Label transition_call; | |
| 2976 Label pop_and_miss; | |
| 2977 | |
| 2978 __ push(receiver); | |
| 2979 // Value, vector and slot are passed on the stack, so no need to save/restore | |
| 2980 // them. | |
| 2981 | |
| 2982 Register receiver_map = receiver; | |
| 2983 Register cached_map = vector; | |
| 2984 | |
| 2985 // Receiver might not be a heap object. | |
| 2986 __ JumpIfSmi(receiver, &load_smi_map); | |
| 2987 __ mov(receiver_map, FieldOperand(receiver, 0)); | |
| 2988 __ bind(&compare_map); | |
| 2989 | |
| 2990 // Polymorphic, we have to loop from 0 to N - 1 | |
| 2991 __ push(key); | |
| 2992 // Current stack layout: | |
| 2993 // - esp[0] -- key | |
| 2994 // - esp[4] -- receiver | |
| 2995 // - esp[8] -- return address | |
| 2996 // - esp[12] -- vector | |
| 2997 // - esp[16] -- slot | |
| 2998 // - esp[20] -- value | |
| 2999 // | |
| 3000 // Required stack layout for handler call (see StoreWithVectorDescriptor): | |
| 3001 // - esp[0] -- return address | |
| 3002 // - esp[4] -- vector | |
| 3003 // - esp[8] -- slot | |
| 3004 // - esp[12] -- value | |
| 3005 // - receiver, key, handler in registers. | |
| 3006 Register counter = key; | |
| 3007 __ mov(counter, Immediate(Smi::kZero)); | |
| 3008 __ bind(&next_loop); | |
| 3009 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, | |
| 3010 FixedArray::kHeaderSize)); | |
| 3011 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); | |
| 3012 __ j(not_equal, &prepare_next); | |
| 3013 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, | |
| 3014 FixedArray::kHeaderSize + kPointerSize)); | |
| 3015 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex); | |
| 3016 __ j(not_equal, &transition_call); | |
| 3017 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size, | |
| 3018 FixedArray::kHeaderSize + 2 * kPointerSize)); | |
| 3019 __ pop(key); | |
| 3020 __ pop(receiver); | |
| 3021 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize)); | |
| 3022 __ jmp(feedback); | |
| 3023 | |
| 3024 __ bind(&transition_call); | |
| 3025 // Current stack layout: | |
| 3026 // - esp[0] -- key | |
| 3027 // - esp[4] -- receiver | |
| 3028 // - esp[8] -- return address | |
| 3029 // - esp[12] -- vector | |
| 3030 // - esp[16] -- slot | |
| 3031 // - esp[20] -- value | |
| 3032 // | |
| 3033 // Required stack layout for handler call (see StoreTransitionDescriptor): | |
| 3034 // - esp[0] -- return address | |
| 3035 // - esp[4] -- vector | |
| 3036 // - esp[8] -- slot | |
| 3037 // - esp[12] -- value | |
| 3038 // - receiver, key, map, handler in registers. | |
| 3039 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size, | |
| 3040 FixedArray::kHeaderSize + 2 * kPointerSize)); | |
| 3041 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize)); | |
| 3042 | |
| 3043 __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset)); | |
| 3044 // The weak cell may have been cleared. | |
| 3045 __ JumpIfSmi(cached_map, &pop_and_miss); | |
| 3046 DCHECK(!cached_map.is(StoreTransitionDescriptor::MapRegister())); | |
| 3047 __ mov(StoreTransitionDescriptor::MapRegister(), cached_map); | |
| 3048 | |
| 3049 // Call store transition handler using StoreTransitionDescriptor calling | |
| 3050 // convention. | |
| 3051 __ pop(key); | |
| 3052 __ pop(receiver); | |
| 3053 // Ensure that the transition handler we are going to call has the same | |
| 3054 // number of stack arguments which means that we don't have to adapt them | |
| 3055 // before the call. | |
| 3056 STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3); | |
| 3057 STATIC_ASSERT(StoreTransitionDescriptor::kStackArgumentsCount == 3); | |
| 3058 STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount - | |
| 3059 StoreWithVectorDescriptor::kValue == | |
| 3060 StoreTransitionDescriptor::kParameterCount - | |
| 3061 StoreTransitionDescriptor::kValue); | |
| 3062 STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount - | |
| 3063 StoreWithVectorDescriptor::kSlot == | |
| 3064 StoreTransitionDescriptor::kParameterCount - | |
| 3065 StoreTransitionDescriptor::kSlot); | |
| 3066 STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount - | |
| 3067 StoreWithVectorDescriptor::kVector == | |
| 3068 StoreTransitionDescriptor::kParameterCount - | |
| 3069 StoreTransitionDescriptor::kVector); | |
| 3070 __ jmp(feedback); | |
| 3071 | |
| 3072 __ bind(&prepare_next); | |
| 3073 __ add(counter, Immediate(Smi::FromInt(3))); | |
| 3074 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); | |
| 3075 __ j(less, &next_loop); | |
| 3076 | |
| 3077 // We exhausted our array of map handler pairs. | |
| 3078 __ bind(&pop_and_miss); | |
| 3079 __ pop(key); | |
| 3080 __ pop(receiver); | |
| 3081 __ jmp(miss); | |
| 3082 | |
| 3083 __ bind(&load_smi_map); | |
| 3084 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
| 3085 __ jmp(&compare_map); | |
| 3086 } | |
| 3087 | |
| 3088 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
| 3089 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // edx | |
| 3090 Register key = StoreWithVectorDescriptor::NameRegister(); // ecx | |
| 3091 Register value = StoreWithVectorDescriptor::ValueRegister(); // eax | |
| 3092 Register vector = StoreWithVectorDescriptor::VectorRegister(); // ebx | |
| 3093 Register slot = StoreWithVectorDescriptor::SlotRegister(); // edi | |
| 3094 Label miss; | |
| 3095 | |
| 3096 if (StoreWithVectorDescriptor::kPassLastArgsOnStack) { | |
| 3097 // Current stack layout: | |
| 3098 // - esp[8] -- value | |
| 3099 // - esp[4] -- slot | |
| 3100 // - esp[0] -- return address | |
| 3101 STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2); | |
| 3102 STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3); | |
| 3103 if (in_frame) { | |
| 3104 __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor"); | |
| 3105 // If the vector is not on the stack, then insert the vector beneath | |
| 3106 // return address in order to prepare for calling handler with | |
| 3107 // StoreWithVector calling convention. | |
| 3108 __ push(Operand(esp, 0)); | |
| 3109 __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister()); | |
| 3110 __ RecordComment("]"); | |
| 3111 } else { | |
| 3112 __ mov(vector, Operand(esp, 1 * kPointerSize)); | |
| 3113 } | |
| 3114 __ mov(slot, Operand(esp, 2 * kPointerSize)); | |
| 3115 } | |
| 3116 | |
| 3117 Register scratch = value; | |
| 3118 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size, | |
| 3119 FixedArray::kHeaderSize)); | |
| 3120 | |
| 3121 // Is it a weak cell? | |
| 3122 Label try_array; | |
| 3123 Label not_array, smi_key, key_okay; | |
| 3124 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex); | |
| 3125 __ j(not_equal, &try_array); | |
| 3126 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss); | |
| 3127 | |
| 3128 // Is it a fixed array? | |
| 3129 __ bind(&try_array); | |
| 3130 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex); | |
| 3131 __ j(not_equal, ¬_array); | |
| 3132 HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch, | |
| 3133 &miss); | |
| 3134 | |
| 3135 __ bind(¬_array); | |
| 3136 Label try_poly_name; | |
| 3137 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex); | |
| 3138 __ j(not_equal, &try_poly_name); | |
| 3139 | |
| 3140 Handle<Code> megamorphic_stub = | |
| 3141 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
| 3142 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET); | |
| 3143 | |
| 3144 __ bind(&try_poly_name); | |
| 3145 // We might have a name in feedback, and a fixed array in the next slot. | |
| 3146 __ cmp(key, scratch); | |
| 3147 __ j(not_equal, &miss); | |
| 3148 // If the name comparison succeeded, we know we have a fixed array with | |
| 3149 // at least one map/handler pair. | |
| 3150 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size, | |
| 3151 FixedArray::kHeaderSize + kPointerSize)); | |
| 3152 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false, | |
| 3153 &miss); | |
| 3154 | |
| 3155 __ bind(&miss); | |
| 3156 KeyedStoreIC::GenerateMiss(masm); | |
| 3157 } | |
| 3158 | |
| 3159 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 2837 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
| 3160 __ EmitLoadTypeFeedbackVector(ebx); | 2838 __ EmitLoadTypeFeedbackVector(ebx); |
| 3161 CallICStub stub(isolate(), state()); | 2839 CallICStub stub(isolate(), state()); |
| 3162 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 2840 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 3163 } | 2841 } |
| 3164 | 2842 |
| 3165 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 2843 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 3166 if (masm->isolate()->function_entry_hook() != NULL) { | 2844 if (masm->isolate()->function_entry_hook() != NULL) { |
| 3167 ProfileEntryHookStub stub(masm->isolate()); | 2845 ProfileEntryHookStub stub(masm->isolate()); |
| 3168 masm->CallStub(&stub); | 2846 masm->CallStub(&stub); |
| (...skipping 1383 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4552 kStackUnwindSpace, nullptr, return_value_operand, | 4230 kStackUnwindSpace, nullptr, return_value_operand, |
| 4553 NULL); | 4231 NULL); |
| 4554 } | 4232 } |
| 4555 | 4233 |
| 4556 #undef __ | 4234 #undef __ |
| 4557 | 4235 |
| 4558 } // namespace internal | 4236 } // namespace internal |
| 4559 } // namespace v8 | 4237 } // namespace v8 |
| 4560 | 4238 |
| 4561 #endif // V8_TARGET_ARCH_X87 | 4239 #endif // V8_TARGET_ARCH_X87 |
| OLD | NEW |