OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2959 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2970 | 2970 |
2971 __ Push(r4, r2, r1); | 2971 __ Push(r4, r2, r1); |
2972 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, | 2972 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
2973 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 2973 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
2974 __ Pop(r4, r2, r1); | 2974 __ Pop(r4, r2, r1); |
2975 | 2975 |
2976 __ bind(&done); | 2976 __ bind(&done); |
2977 } | 2977 } |
2978 | 2978 |
2979 | 2979 |
2980 void CallFunctionStub::Generate(MacroAssembler* masm) { | 2980 static void GenericCallHelper(MacroAssembler* masm, |
| 2981 const CallIC::State& state, |
| 2982 bool wrap_and_call = false) { |
2981 // r1 : the function to call | 2983 // r1 : the function to call |
2982 // r2 : feedback vector | 2984 |
2983 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback | 2985 // wrap_and_call can only be true if we are compiling a monomorphic method. |
2984 // vector (Smi) | 2986 ASSERT(!(wrap_and_call && state.IsGeneric())); |
| 2987 ASSERT(!wrap_and_call || state.CallAsMethod()); |
2985 Label slow, non_function, wrap, cont; | 2988 Label slow, non_function, wrap, cont; |
2986 | 2989 |
2987 if (NeedsChecks()) { | 2990 if (state.IsGeneric()) { |
2988 // Check that the function is really a JavaScript function. | 2991 // Check that the function is really a JavaScript function. |
2989 // r1: pushed function (to be verified) | 2992 // r1: pushed function (to be verified) |
2990 __ JumpIfSmi(r1, &non_function); | 2993 __ JumpIfSmi(r1, &non_function); |
2991 | 2994 |
2992 // Goto slow case if we do not have a function. | 2995 // Goto slow case if we do not have a function. |
2993 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 2996 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
2994 __ b(ne, &slow); | 2997 __ b(ne, &slow); |
2995 | |
2996 if (RecordCallTarget()) { | |
2997 GenerateRecordCallTarget(masm); | |
2998 // Type information was updated. Because we may call Array, which | |
2999 // expects either undefined or an AllocationSite in ebx we need | |
3000 // to set ebx to undefined. | |
3001 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | |
3002 } | |
3003 } | 2998 } |
3004 | 2999 |
3005 // Fast-case: Invoke the function now. | 3000 // Fast-case: Invoke the function now. |
3006 // r1: pushed function | 3001 // r1: pushed function |
3007 ParameterCount actual(argc_); | 3002 int argc = state.arg_count(); |
| 3003 ParameterCount actual(argc); |
3008 | 3004 |
3009 if (CallAsMethod()) { | 3005 if (state.CallAsMethod()) { |
3010 if (NeedsChecks()) { | 3006 if (state.IsGeneric()) { |
3011 // Do not transform the receiver for strict mode functions. | 3007 // Do not transform the receiver for strict mode functions. |
3012 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 3008 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
3013 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); | 3009 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); |
3014 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 3010 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
3015 kSmiTagSize))); | 3011 kSmiTagSize))); |
3016 __ b(ne, &cont); | 3012 __ b(ne, &cont); |
3017 | 3013 |
3018 // Do not transform the receiver for native (Compilerhints already in r3). | 3014 // Do not transform the receiver for native (Compilerhints already in r3). |
3019 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 3015 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
3020 __ b(ne, &cont); | 3016 __ b(ne, &cont); |
3021 } | 3017 } |
3022 | 3018 |
3023 // Compute the receiver in sloppy mode. | 3019 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) { |
3024 __ ldr(r3, MemOperand(sp, argc_ * kPointerSize)); | 3020 // Compute the receiver in sloppy mode. |
| 3021 __ ldr(r3, MemOperand(sp, argc * kPointerSize)); |
3025 | 3022 |
3026 if (NeedsChecks()) { | 3023 if (state.IsGeneric()) { |
3027 __ JumpIfSmi(r3, &wrap); | 3024 __ JumpIfSmi(r3, &wrap); |
3028 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); | 3025 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); |
3029 __ b(lt, &wrap); | 3026 __ b(lt, &wrap); |
3030 } else { | 3027 } else { |
3031 __ jmp(&wrap); | 3028 __ jmp(&wrap); |
| 3029 } |
3032 } | 3030 } |
3033 | 3031 |
3034 __ bind(&cont); | 3032 __ bind(&cont); |
3035 } | 3033 } |
3036 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
3037 | 3034 |
3038 if (NeedsChecks()) { | 3035 if (state.ArgumentsMustMatch()) { |
| 3036 __ InvokeFunction(r1, actual, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3037 } else { |
| 3038 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3039 } |
| 3040 |
| 3041 if (state.IsGeneric()) { |
3039 // Slow-case: Non-function called. | 3042 // Slow-case: Non-function called. |
3040 __ bind(&slow); | 3043 __ bind(&slow); |
3041 if (RecordCallTarget()) { | |
3042 // If there is a call target cache, mark it megamorphic in the | |
3043 // non-function case. MegamorphicSentinel is an immortal immovable | |
3044 // object (megamorphic symbol) so no write barrier is needed. | |
3045 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), | |
3046 masm->isolate()->heap()->megamorphic_symbol()); | |
3047 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); | |
3048 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); | |
3049 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); | |
3050 } | |
3051 // Check for function proxy. | 3044 // Check for function proxy. |
3052 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); | 3045 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); |
3053 __ b(ne, &non_function); | 3046 __ b(ne, &non_function); |
3054 __ push(r1); // put proxy as additional argument | 3047 __ push(r1); // put proxy as additional argument |
3055 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); | 3048 __ mov(r0, Operand(argc + 1, RelocInfo::NONE32)); |
3056 __ mov(r2, Operand::Zero()); | 3049 __ mov(r2, Operand::Zero()); |
3057 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); | 3050 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); |
3058 { | 3051 { |
3059 Handle<Code> adaptor = | 3052 Handle<Code> adaptor = |
3060 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3053 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
3061 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3054 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
3062 } | 3055 } |
3063 | 3056 |
3064 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 3057 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
3065 // of the original receiver from the call site). | 3058 // of the original receiver from the call site). |
3066 __ bind(&non_function); | 3059 __ bind(&non_function); |
3067 __ str(r1, MemOperand(sp, argc_ * kPointerSize)); | 3060 __ str(r1, MemOperand(sp, argc * kPointerSize)); |
3068 __ mov(r0, Operand(argc_)); // Set up the number of arguments. | 3061 __ mov(r0, Operand(argc)); // Set up the number of arguments. |
3069 __ mov(r2, Operand::Zero()); | 3062 __ mov(r2, Operand::Zero()); |
3070 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); | 3063 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); |
3071 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3064 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
3072 RelocInfo::CODE_TARGET); | 3065 RelocInfo::CODE_TARGET); |
3073 } | 3066 } |
3074 | 3067 |
3075 if (CallAsMethod()) { | 3068 if (state.CallAsMethod()) { |
3076 __ bind(&wrap); | 3069 __ bind(&wrap); |
| 3070 |
| 3071 if (!state.IsGeneric() && !wrap_and_call) { |
| 3072 __ ldr(r5, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 3073 __ ldr(r4, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); |
| 3074 |
| 3075 // Do not transform the receiver for native |
| 3076 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
| 3077 __ b(ne, &cont); |
| 3078 } |
| 3079 |
3077 // Wrap the receiver and patch it back onto the stack. | 3080 // Wrap the receiver and patch it back onto the stack. |
3078 { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | 3081 { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
3079 __ Push(r1, r3); | 3082 __ Push(r1, r3); |
3080 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 3083 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
3081 __ pop(r1); | 3084 __ pop(r1); |
3082 } | 3085 } |
3083 __ str(r0, MemOperand(sp, argc_ * kPointerSize)); | 3086 __ str(r0, MemOperand(sp, argc * kPointerSize)); |
3084 __ jmp(&cont); | 3087 __ jmp(&cont); |
3085 } | 3088 } |
3086 } | 3089 } |
3087 | 3090 |
3088 | 3091 |
| 3092 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3093 // r1 : the function to call |
| 3094 |
| 3095 // GenericCallHelper expresses it's options in terms of CallIC::State. |
| 3096 CallIC::CallType call_type = CallAsMethod() ? |
| 3097 CallIC::METHOD : CallIC::FUNCTION; |
| 3098 |
| 3099 if (NeedsChecks()) { |
| 3100 GenericCallHelper(masm, |
| 3101 CallIC::State::SlowCallState( |
| 3102 argc_, |
| 3103 call_type)); |
| 3104 } else { |
| 3105 GenericCallHelper(masm, |
| 3106 CallIC::State::MonomorphicCallState( |
| 3107 argc_, |
| 3108 call_type, |
| 3109 CallIC::ARGUMENTS_COUNT_UNKNOWN, |
| 3110 SLOPPY), |
| 3111 true); |
| 3112 } |
| 3113 } |
| 3114 |
| 3115 |
3089 void CallConstructStub::Generate(MacroAssembler* masm) { | 3116 void CallConstructStub::Generate(MacroAssembler* masm) { |
3090 // r0 : number of arguments | 3117 // r0 : number of arguments |
3091 // r1 : the function to call | 3118 // r1 : the function to call |
3092 // r2 : feedback vector | 3119 // r2 : feedback vector |
3093 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback | 3120 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback |
3094 // vector (Smi) | 3121 // vector (Smi) |
3095 Label slow, non_function_call; | 3122 Label slow, non_function_call; |
3096 | 3123 |
3097 // Check that the function is not a smi. | 3124 // Check that the function is not a smi. |
3098 __ JumpIfSmi(r1, &non_function_call); | 3125 __ JumpIfSmi(r1, &non_function_call); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3143 __ bind(&non_function_call); | 3170 __ bind(&non_function_call); |
3144 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 3171 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
3145 __ bind(&do_call); | 3172 __ bind(&do_call); |
3146 // Set expected number of arguments to zero (not changing r0). | 3173 // Set expected number of arguments to zero (not changing r0). |
3147 __ mov(r2, Operand::Zero()); | 3174 __ mov(r2, Operand::Zero()); |
3148 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3175 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
3149 RelocInfo::CODE_TARGET); | 3176 RelocInfo::CODE_TARGET); |
3150 } | 3177 } |
3151 | 3178 |
3152 | 3179 |
| 3180 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) { |
| 3181 GenericCallHelper(masm, |
| 3182 CallIC::State::MonomorphicCallState( |
| 3183 state_.arg_count(), |
| 3184 state_.call_type(), |
| 3185 state_.argument_check(), |
| 3186 state_.strict_mode())); |
| 3187 } |
| 3188 |
| 3189 |
| 3190 void CallICStub::GenerateSlowCall(MacroAssembler* masm) { |
| 3191 GenericCallHelper(masm, |
| 3192 CallIC::State::SlowCallState( |
| 3193 state_.arg_count(), |
| 3194 state_.call_type())); |
| 3195 } |
| 3196 |
| 3197 |
| 3198 void CallICStub::Generate(MacroAssembler* masm) { |
| 3199 // r1 - function |
| 3200 // r2 - vector |
| 3201 // r3 - slot id (Smi) |
| 3202 Label extra_checks_or_miss, slow; |
| 3203 |
| 3204 // The checks. First, does r1 match the recorded monomorphic target? |
| 3205 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
| 3206 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
| 3207 __ cmp(r1, r4); |
| 3208 __ b(ne, &extra_checks_or_miss); |
| 3209 |
| 3210 GenerateMonomorphicCall(masm); |
| 3211 |
| 3212 __ bind(&extra_checks_or_miss); |
| 3213 if (IsGeneric()) { |
| 3214 Label miss_uninit; |
| 3215 |
| 3216 __ CompareRoot(r4, Heap::kMegamorphicSymbolRootIndex); |
| 3217 __ b(eq, &slow); |
| 3218 __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex); |
| 3219 __ b(eq, &miss_uninit); |
| 3220 // If we get here, go from monomorphic to megamorphic, Don't bother missing, |
| 3221 // just update. |
| 3222 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
| 3223 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); |
| 3224 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
| 3225 __ jmp(&slow); |
| 3226 |
| 3227 __ bind(&miss_uninit); |
| 3228 } |
| 3229 |
| 3230 GenerateMiss(masm); |
| 3231 |
| 3232 // the slow case |
| 3233 __ bind(&slow); |
| 3234 GenerateSlowCall(masm); |
| 3235 } |
| 3236 |
| 3237 |
| 3238 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 3239 // Get the receiver of the function from the stack; 1 ~ return address. |
| 3240 __ ldr(r4, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize)); |
| 3241 |
| 3242 { |
| 3243 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 3244 |
| 3245 // Push the receiver and the function and feedback info. |
| 3246 __ Push(r4, r1, r2, r3); |
| 3247 |
| 3248 // Call the entry. |
| 3249 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), |
| 3250 masm->isolate()); |
| 3251 __ CallExternalReference(miss, 4); |
| 3252 |
| 3253 // Move result to edi and exit the internal frame. |
| 3254 __ mov(r1, r0); |
| 3255 } |
| 3256 } |
| 3257 |
| 3258 |
3153 // StringCharCodeAtGenerator | 3259 // StringCharCodeAtGenerator |
3154 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 3260 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
3155 Label flat_string; | 3261 Label flat_string; |
3156 Label ascii_string; | 3262 Label ascii_string; |
3157 Label got_char_code; | 3263 Label got_char_code; |
3158 Label sliced_string; | 3264 Label sliced_string; |
3159 | 3265 |
3160 // If the receiver is a smi trigger the non-string case. | 3266 // If the receiver is a smi trigger the non-string case. |
3161 __ JumpIfSmi(object_, receiver_not_string_); | 3267 __ JumpIfSmi(object_, receiver_not_string_); |
3162 | 3268 |
(...skipping 2296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5459 MemOperand(fp, 6 * kPointerSize), | 5565 MemOperand(fp, 6 * kPointerSize), |
5460 NULL); | 5566 NULL); |
5461 } | 5567 } |
5462 | 5568 |
5463 | 5569 |
5464 #undef __ | 5570 #undef __ |
5465 | 5571 |
5466 } } // namespace v8::internal | 5572 } } // namespace v8::internal |
5467 | 5573 |
5468 #endif // V8_TARGET_ARCH_ARM | 5574 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |