OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "bootstrapper.h" | 9 #include "bootstrapper.h" |
10 #include "code-stubs.h" | 10 #include "code-stubs.h" |
(...skipping 2999 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3010 | 3010 |
3011 __ Push(t0, a2, a1); | 3011 __ Push(t0, a2, a1); |
3012 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, | 3012 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, |
3013 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3013 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
3014 __ Pop(t0, a2, a1); | 3014 __ Pop(t0, a2, a1); |
3015 | 3015 |
3016 __ bind(&done); | 3016 __ bind(&done); |
3017 } | 3017 } |
3018 | 3018 |
3019 | 3019 |
| 3020 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
| 3021 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 3022 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); |
| 3023 |
| 3024 // Do not transform the receiver for strict mode functions. |
| 3025 int32_t strict_mode_function_mask = |
| 3026 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); |
| 3027 // Do not transform the receiver for native (Compilerhints already in a3). |
| 3028 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); |
| 3029 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); |
| 3030 __ Branch(cont, ne, at, Operand(zero_reg)); |
| 3031 } |
| 3032 |
| 3033 |
| 3034 static void EmitSlowCase(MacroAssembler* masm, |
| 3035 int argc, |
| 3036 Label* non_function) { |
| 3037 // Check for function proxy. |
| 3038 __ Branch(non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3039 __ push(a1); // put proxy as additional argument |
| 3040 __ li(a0, Operand(argc + 1, RelocInfo::NONE32)); |
| 3041 __ mov(a2, zero_reg); |
| 3042 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); |
| 3043 { |
| 3044 Handle<Code> adaptor = |
| 3045 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3046 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 3047 } |
| 3048 |
| 3049 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 3050 // of the original receiver from the call site). |
| 3051 __ bind(non_function); |
| 3052 __ sw(a1, MemOperand(sp, argc * kPointerSize)); |
| 3053 __ li(a0, Operand(argc)); // Set up the number of arguments. |
| 3054 __ mov(a2, zero_reg); |
| 3055 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION); |
| 3056 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 3057 RelocInfo::CODE_TARGET); |
| 3058 } |
| 3059 |
| 3060 |
| 3061 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { |
| 3062 // Wrap the receiver and patch it back onto the stack. |
| 3063 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 3064 __ Push(a1, a3); |
| 3065 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 3066 __ pop(a1); |
| 3067 } |
| 3068 __ Branch(USE_DELAY_SLOT, cont); |
| 3069 __ sw(v0, MemOperand(sp, argc * kPointerSize)); |
| 3070 } |
| 3071 |
| 3072 |
3020 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3073 void CallFunctionStub::Generate(MacroAssembler* masm) { |
3021 // a1 : the function to call | 3074 // a1 : the function to call |
3022 // a2 : feedback vector | |
3023 // a3 : (only if a2 is not the megamorphic symbol) slot in feedback | |
3024 // vector (Smi) | |
3025 Label slow, non_function, wrap, cont; | 3075 Label slow, non_function, wrap, cont; |
3026 | 3076 |
3027 if (NeedsChecks()) { | 3077 if (NeedsChecks()) { |
3028 // Check that the function is really a JavaScript function. | 3078 // Check that the function is really a JavaScript function. |
3029 // a1: pushed function (to be verified) | 3079 // a1: pushed function (to be verified) |
3030 __ JumpIfSmi(a1, &non_function); | 3080 __ JumpIfSmi(a1, &non_function); |
3031 | 3081 |
3032 // Goto slow case if we do not have a function. | 3082 // Goto slow case if we do not have a function. |
3033 __ GetObjectType(a1, t0, t0); | 3083 __ GetObjectType(a1, t0, t0); |
3034 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); | 3084 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
3035 | |
3036 if (RecordCallTarget()) { | |
3037 GenerateRecordCallTarget(masm); | |
3038 // Type information was updated. Because we may call Array, which | |
3039 // expects either undefined or an AllocationSite in a2 we need | |
3040 // to set a2 to undefined. | |
3041 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
3042 } | |
3043 } | 3085 } |
3044 | 3086 |
3045 // Fast-case: Invoke the function now. | 3087 // Fast-case: Invoke the function now. |
3046 // a1: pushed function | 3088 // a1: pushed function |
3047 ParameterCount actual(argc_); | 3089 int argc = argc_; |
| 3090 ParameterCount actual(argc); |
3048 | 3091 |
3049 if (CallAsMethod()) { | 3092 if (CallAsMethod()) { |
3050 if (NeedsChecks()) { | 3093 if (NeedsChecks()) { |
3051 // Do not transform the receiver for strict mode functions and natives. | 3094 EmitContinueIfStrictOrNative(masm, &cont); |
3052 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
3053 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); | |
3054 int32_t strict_mode_function_mask = | |
3055 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); | |
3056 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); | |
3057 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); | |
3058 __ Branch(&cont, ne, at, Operand(zero_reg)); | |
3059 } | 3095 } |
3060 | 3096 |
3061 // Compute the receiver in sloppy mode. | 3097 // Compute the receiver in sloppy mode. |
3062 __ lw(a3, MemOperand(sp, argc_ * kPointerSize)); | 3098 __ lw(a3, MemOperand(sp, argc * kPointerSize)); |
3063 | 3099 |
3064 if (NeedsChecks()) { | 3100 if (NeedsChecks()) { |
3065 __ JumpIfSmi(a3, &wrap); | 3101 __ JumpIfSmi(a3, &wrap); |
3066 __ GetObjectType(a3, t0, t0); | 3102 __ GetObjectType(a3, t0, t0); |
3067 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | 3103 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
3068 } else { | 3104 } else { |
3069 __ jmp(&wrap); | 3105 __ jmp(&wrap); |
3070 } | 3106 } |
3071 | 3107 |
3072 __ bind(&cont); | 3108 __ bind(&cont); |
3073 } | 3109 } |
| 3110 |
3074 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | 3111 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); |
3075 | 3112 |
3076 if (NeedsChecks()) { | 3113 if (NeedsChecks()) { |
3077 // Slow-case: Non-function called. | 3114 // Slow-case: Non-function called. |
3078 __ bind(&slow); | 3115 __ bind(&slow); |
3079 if (RecordCallTarget()) { | 3116 EmitSlowCase(masm, argc, &non_function); |
3080 // If there is a call target cache, mark it megamorphic in the | |
3081 // non-function case. MegamorphicSentinel is an immortal immovable | |
3082 // object (megamorphic symbol) so no write barrier is needed. | |
3083 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()), | |
3084 isolate()->heap()->megamorphic_symbol()); | |
3085 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); | |
3086 __ Addu(t1, a2, Operand(t1)); | |
3087 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); | |
3088 __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); | |
3089 } | |
3090 // Check for function proxy. | |
3091 __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); | |
3092 __ push(a1); // Put proxy as additional argument. | |
3093 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); | |
3094 __ li(a2, Operand(0, RelocInfo::NONE32)); | |
3095 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); | |
3096 { | |
3097 Handle<Code> adaptor = | |
3098 isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
3099 __ Jump(adaptor, RelocInfo::CODE_TARGET); | |
3100 } | |
3101 | |
3102 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | |
3103 // of the original receiver from the call site). | |
3104 __ bind(&non_function); | |
3105 __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); | |
3106 __ li(a0, Operand(argc_)); // Set up the number of arguments. | |
3107 __ li(a2, Operand(0, RelocInfo::NONE32)); | |
3108 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION); | |
3109 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
3110 RelocInfo::CODE_TARGET); | |
3111 } | 3117 } |
3112 | 3118 |
3113 if (CallAsMethod()) { | 3119 if (CallAsMethod()) { |
3114 __ bind(&wrap); | 3120 __ bind(&wrap); |
3115 // Wrap the receiver and patch it back onto the stack. | 3121 // Wrap the receiver and patch it back onto the stack. |
3116 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | 3122 EmitWrapCase(masm, argc, &cont); |
3117 __ Push(a1, a3); | |
3118 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
3119 __ pop(a1); | |
3120 } | |
3121 __ mov(a0, v0); | |
3122 __ sw(a0, MemOperand(sp, argc_ * kPointerSize)); | |
3123 __ jmp(&cont); | |
3124 } | 3123 } |
3125 } | 3124 } |
3126 | 3125 |
3127 | 3126 |
3128 void CallConstructStub::Generate(MacroAssembler* masm) { | 3127 void CallConstructStub::Generate(MacroAssembler* masm) { |
3129 // a0 : number of arguments | 3128 // a0 : number of arguments |
3130 // a1 : the function to call | 3129 // a1 : the function to call |
3131 // a2 : feedback vector | 3130 // a2 : feedback vector |
3132 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) | 3131 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) |
3133 Label slow, non_function_call; | 3132 Label slow, non_function_call; |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3182 __ bind(&non_function_call); | 3181 __ bind(&non_function_call); |
3183 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 3182 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
3184 __ bind(&do_call); | 3183 __ bind(&do_call); |
3185 // Set expected number of arguments to zero (not changing r0). | 3184 // Set expected number of arguments to zero (not changing r0). |
3186 __ li(a2, Operand(0, RelocInfo::NONE32)); | 3185 __ li(a2, Operand(0, RelocInfo::NONE32)); |
3187 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3186 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
3188 RelocInfo::CODE_TARGET); | 3187 RelocInfo::CODE_TARGET); |
3189 } | 3188 } |
3190 | 3189 |
3191 | 3190 |
| 3191 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { |
| 3192 __ lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3193 __ lw(vector, FieldMemOperand(vector, |
| 3194 JSFunction::kSharedFunctionInfoOffset)); |
| 3195 __ lw(vector, FieldMemOperand(vector, |
| 3196 SharedFunctionInfo::kFeedbackVectorOffset)); |
| 3197 } |
| 3198 |
| 3199 |
| 3200 void CallICStub::Generate(MacroAssembler* masm) { |
| 3201 // r1 - function |
| 3202 // r3 - slot id (Smi) |
| 3203 Label extra_checks_or_miss, slow_start; |
| 3204 Label slow, non_function, wrap, cont; |
| 3205 Label have_js_function; |
| 3206 int argc = state_.arg_count(); |
| 3207 ParameterCount actual(argc); |
| 3208 |
| 3209 EmitLoadTypeFeedbackVector(masm, a2); |
| 3210 |
| 3211 // The checks. First, does r1 match the recorded monomorphic target? |
| 3212 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3213 __ Addu(t0, a2, Operand(t0)); |
| 3214 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3215 __ Branch(&extra_checks_or_miss, ne, a1, Operand(t0)); |
| 3216 |
| 3217 __ bind(&have_js_function); |
| 3218 if (state_.CallAsMethod()) { |
| 3219 EmitContinueIfStrictOrNative(masm, &cont); |
| 3220 // Compute the receiver in sloppy mode. |
| 3221 __ lw(a3, MemOperand(sp, argc * kPointerSize)); |
| 3222 |
| 3223 __ JumpIfSmi(a3, &wrap); |
| 3224 __ GetObjectType(a3, t0, t0); |
| 3225 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 3226 |
| 3227 __ bind(&cont); |
| 3228 } |
| 3229 |
| 3230 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3231 |
| 3232 __ bind(&slow); |
| 3233 EmitSlowCase(masm, argc, &non_function); |
| 3234 |
| 3235 if (state_.CallAsMethod()) { |
| 3236 __ bind(&wrap); |
| 3237 EmitWrapCase(masm, argc, &cont); |
| 3238 } |
| 3239 |
| 3240 __ bind(&extra_checks_or_miss); |
| 3241 Label miss; |
| 3242 |
| 3243 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); |
| 3244 __ Branch(&slow_start, eq, t0, Operand(at)); |
| 3245 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex); |
| 3246 __ Branch(&miss, eq, t0, Operand(at)); |
| 3247 |
| 3248 if (!FLAG_trace_ic) { |
| 3249 // We are going megamorphic, and we don't want to visit the runtime. |
| 3250 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3251 __ Addu(t0, a2, Operand(t0)); |
| 3252 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); |
| 3253 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3254 __ Branch(&slow_start); |
| 3255 } |
| 3256 |
| 3257 // We are here because tracing is on or we are going monomorphic. |
| 3258 __ bind(&miss); |
| 3259 GenerateMiss(masm); |
| 3260 |
| 3261 // the slow case |
| 3262 __ bind(&slow_start); |
| 3263 // Check that the function is really a JavaScript function. |
| 3264 // r1: pushed function (to be verified) |
| 3265 __ JumpIfSmi(a1, &non_function); |
| 3266 |
| 3267 // Goto slow case if we do not have a function. |
| 3268 __ GetObjectType(a1, t0, t0); |
| 3269 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| 3270 __ Branch(&have_js_function); |
| 3271 } |
| 3272 |
| 3273 |
| 3274 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 3275 // Get the receiver of the function from the stack; 1 ~ return address. |
| 3276 __ lw(t0, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize)); |
| 3277 |
| 3278 { |
| 3279 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3280 |
| 3281 // Push the receiver and the function and feedback info. |
| 3282 __ Push(t0, a1, a2, a3); |
| 3283 |
| 3284 // Call the entry. |
| 3285 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), |
| 3286 masm->isolate()); |
| 3287 __ CallExternalReference(miss, 4); |
| 3288 |
| 3289 // Move result to a1 and exit the internal frame. |
| 3290 __ mov(a1, v0); |
| 3291 } |
| 3292 } |
| 3293 |
| 3294 |
3192 // StringCharCodeAtGenerator. | 3295 // StringCharCodeAtGenerator. |
3193 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 3296 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
3194 Label flat_string; | 3297 Label flat_string; |
3195 Label ascii_string; | 3298 Label ascii_string; |
3196 Label got_char_code; | 3299 Label got_char_code; |
3197 Label sliced_string; | 3300 Label sliced_string; |
3198 | 3301 |
3199 ASSERT(!t0.is(index_)); | 3302 ASSERT(!t0.is(index_)); |
3200 ASSERT(!t0.is(result_)); | 3303 ASSERT(!t0.is(result_)); |
3201 ASSERT(!t0.is(object_)); | 3304 ASSERT(!t0.is(object_)); |
(...skipping 2121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5323 MemOperand(fp, 6 * kPointerSize), | 5426 MemOperand(fp, 6 * kPointerSize), |
5324 NULL); | 5427 NULL); |
5325 } | 5428 } |
5326 | 5429 |
5327 | 5430 |
5328 #undef __ | 5431 #undef __ |
5329 | 5432 |
5330 } } // namespace v8::internal | 5433 } } // namespace v8::internal |
5331 | 5434 |
5332 #endif // V8_TARGET_ARCH_MIPS | 5435 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |