Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(328)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 247373002: CallICStub with a "never patch" approach until customization. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE + code size multiplier. Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "code-stubs.h" 10 #include "code-stubs.h"
(...skipping 3115 matching lines...) Expand 10 before | Expand all | Expand 10 after
3126 3126
3127 __ Push(function); 3127 __ Push(function);
3128 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, 3128 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
3129 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3129 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3130 __ Pop(function); 3130 __ Pop(function);
3131 3131
3132 __ Bind(&done); 3132 __ Bind(&done);
3133 } 3133 }
3134 3134
3135 3135
3136 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
3137 // Do not transform the receiver for strict mode functions.
3138 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
3139 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
3140 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont);
3141
3142 // Do not transform the receiver for native (Compilerhints already in x3).
3143 __ Tbnz(w4, SharedFunctionInfo::kNative, cont);
3144 }
3145
3146
3147 static void EmitSlowCase(MacroAssembler* masm,
3148 int argc,
3149 Register function,
3150 Register type,
3151 Label* non_function) {
3152 // Check for function proxy.
3153 // x10 : function type.
3154 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, non_function);
3155 __ Push(function); // put proxy as additional argument
3156 __ Mov(x0, argc + 1);
3157 __ Mov(x2, 0);
3158 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
3159 {
3160 Handle<Code> adaptor =
3161 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3162 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3163 }
3164
3165 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3166 // of the original receiver from the call site).
3167 __ Bind(non_function);
3168 __ Poke(function, argc * kXRegSize);
3169 __ Mov(x0, argc); // Set up the number of arguments.
3170 __ Mov(x2, 0);
3171 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
3172 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3173 RelocInfo::CODE_TARGET);
3174 }
3175
3176
3177 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
3178 // Wrap the receiver and patch it back onto the stack.
3179 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
3180 __ Push(x1, x3);
3181 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3182 __ Pop(x1);
3183 }
3184 __ Poke(x0, argc * kPointerSize);
3185 __ B(cont);
3186 }
3187
3188
3136 void CallFunctionStub::Generate(MacroAssembler* masm) { 3189 void CallFunctionStub::Generate(MacroAssembler* masm) {
3137 ASM_LOCATION("CallFunctionStub::Generate"); 3190 ASM_LOCATION("CallFunctionStub::Generate");
3138 // x1 function the function to call 3191 // x1 function the function to call
3139 // x2 : feedback vector 3192
3140 // x3 : slot in feedback vector (smi) (if x2 is not the megamorphic symbol)
3141 Register function = x1; 3193 Register function = x1;
3142 Register cache_cell = x2;
3143 Register slot = x3;
3144 Register type = x4; 3194 Register type = x4;
3145 Label slow, non_function, wrap, cont; 3195 Label slow, non_function, wrap, cont;
3146 3196
3147 // TODO(jbramley): This function has a lot of unnamed registers. Name them, 3197 // TODO(jbramley): This function has a lot of unnamed registers. Name them,
3148 // and tidy things up a bit. 3198 // and tidy things up a bit.
3149 3199
3150 if (NeedsChecks()) { 3200 if (NeedsChecks()) {
3151 // Check that the function is really a JavaScript function. 3201 // Check that the function is really a JavaScript function.
3152 __ JumpIfSmi(function, &non_function); 3202 __ JumpIfSmi(function, &non_function);
3153 3203
3154 // Goto slow case if we do not have a function. 3204 // Goto slow case if we do not have a function.
3155 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); 3205 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
3156
3157 if (RecordCallTarget()) {
3158 GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5);
3159 // Type information was updated. Because we may call Array, which
3160 // expects either undefined or an AllocationSite in ebx we need
3161 // to set ebx to undefined.
3162 __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex);
3163 }
3164 } 3206 }
3165 3207
3166 // Fast-case: Invoke the function now. 3208 // Fast-case: Invoke the function now.
3167 // x1 function pushed function 3209 // x1 function pushed function
3168 ParameterCount actual(argc_); 3210 int argc = argc_;
3211 ParameterCount actual(argc);
3169 3212
3170 if (CallAsMethod()) { 3213 if (CallAsMethod()) {
3171 if (NeedsChecks()) { 3214 if (NeedsChecks()) {
3172 // Do not transform the receiver for strict mode functions. 3215 EmitContinueIfStrictOrNative(masm, &cont);
3173 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
3174 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
3175 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont);
3176
3177 // Do not transform the receiver for native (Compilerhints already in x3).
3178 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont);
3179 } 3216 }
3180 3217
3181 // Compute the receiver in sloppy mode. 3218 // Compute the receiver in sloppy mode.
3182 __ Peek(x3, argc_ * kPointerSize); 3219 __ Peek(x3, argc * kPointerSize);
3183 3220
3184 if (NeedsChecks()) { 3221 if (NeedsChecks()) {
3185 __ JumpIfSmi(x3, &wrap); 3222 __ JumpIfSmi(x3, &wrap);
3186 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); 3223 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
3187 } else { 3224 } else {
3188 __ B(&wrap); 3225 __ B(&wrap);
3189 } 3226 }
3190 3227
3191 __ Bind(&cont); 3228 __ Bind(&cont);
3192 } 3229 }
3230
3193 __ InvokeFunction(function, 3231 __ InvokeFunction(function,
3194 actual, 3232 actual,
3195 JUMP_FUNCTION, 3233 JUMP_FUNCTION,
3196 NullCallWrapper()); 3234 NullCallWrapper());
3197 3235
3198 if (NeedsChecks()) { 3236 if (NeedsChecks()) {
3199 // Slow-case: Non-function called. 3237 // Slow-case: Non-function called.
3200 __ Bind(&slow); 3238 __ Bind(&slow);
3201 if (RecordCallTarget()) { 3239 EmitSlowCase(masm, argc, function, type, &non_function);
3202 // If there is a call target cache, mark it megamorphic in the
3203 // non-function case. MegamorphicSentinel is an immortal immovable object
3204 // (megamorphic symbol) so no write barrier is needed.
3205 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
3206 isolate()->heap()->megamorphic_symbol());
3207 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
3208 kPointerSizeLog2));
3209 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
3210 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
3211 }
3212 // Check for function proxy.
3213 // x10 : function type.
3214 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function);
3215 __ Push(function); // put proxy as additional argument
3216 __ Mov(x0, argc_ + 1);
3217 __ Mov(x2, 0);
3218 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
3219 {
3220 Handle<Code> adaptor =
3221 isolate()->builtins()->ArgumentsAdaptorTrampoline();
3222 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3223 }
3224
3225 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3226 // of the original receiver from the call site).
3227 __ Bind(&non_function);
3228 __ Poke(function, argc_ * kXRegSize);
3229 __ Mov(x0, argc_); // Set up the number of arguments.
3230 __ Mov(x2, 0);
3231 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
3232 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3233 RelocInfo::CODE_TARGET);
3234 } 3240 }
3235 3241
3236 if (CallAsMethod()) { 3242 if (CallAsMethod()) {
3237 __ Bind(&wrap); 3243 __ Bind(&wrap);
3238 // Wrap the receiver and patch it back onto the stack. 3244 EmitWrapCase(masm, argc, &cont);
3239 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
3240 __ Push(x1, x3);
3241 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3242 __ Pop(x1);
3243 }
3244 __ Poke(x0, argc_ * kPointerSize);
3245 __ B(&cont);
3246 } 3245 }
3247 } 3246 }
3248 3247
3249 3248
3250 void CallConstructStub::Generate(MacroAssembler* masm) { 3249 void CallConstructStub::Generate(MacroAssembler* masm) {
3251 ASM_LOCATION("CallConstructStub::Generate"); 3250 ASM_LOCATION("CallConstructStub::Generate");
3252 // x0 : number of arguments 3251 // x0 : number of arguments
3253 // x1 : the function to call 3252 // x1 : the function to call
3254 // x2 : feedback vector 3253 // x2 : feedback vector
3255 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol) 3254 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol)
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
3310 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3309 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3311 3310
3312 __ Bind(&do_call); 3311 __ Bind(&do_call);
3313 // Set expected number of arguments to zero (not changing x0). 3312 // Set expected number of arguments to zero (not changing x0).
3314 __ Mov(x2, 0); 3313 __ Mov(x2, 0);
3315 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3314 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3316 RelocInfo::CODE_TARGET); 3315 RelocInfo::CODE_TARGET);
3317 } 3316 }
3318 3317
3319 3318
3319 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
3320 __ Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3321 __ Ldr(vector, FieldMemOperand(vector,
3322 JSFunction::kSharedFunctionInfoOffset));
3323 __ Ldr(vector, FieldMemOperand(vector,
3324 SharedFunctionInfo::kFeedbackVectorOffset));
3325 }
3326
3327
3328 void CallICStub::Generate(MacroAssembler* masm) {
3329 ASM_LOCATION("CallICStub");
3330
3331 // x1 - function
3332 // x3 - slot id (Smi)
3333 Label extra_checks_or_miss, slow_start;
3334 Label slow, non_function, wrap, cont;
3335 Label have_js_function;
3336 int argc = state_.arg_count();
3337 ParameterCount actual(argc);
3338
3339 Register function = x1;
3340 Register feedback_vector = x2;
3341 Register index = x3;
3342 Register type = x4;
3343
3344 EmitLoadTypeFeedbackVector(masm, feedback_vector);
3345
3346 // The checks. First, does x1 match the recorded monomorphic target?
3347 __ Add(x4, feedback_vector,
3348 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3349 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
3350
3351 __ Cmp(x4, function);
3352 __ B(ne, &extra_checks_or_miss);
3353
3354 __ bind(&have_js_function);
3355 if (state_.CallAsMethod()) {
3356 EmitContinueIfStrictOrNative(masm, &cont);
3357
3358 // Compute the receiver in sloppy mode.
3359 __ Peek(x3, argc * kPointerSize);
3360
3361 __ JumpIfSmi(x3, &wrap);
3362 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
3363
3364 __ Bind(&cont);
3365 }
3366
3367 __ InvokeFunction(function,
3368 actual,
3369 JUMP_FUNCTION,
3370 NullCallWrapper());
3371
3372 __ bind(&slow);
3373 EmitSlowCase(masm, argc, function, type, &non_function);
3374
3375 if (state_.CallAsMethod()) {
3376 __ bind(&wrap);
3377 EmitWrapCase(masm, argc, &cont);
3378 }
3379
3380 __ bind(&extra_checks_or_miss);
3381 Label miss;
3382
3383 __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow_start);
3384 __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss);
3385
3386 if (!FLAG_trace_ic) {
3387 // We are going megamorphic, and we don't want to visit the runtime.
3388 __ Add(x4, feedback_vector,
3389 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3390 __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex);
3391 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
3392 __ B(&slow_start);
3393 }
3394
3395 // We are here because tracing is on or we are going monomorphic.
3396 __ bind(&miss);
3397 GenerateMiss(masm);
3398
3399 // the slow case
3400 __ bind(&slow_start);
3401
3402 // Check that the function is really a JavaScript function.
3403 __ JumpIfSmi(function, &non_function);
3404
3405 // Goto slow case if we do not have a function.
3406 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
3407 __ B(&have_js_function);
3408 }
3409
3410
3411 void CallICStub::GenerateMiss(MacroAssembler* masm) {
3412 ASM_LOCATION("CallICStub[Miss]");
3413
3414 // Get the receiver of the function from the stack; 1 ~ return address.
3415 __ Peek(x4, (state_.arg_count() + 1) * kPointerSize);
3416
3417 {
3418 FrameScope scope(masm, StackFrame::INTERNAL);
3419
3420 // Push the receiver and the function and feedback info.
3421 __ Push(x4, x1, x2, x3);
3422
3423 // Call the entry.
3424 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
3425 masm->isolate());
3426 __ CallExternalReference(miss, 4);
3427
3428 // Move result to edi and exit the internal frame.
3429 __ Mov(x1, x0);
3430 }
3431 }
3432
3433
3320 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3434 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3321 // If the receiver is a smi trigger the non-string case. 3435 // If the receiver is a smi trigger the non-string case.
3322 __ JumpIfSmi(object_, receiver_not_string_); 3436 __ JumpIfSmi(object_, receiver_not_string_);
3323 3437
3324 // Fetch the instance type of the receiver into result register. 3438 // Fetch the instance type of the receiver into result register.
3325 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3439 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
3326 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 3440 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
3327 3441
3328 // If the receiver is not a string trigger the non-string case. 3442 // If the receiver is not a string trigger the non-string case.
3329 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_); 3443 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_);
(...skipping 2024 matching lines...) Expand 10 before | Expand all | Expand 10 after
5354 MemOperand(fp, 6 * kPointerSize), 5468 MemOperand(fp, 6 * kPointerSize),
5355 NULL); 5469 NULL);
5356 } 5470 }
5357 5471
5358 5472
5359 #undef __ 5473 #undef __
5360 5474
5361 } } // namespace v8::internal 5475 } } // namespace v8::internal
5362 5476
5363 #endif // V8_TARGET_ARCH_ARM64 5477 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698