OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3286 | 3286 |
3287 __ Push(function); | 3287 __ Push(function); |
3288 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, | 3288 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, |
3289 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3289 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
3290 __ Pop(function); | 3290 __ Pop(function); |
3291 | 3291 |
3292 __ Bind(&done); | 3292 __ Bind(&done); |
3293 } | 3293 } |
3294 | 3294 |
3295 | 3295 |
3296 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3296 static void GenericCallHelper(MacroAssembler* masm, |
3297 ASM_LOCATION("CallFunctionStub::Generate"); | 3297 const CallIC::State& state) { |
3298 // x1 function the function to call | 3298 // x1 function the function to call |
3299 // x2 : feedback vector | |
3300 // x3 : slot in feedback vector (smi) (if x2 is not the megamorphic symbol) | |
3301 Register function = x1; | 3299 Register function = x1; |
3302 Register cache_cell = x2; | |
3303 Register slot = x3; | |
3304 Register type = x4; | 3300 Register type = x4; |
3305 Label slow, non_function, wrap, cont; | 3301 Label slow, non_function, wrap, cont; |
3306 | 3302 |
3307 // TODO(jbramley): This function has a lot of unnamed registers. Name them, | 3303 // TODO(jbramley): This function has a lot of unnamed registers. Name them, |
3308 // and tidy things up a bit. | 3304 // and tidy things up a bit. |
3309 | 3305 |
3310 if (NeedsChecks()) { | 3306 if (state.IsGeneric()) { |
3311 // Check that the function is really a JavaScript function. | 3307 // Check that the function is really a JavaScript function. |
3312 __ JumpIfSmi(function, &non_function); | 3308 __ JumpIfSmi(function, &non_function); |
3313 | 3309 |
3314 // Goto slow case if we do not have a function. | 3310 // Goto slow case if we do not have a function. |
3315 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); | 3311 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); |
3316 | |
3317 if (RecordCallTarget()) { | |
3318 GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5); | |
3319 // Type information was updated. Because we may call Array, which | |
3320 // expects either undefined or an AllocationSite in ebx we need | |
3321 // to set ebx to undefined. | |
3322 __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex); | |
3323 } | |
3324 } | 3312 } |
3325 | 3313 |
3326 // Fast-case: Invoke the function now. | 3314 // Fast-case: Invoke the function now. |
3327 // x1 function pushed function | 3315 // x1 function pushed function |
3328 ParameterCount actual(argc_); | 3316 int argc = state.arg_count(); |
| 3317 ParameterCount actual(argc); |
3329 | 3318 |
3330 if (CallAsMethod()) { | 3319 if (state.CallAsMethod()) { |
3331 if (NeedsChecks()) { | 3320 if (state.IsGeneric()) { |
3332 // Do not transform the receiver for strict mode functions. | 3321 // Do not transform the receiver for strict mode functions. |
3333 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | 3322 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
3334 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); | 3323 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); |
3335 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont); | 3324 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont); |
3336 | 3325 |
3337 // Do not transform the receiver for native (Compilerhints already in x3). | 3326 // Do not transform the receiver for native (Compilerhints already in x3). |
3338 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont); | 3327 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont); |
3339 } | 3328 } |
3340 | 3329 |
3341 // Compute the receiver in sloppy mode. | 3330 if (state.IsSloppy()) { |
3342 __ Peek(x3, argc_ * kPointerSize); | 3331 // Compute the receiver in sloppy mode. |
| 3332 __ Peek(x3, argc * kPointerSize); |
3343 | 3333 |
3344 if (NeedsChecks()) { | |
3345 __ JumpIfSmi(x3, &wrap); | 3334 __ JumpIfSmi(x3, &wrap); |
3346 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); | 3335 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); |
3347 } else { | |
3348 __ B(&wrap); | |
3349 } | 3336 } |
3350 | 3337 |
3351 __ Bind(&cont); | 3338 __ Bind(&cont); |
3352 } | 3339 } |
3353 __ InvokeFunction(function, | |
3354 actual, | |
3355 JUMP_FUNCTION, | |
3356 NullCallWrapper()); | |
3357 | 3340 |
3358 if (NeedsChecks()) { | 3341 if (state.ArgumentsMustMatch()) { |
| 3342 __ InvokeFunction(function, |
| 3343 actual, |
| 3344 actual, |
| 3345 JUMP_FUNCTION, |
| 3346 NullCallWrapper()); |
| 3347 } else { |
| 3348 __ InvokeFunction(function, |
| 3349 actual, |
| 3350 JUMP_FUNCTION, |
| 3351 NullCallWrapper()); |
| 3352 } |
| 3353 |
| 3354 if (state.IsGeneric()) { |
3359 // Slow-case: Non-function called. | 3355 // Slow-case: Non-function called. |
3360 __ Bind(&slow); | 3356 __ Bind(&slow); |
3361 if (RecordCallTarget()) { | |
3362 // If there is a call target cache, mark it megamorphic in the | |
3363 // non-function case. MegamorphicSentinel is an immortal immovable object | |
3364 // (megamorphic symbol) so no write barrier is needed. | |
3365 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), | |
3366 masm->isolate()->heap()->megamorphic_symbol()); | |
3367 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot, | |
3368 kPointerSizeLog2)); | |
3369 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex); | |
3370 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize)); | |
3371 } | |
3372 // Check for function proxy. | 3357 // Check for function proxy. |
3373 // x10 : function type. | 3358 // x10 : function type. |
3374 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); | 3359 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); |
3375 __ Push(function); // put proxy as additional argument | 3360 __ Push(function); // put proxy as additional argument |
3376 __ Mov(x0, argc_ + 1); | 3361 __ Mov(x0, argc + 1); |
3377 __ Mov(x2, 0); | 3362 __ Mov(x2, 0); |
3378 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); | 3363 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); |
3379 { | 3364 { |
3380 Handle<Code> adaptor = | 3365 Handle<Code> adaptor = |
3381 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3366 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
3382 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3367 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
3383 } | 3368 } |
3384 | 3369 |
3385 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 3370 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
3386 // of the original receiver from the call site). | 3371 // of the original receiver from the call site). |
3387 __ Bind(&non_function); | 3372 __ Bind(&non_function); |
3388 __ Poke(function, argc_ * kXRegSize); | 3373 __ Poke(function, argc * kXRegSize); |
3389 __ Mov(x0, argc_); // Set up the number of arguments. | 3374 __ Mov(x0, argc); // Set up the number of arguments. |
3390 __ Mov(x2, 0); | 3375 __ Mov(x2, 0); |
3391 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); | 3376 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); |
3392 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
3393 RelocInfo::CODE_TARGET); | 3378 RelocInfo::CODE_TARGET); |
3394 } | 3379 } |
3395 | 3380 |
3396 if (CallAsMethod()) { | 3381 if (state.CallAsMethod() && state.IsSloppy()) { |
3397 __ Bind(&wrap); | 3382 __ Bind(&wrap); |
| 3383 |
| 3384 if (!state.IsGeneric()) { |
| 3385 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
| 3386 __ Ldr(w4, FieldMemOperand(x5, SharedFunctionInfo::kCompilerHintsOffset)); |
| 3387 |
| 3388 // Do not transform the receiver for native |
| 3389 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont); |
| 3390 } |
| 3391 |
3398 // Wrap the receiver and patch it back onto the stack. | 3392 // Wrap the receiver and patch it back onto the stack. |
3399 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | 3393 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
3400 __ Push(x1, x3); | 3394 __ Push(x1, x3); |
3401 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 3395 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
3402 __ Pop(x1); | 3396 __ Pop(x1); |
3403 } | 3397 } |
3404 __ Poke(x0, argc_ * kPointerSize); | 3398 __ Poke(x0, argc * kPointerSize); |
3405 __ B(&cont); | 3399 __ B(&cont); |
3406 } | 3400 } |
3407 } | 3401 } |
3408 | 3402 |
3409 | 3403 |
| 3404 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3405 ASM_LOCATION("CallFunctionStub::Generate"); |
| 3406 // x1 function the function to call |
| 3407 |
| 3408 // GenericCallHelper expresses it's options in terms of CallIC::State. |
| 3409 CallIC::CallType call_type = CallAsMethod() ? |
| 3410 CallIC::METHOD : CallIC::FUNCTION; |
| 3411 |
| 3412 if (NeedsChecks()) { |
| 3413 GenericCallHelper(masm, |
| 3414 CallIC::State::SlowCallState( |
| 3415 argc_, |
| 3416 call_type)); |
| 3417 } else { |
| 3418 GenericCallHelper(masm, |
| 3419 CallIC::State::MonomorphicCallState( |
| 3420 argc_, |
| 3421 call_type, |
| 3422 CallIC::ARGUMENTS_COUNT_UNKNOWN, |
| 3423 SLOPPY)); |
| 3424 } |
| 3425 } |
| 3426 |
| 3427 |
3410 void CallConstructStub::Generate(MacroAssembler* masm) { | 3428 void CallConstructStub::Generate(MacroAssembler* masm) { |
3411 ASM_LOCATION("CallConstructStub::Generate"); | 3429 ASM_LOCATION("CallConstructStub::Generate"); |
3412 // x0 : number of arguments | 3430 // x0 : number of arguments |
3413 // x1 : the function to call | 3431 // x1 : the function to call |
3414 // x2 : feedback vector | 3432 // x2 : feedback vector |
3415 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol) | 3433 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol) |
3416 Register function = x1; | 3434 Register function = x1; |
3417 Label slow, non_function_call; | 3435 Label slow, non_function_call; |
3418 | 3436 |
3419 // Check that the function is not a smi. | 3437 // Check that the function is not a smi. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3470 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 3488 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
3471 | 3489 |
3472 __ Bind(&do_call); | 3490 __ Bind(&do_call); |
3473 // Set expected number of arguments to zero (not changing x0). | 3491 // Set expected number of arguments to zero (not changing x0). |
3474 __ Mov(x2, 0); | 3492 __ Mov(x2, 0); |
3475 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3493 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
3476 RelocInfo::CODE_TARGET); | 3494 RelocInfo::CODE_TARGET); |
3477 } | 3495 } |
3478 | 3496 |
3479 | 3497 |
| 3498 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) { |
| 3499 GenericCallHelper(masm, |
| 3500 CallIC::State::MonomorphicCallState( |
| 3501 state_.arg_count(), |
| 3502 state_.call_type(), |
| 3503 state_.argument_check(), |
| 3504 state_.strict_mode())); |
| 3505 } |
| 3506 |
| 3507 |
| 3508 void CallICStub::GenerateSlowCall(MacroAssembler* masm) { |
| 3509 GenericCallHelper(masm, |
| 3510 CallIC::State::SlowCallState( |
| 3511 state_.arg_count(), |
| 3512 state_.call_type())); |
| 3513 } |
| 3514 |
| 3515 |
| 3516 void CallICStub::Generate(MacroAssembler* masm) { |
| 3517 ASM_LOCATION("CallICStub"); |
| 3518 |
| 3519 // x1 - function |
| 3520 // x2 - vector |
| 3521 // x3 - slot id (Smi) |
| 3522 Label extra_checks_or_miss, slow; |
| 3523 Register function = x1; |
| 3524 Register feedback_vector = x2; |
| 3525 Register index = x3; |
| 3526 |
| 3527 // The checks. First, does x1 match the recorded monomorphic target? |
| 3528 __ Add(x4, feedback_vector, |
| 3529 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 3530 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3531 |
| 3532 __ Cmp(x4, function); |
| 3533 __ B(ne, &extra_checks_or_miss); |
| 3534 |
| 3535 GenerateMonomorphicCall(masm); |
| 3536 |
| 3537 __ bind(&extra_checks_or_miss); |
| 3538 if (IsGeneric()) { |
| 3539 Label miss_uninit; |
| 3540 |
| 3541 __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow); |
| 3542 __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss_uninit); |
| 3543 // If we get here, go from monomorphic to megamorphic, Don't bother missing, |
| 3544 // just update. |
| 3545 __ Add(x4, feedback_vector, |
| 3546 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 3547 __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex); |
| 3548 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3549 __ B(&slow); |
| 3550 |
| 3551 __ bind(&miss_uninit); |
| 3552 } |
| 3553 |
| 3554 GenerateMiss(masm); |
| 3555 |
| 3556 // the slow case |
| 3557 __ bind(&slow); |
| 3558 GenerateSlowCall(masm); |
| 3559 } |
| 3560 |
| 3561 |
| 3562 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 3563 ASM_LOCATION("CallICStub[Miss]"); |
| 3564 |
| 3565 // Get the receiver of the function from the stack; 1 ~ return address. |
| 3566 __ Peek(x4, (state_.arg_count() + 1) * kPointerSize); |
| 3567 |
| 3568 { |
| 3569 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3570 |
| 3571 // Push the receiver and the function and feedback info. |
| 3572 __ Push(x4, x1, x2, x3); |
| 3573 |
| 3574 // Call the entry. |
| 3575 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), |
| 3576 masm->isolate()); |
| 3577 __ CallExternalReference(miss, 4); |
| 3578 |
| 3579 // Move result to edi and exit the internal frame. |
| 3580 __ Mov(x1, x0); |
| 3581 } |
| 3582 } |
| 3583 |
| 3584 |
3480 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 3585 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
3481 // If the receiver is a smi trigger the non-string case. | 3586 // If the receiver is a smi trigger the non-string case. |
3482 __ JumpIfSmi(object_, receiver_not_string_); | 3587 __ JumpIfSmi(object_, receiver_not_string_); |
3483 | 3588 |
3484 // Fetch the instance type of the receiver into result register. | 3589 // Fetch the instance type of the receiver into result register. |
3485 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | 3590 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); |
3486 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | 3591 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); |
3487 | 3592 |
3488 // If the receiver is not a string trigger the non-string case. | 3593 // If the receiver is not a string trigger the non-string case. |
3489 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_); | 3594 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_); |
(...skipping 2243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5733 MemOperand(fp, 6 * kPointerSize), | 5838 MemOperand(fp, 6 * kPointerSize), |
5734 NULL); | 5839 NULL); |
5735 } | 5840 } |
5736 | 5841 |
5737 | 5842 |
5738 #undef __ | 5843 #undef __ |
5739 | 5844 |
5740 } } // namespace v8::internal | 5845 } } // namespace v8::internal |
5741 | 5846 |
5742 #endif // V8_TARGET_ARCH_ARM64 | 5847 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |