Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(86)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 226233002: Revert "Reland of https://codereview.chromium.org/172523002/" (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 3275 matching lines...) Expand 10 before | Expand all | Expand 10 after
3286 3286
3287 __ Push(function); 3287 __ Push(function);
3288 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, 3288 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
3289 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3289 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3290 __ Pop(function); 3290 __ Pop(function);
3291 3291
3292 __ Bind(&done); 3292 __ Bind(&done);
3293 } 3293 }
3294 3294
3295 3295
3296 static void GenericCallHelper(MacroAssembler* masm, 3296 void CallFunctionStub::Generate(MacroAssembler* masm) {
3297 const CallIC::State& state, 3297 ASM_LOCATION("CallFunctionStub::Generate");
3298 bool wrap_and_call = false) {
3299 // x1 function the function to call 3298 // x1 function the function to call
3300 3299 // x2 : feedback vector
3301 // wrap_and_call can only be true if we are compiling a monomorphic method. 3300 // x3 : slot in feedback vector (smi) (if x2 is not the megamorphic symbol)
3302 ASSERT(!(wrap_and_call && state.IsGeneric()));
3303 ASSERT(!wrap_and_call || state.CallAsMethod());
3304 Register function = x1; 3301 Register function = x1;
3302 Register cache_cell = x2;
3303 Register slot = x3;
3305 Register type = x4; 3304 Register type = x4;
3306 Label slow, non_function, wrap, cont; 3305 Label slow, non_function, wrap, cont;
3307 3306
3308 // TODO(jbramley): This function has a lot of unnamed registers. Name them, 3307 // TODO(jbramley): This function has a lot of unnamed registers. Name them,
3309 // and tidy things up a bit. 3308 // and tidy things up a bit.
3310 3309
3311 if (state.IsGeneric()) { 3310 if (NeedsChecks()) {
3312 // Check that the function is really a JavaScript function. 3311 // Check that the function is really a JavaScript function.
3313 __ JumpIfSmi(function, &non_function); 3312 __ JumpIfSmi(function, &non_function);
3314 3313
3315 // Goto slow case if we do not have a function. 3314 // Goto slow case if we do not have a function.
3316 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); 3315 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
3316
3317 if (RecordCallTarget()) {
3318 GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5);
3319 // Type information was updated. Because we may call Array, which
3320 // expects either undefined or an AllocationSite in ebx we need
3321 // to set ebx to undefined.
3322 __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex);
3323 }
3317 } 3324 }
3318 3325
3319 // Fast-case: Invoke the function now. 3326 // Fast-case: Invoke the function now.
3320 // x1 function pushed function 3327 // x1 function pushed function
3321 int argc = state.arg_count(); 3328 ParameterCount actual(argc_);
3322 ParameterCount actual(argc);
3323 3329
3324 if (state.CallAsMethod()) { 3330 if (CallAsMethod()) {
3325 if (state.IsGeneric()) { 3331 if (NeedsChecks()) {
3326 // Do not transform the receiver for strict mode functions. 3332 // Do not transform the receiver for strict mode functions.
3327 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 3333 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
3328 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); 3334 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
3329 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont); 3335 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont);
3330 3336
3331 // Do not transform the receiver for native (Compilerhints already in x3). 3337 // Do not transform the receiver for native (Compilerhints already in x3).
3332 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont); 3338 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont);
3333 } 3339 }
3334 3340
3335 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) { 3341 // Compute the receiver in sloppy mode.
3336 // Compute the receiver in sloppy mode. 3342 __ Peek(x3, argc_ * kPointerSize);
3337 __ Peek(x3, argc * kPointerSize);
3338 3343
3339 if (state.IsGeneric()) { 3344 if (NeedsChecks()) {
3340 __ JumpIfSmi(x3, &wrap); 3345 __ JumpIfSmi(x3, &wrap);
3341 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); 3346 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
3342 } else { 3347 } else {
3343 __ B(&wrap); 3348 __ B(&wrap);
3344 }
3345 } 3349 }
3346 3350
3347 __ Bind(&cont); 3351 __ Bind(&cont);
3348 } 3352 }
3353 __ InvokeFunction(function,
3354 actual,
3355 JUMP_FUNCTION,
3356 NullCallWrapper());
3349 3357
3350 if (state.ArgumentsMustMatch()) { 3358 if (NeedsChecks()) {
3351 __ InvokeFunction(function,
3352 actual,
3353 actual,
3354 JUMP_FUNCTION,
3355 NullCallWrapper());
3356 } else {
3357 __ InvokeFunction(function,
3358 actual,
3359 JUMP_FUNCTION,
3360 NullCallWrapper());
3361 }
3362
3363 if (state.IsGeneric()) {
3364 // Slow-case: Non-function called. 3359 // Slow-case: Non-function called.
3365 __ Bind(&slow); 3360 __ Bind(&slow);
3361 if (RecordCallTarget()) {
3362 // If there is a call target cache, mark it megamorphic in the
3363 // non-function case. MegamorphicSentinel is an immortal immovable object
3364 // (megamorphic symbol) so no write barrier is needed.
3365 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3366 masm->isolate()->heap()->megamorphic_symbol());
3367 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
3368 kPointerSizeLog2));
3369 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
3370 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
3371 }
3366 // Check for function proxy. 3372 // Check for function proxy.
3367 // x10 : function type. 3373 // x10 : function type.
3368 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); 3374 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function);
3369 __ Push(function); // put proxy as additional argument 3375 __ Push(function); // put proxy as additional argument
3370 __ Mov(x0, argc + 1); 3376 __ Mov(x0, argc_ + 1);
3371 __ Mov(x2, 0); 3377 __ Mov(x2, 0);
3372 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); 3378 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
3373 { 3379 {
3374 Handle<Code> adaptor = 3380 Handle<Code> adaptor =
3375 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3381 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3376 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3382 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3377 } 3383 }
3378 3384
3379 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3385 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3380 // of the original receiver from the call site). 3386 // of the original receiver from the call site).
3381 __ Bind(&non_function); 3387 __ Bind(&non_function);
3382 __ Poke(function, argc * kXRegSize); 3388 __ Poke(function, argc_ * kXRegSize);
3383 __ Mov(x0, argc); // Set up the number of arguments. 3389 __ Mov(x0, argc_); // Set up the number of arguments.
3384 __ Mov(x2, 0); 3390 __ Mov(x2, 0);
3385 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); 3391 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
3386 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3392 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3387 RelocInfo::CODE_TARGET); 3393 RelocInfo::CODE_TARGET);
3388 } 3394 }
3389 3395
3390 if (state.CallAsMethod()) { 3396 if (CallAsMethod()) {
3391 __ Bind(&wrap); 3397 __ Bind(&wrap);
3392
3393 if (!state.IsGeneric() && !wrap_and_call) {
3394 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
3395 __ Ldr(w4, FieldMemOperand(x5, SharedFunctionInfo::kCompilerHintsOffset));
3396
3397 // Do not transform the receiver for native
3398 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont);
3399 }
3400
3401 // Wrap the receiver and patch it back onto the stack. 3398 // Wrap the receiver and patch it back onto the stack.
3402 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 3399 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
3403 __ Push(x1, x3); 3400 __ Push(x1, x3);
3404 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 3401 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3405 __ Pop(x1); 3402 __ Pop(x1);
3406 } 3403 }
3407 __ Poke(x0, argc * kPointerSize); 3404 __ Poke(x0, argc_ * kPointerSize);
3408 __ B(&cont); 3405 __ B(&cont);
3409 } 3406 }
3410 } 3407 }
3411 3408
3412 3409
3413 void CallFunctionStub::Generate(MacroAssembler* masm) {
3414 ASM_LOCATION("CallFunctionStub::Generate");
3415 // x1 function the function to call
3416
3417 // GenericCallHelper expresses it's options in terms of CallIC::State.
3418 CallIC::CallType call_type = CallAsMethod() ?
3419 CallIC::METHOD : CallIC::FUNCTION;
3420
3421 if (NeedsChecks()) {
3422 GenericCallHelper(masm,
3423 CallIC::State::SlowCallState(
3424 argc_,
3425 call_type));
3426 } else {
3427 GenericCallHelper(masm,
3428 CallIC::State::MonomorphicCallState(
3429 argc_,
3430 call_type,
3431 CallIC::ARGUMENTS_COUNT_UNKNOWN,
3432 SLOPPY),
3433 true);
3434 }
3435 }
3436
3437
3438 void CallConstructStub::Generate(MacroAssembler* masm) { 3410 void CallConstructStub::Generate(MacroAssembler* masm) {
3439 ASM_LOCATION("CallConstructStub::Generate"); 3411 ASM_LOCATION("CallConstructStub::Generate");
3440 // x0 : number of arguments 3412 // x0 : number of arguments
3441 // x1 : the function to call 3413 // x1 : the function to call
3442 // x2 : feedback vector 3414 // x2 : feedback vector
3443 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol) 3415 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol)
3444 Register function = x1; 3416 Register function = x1;
3445 Label slow, non_function_call; 3417 Label slow, non_function_call;
3446 3418
3447 // Check that the function is not a smi. 3419 // Check that the function is not a smi.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3498 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3470 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3499 3471
3500 __ Bind(&do_call); 3472 __ Bind(&do_call);
3501 // Set expected number of arguments to zero (not changing x0). 3473 // Set expected number of arguments to zero (not changing x0).
3502 __ Mov(x2, 0); 3474 __ Mov(x2, 0);
3503 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3475 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3504 RelocInfo::CODE_TARGET); 3476 RelocInfo::CODE_TARGET);
3505 } 3477 }
3506 3478
3507 3479
3508 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
3509 GenericCallHelper(masm,
3510 CallIC::State::MonomorphicCallState(
3511 state_.arg_count(),
3512 state_.call_type(),
3513 state_.argument_check(),
3514 state_.strict_mode()));
3515 }
3516
3517
3518 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
3519 GenericCallHelper(masm,
3520 CallIC::State::SlowCallState(
3521 state_.arg_count(),
3522 state_.call_type()));
3523 }
3524
3525
3526 void CallICStub::Generate(MacroAssembler* masm) {
3527 ASM_LOCATION("CallICStub");
3528
3529 // x1 - function
3530 // x2 - vector
3531 // x3 - slot id (Smi)
3532 Label extra_checks_or_miss, slow;
3533 Register function = x1;
3534 Register feedback_vector = x2;
3535 Register index = x3;
3536
3537 // The checks. First, does x1 match the recorded monomorphic target?
3538 __ Add(x4, feedback_vector,
3539 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3540 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
3541
3542 __ Cmp(x4, function);
3543 __ B(ne, &extra_checks_or_miss);
3544
3545 GenerateMonomorphicCall(masm);
3546
3547 __ bind(&extra_checks_or_miss);
3548 if (IsGeneric()) {
3549 Label miss_uninit;
3550
3551 __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow);
3552 __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss_uninit);
3553 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
3554 // just update.
3555 __ Add(x4, feedback_vector,
3556 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3557 __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex);
3558 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
3559 __ B(&slow);
3560
3561 __ bind(&miss_uninit);
3562 }
3563
3564 GenerateMiss(masm);
3565
3566 // the slow case
3567 __ bind(&slow);
3568 GenerateSlowCall(masm);
3569 }
3570
3571
3572 void CallICStub::GenerateMiss(MacroAssembler* masm) {
3573 ASM_LOCATION("CallICStub[Miss]");
3574
3575 // Get the receiver of the function from the stack; 1 ~ return address.
3576 __ Peek(x4, (state_.arg_count() + 1) * kPointerSize);
3577
3578 {
3579 FrameScope scope(masm, StackFrame::INTERNAL);
3580
3581 // Push the receiver and the function and feedback info.
3582 __ Push(x4, x1, x2, x3);
3583
3584 // Call the entry.
3585 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
3586 masm->isolate());
3587 __ CallExternalReference(miss, 4);
3588
3589 // Move result to edi and exit the internal frame.
3590 __ Mov(x1, x0);
3591 }
3592 }
3593
3594
3595 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3480 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3596 // If the receiver is a smi trigger the non-string case. 3481 // If the receiver is a smi trigger the non-string case.
3597 __ JumpIfSmi(object_, receiver_not_string_); 3482 __ JumpIfSmi(object_, receiver_not_string_);
3598 3483
3599 // Fetch the instance type of the receiver into result register. 3484 // Fetch the instance type of the receiver into result register.
3600 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3485 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
3601 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 3486 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
3602 3487
3603 // If the receiver is not a string trigger the non-string case. 3488 // If the receiver is not a string trigger the non-string case.
3604 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_); 3489 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_);
(...skipping 2243 matching lines...) Expand 10 before | Expand all | Expand 10 after
5848 MemOperand(fp, 6 * kPointerSize), 5733 MemOperand(fp, 6 * kPointerSize),
5849 NULL); 5734 NULL);
5850 } 5735 }
5851 5736
5852 5737
5853 #undef __ 5738 #undef __
5854 5739
5855 } } // namespace v8::internal 5740 } } // namespace v8::internal
5856 5741
5857 #endif // V8_TARGET_ARCH_ARM64 5742 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698