Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(369)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 223823002: Revert r20474 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 3275 matching lines...) Expand 10 before | Expand all | Expand 10 after
3286 3286
3287 __ Push(function); 3287 __ Push(function);
3288 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, 3288 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
3289 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3289 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3290 __ Pop(function); 3290 __ Pop(function);
3291 3291
3292 __ Bind(&done); 3292 __ Bind(&done);
3293 } 3293 }
3294 3294
3295 3295
3296 static void GenericCallHelper(MacroAssembler* masm, 3296 void CallFunctionStub::Generate(MacroAssembler* masm) {
3297 const CallIC::State& state) { 3297 ASM_LOCATION("CallFunctionStub::Generate");
3298 // x1 function the function to call 3298 // x1 function the function to call
3299 // x2 : feedback vector
3300 // x3 : slot in feedback vector (smi) (if x2 is not the megamorphic symbol)
3299 Register function = x1; 3301 Register function = x1;
3302 Register cache_cell = x2;
3303 Register slot = x3;
3300 Register type = x4; 3304 Register type = x4;
3301 Label slow, non_function, wrap, cont; 3305 Label slow, non_function, wrap, cont;
3302 3306
3303 // TODO(jbramley): This function has a lot of unnamed registers. Name them, 3307 // TODO(jbramley): This function has a lot of unnamed registers. Name them,
3304 // and tidy things up a bit. 3308 // and tidy things up a bit.
3305 3309
3306 if (state.IsGeneric()) { 3310 if (NeedsChecks()) {
3307 // Check that the function is really a JavaScript function. 3311 // Check that the function is really a JavaScript function.
3308 __ JumpIfSmi(function, &non_function); 3312 __ JumpIfSmi(function, &non_function);
3309 3313
3310 // Goto slow case if we do not have a function. 3314 // Goto slow case if we do not have a function.
3311 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); 3315 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
3316
3317 if (RecordCallTarget()) {
3318 GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5);
3319 // Type information was updated. Because we may call Array, which
3320 // expects either undefined or an AllocationSite in ebx we need
3321 // to set ebx to undefined.
3322 __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex);
3323 }
3312 } 3324 }
3313 3325
3314 // Fast-case: Invoke the function now. 3326 // Fast-case: Invoke the function now.
3315 // x1 function pushed function 3327 // x1 function pushed function
3316 int argc = state.arg_count(); 3328 ParameterCount actual(argc_);
3317 ParameterCount actual(argc);
3318 3329
3319 if (state.CallAsMethod()) { 3330 if (CallAsMethod()) {
3320 if (state.IsGeneric()) { 3331 if (NeedsChecks()) {
3321 // Do not transform the receiver for strict mode functions. 3332 // Do not transform the receiver for strict mode functions.
3322 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 3333 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
3323 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); 3334 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
3324 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont); 3335 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont);
3325 3336
3326 // Do not transform the receiver for native (Compilerhints already in x3). 3337 // Do not transform the receiver for native (Compilerhints already in x3).
3327 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont); 3338 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont);
3328 } 3339 }
3329 3340
3330 if (state.IsSloppy()) { 3341 // Compute the receiver in sloppy mode.
3331 // Compute the receiver in sloppy mode. 3342 __ Peek(x3, argc_ * kPointerSize);
3332 __ Peek(x3, argc * kPointerSize);
3333 3343
3344 if (NeedsChecks()) {
3334 __ JumpIfSmi(x3, &wrap); 3345 __ JumpIfSmi(x3, &wrap);
3335 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); 3346 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
3347 } else {
3348 __ B(&wrap);
3336 } 3349 }
3337 3350
3338 __ Bind(&cont); 3351 __ Bind(&cont);
3339 } 3352 }
3353 __ InvokeFunction(function,
3354 actual,
3355 JUMP_FUNCTION,
3356 NullCallWrapper());
3340 3357
3341 if (state.ArgumentsMustMatch()) { 3358 if (NeedsChecks()) {
3342 __ InvokeFunction(function,
3343 actual,
3344 actual,
3345 JUMP_FUNCTION,
3346 NullCallWrapper());
3347 } else {
3348 __ InvokeFunction(function,
3349 actual,
3350 JUMP_FUNCTION,
3351 NullCallWrapper());
3352 }
3353
3354 if (state.IsGeneric()) {
3355 // Slow-case: Non-function called. 3359 // Slow-case: Non-function called.
3356 __ Bind(&slow); 3360 __ Bind(&slow);
3361 if (RecordCallTarget()) {
3362 // If there is a call target cache, mark it megamorphic in the
3363 // non-function case. MegamorphicSentinel is an immortal immovable object
3364 // (megamorphic symbol) so no write barrier is needed.
3365 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3366 masm->isolate()->heap()->megamorphic_symbol());
3367 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
3368 kPointerSizeLog2));
3369 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
3370 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
3371 }
3357 // Check for function proxy. 3372 // Check for function proxy.
3358 // x10 : function type. 3373 // x10 : function type.
3359 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); 3374 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function);
3360 __ Push(function); // put proxy as additional argument 3375 __ Push(function); // put proxy as additional argument
3361 __ Mov(x0, argc + 1); 3376 __ Mov(x0, argc_ + 1);
3362 __ Mov(x2, 0); 3377 __ Mov(x2, 0);
3363 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); 3378 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
3364 { 3379 {
3365 Handle<Code> adaptor = 3380 Handle<Code> adaptor =
3366 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3381 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3367 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3382 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3368 } 3383 }
3369 3384
3370 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3385 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3371 // of the original receiver from the call site). 3386 // of the original receiver from the call site).
3372 __ Bind(&non_function); 3387 __ Bind(&non_function);
3373 __ Poke(function, argc * kXRegSize); 3388 __ Poke(function, argc_ * kXRegSize);
3374 __ Mov(x0, argc); // Set up the number of arguments. 3389 __ Mov(x0, argc_); // Set up the number of arguments.
3375 __ Mov(x2, 0); 3390 __ Mov(x2, 0);
3376 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); 3391 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
3377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3392 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3378 RelocInfo::CODE_TARGET); 3393 RelocInfo::CODE_TARGET);
3379 } 3394 }
3380 3395
3381 if (state.CallAsMethod() && state.IsSloppy()) { 3396 if (CallAsMethod()) {
3382 __ Bind(&wrap); 3397 __ Bind(&wrap);
3383
3384 if (!state.IsGeneric()) {
3385 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
3386 __ Ldr(w4, FieldMemOperand(x5, SharedFunctionInfo::kCompilerHintsOffset));
3387
3388 // Do not transform the receiver for native
3389 __ Tbnz(w4, SharedFunctionInfo::kNative, &cont);
3390 }
3391
3392 // Wrap the receiver and patch it back onto the stack. 3398 // Wrap the receiver and patch it back onto the stack.
3393 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 3399 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
3394 __ Push(x1, x3); 3400 __ Push(x1, x3);
3395 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 3401 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3396 __ Pop(x1); 3402 __ Pop(x1);
3397 } 3403 }
3398 __ Poke(x0, argc * kPointerSize); 3404 __ Poke(x0, argc_ * kPointerSize);
3399 __ B(&cont); 3405 __ B(&cont);
3400 } 3406 }
3401 } 3407 }
3402 3408
3403 3409
3404 void CallFunctionStub::Generate(MacroAssembler* masm) {
3405 ASM_LOCATION("CallFunctionStub::Generate");
3406 // x1 function the function to call
3407
3408 // GenericCallHelper expresses it's options in terms of CallIC::State.
3409 CallIC::CallType call_type = CallAsMethod() ?
3410 CallIC::METHOD : CallIC::FUNCTION;
3411
3412 if (NeedsChecks()) {
3413 GenericCallHelper(masm,
3414 CallIC::State::SlowCallState(
3415 argc_,
3416 call_type));
3417 } else {
3418 GenericCallHelper(masm,
3419 CallIC::State::MonomorphicCallState(
3420 argc_,
3421 call_type,
3422 CallIC::ARGUMENTS_COUNT_UNKNOWN,
3423 SLOPPY));
3424 }
3425 }
3426
3427
3428 void CallConstructStub::Generate(MacroAssembler* masm) { 3410 void CallConstructStub::Generate(MacroAssembler* masm) {
3429 ASM_LOCATION("CallConstructStub::Generate"); 3411 ASM_LOCATION("CallConstructStub::Generate");
3430 // x0 : number of arguments 3412 // x0 : number of arguments
3431 // x1 : the function to call 3413 // x1 : the function to call
3432 // x2 : feedback vector 3414 // x2 : feedback vector
3433 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol) 3415 // x3 : slot in feedback vector (smi) (if r2 is not the megamorphic symbol)
3434 Register function = x1; 3416 Register function = x1;
3435 Label slow, non_function_call; 3417 Label slow, non_function_call;
3436 3418
3437 // Check that the function is not a smi. 3419 // Check that the function is not a smi.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3488 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3470 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3489 3471
3490 __ Bind(&do_call); 3472 __ Bind(&do_call);
3491 // Set expected number of arguments to zero (not changing x0). 3473 // Set expected number of arguments to zero (not changing x0).
3492 __ Mov(x2, 0); 3474 __ Mov(x2, 0);
3493 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3475 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3494 RelocInfo::CODE_TARGET); 3476 RelocInfo::CODE_TARGET);
3495 } 3477 }
3496 3478
3497 3479
3498 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
3499 GenericCallHelper(masm,
3500 CallIC::State::MonomorphicCallState(
3501 state_.arg_count(),
3502 state_.call_type(),
3503 state_.argument_check(),
3504 state_.strict_mode()));
3505 }
3506
3507
3508 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
3509 GenericCallHelper(masm,
3510 CallIC::State::SlowCallState(
3511 state_.arg_count(),
3512 state_.call_type()));
3513 }
3514
3515
3516 void CallICStub::Generate(MacroAssembler* masm) {
3517 ASM_LOCATION("CallICStub");
3518
3519 // x1 - function
3520 // x2 - vector
3521 // x3 - slot id (Smi)
3522 Label extra_checks_or_miss, slow;
3523 Register function = x1;
3524 Register feedback_vector = x2;
3525 Register index = x3;
3526
3527 // The checks. First, does x1 match the recorded monomorphic target?
3528 __ Add(x4, feedback_vector,
3529 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3530 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
3531
3532 __ Cmp(x4, function);
3533 __ B(ne, &extra_checks_or_miss);
3534
3535 GenerateMonomorphicCall(masm);
3536
3537 __ bind(&extra_checks_or_miss);
3538 if (IsGeneric()) {
3539 Label miss_uninit;
3540
3541 __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow);
3542 __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss_uninit);
3543 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
3544 // just update.
3545 __ Add(x4, feedback_vector,
3546 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3547 __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex);
3548 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
3549 __ B(&slow);
3550
3551 __ bind(&miss_uninit);
3552 }
3553
3554 GenerateMiss(masm);
3555
3556 // the slow case
3557 __ bind(&slow);
3558 GenerateSlowCall(masm);
3559 }
3560
3561
3562 void CallICStub::GenerateMiss(MacroAssembler* masm) {
3563 ASM_LOCATION("CallICStub[Miss]");
3564
3565 // Get the receiver of the function from the stack; 1 ~ return address.
3566 __ Peek(x4, (state_.arg_count() + 1) * kPointerSize);
3567
3568 {
3569 FrameScope scope(masm, StackFrame::INTERNAL);
3570
3571 // Push the receiver and the function and feedback info.
3572 __ Push(x4, x1, x2, x3);
3573
3574 // Call the entry.
3575 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
3576 masm->isolate());
3577 __ CallExternalReference(miss, 4);
3578
3579 // Move result to edi and exit the internal frame.
3580 __ Mov(x1, x0);
3581 }
3582 }
3583
3584
3585 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3480 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3586 // If the receiver is a smi trigger the non-string case. 3481 // If the receiver is a smi trigger the non-string case.
3587 __ JumpIfSmi(object_, receiver_not_string_); 3482 __ JumpIfSmi(object_, receiver_not_string_);
3588 3483
3589 // Fetch the instance type of the receiver into result register. 3484 // Fetch the instance type of the receiver into result register.
3590 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3485 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
3591 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 3486 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
3592 3487
3593 // If the receiver is not a string trigger the non-string case. 3488 // If the receiver is not a string trigger the non-string case.
3594 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_); 3489 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_);
(...skipping 2243 matching lines...) Expand 10 before | Expand all | Expand 10 after
5838 MemOperand(fp, 6 * kPointerSize), 5733 MemOperand(fp, 6 * kPointerSize),
5839 NULL); 5734 NULL);
5840 } 5735 }
5841 5736
5842 5737
5843 #undef __ 5738 #undef __
5844 5739
5845 } } // namespace v8::internal 5740 } } // namespace v8::internal
5846 5741
5847 #endif // V8_TARGET_ARCH_ARM64 5742 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698