OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2361 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2372 CallStubInRecordCallTarget(masm, &create_stub, is_super); | 2372 CallStubInRecordCallTarget(masm, &create_stub, is_super); |
2373 __ b(&done); | 2373 __ b(&done); |
2374 | 2374 |
2375 __ bind(¬_array_function); | 2375 __ bind(¬_array_function); |
2376 CreateWeakCellStub weak_cell_stub(masm->isolate()); | 2376 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
2377 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); | 2377 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); |
2378 __ bind(&done); | 2378 __ bind(&done); |
2379 } | 2379 } |
2380 | 2380 |
2381 | 2381 |
2382 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | |
2383 // ----------- S t a t e ------------- | |
2384 // -- r1 : the function to call | |
2385 // -- r3 : the function's shared function info | |
2386 // ----------------------------------- | |
2387 // Do not transform the receiver for strict mode functions. | |
2388 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); | |
2389 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | |
2390 kSmiTagSize))); | |
2391 __ b(ne, cont); | |
2392 | |
2393 // Do not transform the receiver for native (Compilerhints already in r3). | |
2394 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | |
2395 __ b(ne, cont); | |
2396 } | |
2397 | |
2398 | |
2399 static void EmitSlowCase(MacroAssembler* masm, int argc) { | |
2400 __ mov(r0, Operand(argc)); | |
2401 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2402 } | |
2403 | |
2404 | |
2405 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { | |
2406 // Wrap the receiver and patch it back onto the stack. | |
2407 { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | |
2408 __ push(r1); | |
2409 __ mov(r0, r3); | |
2410 ToObjectStub stub(masm->isolate()); | |
2411 __ CallStub(&stub); | |
2412 __ pop(r1); | |
2413 } | |
2414 __ str(r0, MemOperand(sp, argc * kPointerSize)); | |
2415 __ jmp(cont); | |
2416 } | |
2417 | |
2418 | |
2419 static void EmitClassConstructorCallCheck(MacroAssembler* masm) { | |
2420 // ----------- S t a t e ------------- | |
2421 // -- r1 : the function to call | |
2422 // -- r3 : the function's shared function info | |
2423 // ----------------------------------- | |
2424 // ClassConstructor Check: ES6 section 9.2.1 [[Call]] | |
2425 Label non_class_constructor; | |
2426 // Check whether the current function is a classConstructor. | |
2427 __ ldrb(r4, FieldMemOperand(r3, SharedFunctionInfo::kFunctionKindByteOffset)); | |
2428 __ tst(r4, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); | |
2429 __ b(eq, &non_class_constructor); | |
2430 // If we call a classConstructor Function throw a TypeError | |
2431 // indirectly via the CallFunction builtin. | |
2432 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); | |
2433 __ bind(&non_class_constructor); | |
2434 } | |
2435 | |
2436 | |
2437 static void CallFunctionNoFeedback(MacroAssembler* masm, | |
2438 int argc, bool needs_checks, | |
2439 bool call_as_method) { | |
2440 // r1 : the function to call | |
2441 Label slow, wrap, cont; | |
2442 | |
2443 if (needs_checks) { | |
2444 // Check that the function is really a JavaScript function. | |
2445 // r1: pushed function (to be verified) | |
2446 __ JumpIfSmi(r1, &slow); | |
2447 | |
2448 // Goto slow case if we do not have a function. | |
2449 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | |
2450 __ b(ne, &slow); | |
2451 } | |
2452 | |
2453 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | |
2454 EmitClassConstructorCallCheck(masm); | |
2455 | |
2456 // Fast-case: Invoke the function now. | |
2457 // r1: pushed function | |
2458 ParameterCount actual(argc); | |
2459 | |
2460 if (call_as_method) { | |
2461 if (needs_checks) { | |
2462 EmitContinueIfStrictOrNative(masm, &cont); | |
2463 } | |
2464 | |
2465 // Compute the receiver in sloppy mode. | |
2466 __ ldr(r3, MemOperand(sp, argc * kPointerSize)); | |
2467 | |
2468 if (needs_checks) { | |
2469 __ JumpIfSmi(r3, &wrap); | |
2470 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); | |
2471 __ b(lt, &wrap); | |
2472 } else { | |
2473 __ jmp(&wrap); | |
2474 } | |
2475 | |
2476 __ bind(&cont); | |
2477 } | |
2478 | |
2479 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
2480 | |
2481 if (needs_checks) { | |
2482 // Slow-case: Non-function called. | |
2483 __ bind(&slow); | |
2484 EmitSlowCase(masm, argc); | |
2485 } | |
2486 | |
2487 if (call_as_method) { | |
2488 __ bind(&wrap); | |
2489 EmitWrapCase(masm, argc, &cont); | |
2490 } | |
2491 } | |
2492 | |
2493 | |
2494 void CallFunctionStub::Generate(MacroAssembler* masm) { | |
2495 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod()); | |
2496 } | |
2497 | |
2498 | |
2499 void CallConstructStub::Generate(MacroAssembler* masm) { | 2382 void CallConstructStub::Generate(MacroAssembler* masm) { |
2500 // r0 : number of arguments | 2383 // r0 : number of arguments |
2501 // r1 : the function to call | 2384 // r1 : the function to call |
2502 // r2 : feedback vector | 2385 // r2 : feedback vector |
2503 // r3 : slot in feedback vector (Smi, for RecordCallTarget) | 2386 // r3 : slot in feedback vector (Smi, for RecordCallTarget) |
2504 // r4 : original constructor (for IsSuperConstructorCall) | 2387 // r4 : original constructor (for IsSuperConstructorCall) |
2505 | 2388 |
2506 Label non_function; | 2389 Label non_function; |
2507 // Check that the function is not a smi. | 2390 // Check that the function is not a smi. |
2508 __ JumpIfSmi(r1, &non_function); | 2391 __ JumpIfSmi(r1, &non_function); |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2571 | 2454 |
2572 | 2455 |
2573 void CallICStub::Generate(MacroAssembler* masm) { | 2456 void CallICStub::Generate(MacroAssembler* masm) { |
2574 // r1 - function | 2457 // r1 - function |
2575 // r3 - slot id (Smi) | 2458 // r3 - slot id (Smi) |
2576 // r2 - vector | 2459 // r2 - vector |
2577 const int with_types_offset = | 2460 const int with_types_offset = |
2578 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2461 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2579 const int generic_offset = | 2462 const int generic_offset = |
2580 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2463 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2581 Label extra_checks_or_miss, slow_start; | 2464 Label extra_checks_or_miss, call; |
2582 Label slow, wrap, cont; | |
2583 Label have_js_function; | |
2584 int argc = arg_count(); | 2465 int argc = arg_count(); |
2585 ParameterCount actual(argc); | 2466 ParameterCount actual(argc); |
2586 | 2467 |
2587 // The checks. First, does r1 match the recorded monomorphic target? | 2468 // The checks. First, does r1 match the recorded monomorphic target? |
2588 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2469 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2589 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2470 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2590 | 2471 |
2591 // We don't know that we have a weak cell. We might have a private symbol | 2472 // We don't know that we have a weak cell. We might have a private symbol |
2592 // or an AllocationSite, but the memory is safe to examine. | 2473 // or an AllocationSite, but the memory is safe to examine. |
2593 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 2474 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
(...skipping 16 matching lines...) Expand all Loading... |
2610 // convincing us that we have a monomorphic JSFunction. | 2491 // convincing us that we have a monomorphic JSFunction. |
2611 __ JumpIfSmi(r1, &extra_checks_or_miss); | 2492 __ JumpIfSmi(r1, &extra_checks_or_miss); |
2612 | 2493 |
2613 // Increment the call count for monomorphic function calls. | 2494 // Increment the call count for monomorphic function calls. |
2614 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2495 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2615 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); | 2496 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); |
2616 __ ldr(r3, FieldMemOperand(r2, 0)); | 2497 __ ldr(r3, FieldMemOperand(r2, 0)); |
2617 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2498 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2618 __ str(r3, FieldMemOperand(r2, 0)); | 2499 __ str(r3, FieldMemOperand(r2, 0)); |
2619 | 2500 |
2620 __ bind(&have_js_function); | 2501 __ bind(&call); |
2621 | 2502 __ mov(r0, Operand(argc)); |
2622 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 2503 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
2623 EmitClassConstructorCallCheck(masm); | |
2624 | |
2625 if (CallAsMethod()) { | |
2626 EmitContinueIfStrictOrNative(masm, &cont); | |
2627 // Compute the receiver in sloppy mode. | |
2628 __ ldr(r3, MemOperand(sp, argc * kPointerSize)); | |
2629 | |
2630 __ JumpIfSmi(r3, &wrap); | |
2631 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); | |
2632 __ b(lt, &wrap); | |
2633 | |
2634 __ bind(&cont); | |
2635 } | |
2636 | |
2637 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
2638 | |
2639 __ bind(&slow); | |
2640 EmitSlowCase(masm, argc); | |
2641 | |
2642 if (CallAsMethod()) { | |
2643 __ bind(&wrap); | |
2644 EmitWrapCase(masm, argc, &cont); | |
2645 } | |
2646 | 2504 |
2647 __ bind(&extra_checks_or_miss); | 2505 __ bind(&extra_checks_or_miss); |
2648 Label uninitialized, miss, not_allocation_site; | 2506 Label uninitialized, miss, not_allocation_site; |
2649 | 2507 |
2650 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); | 2508 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); |
2651 __ b(eq, &slow_start); | 2509 __ b(eq, &call); |
2652 | 2510 |
2653 // Verify that r4 contains an AllocationSite | 2511 // Verify that r4 contains an AllocationSite |
2654 __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset)); | 2512 __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset)); |
2655 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); | 2513 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); |
2656 __ b(ne, ¬_allocation_site); | 2514 __ b(ne, ¬_allocation_site); |
2657 | 2515 |
2658 // We have an allocation site. | 2516 // We have an allocation site. |
2659 HandleArrayCase(masm, &miss); | 2517 HandleArrayCase(masm, &miss); |
2660 | 2518 |
2661 __ bind(¬_allocation_site); | 2519 __ bind(¬_allocation_site); |
(...skipping 15 matching lines...) Expand all Loading... |
2677 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2535 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2678 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 2536 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
2679 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2537 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2680 // We have to update statistics for runtime profiling. | 2538 // We have to update statistics for runtime profiling. |
2681 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); | 2539 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); |
2682 __ sub(r4, r4, Operand(Smi::FromInt(1))); | 2540 __ sub(r4, r4, Operand(Smi::FromInt(1))); |
2683 __ str(r4, FieldMemOperand(r2, with_types_offset)); | 2541 __ str(r4, FieldMemOperand(r2, with_types_offset)); |
2684 __ ldr(r4, FieldMemOperand(r2, generic_offset)); | 2542 __ ldr(r4, FieldMemOperand(r2, generic_offset)); |
2685 __ add(r4, r4, Operand(Smi::FromInt(1))); | 2543 __ add(r4, r4, Operand(Smi::FromInt(1))); |
2686 __ str(r4, FieldMemOperand(r2, generic_offset)); | 2544 __ str(r4, FieldMemOperand(r2, generic_offset)); |
2687 __ jmp(&slow_start); | 2545 __ jmp(&call); |
2688 | 2546 |
2689 __ bind(&uninitialized); | 2547 __ bind(&uninitialized); |
2690 | 2548 |
2691 // We are going monomorphic, provided we actually have a JSFunction. | 2549 // We are going monomorphic, provided we actually have a JSFunction. |
2692 __ JumpIfSmi(r1, &miss); | 2550 __ JumpIfSmi(r1, &miss); |
2693 | 2551 |
2694 // Goto miss case if we do not have a function. | 2552 // Goto miss case if we do not have a function. |
2695 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 2553 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
2696 __ b(ne, &miss); | 2554 __ b(ne, &miss); |
2697 | 2555 |
(...skipping 18 matching lines...) Expand all Loading... |
2716 // r3 - slot | 2574 // r3 - slot |
2717 // r1 - function | 2575 // r1 - function |
2718 { | 2576 { |
2719 FrameScope scope(masm, StackFrame::INTERNAL); | 2577 FrameScope scope(masm, StackFrame::INTERNAL); |
2720 CreateWeakCellStub create_stub(masm->isolate()); | 2578 CreateWeakCellStub create_stub(masm->isolate()); |
2721 __ Push(r1); | 2579 __ Push(r1); |
2722 __ CallStub(&create_stub); | 2580 __ CallStub(&create_stub); |
2723 __ Pop(r1); | 2581 __ Pop(r1); |
2724 } | 2582 } |
2725 | 2583 |
2726 __ jmp(&have_js_function); | 2584 __ jmp(&call); |
2727 | 2585 |
2728 // We are here because tracing is on or we encountered a MISS case we can't | 2586 // We are here because tracing is on or we encountered a MISS case we can't |
2729 // handle here. | 2587 // handle here. |
2730 __ bind(&miss); | 2588 __ bind(&miss); |
2731 GenerateMiss(masm); | 2589 GenerateMiss(masm); |
2732 | 2590 |
2733 // the slow case | 2591 __ jmp(&call); |
2734 __ bind(&slow_start); | |
2735 // Check that the function is really a JavaScript function. | |
2736 // r1: pushed function (to be verified) | |
2737 __ JumpIfSmi(r1, &slow); | |
2738 | |
2739 // Goto slow case if we do not have a function. | |
2740 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | |
2741 __ b(ne, &slow); | |
2742 __ jmp(&have_js_function); | |
2743 } | 2592 } |
2744 | 2593 |
2745 | 2594 |
2746 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2595 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2747 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2596 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
2748 | 2597 |
2749 // Push the receiver and the function and feedback info. | 2598 // Push the receiver and the function and feedback info. |
2750 __ Push(r1, r2, r3); | 2599 __ Push(r1, r2, r3); |
2751 | 2600 |
2752 // Call the entry. | 2601 // Call the entry. |
(...skipping 2837 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5590 MemOperand(fp, 6 * kPointerSize), NULL); | 5439 MemOperand(fp, 6 * kPointerSize), NULL); |
5591 } | 5440 } |
5592 | 5441 |
5593 | 5442 |
5594 #undef __ | 5443 #undef __ |
5595 | 5444 |
5596 } // namespace internal | 5445 } // namespace internal |
5597 } // namespace v8 | 5446 } // namespace v8 |
5598 | 5447 |
5599 #endif // V8_TARGET_ARCH_ARM | 5448 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |