OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2496 __ Branch(&done); | 2496 __ Branch(&done); |
2497 | 2497 |
2498 __ bind(¬_array_function); | 2498 __ bind(¬_array_function); |
2499 CreateWeakCellStub weak_cell_stub(masm->isolate()); | 2499 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
2500 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); | 2500 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); |
2501 __ bind(&done); | 2501 __ bind(&done); |
2502 } | 2502 } |
2503 | 2503 |
2504 | 2504 |
2505 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | 2505 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
2506 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 2506 // ----------- S t a t e ------------- |
| 2507 // -- a1 : the function to call |
| 2508 // -- a3 : the function's shared function info |
| 2509 // ----------------------------------- |
2507 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); | 2510 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); |
2508 | 2511 |
2509 // Do not transform the receiver for strict mode functions. | 2512 // Do not transform the receiver for strict mode functions. |
2510 int32_t strict_mode_function_mask = | 2513 int32_t strict_mode_function_mask = |
2511 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); | 2514 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); |
2512 // Do not transform the receiver for native (Compilerhints already in a3). | 2515 // Do not transform the receiver for native (Compilerhints already in a3). |
2513 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); | 2516 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); |
2514 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); | 2517 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); |
2515 __ Branch(cont, ne, at, Operand(zero_reg)); | 2518 __ Branch(cont, ne, at, Operand(zero_reg)); |
2516 } | 2519 } |
(...skipping 12 matching lines...) Expand all Loading... |
2529 __ mov(a0, a3); | 2532 __ mov(a0, a3); |
2530 ToObjectStub stub(masm->isolate()); | 2533 ToObjectStub stub(masm->isolate()); |
2531 __ CallStub(&stub); | 2534 __ CallStub(&stub); |
2532 __ pop(a1); | 2535 __ pop(a1); |
2533 } | 2536 } |
2534 __ Branch(USE_DELAY_SLOT, cont); | 2537 __ Branch(USE_DELAY_SLOT, cont); |
2535 __ sw(v0, MemOperand(sp, argc * kPointerSize)); | 2538 __ sw(v0, MemOperand(sp, argc * kPointerSize)); |
2536 } | 2539 } |
2537 | 2540 |
2538 | 2541 |
| 2542 static void EmitClassConstructorCallCheck(MacroAssembler* masm) { |
| 2543 // ----------- S t a t e ------------- |
| 2544 // -- a1 : the function to call |
| 2545 // -- a3 : the function's shared function info |
| 2546 // ----------------------------------- |
| 2547 // ClassConstructor Check: ES6 section 9.2.1 [[Call]] |
| 2548 Label non_class_constructor; |
| 2549 // Check whether the current function is a classConstructor This only works |
| 2550 // since kClassConstructor is more than 1 bit away from the byte boundary in |
| 2551 // CompilerHints (note that compiler_hints is stored as smi on 32bit |
| 2552 // architectures) |
| 2553 STATIC_ASSERT((FunctionKind::kClassConstructor << kSmiTagSize) < |
| 2554 (1 << kBitsPerByte)); |
| 2555 __ lbu(t0, FieldMemOperand(a3, SharedFunctionInfo::kFunctionKindByteOffset)); |
| 2556 // Left-shift to account for smi storage in 32bits. |
| 2557 __ And(at, t0, Operand(FunctionKind::kClassConstructor << kSmiTagSize)); |
| 2558 __ Branch(&non_class_constructor, eq, at, Operand(zero_reg)); |
| 2559 // Step: 2, If we call a classConstructor Function throw a TypeError. |
| 2560 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); |
| 2561 __ bind(&non_class_constructor); |
| 2562 } |
| 2563 |
| 2564 |
2539 static void CallFunctionNoFeedback(MacroAssembler* masm, | 2565 static void CallFunctionNoFeedback(MacroAssembler* masm, |
2540 int argc, bool needs_checks, | 2566 int argc, bool needs_checks, |
2541 bool call_as_method) { | 2567 bool call_as_method) { |
2542 // a1 : the function to call | 2568 // a1 : the function to call |
2543 Label slow, wrap, cont; | 2569 Label slow, wrap, cont; |
2544 | 2570 |
2545 if (needs_checks) { | 2571 if (needs_checks) { |
2546 // Check that the function is really a JavaScript function. | 2572 // Check that the function is really a JavaScript function. |
2547 // a1: pushed function (to be verified) | 2573 // a1: pushed function (to be verified) |
2548 __ JumpIfSmi(a1, &slow); | 2574 __ JumpIfSmi(a1, &slow); |
2549 | 2575 |
2550 // Goto slow case if we do not have a function. | 2576 // Goto slow case if we do not have a function. |
2551 __ GetObjectType(a1, t0, t0); | 2577 __ GetObjectType(a1, t0, t0); |
2552 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); | 2578 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
2553 } | 2579 } |
2554 | 2580 |
| 2581 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 2582 EmitClassConstructorCallCheck(masm); |
| 2583 |
2555 // Fast-case: Invoke the function now. | 2584 // Fast-case: Invoke the function now. |
2556 // a1: pushed function | 2585 // a1: pushed function |
2557 ParameterCount actual(argc); | 2586 ParameterCount actual(argc); |
2558 | 2587 |
2559 if (call_as_method) { | 2588 if (call_as_method) { |
2560 if (needs_checks) { | 2589 if (needs_checks) { |
2561 EmitContinueIfStrictOrNative(masm, &cont); | 2590 EmitContinueIfStrictOrNative(masm, &cont); |
2562 } | 2591 } |
2563 | 2592 |
2564 // Compute the receiver in sloppy mode. | 2593 // Compute the receiver in sloppy mode. |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2712 __ JumpIfSmi(a1, &extra_checks_or_miss); | 2741 __ JumpIfSmi(a1, &extra_checks_or_miss); |
2713 | 2742 |
2714 // Increment the call count for monomorphic function calls. | 2743 // Increment the call count for monomorphic function calls. |
2715 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); | 2744 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); |
2716 __ Addu(at, a2, Operand(at)); | 2745 __ Addu(at, a2, Operand(at)); |
2717 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | 2746 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); |
2718 __ Addu(a3, a3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2747 __ Addu(a3, a3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2719 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | 2748 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); |
2720 | 2749 |
2721 __ bind(&have_js_function); | 2750 __ bind(&have_js_function); |
| 2751 |
| 2752 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 2753 EmitClassConstructorCallCheck(masm); |
| 2754 |
2722 if (CallAsMethod()) { | 2755 if (CallAsMethod()) { |
2723 EmitContinueIfStrictOrNative(masm, &cont); | 2756 EmitContinueIfStrictOrNative(masm, &cont); |
2724 // Compute the receiver in sloppy mode. | 2757 // Compute the receiver in sloppy mode. |
2725 __ lw(a3, MemOperand(sp, argc * kPointerSize)); | 2758 __ lw(a3, MemOperand(sp, argc * kPointerSize)); |
2726 | 2759 |
2727 __ JumpIfSmi(a3, &wrap); | 2760 __ JumpIfSmi(a3, &wrap); |
2728 __ GetObjectType(a3, t0, t0); | 2761 __ GetObjectType(a3, t0, t0); |
2729 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | 2762 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
2730 | 2763 |
2731 __ bind(&cont); | 2764 __ bind(&cont); |
(...skipping 3027 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5759 MemOperand(fp, 6 * kPointerSize), NULL); | 5792 MemOperand(fp, 6 * kPointerSize), NULL); |
5760 } | 5793 } |
5761 | 5794 |
5762 | 5795 |
5763 #undef __ | 5796 #undef __ |
5764 | 5797 |
5765 } // namespace internal | 5798 } // namespace internal |
5766 } // namespace v8 | 5799 } // namespace v8 |
5767 | 5800 |
5768 #endif // V8_TARGET_ARCH_MIPS | 5801 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |