| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 2497 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2508 // (9) Sliced string. Replace subject with parent. Go to (4). | 2508 // (9) Sliced string. Replace subject with parent. Go to (4). |
| 2509 // Load offset into t0 and replace subject string with parent. | 2509 // Load offset into t0 and replace subject string with parent. |
| 2510 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); | 2510 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); |
| 2511 __ sra(t0, t0, kSmiTagSize); | 2511 __ sra(t0, t0, kSmiTagSize); |
| 2512 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 2512 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
| 2513 __ jmp(&check_underlying); // Go to (4). | 2513 __ jmp(&check_underlying); // Go to (4). |
| 2514 #endif // V8_INTERPRETED_REGEXP | 2514 #endif // V8_INTERPRETED_REGEXP |
| 2515 } | 2515 } |
| 2516 | 2516 |
| 2517 | 2517 |
| 2518 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { | 2518 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, |
| 2519 bool is_super) { |
| 2519 // a0 : number of arguments to the construct function | 2520 // a0 : number of arguments to the construct function |
| 2520 // a2 : Feedback vector | 2521 // a2 : feedback vector |
| 2521 // a3 : slot in feedback vector (Smi) | 2522 // a3 : slot in feedback vector (Smi) |
| 2522 // a1 : the function to call | 2523 // a1 : the function to call |
| 2524 // t0 : original constructor (for IsSuperConstructorCall) |
| 2523 FrameScope scope(masm, StackFrame::INTERNAL); | 2525 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2524 const RegList kSavedRegs = 1 << 4 | // a0 | 2526 const RegList kSavedRegs = 1 << 4 | // a0 |
| 2525 1 << 5 | // a1 | 2527 1 << 5 | // a1 |
| 2526 1 << 6 | // a2 | 2528 1 << 6 | // a2 |
| 2527 1 << 7; // a3 | 2529 1 << 7 | // a3 |
| 2530 BoolToInt(is_super) << 8; // t0 |
| 2528 | 2531 |
| 2529 // Number-of-arguments register must be smi-tagged to call out. | 2532 // Number-of-arguments register must be smi-tagged to call out. |
| 2530 __ SmiTag(a0); | 2533 __ SmiTag(a0); |
| 2531 __ MultiPush(kSavedRegs); | 2534 __ MultiPush(kSavedRegs); |
| 2532 | 2535 |
| 2533 __ CallStub(stub); | 2536 __ CallStub(stub); |
| 2534 | 2537 |
| 2535 __ MultiPop(kSavedRegs); | 2538 __ MultiPop(kSavedRegs); |
| 2536 __ SmiUntag(a0); | 2539 __ SmiUntag(a0); |
| 2537 } | 2540 } |
| 2538 | 2541 |
| 2539 | 2542 |
| 2540 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 2543 static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) { |
| 2541 // Cache the called function in a feedback vector slot. Cache states | 2544 // Cache the called function in a feedback vector slot. Cache states |
| 2542 // are uninitialized, monomorphic (indicated by a JSFunction), and | 2545 // are uninitialized, monomorphic (indicated by a JSFunction), and |
| 2543 // megamorphic. | 2546 // megamorphic. |
| 2544 // a0 : number of arguments to the construct function | 2547 // a0 : number of arguments to the construct function |
| 2545 // a1 : the function to call | 2548 // a1 : the function to call |
| 2546 // a2 : Feedback vector | 2549 // a2 : feedback vector |
| 2547 // a3 : slot in feedback vector (Smi) | 2550 // a3 : slot in feedback vector (Smi) |
| 2551 // t0 : original constructor (for IsSuperConstructorCall) |
| 2548 Label initialize, done, miss, megamorphic, not_array_function; | 2552 Label initialize, done, miss, megamorphic, not_array_function; |
| 2549 | 2553 |
| 2550 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), | 2554 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), |
| 2551 masm->isolate()->heap()->megamorphic_symbol()); | 2555 masm->isolate()->heap()->megamorphic_symbol()); |
| 2552 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), | 2556 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), |
| 2553 masm->isolate()->heap()->uninitialized_symbol()); | 2557 masm->isolate()->heap()->uninitialized_symbol()); |
| 2554 | 2558 |
| 2555 // Load the cache state into t0. | 2559 // Load the cache state into t2. |
| 2556 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 2560 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2557 __ Addu(t0, a2, Operand(t0)); | 2561 __ Addu(t2, a2, Operand(t2)); |
| 2558 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 2562 __ lw(t2, FieldMemOperand(t2, FixedArray::kHeaderSize)); |
| 2559 | 2563 |
| 2560 // A monomorphic cache hit or an already megamorphic state: invoke the | 2564 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 2561 // function without changing the state. | 2565 // function without changing the state. |
| 2562 // We don't know if t0 is a WeakCell or a Symbol, but it's harmless to read at | 2566 // We don't know if t2 is a WeakCell or a Symbol, but it's harmless to read at |
| 2563 // this position in a symbol (see static asserts in type-feedback-vector.h). | 2567 // this position in a symbol (see static asserts in type-feedback-vector.h). |
| 2564 Label check_allocation_site; | 2568 Label check_allocation_site; |
| 2565 Register feedback_map = t1; | 2569 Register feedback_map = t1; |
| 2566 Register weak_value = t4; | 2570 Register weak_value = t4; |
| 2567 __ lw(weak_value, FieldMemOperand(t0, WeakCell::kValueOffset)); | 2571 __ lw(weak_value, FieldMemOperand(t2, WeakCell::kValueOffset)); |
| 2568 __ Branch(&done, eq, a1, Operand(weak_value)); | 2572 __ Branch(&done, eq, a1, Operand(weak_value)); |
| 2569 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2573 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 2570 __ Branch(&done, eq, t0, Operand(at)); | 2574 __ Branch(&done, eq, t2, Operand(at)); |
| 2571 __ lw(feedback_map, FieldMemOperand(t0, HeapObject::kMapOffset)); | 2575 __ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset)); |
| 2572 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); | 2576 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); |
| 2573 __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne, | 2577 __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne, |
| 2574 feedback_map, Operand(at)); | 2578 feedback_map, Operand(at)); |
| 2575 | 2579 |
| 2576 // If the weak cell is cleared, we have a new chance to become monomorphic. | 2580 // If the weak cell is cleared, we have a new chance to become monomorphic. |
| 2577 __ JumpIfSmi(weak_value, &initialize); | 2581 __ JumpIfSmi(weak_value, &initialize); |
| 2578 __ jmp(&megamorphic); | 2582 __ jmp(&megamorphic); |
| 2579 | 2583 |
| 2580 if (!FLAG_pretenuring_call_new) { | 2584 if (!FLAG_pretenuring_call_new) { |
| 2581 __ bind(&check_allocation_site); | 2585 __ bind(&check_allocation_site); |
| 2582 // If we came here, we need to see if we are the array function. | 2586 // If we came here, we need to see if we are the array function. |
| 2583 // If we didn't have a matching function, and we didn't find the megamorph | 2587 // If we didn't have a matching function, and we didn't find the megamorph |
| 2584 // sentinel, then we have in the slot either some other function or an | 2588 // sentinel, then we have in the slot either some other function or an |
| 2585 // AllocationSite. | 2589 // AllocationSite. |
| 2586 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 2590 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 2587 __ Branch(&miss, ne, feedback_map, Operand(at)); | 2591 __ Branch(&miss, ne, feedback_map, Operand(at)); |
| 2588 | 2592 |
| 2589 // Make sure the function is the Array() function | 2593 // Make sure the function is the Array() function |
| 2590 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); | 2594 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2); |
| 2591 __ Branch(&megamorphic, ne, a1, Operand(t0)); | 2595 __ Branch(&megamorphic, ne, a1, Operand(t2)); |
| 2592 __ jmp(&done); | 2596 __ jmp(&done); |
| 2593 } | 2597 } |
| 2594 | 2598 |
| 2595 __ bind(&miss); | 2599 __ bind(&miss); |
| 2596 | 2600 |
| 2597 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 2601 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 2598 // megamorphic. | 2602 // megamorphic. |
| 2599 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); | 2603 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); |
| 2600 __ Branch(&initialize, eq, t0, Operand(at)); | 2604 __ Branch(&initialize, eq, t2, Operand(at)); |
| 2601 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 2605 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 2602 // write-barrier is needed. | 2606 // write-barrier is needed. |
| 2603 __ bind(&megamorphic); | 2607 __ bind(&megamorphic); |
| 2604 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 2608 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2605 __ Addu(t0, a2, Operand(t0)); | 2609 __ Addu(t2, a2, Operand(t2)); |
| 2606 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2610 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 2607 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 2611 __ sw(at, FieldMemOperand(t2, FixedArray::kHeaderSize)); |
| 2608 __ jmp(&done); | 2612 __ jmp(&done); |
| 2609 | 2613 |
| 2610 // An uninitialized cache is patched with the function. | 2614 // An uninitialized cache is patched with the function. |
| 2611 __ bind(&initialize); | 2615 __ bind(&initialize); |
| 2612 if (!FLAG_pretenuring_call_new) { | 2616 if (!FLAG_pretenuring_call_new) { |
| 2613 // Make sure the function is the Array() function. | 2617 // Make sure the function is the Array() function. |
| 2614 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); | 2618 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2); |
| 2615 __ Branch(¬_array_function, ne, a1, Operand(t0)); | 2619 __ Branch(¬_array_function, ne, a1, Operand(t2)); |
| 2616 | 2620 |
| 2617 // The target function is the Array constructor, | 2621 // The target function is the Array constructor, |
| 2618 // Create an AllocationSite if we don't already have it, store it in the | 2622 // Create an AllocationSite if we don't already have it, store it in the |
| 2619 // slot. | 2623 // slot. |
| 2620 CreateAllocationSiteStub create_stub(masm->isolate()); | 2624 CreateAllocationSiteStub create_stub(masm->isolate()); |
| 2621 CallStubInRecordCallTarget(masm, &create_stub); | 2625 CallStubInRecordCallTarget(masm, &create_stub, is_super); |
| 2622 __ Branch(&done); | 2626 __ Branch(&done); |
| 2623 | 2627 |
| 2624 __ bind(¬_array_function); | 2628 __ bind(¬_array_function); |
| 2625 } | 2629 } |
| 2626 | 2630 |
| 2627 CreateWeakCellStub create_stub(masm->isolate()); | 2631 CreateWeakCellStub create_stub(masm->isolate()); |
| 2628 CallStubInRecordCallTarget(masm, &create_stub); | 2632 CallStubInRecordCallTarget(masm, &create_stub, is_super); |
| 2629 __ bind(&done); | 2633 __ bind(&done); |
| 2630 } | 2634 } |
| 2631 | 2635 |
| 2632 | 2636 |
| 2633 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | 2637 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
| 2634 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 2638 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 2635 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); | 2639 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); |
| 2636 | 2640 |
| 2637 // Do not transform the receiver for strict mode functions. | 2641 // Do not transform the receiver for strict mode functions. |
| 2638 int32_t strict_mode_function_mask = | 2642 int32_t strict_mode_function_mask = |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2751 // t0 : original constructor (for IsSuperConstructorCall) | 2755 // t0 : original constructor (for IsSuperConstructorCall) |
| 2752 Label slow, non_function_call; | 2756 Label slow, non_function_call; |
| 2753 | 2757 |
| 2754 // Check that the function is not a smi. | 2758 // Check that the function is not a smi. |
| 2755 __ JumpIfSmi(a1, &non_function_call); | 2759 __ JumpIfSmi(a1, &non_function_call); |
| 2756 // Check that the function is a JSFunction. | 2760 // Check that the function is a JSFunction. |
| 2757 __ GetObjectType(a1, t1, t1); | 2761 __ GetObjectType(a1, t1, t1); |
| 2758 __ Branch(&slow, ne, t1, Operand(JS_FUNCTION_TYPE)); | 2762 __ Branch(&slow, ne, t1, Operand(JS_FUNCTION_TYPE)); |
| 2759 | 2763 |
| 2760 if (RecordCallTarget()) { | 2764 if (RecordCallTarget()) { |
| 2761 if (IsSuperConstructorCall()) { | 2765 GenerateRecordCallTarget(masm, IsSuperConstructorCall()); |
| 2762 __ push(t0); | |
| 2763 } | |
| 2764 GenerateRecordCallTarget(masm); | |
| 2765 if (IsSuperConstructorCall()) { | |
| 2766 __ pop(t0); | |
| 2767 } | |
| 2768 | 2766 |
| 2769 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); | 2767 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2770 __ Addu(t1, a2, at); | 2768 __ Addu(t1, a2, at); |
| 2771 if (FLAG_pretenuring_call_new) { | 2769 if (FLAG_pretenuring_call_new) { |
| 2772 // Put the AllocationSite from the feedback vector into a2. | 2770 // Put the AllocationSite from the feedback vector into a2. |
| 2773 // By adding kPointerSize we encode that we know the AllocationSite | 2771 // By adding kPointerSize we encode that we know the AllocationSite |
| 2774 // entry is at the feedback vector slot given by a3 + 1. | 2772 // entry is at the feedback vector slot given by a3 + 1. |
| 2775 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize)); | 2773 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize)); |
| 2776 } else { | 2774 } else { |
| 2777 Label feedback_register_initialized; | 2775 Label feedback_register_initialized; |
| (...skipping 2801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5579 MemOperand(fp, 6 * kPointerSize), NULL); | 5577 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5580 } | 5578 } |
| 5581 | 5579 |
| 5582 | 5580 |
| 5583 #undef __ | 5581 #undef __ |
| 5584 | 5582 |
| 5585 } // namespace internal | 5583 } // namespace internal |
| 5586 } // namespace v8 | 5584 } // namespace v8 |
| 5587 | 5585 |
| 5588 #endif // V8_TARGET_ARCH_MIPS | 5586 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |