OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 2485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2496 // (9) Sliced string. Replace subject with parent. Go to (4). | 2496 // (9) Sliced string. Replace subject with parent. Go to (4). |
2497 // Load offset into t0 and replace subject string with parent. | 2497 // Load offset into t0 and replace subject string with parent. |
2498 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); | 2498 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); |
2499 __ sra(t0, t0, kSmiTagSize); | 2499 __ sra(t0, t0, kSmiTagSize); |
2500 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 2500 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
2501 __ jmp(&check_underlying); // Go to (4). | 2501 __ jmp(&check_underlying); // Go to (4). |
2502 #endif // V8_INTERPRETED_REGEXP | 2502 #endif // V8_INTERPRETED_REGEXP |
2503 } | 2503 } |
2504 | 2504 |
2505 | 2505 |
| 2506 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { |
| 2507 // a0 : number of arguments to the construct function |
| 2508 // a2 : Feedback vector |
| 2509 // a3 : slot in feedback vector (Smi) |
| 2510 // a1 : the function to call |
| 2511 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2512 const RegList kSavedRegs = 1 << 4 | // a0 |
| 2513 1 << 5 | // a1 |
| 2514 1 << 6 | // a2 |
| 2515 1 << 7; // a3 |
| 2516 |
| 2517 // Arguments register must be smi-tagged to call out. |
| 2518 __ SmiTag(a0); |
| 2519 __ MultiPush(kSavedRegs); |
| 2520 |
| 2521 __ CallStub(stub); |
| 2522 |
| 2523 __ MultiPop(kSavedRegs); |
| 2524 __ SmiUntag(a0); |
| 2525 } |
| 2526 |
| 2527 |
2506 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 2528 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
2507 // Cache the called function in a feedback vector slot. Cache states | 2529 // Cache the called function in a feedback vector slot. Cache states |
2508 // are uninitialized, monomorphic (indicated by a JSFunction), and | 2530 // are uninitialized, monomorphic (indicated by a JSFunction), and |
2509 // megamorphic. | 2531 // megamorphic. |
2510 // a0 : number of arguments to the construct function | 2532 // a0 : number of arguments to the construct function |
2511 // a1 : the function to call | 2533 // a1 : the function to call |
2512 // a2 : Feedback vector | 2534 // a2 : Feedback vector |
2513 // a3 : slot in feedback vector (Smi) | 2535 // a3 : slot in feedback vector (Smi) |
2514 Label initialize, done, miss, megamorphic, not_array_function; | 2536 Label initialize, done, miss, megamorphic, not_array_function; |
2515 | 2537 |
2516 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), | 2538 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), |
2517 masm->isolate()->heap()->megamorphic_symbol()); | 2539 masm->isolate()->heap()->megamorphic_symbol()); |
2518 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), | 2540 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), |
2519 masm->isolate()->heap()->uninitialized_symbol()); | 2541 masm->isolate()->heap()->uninitialized_symbol()); |
2520 | 2542 |
2521 // Load the cache state into t0. | 2543 // Load the cache state into t0. |
2522 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 2544 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
2523 __ Addu(t0, a2, Operand(t0)); | 2545 __ Addu(t0, a2, Operand(t0)); |
2524 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 2546 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
2525 | 2547 |
2526 // A monomorphic cache hit or an already megamorphic state: invoke the | 2548 // A monomorphic cache hit or an already megamorphic state: invoke the |
2527 // function without changing the state. | 2549 // function without changing the state. |
2528 __ Branch(&done, eq, t0, Operand(a1)); | 2550 Label check_allocation_site; |
| 2551 Register feedback_map = t1; |
| 2552 Register weak_value = t4; |
| 2553 __ lw(weak_value, FieldMemOperand(t0, WeakCell::kValueOffset)); |
| 2554 __ Branch(&done, eq, a1, Operand(weak_value)); |
| 2555 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 2556 __ Branch(&done, eq, t0, Operand(at)); |
| 2557 __ lw(feedback_map, FieldMemOperand(t0, 0)); |
| 2558 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); |
| 2559 __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne, |
| 2560 feedback_map, Operand(at)); |
| 2561 |
| 2562 // If a1 is not equal to the weak cell value, and the weak cell value is |
| 2563 // cleared, we have a new chance to become monomorphic. |
| 2564 __ JumpIfSmi(weak_value, &initialize); |
| 2565 __ jmp(&megamorphic); |
2529 | 2566 |
2530 if (!FLAG_pretenuring_call_new) { | 2567 if (!FLAG_pretenuring_call_new) { |
| 2568 __ bind(&check_allocation_site); |
2531 // If we came here, we need to see if we are the array function. | 2569 // If we came here, we need to see if we are the array function. |
2532 // If we didn't have a matching function, and we didn't find the megamorph | 2570 // If we didn't have a matching function, and we didn't find the megamorph |
2533 // sentinel, then we have in the slot either some other function or an | 2571 // sentinel, then we have in the slot either some other function or an |
2534 // AllocationSite. Do a map check on the object in a3. | 2572 // AllocationSite. Do a map check on the object in a3. |
2535 __ lw(t1, FieldMemOperand(t0, 0)); | |
2536 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 2573 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
2537 __ Branch(&miss, ne, t1, Operand(at)); | 2574 __ Branch(&miss, ne, feedback_map, Operand(at)); |
2538 | 2575 |
2539 // Make sure the function is the Array() function | 2576 // Make sure the function is the Array() function |
2540 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); | 2577 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
2541 __ Branch(&megamorphic, ne, a1, Operand(t0)); | 2578 __ Branch(&megamorphic, ne, a1, Operand(t0)); |
2542 __ jmp(&done); | 2579 __ jmp(&done); |
2543 } | 2580 } |
2544 | 2581 |
2545 __ bind(&miss); | 2582 __ bind(&miss); |
2546 | 2583 |
2547 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 2584 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
(...skipping 12 matching lines...) Expand all Loading... |
2560 // An uninitialized cache is patched with the function. | 2597 // An uninitialized cache is patched with the function. |
2561 __ bind(&initialize); | 2598 __ bind(&initialize); |
2562 if (!FLAG_pretenuring_call_new) { | 2599 if (!FLAG_pretenuring_call_new) { |
2563 // Make sure the function is the Array() function. | 2600 // Make sure the function is the Array() function. |
2564 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); | 2601 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
2565 __ Branch(¬_array_function, ne, a1, Operand(t0)); | 2602 __ Branch(¬_array_function, ne, a1, Operand(t0)); |
2566 | 2603 |
2567 // The target function is the Array constructor, | 2604 // The target function is the Array constructor, |
2568 // Create an AllocationSite if we don't already have it, store it in the | 2605 // Create an AllocationSite if we don't already have it, store it in the |
2569 // slot. | 2606 // slot. |
2570 { | 2607 CreateAllocationSiteStub create_stub(masm->isolate()); |
2571 FrameScope scope(masm, StackFrame::INTERNAL); | 2608 CallStubInRecordCallTarget(masm, &create_stub); |
2572 const RegList kSavedRegs = | |
2573 1 << 4 | // a0 | |
2574 1 << 5 | // a1 | |
2575 1 << 6 | // a2 | |
2576 1 << 7; // a3 | |
2577 | |
2578 // Arguments register must be smi-tagged to call out. | |
2579 __ SmiTag(a0); | |
2580 __ MultiPush(kSavedRegs); | |
2581 | |
2582 CreateAllocationSiteStub create_stub(masm->isolate()); | |
2583 __ CallStub(&create_stub); | |
2584 | |
2585 __ MultiPop(kSavedRegs); | |
2586 __ SmiUntag(a0); | |
2587 } | |
2588 __ Branch(&done); | 2609 __ Branch(&done); |
2589 | 2610 |
2590 __ bind(¬_array_function); | 2611 __ bind(¬_array_function); |
2591 } | 2612 } |
2592 | 2613 |
2593 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 2614 CreateWeakCellStub create_stub(masm->isolate()); |
2594 __ Addu(t0, a2, Operand(t0)); | 2615 CallStubInRecordCallTarget(masm, &create_stub); |
2595 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
2596 __ sw(a1, MemOperand(t0, 0)); | |
2597 | |
2598 __ Push(t0, a2, a1); | |
2599 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, | |
2600 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
2601 __ Pop(t0, a2, a1); | |
2602 | |
2603 __ bind(&done); | 2616 __ bind(&done); |
2604 } | 2617 } |
2605 | 2618 |
2606 | 2619 |
2607 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | 2620 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
2608 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 2621 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
2609 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); | 2622 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); |
2610 | 2623 |
2611 // Do not transform the receiver for strict mode functions. | 2624 // Do not transform the receiver for strict mode functions. |
2612 int32_t strict_mode_function_mask = | 2625 int32_t strict_mode_function_mask = |
(...skipping 2885 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5498 kStackUnwindSpace, kInvalidStackOffset, | 5511 kStackUnwindSpace, kInvalidStackOffset, |
5499 MemOperand(fp, 6 * kPointerSize), NULL); | 5512 MemOperand(fp, 6 * kPointerSize), NULL); |
5500 } | 5513 } |
5501 | 5514 |
5502 | 5515 |
5503 #undef __ | 5516 #undef __ |
5504 | 5517 |
5505 } } // namespace v8::internal | 5518 } } // namespace v8::internal |
5506 | 5519 |
5507 #endif // V8_TARGET_ARCH_MIPS | 5520 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |