OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 3526 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3537 | 3537 |
3538 __ Bind(&miss); | 3538 __ Bind(&miss); |
3539 KeyedLoadIC::GenerateMiss(masm); | 3539 KeyedLoadIC::GenerateMiss(masm); |
3540 | 3540 |
3541 __ Bind(&load_smi_map); | 3541 __ Bind(&load_smi_map); |
3542 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 3542 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
3543 __ jmp(&compare_map); | 3543 __ jmp(&compare_map); |
3544 } | 3544 } |
3545 | 3545 |
3546 void StoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3546 void StoreICTrampolineStub::Generate(MacroAssembler* masm) { |
3547 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); | 3547 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); |
3548 StoreICStub stub(isolate(), state()); | 3548 StoreICStub stub(isolate(), state()); |
3549 stub.GenerateForTrampoline(masm); | 3549 stub.GenerateForTrampoline(masm); |
3550 } | 3550 } |
3551 | 3551 |
3552 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3552 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
3553 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); | 3553 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); |
3554 KeyedStoreICStub stub(isolate(), state()); | 3554 KeyedStoreICStub stub(isolate(), state()); |
3555 stub.GenerateForTrampoline(masm); | 3555 stub.GenerateForTrampoline(masm); |
3556 } | 3556 } |
3557 | 3557 |
3558 void StoreICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | 3558 void StoreICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } |
3559 | 3559 |
3560 void StoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 3560 void StoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
3561 GenerateImpl(masm, true); | 3561 GenerateImpl(masm, true); |
3562 } | 3562 } |
3563 | 3563 |
3564 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 3564 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
3565 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // x1 | 3565 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // x1 |
3566 Register key = VectorStoreICDescriptor::NameRegister(); // x2 | 3566 Register key = StoreWithVectorDescriptor::NameRegister(); // x2 |
3567 Register vector = VectorStoreICDescriptor::VectorRegister(); // x3 | 3567 Register vector = StoreWithVectorDescriptor::VectorRegister(); // x3 |
3568 Register slot = VectorStoreICDescriptor::SlotRegister(); // x4 | 3568 Register slot = StoreWithVectorDescriptor::SlotRegister(); // x4 |
3569 DCHECK(VectorStoreICDescriptor::ValueRegister().is(x0)); // x0 | 3569 DCHECK(StoreWithVectorDescriptor::ValueRegister().is(x0)); // x0 |
3570 Register feedback = x5; | 3570 Register feedback = x5; |
3571 Register receiver_map = x6; | 3571 Register receiver_map = x6; |
3572 Register scratch1 = x7; | 3572 Register scratch1 = x7; |
3573 | 3573 |
3574 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 3574 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
3575 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 3575 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
3576 | 3576 |
3577 // Try to quickly handle the monomorphic case without knowing for sure | 3577 // Try to quickly handle the monomorphic case without knowing for sure |
3578 // if we have a weak cell in feedback. We do know it's safe to look | 3578 // if we have a weak cell in feedback. We do know it's safe to look |
3579 // at WeakCell::kValueOffset. | 3579 // at WeakCell::kValueOffset. |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3668 __ Bind(&prepare_next); | 3668 __ Bind(&prepare_next); |
3669 __ Add(pointer_reg, pointer_reg, kPointerSize * 3); | 3669 __ Add(pointer_reg, pointer_reg, kPointerSize * 3); |
3670 __ Cmp(pointer_reg, too_far); | 3670 __ Cmp(pointer_reg, too_far); |
3671 __ B(lt, &next_loop); | 3671 __ B(lt, &next_loop); |
3672 | 3672 |
3673 // We exhausted our array of map handler pairs. | 3673 // We exhausted our array of map handler pairs. |
3674 __ jmp(miss); | 3674 __ jmp(miss); |
3675 } | 3675 } |
3676 | 3676 |
3677 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 3677 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
3678 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // x1 | 3678 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // x1 |
3679 Register key = VectorStoreICDescriptor::NameRegister(); // x2 | 3679 Register key = StoreWithVectorDescriptor::NameRegister(); // x2 |
3680 Register vector = VectorStoreICDescriptor::VectorRegister(); // x3 | 3680 Register vector = StoreWithVectorDescriptor::VectorRegister(); // x3 |
3681 Register slot = VectorStoreICDescriptor::SlotRegister(); // x4 | 3681 Register slot = StoreWithVectorDescriptor::SlotRegister(); // x4 |
3682 DCHECK(VectorStoreICDescriptor::ValueRegister().is(x0)); // x0 | 3682 DCHECK(StoreWithVectorDescriptor::ValueRegister().is(x0)); // x0 |
3683 Register feedback = x5; | 3683 Register feedback = x5; |
3684 Register receiver_map = x6; | 3684 Register receiver_map = x6; |
3685 Register scratch1 = x7; | 3685 Register scratch1 = x7; |
3686 | 3686 |
3687 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 3687 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
3688 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 3688 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
3689 | 3689 |
3690 // Try to quickly handle the monomorphic case without knowing for sure | 3690 // Try to quickly handle the monomorphic case without knowing for sure |
3691 // if we have a weak cell in feedback. We do know it's safe to look | 3691 // if we have a weak cell in feedback. We do know it's safe to look |
3692 // at WeakCell::kValueOffset. | 3692 // at WeakCell::kValueOffset. |
(...skipping 1887 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5580 kStackUnwindSpace, NULL, spill_offset, | 5580 kStackUnwindSpace, NULL, spill_offset, |
5581 return_value_operand, NULL); | 5581 return_value_operand, NULL); |
5582 } | 5582 } |
5583 | 5583 |
5584 #undef __ | 5584 #undef __ |
5585 | 5585 |
5586 } // namespace internal | 5586 } // namespace internal |
5587 } // namespace v8 | 5587 } // namespace v8 |
5588 | 5588 |
5589 #endif // V8_TARGET_ARCH_ARM64 | 5589 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |