OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 4658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4669 GenerateImpl(masm, false); | 4669 GenerateImpl(masm, false); |
4670 } | 4670 } |
4671 | 4671 |
4672 | 4672 |
4673 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4673 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4674 GenerateImpl(masm, true); | 4674 GenerateImpl(masm, true); |
4675 } | 4675 } |
4676 | 4676 |
4677 | 4677 |
4678 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4678 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4679 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // x1 | 4679 Label miss; |
4680 Register key = VectorStoreICDescriptor::NameRegister(); // x2 | |
4681 Register vector = VectorStoreICDescriptor::VectorRegister(); // x3 | |
4682 Register slot = VectorStoreICDescriptor::SlotRegister(); // x4 | |
4683 DCHECK(VectorStoreICDescriptor::ValueRegister().is(x0)); // x0 | |
4684 Register feedback = x5; | |
4685 Register receiver_map = x6; | |
4686 Register scratch1 = x7; | |
4687 | 4680 |
4688 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 4681 // TODO(mvstanton): Implement. |
4689 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
4690 | |
4691 // Try to quickly handle the monomorphic case without knowing for sure | |
4692 // if we have a weak cell in feedback. We do know it's safe to look | |
4693 // at WeakCell::kValueOffset. | |
4694 Label try_array, load_smi_map, compare_map; | |
4695 Label not_array, miss; | |
4696 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
4697 scratch1, &compare_map, &load_smi_map, &try_array); | |
4698 | |
4699 // Is it a fixed array? | |
4700 __ Bind(&try_array); | |
4701 __ Ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
4702 __ JumpIfNotRoot(scratch1, Heap::kFixedArrayMapRootIndex, ¬_array); | |
4703 HandleArrayCases(masm, feedback, receiver_map, scratch1, x8, true, &miss); | |
4704 | |
4705 __ Bind(¬_array); | |
4706 __ JumpIfNotRoot(feedback, Heap::kmegamorphic_symbolRootIndex, &miss); | |
4707 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( | |
4708 Code::ComputeHandlerFlags(Code::STORE_IC)); | |
4709 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags, | |
4710 receiver, key, feedback, | |
4711 receiver_map, scratch1, x8); | |
4712 | |
4713 __ Bind(&miss); | 4682 __ Bind(&miss); |
4714 StoreIC::GenerateMiss(masm); | 4683 StoreIC::GenerateMiss(masm); |
4715 | |
4716 __ Bind(&load_smi_map); | |
4717 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
4718 __ jmp(&compare_map); | |
4719 } | 4684 } |
4720 | 4685 |
4721 | 4686 |
4722 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { | 4687 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { |
4723 GenerateImpl(masm, false); | 4688 GenerateImpl(masm, false); |
4724 } | 4689 } |
4725 | 4690 |
4726 | 4691 |
4727 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4692 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4728 GenerateImpl(masm, true); | 4693 GenerateImpl(masm, true); |
4729 } | 4694 } |
4730 | 4695 |
4731 | 4696 |
4732 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, | |
4733 Register receiver_map, Register scratch1, | |
4734 Register scratch2, Label* miss) { | |
4735 // feedback initially contains the feedback array | |
4736 Label next_loop, prepare_next; | |
4737 Label start_polymorphic; | |
4738 Label transition_call; | |
4739 | |
4740 Register cached_map = scratch1; | |
4741 Register too_far = scratch2; | |
4742 Register pointer_reg = feedback; | |
4743 | |
4744 __ Ldr(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
4745 | |
4746 // +-----+------+------+-----+-----+-----+ ... ----+ | |
4747 // | map | len | wm0 | wt0 | h0 | wm1 | hN | | |
4748 // +-----+------+------+-----+-----+ ----+ ... ----+ | |
4749 // 0 1 2 len-1 | |
4750 // ^ ^ | |
4751 // | | | |
4752 // pointer_reg too_far | |
4753 // aka feedback scratch2 | |
4754 // also need receiver_map | |
4755 // use cached_map (scratch1) to look in the weak map values. | |
4756 __ Add(too_far, feedback, | |
4757 Operand::UntagSmiAndScale(too_far, kPointerSizeLog2)); | |
4758 __ Add(too_far, too_far, FixedArray::kHeaderSize - kHeapObjectTag); | |
4759 __ Add(pointer_reg, feedback, | |
4760 FixedArray::OffsetOfElementAt(0) - kHeapObjectTag); | |
4761 | |
4762 __ Bind(&next_loop); | |
4763 __ Ldr(cached_map, MemOperand(pointer_reg)); | |
4764 __ Ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
4765 __ Cmp(receiver_map, cached_map); | |
4766 __ B(ne, &prepare_next); | |
4767 // Is it a transitioning store? | |
4768 __ Ldr(too_far, MemOperand(pointer_reg, kPointerSize)); | |
4769 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); | |
4770 __ B(ne, &transition_call); | |
4771 | |
4772 __ Ldr(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); | |
4773 __ Add(pointer_reg, pointer_reg, Code::kHeaderSize - kHeapObjectTag); | |
4774 __ Jump(pointer_reg); | |
4775 | |
4776 __ Bind(&transition_call); | |
4777 __ Ldr(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); | |
4778 __ JumpIfSmi(too_far, miss); | |
4779 | |
4780 __ Ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); | |
4781 // Load the map into the correct register. | |
4782 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister())); | |
4783 __ mov(feedback, too_far); | |
4784 __ Add(receiver_map, receiver_map, Code::kHeaderSize - kHeapObjectTag); | |
4785 __ Jump(receiver_map); | |
4786 | |
4787 __ Bind(&prepare_next); | |
4788 __ Add(pointer_reg, pointer_reg, kPointerSize * 3); | |
4789 __ Cmp(pointer_reg, too_far); | |
4790 __ B(lt, &next_loop); | |
4791 | |
4792 // We exhausted our array of map handler pairs. | |
4793 __ jmp(miss); | |
4794 } | |
4795 | |
4796 | |
4797 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4697 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4798 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // x1 | 4698 Label miss; |
4799 Register key = VectorStoreICDescriptor::NameRegister(); // x2 | |
4800 Register vector = VectorStoreICDescriptor::VectorRegister(); // x3 | |
4801 Register slot = VectorStoreICDescriptor::SlotRegister(); // x4 | |
4802 DCHECK(VectorStoreICDescriptor::ValueRegister().is(x0)); // x0 | |
4803 Register feedback = x5; | |
4804 Register receiver_map = x6; | |
4805 Register scratch1 = x7; | |
4806 | 4699 |
4807 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 4700 // TODO(mvstanton): Implement. |
4808 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
4809 | |
4810 // Try to quickly handle the monomorphic case without knowing for sure | |
4811 // if we have a weak cell in feedback. We do know it's safe to look | |
4812 // at WeakCell::kValueOffset. | |
4813 Label try_array, load_smi_map, compare_map; | |
4814 Label not_array, miss; | |
4815 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
4816 scratch1, &compare_map, &load_smi_map, &try_array); | |
4817 | |
4818 __ Bind(&try_array); | |
4819 // Is it a fixed array? | |
4820 __ Ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
4821 __ JumpIfNotRoot(scratch1, Heap::kFixedArrayMapRootIndex, ¬_array); | |
4822 | |
4823 // We have a polymorphic element handler. | |
4824 Label try_poly_name; | |
4825 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, x8, &miss); | |
4826 | |
4827 __ Bind(¬_array); | |
4828 // Is it generic? | |
4829 __ JumpIfNotRoot(feedback, Heap::kmegamorphic_symbolRootIndex, | |
4830 &try_poly_name); | |
4831 Handle<Code> megamorphic_stub = | |
4832 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
4833 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
4834 | |
4835 __ Bind(&try_poly_name); | |
4836 // We might have a name in feedback, and a fixed array in the next slot. | |
4837 __ Cmp(key, feedback); | |
4838 __ B(ne, &miss); | |
4839 // If the name comparison succeeded, we know we have a fixed array with | |
4840 // at least one map/handler pair. | |
4841 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | |
4842 __ Ldr(feedback, | |
4843 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
4844 HandleArrayCases(masm, feedback, receiver_map, scratch1, x8, false, &miss); | |
4845 | |
4846 __ Bind(&miss); | 4701 __ Bind(&miss); |
4847 KeyedStoreIC::GenerateMiss(masm); | 4702 KeyedStoreIC::GenerateMiss(masm); |
4848 | |
4849 __ Bind(&load_smi_map); | |
4850 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
4851 __ jmp(&compare_map); | |
4852 } | 4703 } |
4853 | 4704 |
4854 | 4705 |
4855 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by | 4706 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by |
4856 // a "Push lr" instruction, followed by a call. | 4707 // a "Push lr" instruction, followed by a call. |
4857 static const unsigned int kProfileEntryHookCallSize = | 4708 static const unsigned int kProfileEntryHookCallSize = |
4858 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); | 4709 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); |
4859 | 4710 |
4860 | 4711 |
4861 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4712 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
(...skipping 1178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6040 MemOperand(fp, 6 * kPointerSize), NULL); | 5891 MemOperand(fp, 6 * kPointerSize), NULL); |
6041 } | 5892 } |
6042 | 5893 |
6043 | 5894 |
6044 #undef __ | 5895 #undef __ |
6045 | 5896 |
6046 } // namespace internal | 5897 } // namespace internal |
6047 } // namespace v8 | 5898 } // namespace v8 |
6048 | 5899 |
6049 #endif // V8_TARGET_ARCH_ARM64 | 5900 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |