OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 4710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4721 GenerateImpl(masm, false); | 4721 GenerateImpl(masm, false); |
4722 } | 4722 } |
4723 | 4723 |
4724 | 4724 |
4725 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4725 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4726 GenerateImpl(masm, true); | 4726 GenerateImpl(masm, true); |
4727 } | 4727 } |
4728 | 4728 |
4729 | 4729 |
4730 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4730 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4731 Label miss; | 4731 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // x1 |
| 4732 Register key = VectorStoreICDescriptor::NameRegister(); // x2 |
| 4733 Register vector = VectorStoreICDescriptor::VectorRegister(); // x3 |
| 4734 Register slot = VectorStoreICDescriptor::SlotRegister(); // x4 |
| 4735 DCHECK(VectorStoreICDescriptor::ValueRegister().is(x0)); // x0 |
| 4736 Register feedback = x5; |
| 4737 Register receiver_map = x6; |
| 4738 Register scratch1 = x7; |
4732 | 4739 |
4733 // TODO(mvstanton): Implement. | 4740 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
| 4741 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4742 |
| 4743 // Try to quickly handle the monomorphic case without knowing for sure |
| 4744 // if we have a weak cell in feedback. We do know it's safe to look |
| 4745 // at WeakCell::kValueOffset. |
| 4746 Label try_array, load_smi_map, compare_map; |
| 4747 Label not_array, miss; |
| 4748 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
| 4749 scratch1, &compare_map, &load_smi_map, &try_array); |
| 4750 |
| 4751 // Is it a fixed array? |
| 4752 __ Bind(&try_array); |
| 4753 __ Ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4754 __ JumpIfNotRoot(scratch1, Heap::kFixedArrayMapRootIndex, ¬_array); |
| 4755 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, |
| 4756 scratch1, x8, true, &miss); |
| 4757 |
| 4758 __ Bind(¬_array); |
| 4759 __ JumpIfNotRoot(feedback, Heap::kmegamorphic_symbolRootIndex, &miss); |
| 4760 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
| 4761 Code::ComputeHandlerFlags(Code::STORE_IC)); |
| 4762 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags, |
| 4763 receiver, key, feedback, |
| 4764 receiver_map, scratch1, x8); |
| 4765 |
4734 __ Bind(&miss); | 4766 __ Bind(&miss); |
4735 StoreIC::GenerateMiss(masm); | 4767 StoreIC::GenerateMiss(masm); |
| 4768 |
| 4769 __ Bind(&load_smi_map); |
| 4770 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4771 __ jmp(&compare_map); |
4736 } | 4772 } |
4737 | 4773 |
4738 | 4774 |
4739 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { | 4775 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { |
4740 GenerateImpl(masm, false); | 4776 GenerateImpl(masm, false); |
4741 } | 4777 } |
4742 | 4778 |
4743 | 4779 |
4744 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4780 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4745 GenerateImpl(masm, true); | 4781 GenerateImpl(masm, true); |
4746 } | 4782 } |
4747 | 4783 |
4748 | 4784 |
| 4785 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, |
| 4786 Register receiver_map, Register scratch1, |
| 4787 Register scratch2, Label* miss) { |
| 4788 // feedback initially contains the feedback array |
| 4789 Label next_loop, prepare_next; |
| 4790 Label start_polymorphic; |
| 4791 Label transition_call; |
| 4792 |
| 4793 Register cached_map = scratch1; |
| 4794 Register too_far = scratch2; |
| 4795 Register pointer_reg = feedback; |
| 4796 |
| 4797 __ Ldr(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
| 4798 |
| 4799 // +-----+------+------+-----+-----+-----+ ... ----+ |
| 4800 // | map | len | wm0 | wt0 | h0 | wm1 | hN | |
| 4801 // +-----+------+------+-----+-----+ ----+ ... ----+ |
| 4802 // 0 1 2 len-1 |
| 4803 // ^ ^ |
| 4804 // | | |
| 4805 // pointer_reg too_far |
| 4806 // aka feedback scratch2 |
| 4807 // also need receiver_map |
| 4808 // use cached_map (scratch1) to look in the weak map values. |
| 4809 __ Add(too_far, feedback, |
| 4810 Operand::UntagSmiAndScale(too_far, kPointerSizeLog2)); |
| 4811 __ Add(too_far, too_far, FixedArray::kHeaderSize - kHeapObjectTag); |
| 4812 __ Add(pointer_reg, feedback, |
| 4813 FixedArray::OffsetOfElementAt(0) - kHeapObjectTag); |
| 4814 |
| 4815 __ Bind(&next_loop); |
| 4816 __ Ldr(cached_map, MemOperand(pointer_reg)); |
| 4817 __ Ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
| 4818 __ Cmp(receiver_map, cached_map); |
| 4819 __ B(ne, &prepare_next); |
| 4820 // Is it a transitioning store? |
| 4821 __ Ldr(too_far, MemOperand(pointer_reg, kPointerSize)); |
| 4822 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); |
| 4823 __ B(ne, &transition_call); |
| 4824 |
| 4825 __ Ldr(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); |
| 4826 __ Add(pointer_reg, pointer_reg, Code::kHeaderSize - kHeapObjectTag); |
| 4827 __ Jump(pointer_reg); |
| 4828 |
| 4829 __ Bind(&transition_call); |
| 4830 __ Ldr(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); |
| 4831 __ JumpIfSmi(too_far, miss); |
| 4832 |
| 4833 __ Ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); |
| 4834 // Load the map into the correct register. |
| 4835 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister())); |
| 4836 __ mov(feedback, too_far); |
| 4837 __ Add(receiver_map, receiver_map, Code::kHeaderSize - kHeapObjectTag); |
| 4838 __ Jump(receiver_map); |
| 4839 |
| 4840 __ Bind(&prepare_next); |
| 4841 __ Add(pointer_reg, pointer_reg, kPointerSize * 3); |
| 4842 __ Cmp(pointer_reg, too_far); |
| 4843 __ B(lt, &next_loop); |
| 4844 |
| 4845 // We exhausted our array of map handler pairs. |
| 4846 __ jmp(miss); |
| 4847 } |
| 4848 |
| 4849 |
4749 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4850 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4750 Label miss; | 4851 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // x1 |
| 4852 Register key = VectorStoreICDescriptor::NameRegister(); // x2 |
| 4853 Register vector = VectorStoreICDescriptor::VectorRegister(); // x3 |
| 4854 Register slot = VectorStoreICDescriptor::SlotRegister(); // x4 |
| 4855 DCHECK(VectorStoreICDescriptor::ValueRegister().is(x0)); // x0 |
| 4856 Register feedback = x5; |
| 4857 Register receiver_map = x6; |
| 4858 Register scratch1 = x7; |
4751 | 4859 |
4752 // TODO(mvstanton): Implement. | 4860 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
| 4861 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4862 |
| 4863 // Try to quickly handle the monomorphic case without knowing for sure |
| 4864 // if we have a weak cell in feedback. We do know it's safe to look |
| 4865 // at WeakCell::kValueOffset. |
| 4866 Label try_array, load_smi_map, compare_map; |
| 4867 Label not_array, miss; |
| 4868 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
| 4869 scratch1, &compare_map, &load_smi_map, &try_array); |
| 4870 |
| 4871 __ Bind(&try_array); |
| 4872 // Is it a fixed array? |
| 4873 __ Ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4874 __ JumpIfNotRoot(scratch1, Heap::kFixedArrayMapRootIndex, ¬_array); |
| 4875 |
| 4876 // We have a polymorphic element handler. |
| 4877 Label try_poly_name; |
| 4878 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, x8, &miss); |
| 4879 |
| 4880 __ Bind(¬_array); |
| 4881 // Is it generic? |
| 4882 __ JumpIfNotRoot(feedback, Heap::kmegamorphic_symbolRootIndex, |
| 4883 &try_poly_name); |
| 4884 Handle<Code> megamorphic_stub = |
| 4885 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); |
| 4886 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
| 4887 |
| 4888 __ Bind(&try_poly_name); |
| 4889 // We might have a name in feedback, and a fixed array in the next slot. |
| 4890 __ Cmp(key, feedback); |
| 4891 __ B(ne, &miss); |
| 4892 // If the name comparison succeeded, we know we have a fixed array with |
| 4893 // at least one map/handler pair. |
| 4894 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
| 4895 __ Ldr(feedback, |
| 4896 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
| 4897 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, |
| 4898 scratch1, x8, false, &miss); |
| 4899 |
4753 __ Bind(&miss); | 4900 __ Bind(&miss); |
4754 KeyedStoreIC::GenerateMiss(masm); | 4901 KeyedStoreIC::GenerateMiss(masm); |
| 4902 |
| 4903 __ Bind(&load_smi_map); |
| 4904 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4905 __ jmp(&compare_map); |
4755 } | 4906 } |
4756 | 4907 |
4757 | 4908 |
4758 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by | 4909 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by |
4759 // a "Push lr" instruction, followed by a call. | 4910 // a "Push lr" instruction, followed by a call. |
4760 static const unsigned int kProfileEntryHookCallSize = | 4911 static const unsigned int kProfileEntryHookCallSize = |
4761 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); | 4912 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); |
4762 | 4913 |
4763 | 4914 |
4764 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4915 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
(...skipping 1178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5943 MemOperand(fp, 6 * kPointerSize), NULL); | 6094 MemOperand(fp, 6 * kPointerSize), NULL); |
5944 } | 6095 } |
5945 | 6096 |
5946 | 6097 |
5947 #undef __ | 6098 #undef __ |
5948 | 6099 |
5949 } // namespace internal | 6100 } // namespace internal |
5950 } // namespace v8 | 6101 } // namespace v8 |
5951 | 6102 |
5952 #endif // V8_TARGET_ARCH_ARM64 | 6103 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |