| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 4783 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4794 GenerateImpl(masm, false); | 4794 GenerateImpl(masm, false); |
| 4795 } | 4795 } |
| 4796 | 4796 |
| 4797 | 4797 |
| 4798 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4798 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4799 GenerateImpl(masm, true); | 4799 GenerateImpl(masm, true); |
| 4800 } | 4800 } |
| 4801 | 4801 |
| 4802 | 4802 |
| 4803 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4803 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
| 4804 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1 | 4804 Label miss; |
| 4805 Register key = VectorStoreICDescriptor::NameRegister(); // a2 | |
| 4806 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3 | |
| 4807 Register slot = VectorStoreICDescriptor::SlotRegister(); // a4 | |
| 4808 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0 | |
| 4809 Register feedback = a5; | |
| 4810 Register receiver_map = a6; | |
| 4811 Register scratch1 = a7; | |
| 4812 | 4805 |
| 4813 __ SmiScale(scratch1, slot, kPointerSizeLog2); | 4806 // TODO(mvstanton): Implement. |
| 4814 __ Daddu(feedback, vector, Operand(scratch1)); | |
| 4815 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
| 4816 | |
| 4817 // Try to quickly handle the monomorphic case without knowing for sure | |
| 4818 // if we have a weak cell in feedback. We do know it's safe to look | |
| 4819 // at WeakCell::kValueOffset. | |
| 4820 Label try_array, load_smi_map, compare_map; | |
| 4821 Label not_array, miss; | |
| 4822 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
| 4823 scratch1, &compare_map, &load_smi_map, &try_array); | |
| 4824 | |
| 4825 // Is it a fixed array? | |
| 4826 __ bind(&try_array); | |
| 4827 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
| 4828 __ Branch(¬_array, ne, scratch1, Heap::kFixedArrayMapRootIndex); | |
| 4829 | |
| 4830 Register scratch2 = t0; | |
| 4831 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true, | |
| 4832 &miss); | |
| 4833 | |
| 4834 __ bind(¬_array); | |
| 4835 __ Branch(&miss, ne, feedback, Heap::kmegamorphic_symbolRootIndex); | |
| 4836 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( | |
| 4837 Code::ComputeHandlerFlags(Code::STORE_IC)); | |
| 4838 masm->isolate()->stub_cache()->GenerateProbe( | |
| 4839 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map, | |
| 4840 scratch1, scratch2); | |
| 4841 | |
| 4842 __ bind(&miss); | 4807 __ bind(&miss); |
| 4843 StoreIC::GenerateMiss(masm); | 4808 StoreIC::GenerateMiss(masm); |
| 4844 | |
| 4845 __ bind(&load_smi_map); | |
| 4846 __ Branch(USE_DELAY_SLOT, &compare_map); | |
| 4847 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. | |
| 4848 } | 4809 } |
| 4849 | 4810 |
| 4850 | 4811 |
| 4851 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { | 4812 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { |
| 4852 GenerateImpl(masm, false); | 4813 GenerateImpl(masm, false); |
| 4853 } | 4814 } |
| 4854 | 4815 |
| 4855 | 4816 |
| 4856 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4817 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4857 GenerateImpl(masm, true); | 4818 GenerateImpl(masm, true); |
| 4858 } | 4819 } |
| 4859 | 4820 |
| 4860 | 4821 |
| 4861 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, | |
| 4862 Register receiver_map, Register scratch1, | |
| 4863 Register scratch2, Label* miss) { | |
| 4864 // feedback initially contains the feedback array | |
| 4865 Label next_loop, prepare_next; | |
| 4866 Label start_polymorphic; | |
| 4867 Label transition_call; | |
| 4868 | |
| 4869 Register cached_map = scratch1; | |
| 4870 Register too_far = scratch2; | |
| 4871 Register pointer_reg = feedback; | |
| 4872 | |
| 4873 __ ld(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
| 4874 | |
| 4875 // +-----+------+------+-----+-----+-----+ ... ----+ | |
| 4876 // | map | len | wm0 | wt0 | h0 | wm1 | hN | | |
| 4877 // +-----+------+------+-----+-----+ ----+ ... ----+ | |
| 4878 // 0 1 2 len-1 | |
| 4879 // ^ ^ | |
| 4880 // | | | |
| 4881 // pointer_reg too_far | |
| 4882 // aka feedback scratch2 | |
| 4883 // also need receiver_map | |
| 4884 // use cached_map (scratch1) to look in the weak map values. | |
| 4885 __ SmiScale(too_far, too_far, kPointerSizeLog2); | |
| 4886 __ Daddu(too_far, feedback, Operand(too_far)); | |
| 4887 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 4888 __ Daddu(pointer_reg, feedback, | |
| 4889 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag)); | |
| 4890 | |
| 4891 __ bind(&next_loop); | |
| 4892 __ ld(cached_map, MemOperand(pointer_reg)); | |
| 4893 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
| 4894 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | |
| 4895 // Is it a transitioning store? | |
| 4896 __ ld(too_far, MemOperand(pointer_reg, kPointerSize)); | |
| 4897 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
| 4898 __ Branch(&transition_call, ne, too_far, Operand(at)); | |
| 4899 | |
| 4900 __ ld(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); | |
| 4901 __ Daddu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 4902 __ Jump(t9); | |
| 4903 | |
| 4904 __ bind(&transition_call); | |
| 4905 __ ld(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); | |
| 4906 __ JumpIfSmi(too_far, miss); | |
| 4907 | |
| 4908 __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); | |
| 4909 // Load the map into the correct register. | |
| 4910 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister())); | |
| 4911 __ Move(feedback, too_far); | |
| 4912 __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 4913 __ Jump(t9); | |
| 4914 | |
| 4915 __ bind(&prepare_next); | |
| 4916 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); | |
| 4917 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | |
| 4918 | |
| 4919 // We exhausted our array of map handler pairs. | |
| 4920 __ Branch(miss); | |
| 4921 } | |
| 4922 | |
| 4923 | |
| 4924 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4822 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
| 4925 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1 | 4823 Label miss; |
| 4926 Register key = VectorStoreICDescriptor::NameRegister(); // a2 | |
| 4927 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3 | |
| 4928 Register slot = VectorStoreICDescriptor::SlotRegister(); // a4 | |
| 4929 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0 | |
| 4930 Register feedback = a5; | |
| 4931 Register receiver_map = a6; | |
| 4932 Register scratch1 = a7; | |
| 4933 | 4824 |
| 4934 __ SmiScale(scratch1, slot, kPointerSizeLog2); | 4825 // TODO(mvstanton): Implement. |
| 4935 __ Daddu(feedback, vector, Operand(scratch1)); | |
| 4936 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
| 4937 | |
| 4938 // Try to quickly handle the monomorphic case without knowing for sure | |
| 4939 // if we have a weak cell in feedback. We do know it's safe to look | |
| 4940 // at WeakCell::kValueOffset. | |
| 4941 Label try_array, load_smi_map, compare_map; | |
| 4942 Label not_array, miss; | |
| 4943 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
| 4944 scratch1, &compare_map, &load_smi_map, &try_array); | |
| 4945 | |
| 4946 __ bind(&try_array); | |
| 4947 // Is it a fixed array? | |
| 4948 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
| 4949 __ Branch(¬_array, ne, scratch1, Heap::kFixedArrayMapRootIndex); | |
| 4950 | |
| 4951 // We have a polymorphic element handler. | |
| 4952 Label try_poly_name; | |
| 4953 | |
| 4954 Register scratch2 = t0; | |
| 4955 | |
| 4956 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2, | |
| 4957 &miss); | |
| 4958 | |
| 4959 __ bind(¬_array); | |
| 4960 // Is it generic? | |
| 4961 __ Branch(&try_poly_name, ne, feedback, Heap::kmegamorphic_symbolRootIndex); | |
| 4962 Handle<Code> megamorphic_stub = | |
| 4963 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
| 4964 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
| 4965 | |
| 4966 __ bind(&try_poly_name); | |
| 4967 // We might have a name in feedback, and a fixed array in the next slot. | |
| 4968 __ Branch(&miss, ne, key, Operand(feedback)); | |
| 4969 // If the name comparison succeeded, we know we have a fixed array with | |
| 4970 // at least one map/handler pair. | |
| 4971 __ SmiScale(scratch1, slot, kPointerSizeLog2); | |
| 4972 __ Daddu(feedback, vector, Operand(scratch1)); | |
| 4973 __ ld(feedback, | |
| 4974 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
| 4975 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false, | |
| 4976 &miss); | |
| 4977 | |
| 4978 __ bind(&miss); | 4826 __ bind(&miss); |
| 4979 KeyedStoreIC::GenerateMiss(masm); | 4827 KeyedStoreIC::GenerateMiss(masm); |
| 4980 | |
| 4981 __ bind(&load_smi_map); | |
| 4982 __ Branch(USE_DELAY_SLOT, &compare_map); | |
| 4983 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. | |
| 4984 } | 4828 } |
| 4985 | 4829 |
| 4986 | 4830 |
| 4987 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4831 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 4988 if (masm->isolate()->function_entry_hook() != NULL) { | 4832 if (masm->isolate()->function_entry_hook() != NULL) { |
| 4989 ProfileEntryHookStub stub(masm->isolate()); | 4833 ProfileEntryHookStub stub(masm->isolate()); |
| 4990 __ push(ra); | 4834 __ push(ra); |
| 4991 __ CallStub(&stub); | 4835 __ CallStub(&stub); |
| 4992 __ pop(ra); | 4836 __ pop(ra); |
| 4993 } | 4837 } |
| (...skipping 822 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5816 MemOperand(fp, 6 * kPointerSize), NULL); | 5660 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5817 } | 5661 } |
| 5818 | 5662 |
| 5819 | 5663 |
| 5820 #undef __ | 5664 #undef __ |
| 5821 | 5665 |
| 5822 } // namespace internal | 5666 } // namespace internal |
| 5823 } // namespace v8 | 5667 } // namespace v8 |
| 5824 | 5668 |
| 5825 #endif // V8_TARGET_ARCH_MIPS64 | 5669 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |