OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 4784 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4795 GenerateImpl(masm, false); | 4795 GenerateImpl(masm, false); |
4796 } | 4796 } |
4797 | 4797 |
4798 | 4798 |
4799 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4799 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4800 GenerateImpl(masm, true); | 4800 GenerateImpl(masm, true); |
4801 } | 4801 } |
4802 | 4802 |
4803 | 4803 |
4804 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4804 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4805 Label miss; | 4805 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1 |
| 4806 Register key = VectorStoreICDescriptor::NameRegister(); // a2 |
| 4807 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3 |
| 4808 Register slot = VectorStoreICDescriptor::SlotRegister(); // t0 |
| 4809 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0 |
| 4810 Register feedback = t1; |
| 4811 Register receiver_map = t2; |
| 4812 Register scratch1 = t5; |
4806 | 4813 |
4807 // TODO(mvstanton): Implement. | 4814 __ sll(scratch1, slot, kPointerSizeLog2 - kSmiTagSize); |
| 4815 __ Addu(feedback, vector, Operand(scratch1)); |
| 4816 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4817 |
| 4818 // Try to quickly handle the monomorphic case without knowing for sure |
| 4819 // if we have a weak cell in feedback. We do know it's safe to look |
| 4820 // at WeakCell::kValueOffset. |
| 4821 Label try_array, load_smi_map, compare_map; |
| 4822 Label not_array, miss; |
| 4823 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
| 4824 scratch1, &compare_map, &load_smi_map, &try_array); |
| 4825 |
| 4826 // Is it a fixed array? |
| 4827 __ bind(&try_array); |
| 4828 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4829 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
| 4830 __ Branch(¬_array, ne, scratch1, Operand(at)); |
| 4831 |
| 4832 Register scratch2 = t4; |
| 4833 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true, |
| 4834 &miss); |
| 4835 |
| 4836 __ bind(¬_array); |
| 4837 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 4838 __ Branch(&miss, ne, feedback, Operand(at)); |
| 4839 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
| 4840 Code::ComputeHandlerFlags(Code::STORE_IC)); |
| 4841 masm->isolate()->stub_cache()->GenerateProbe( |
| 4842 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map, |
| 4843 scratch1, scratch2); |
| 4844 |
4808 __ bind(&miss); | 4845 __ bind(&miss); |
4809 StoreIC::GenerateMiss(masm); | 4846 StoreIC::GenerateMiss(masm); |
| 4847 |
| 4848 __ bind(&load_smi_map); |
| 4849 __ Branch(USE_DELAY_SLOT, &compare_map); |
| 4850 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. |
4810 } | 4851 } |
4811 | 4852 |
4812 | 4853 |
4813 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { | 4854 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { |
4814 GenerateImpl(masm, false); | 4855 GenerateImpl(masm, false); |
4815 } | 4856 } |
4816 | 4857 |
4817 | 4858 |
4818 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4859 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4819 GenerateImpl(masm, true); | 4860 GenerateImpl(masm, true); |
4820 } | 4861 } |
4821 | 4862 |
4822 | 4863 |
| 4864 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, |
| 4865 Register receiver_map, Register scratch1, |
| 4866 Register scratch2, Label* miss) { |
| 4867 // feedback initially contains the feedback array |
| 4868 Label next_loop, prepare_next; |
| 4869 Label start_polymorphic; |
| 4870 Label transition_call; |
| 4871 |
| 4872 Register cached_map = scratch1; |
| 4873 Register too_far = scratch2; |
| 4874 Register pointer_reg = feedback; |
| 4875 __ lw(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
| 4876 |
| 4877 // +-----+------+------+-----+-----+-----+ ... ----+ |
| 4878 // | map | len | wm0 | wt0 | h0 | wm1 | hN | |
| 4879 // +-----+------+------+-----+-----+ ----+ ... ----+ |
| 4880 // 0 1 2 len-1 |
| 4881 // ^ ^ |
| 4882 // | | |
| 4883 // pointer_reg too_far |
| 4884 // aka feedback scratch2 |
| 4885 // also need receiver_map |
| 4886 // use cached_map (scratch1) to look in the weak map values. |
| 4887 __ sll(scratch1, too_far, kPointerSizeLog2 - kSmiTagSize); |
| 4888 __ Addu(too_far, feedback, Operand(scratch1)); |
| 4889 __ Addu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 4890 __ Addu(pointer_reg, feedback, |
| 4891 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag)); |
| 4892 |
| 4893 __ bind(&next_loop); |
| 4894 __ lw(cached_map, MemOperand(pointer_reg)); |
| 4895 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
| 4896 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); |
| 4897 // Is it a transitioning store? |
| 4898 __ lw(too_far, MemOperand(pointer_reg, kPointerSize)); |
| 4899 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 4900 __ Branch(&transition_call, ne, too_far, Operand(at)); |
| 4901 __ lw(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); |
| 4902 __ Addu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4903 __ Jump(t9); |
| 4904 |
| 4905 __ bind(&transition_call); |
| 4906 __ lw(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); |
| 4907 __ JumpIfSmi(too_far, miss); |
| 4908 |
| 4909 __ lw(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); |
| 4910 |
| 4911 // Load the map into the correct register. |
| 4912 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister())); |
| 4913 __ mov(feedback, too_far); |
| 4914 |
| 4915 __ Addu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4916 __ Jump(t9); |
| 4917 |
| 4918 __ bind(&prepare_next); |
| 4919 __ Addu(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); |
| 4920 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); |
| 4921 |
| 4922 // We exhausted our array of map handler pairs. |
| 4923 __ jmp(miss); |
| 4924 } |
| 4925 |
| 4926 |
4823 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4927 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4824 Label miss; | 4928 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1 |
| 4929 Register key = VectorStoreICDescriptor::NameRegister(); // a2 |
| 4930 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3 |
| 4931 Register slot = VectorStoreICDescriptor::SlotRegister(); // t0 |
| 4932 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0 |
| 4933 Register feedback = t1; |
| 4934 Register receiver_map = t2; |
| 4935 Register scratch1 = t5; |
4825 | 4936 |
4826 // TODO(mvstanton): Implement. | 4937 __ sll(scratch1, slot, kPointerSizeLog2 - kSmiTagSize); |
| 4938 __ Addu(feedback, vector, Operand(scratch1)); |
| 4939 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4940 |
| 4941 // Try to quickly handle the monomorphic case without knowing for sure |
| 4942 // if we have a weak cell in feedback. We do know it's safe to look |
| 4943 // at WeakCell::kValueOffset. |
| 4944 Label try_array, load_smi_map, compare_map; |
| 4945 Label not_array, miss; |
| 4946 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
| 4947 scratch1, &compare_map, &load_smi_map, &try_array); |
| 4948 |
| 4949 __ bind(&try_array); |
| 4950 // Is it a fixed array? |
| 4951 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4952 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
| 4953 __ Branch(¬_array, ne, scratch1, Operand(at)); |
| 4954 |
| 4955 // We have a polymorphic element handler. |
| 4956 Label polymorphic, try_poly_name; |
| 4957 __ bind(&polymorphic); |
| 4958 |
| 4959 Register scratch2 = t4; |
| 4960 |
| 4961 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2, |
| 4962 &miss); |
| 4963 |
| 4964 __ bind(¬_array); |
| 4965 // Is it generic? |
| 4966 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 4967 __ Branch(&try_poly_name, ne, feedback, Operand(at)); |
| 4968 Handle<Code> megamorphic_stub = |
| 4969 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); |
| 4970 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
| 4971 |
| 4972 __ bind(&try_poly_name); |
| 4973 // We might have a name in feedback, and a fixed array in the next slot. |
| 4974 __ Branch(&miss, ne, key, Operand(feedback)); |
| 4975 // If the name comparison succeeded, we know we have a fixed array with |
| 4976 // at least one map/handler pair. |
| 4977 __ sll(scratch1, slot, kPointerSizeLog2 - kSmiTagSize); |
| 4978 __ Addu(feedback, vector, Operand(scratch1)); |
| 4979 __ lw(feedback, |
| 4980 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
| 4981 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false, |
| 4982 &miss); |
| 4983 |
4827 __ bind(&miss); | 4984 __ bind(&miss); |
4828 KeyedStoreIC::GenerateMiss(masm); | 4985 KeyedStoreIC::GenerateMiss(masm); |
| 4986 |
| 4987 __ bind(&load_smi_map); |
| 4988 __ Branch(USE_DELAY_SLOT, &compare_map); |
| 4989 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. |
4829 } | 4990 } |
4830 | 4991 |
4831 | 4992 |
4832 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4993 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4833 if (masm->isolate()->function_entry_hook() != NULL) { | 4994 if (masm->isolate()->function_entry_hook() != NULL) { |
4834 ProfileEntryHookStub stub(masm->isolate()); | 4995 ProfileEntryHookStub stub(masm->isolate()); |
4835 __ push(ra); | 4996 __ push(ra); |
4836 __ CallStub(&stub); | 4997 __ CallStub(&stub); |
4837 __ pop(ra); | 4998 __ pop(ra); |
4838 } | 4999 } |
(...skipping 821 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5660 MemOperand(fp, 6 * kPointerSize), NULL); | 5821 MemOperand(fp, 6 * kPointerSize), NULL); |
5661 } | 5822 } |
5662 | 5823 |
5663 | 5824 |
5664 #undef __ | 5825 #undef __ |
5665 | 5826 |
5666 } // namespace internal | 5827 } // namespace internal |
5667 } // namespace v8 | 5828 } // namespace v8 |
5668 | 5829 |
5669 #endif // V8_TARGET_ARCH_MIPS | 5830 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |