OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 4753 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4764 GenerateImpl(masm, false); | 4764 GenerateImpl(masm, false); |
4765 } | 4765 } |
4766 | 4766 |
4767 | 4767 |
4768 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4768 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4769 GenerateImpl(masm, true); | 4769 GenerateImpl(masm, true); |
4770 } | 4770 } |
4771 | 4771 |
4772 | 4772 |
4773 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4773 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4774 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1 | 4774 Label miss; |
4775 Register key = VectorStoreICDescriptor::NameRegister(); // a2 | |
4776 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3 | |
4777 Register slot = VectorStoreICDescriptor::SlotRegister(); // t0 | |
4778 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0 | |
4779 Register feedback = t1; | |
4780 Register receiver_map = t2; | |
4781 Register scratch1 = t5; | |
4782 | 4775 |
4783 __ sll(scratch1, slot, kPointerSizeLog2 - kSmiTagSize); | 4776 // TODO(mvstanton): Implement. |
4784 __ Addu(feedback, vector, Operand(scratch1)); | |
4785 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
4786 | |
4787 // Try to quickly handle the monomorphic case without knowing for sure | |
4788 // if we have a weak cell in feedback. We do know it's safe to look | |
4789 // at WeakCell::kValueOffset. | |
4790 Label try_array, load_smi_map, compare_map; | |
4791 Label not_array, miss; | |
4792 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
4793 scratch1, &compare_map, &load_smi_map, &try_array); | |
4794 | |
4795 // Is it a fixed array? | |
4796 __ bind(&try_array); | |
4797 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
4798 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | |
4799 __ Branch(¬_array, ne, scratch1, Operand(at)); | |
4800 | |
4801 Register scratch2 = t4; | |
4802 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true, | |
4803 &miss); | |
4804 | |
4805 __ bind(¬_array); | |
4806 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
4807 __ Branch(&miss, ne, feedback, Operand(at)); | |
4808 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( | |
4809 Code::ComputeHandlerFlags(Code::STORE_IC)); | |
4810 masm->isolate()->stub_cache()->GenerateProbe( | |
4811 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map, | |
4812 scratch1, scratch2); | |
4813 | |
4814 __ bind(&miss); | 4777 __ bind(&miss); |
4815 StoreIC::GenerateMiss(masm); | 4778 StoreIC::GenerateMiss(masm); |
4816 | |
4817 __ bind(&load_smi_map); | |
4818 __ Branch(USE_DELAY_SLOT, &compare_map); | |
4819 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. | |
4820 } | 4779 } |
4821 | 4780 |
4822 | 4781 |
4823 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { | 4782 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { |
4824 GenerateImpl(masm, false); | 4783 GenerateImpl(masm, false); |
4825 } | 4784 } |
4826 | 4785 |
4827 | 4786 |
4828 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4787 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4829 GenerateImpl(masm, true); | 4788 GenerateImpl(masm, true); |
4830 } | 4789 } |
4831 | 4790 |
4832 | 4791 |
4833 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, | |
4834 Register receiver_map, Register scratch1, | |
4835 Register scratch2, Label* miss) { | |
4836 // feedback initially contains the feedback array | |
4837 Label next_loop, prepare_next; | |
4838 Label start_polymorphic; | |
4839 Label transition_call; | |
4840 | |
4841 Register cached_map = scratch1; | |
4842 Register too_far = scratch2; | |
4843 Register pointer_reg = feedback; | |
4844 __ lw(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
4845 | |
4846 // +-----+------+------+-----+-----+-----+ ... ----+ | |
4847 // | map | len | wm0 | wt0 | h0 | wm1 | hN | | |
4848 // +-----+------+------+-----+-----+ ----+ ... ----+ | |
4849 // 0 1 2 len-1 | |
4850 // ^ ^ | |
4851 // | | | |
4852 // pointer_reg too_far | |
4853 // aka feedback scratch2 | |
4854 // also need receiver_map | |
4855 // use cached_map (scratch1) to look in the weak map values. | |
4856 __ sll(scratch1, too_far, kPointerSizeLog2 - kSmiTagSize); | |
4857 __ Addu(too_far, feedback, Operand(scratch1)); | |
4858 __ Addu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
4859 __ Addu(pointer_reg, feedback, | |
4860 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag)); | |
4861 | |
4862 __ bind(&next_loop); | |
4863 __ lw(cached_map, MemOperand(pointer_reg)); | |
4864 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
4865 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | |
4866 // Is it a transitioning store? | |
4867 __ lw(too_far, MemOperand(pointer_reg, kPointerSize)); | |
4868 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
4869 __ Branch(&transition_call, ne, too_far, Operand(at)); | |
4870 __ lw(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); | |
4871 __ Addu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
4872 __ Jump(t9); | |
4873 | |
4874 __ bind(&transition_call); | |
4875 __ lw(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); | |
4876 __ JumpIfSmi(too_far, miss); | |
4877 | |
4878 __ lw(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); | |
4879 | |
4880 // Load the map into the correct register. | |
4881 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister())); | |
4882 __ mov(feedback, too_far); | |
4883 | |
4884 __ Addu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
4885 __ Jump(t9); | |
4886 | |
4887 __ bind(&prepare_next); | |
4888 __ Addu(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); | |
4889 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | |
4890 | |
4891 // We exhausted our array of map handler pairs. | |
4892 __ jmp(miss); | |
4893 } | |
4894 | |
4895 | |
4896 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4792 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4897 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1 | 4793 Label miss; |
4898 Register key = VectorStoreICDescriptor::NameRegister(); // a2 | |
4899 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3 | |
4900 Register slot = VectorStoreICDescriptor::SlotRegister(); // t0 | |
4901 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0 | |
4902 Register feedback = t1; | |
4903 Register receiver_map = t2; | |
4904 Register scratch1 = t5; | |
4905 | 4794 |
4906 __ sll(scratch1, slot, kPointerSizeLog2 - kSmiTagSize); | 4795 // TODO(mvstanton): Implement. |
4907 __ Addu(feedback, vector, Operand(scratch1)); | |
4908 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
4909 | |
4910 // Try to quickly handle the monomorphic case without knowing for sure | |
4911 // if we have a weak cell in feedback. We do know it's safe to look | |
4912 // at WeakCell::kValueOffset. | |
4913 Label try_array, load_smi_map, compare_map; | |
4914 Label not_array, miss; | |
4915 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
4916 scratch1, &compare_map, &load_smi_map, &try_array); | |
4917 | |
4918 __ bind(&try_array); | |
4919 // Is it a fixed array? | |
4920 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
4921 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | |
4922 __ Branch(¬_array, ne, scratch1, Operand(at)); | |
4923 | |
4924 // We have a polymorphic element handler. | |
4925 Label polymorphic, try_poly_name; | |
4926 __ bind(&polymorphic); | |
4927 | |
4928 Register scratch2 = t4; | |
4929 | |
4930 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2, | |
4931 &miss); | |
4932 | |
4933 __ bind(¬_array); | |
4934 // Is it generic? | |
4935 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
4936 __ Branch(&try_poly_name, ne, feedback, Operand(at)); | |
4937 Handle<Code> megamorphic_stub = | |
4938 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
4939 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
4940 | |
4941 __ bind(&try_poly_name); | |
4942 // We might have a name in feedback, and a fixed array in the next slot. | |
4943 __ Branch(&miss, ne, key, Operand(feedback)); | |
4944 // If the name comparison succeeded, we know we have a fixed array with | |
4945 // at least one map/handler pair. | |
4946 __ sll(scratch1, slot, kPointerSizeLog2 - kSmiTagSize); | |
4947 __ Addu(feedback, vector, Operand(scratch1)); | |
4948 __ lw(feedback, | |
4949 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
4950 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false, | |
4951 &miss); | |
4952 | |
4953 __ bind(&miss); | 4796 __ bind(&miss); |
4954 KeyedStoreIC::GenerateMiss(masm); | 4797 KeyedStoreIC::GenerateMiss(masm); |
4955 | |
4956 __ bind(&load_smi_map); | |
4957 __ Branch(USE_DELAY_SLOT, &compare_map); | |
4958 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. | |
4959 } | 4798 } |
4960 | 4799 |
4961 | 4800 |
4962 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4801 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4963 if (masm->isolate()->function_entry_hook() != NULL) { | 4802 if (masm->isolate()->function_entry_hook() != NULL) { |
4964 ProfileEntryHookStub stub(masm->isolate()); | 4803 ProfileEntryHookStub stub(masm->isolate()); |
4965 __ push(ra); | 4804 __ push(ra); |
4966 __ CallStub(&stub); | 4805 __ CallStub(&stub); |
4967 __ pop(ra); | 4806 __ pop(ra); |
4968 } | 4807 } |
(...skipping 821 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5790 MemOperand(fp, 6 * kPointerSize), NULL); | 5629 MemOperand(fp, 6 * kPointerSize), NULL); |
5791 } | 5630 } |
5792 | 5631 |
5793 | 5632 |
5794 #undef __ | 5633 #undef __ |
5795 | 5634 |
5796 } // namespace internal | 5635 } // namespace internal |
5797 } // namespace v8 | 5636 } // namespace v8 |
5798 | 5637 |
5799 #endif // V8_TARGET_ARCH_MIPS | 5638 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |