OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 4762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4773 GenerateImpl(masm, false); | 4773 GenerateImpl(masm, false); |
4774 } | 4774 } |
4775 | 4775 |
4776 | 4776 |
4777 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4777 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4778 GenerateImpl(masm, true); | 4778 GenerateImpl(masm, true); |
4779 } | 4779 } |
4780 | 4780 |
4781 | 4781 |
4782 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4782 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4783 Label miss; | 4783 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r4 |
| 4784 Register key = VectorStoreICDescriptor::NameRegister(); // r5 |
| 4785 Register vector = VectorStoreICDescriptor::VectorRegister(); // r6 |
| 4786 Register slot = VectorStoreICDescriptor::SlotRegister(); // r7 |
| 4787 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r3)); // r3 |
| 4788 Register feedback = r8; |
| 4789 Register receiver_map = r9; |
| 4790 Register scratch1 = r10; |
4784 | 4791 |
4785 // TODO(mvstanton): Implement. | 4792 __ SmiToPtrArrayOffset(r0, slot); |
| 4793 __ add(feedback, vector, r0); |
| 4794 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4795 |
| 4796 // Try to quickly handle the monomorphic case without knowing for sure |
| 4797 // if we have a weak cell in feedback. We do know it's safe to look |
| 4798 // at WeakCell::kValueOffset. |
| 4799 Label try_array, load_smi_map, compare_map; |
| 4800 Label not_array, miss; |
| 4801 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
| 4802 scratch1, &compare_map, &load_smi_map, &try_array); |
| 4803 |
| 4804 // Is it a fixed array? |
| 4805 __ bind(&try_array); |
| 4806 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4807 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); |
| 4808 __ bne(¬_array); |
| 4809 |
| 4810 Register scratch2 = r11; |
| 4811 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true, |
| 4812 &miss); |
| 4813 |
| 4814 __ bind(¬_array); |
| 4815 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); |
| 4816 __ bne(&miss); |
| 4817 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
| 4818 Code::ComputeHandlerFlags(Code::STORE_IC)); |
| 4819 masm->isolate()->stub_cache()->GenerateProbe( |
| 4820 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map, |
| 4821 scratch1, scratch2); |
| 4822 |
4786 __ bind(&miss); | 4823 __ bind(&miss); |
4787 StoreIC::GenerateMiss(masm); | 4824 StoreIC::GenerateMiss(masm); |
| 4825 |
| 4826 __ bind(&load_smi_map); |
| 4827 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4828 __ b(&compare_map); |
4788 } | 4829 } |
4789 | 4830 |
4790 | 4831 |
4791 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { | 4832 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { |
4792 GenerateImpl(masm, false); | 4833 GenerateImpl(masm, false); |
4793 } | 4834 } |
4794 | 4835 |
4795 | 4836 |
4796 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4837 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4797 GenerateImpl(masm, true); | 4838 GenerateImpl(masm, true); |
4798 } | 4839 } |
4799 | 4840 |
4800 | 4841 |
| 4842 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, |
| 4843 Register receiver_map, Register scratch1, |
| 4844 Register scratch2, Label* miss) { |
| 4845 // feedback initially contains the feedback array |
| 4846 Label next_loop, prepare_next; |
| 4847 Label start_polymorphic; |
| 4848 Label transition_call; |
| 4849 |
| 4850 Register cached_map = scratch1; |
| 4851 Register too_far = scratch2; |
| 4852 Register pointer_reg = feedback; |
| 4853 __ LoadP(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
| 4854 |
| 4855 // +-----+------+------+-----+-----+-----+ ... ----+ |
| 4856 // | map | len | wm0 | wt0 | h0 | wm1 | hN | |
| 4857 // +-----+------+------+-----+-----+ ----+ ... ----+ |
| 4858 // 0 1 2 len-1 |
| 4859 // ^ ^ |
| 4860 // | | |
| 4861 // pointer_reg too_far |
| 4862 // aka feedback scratch2 |
| 4863 // also need receiver_map |
| 4864 // use cached_map (scratch1) to look in the weak map values. |
| 4865 __ SmiToPtrArrayOffset(r0, too_far); |
| 4866 __ add(too_far, feedback, r0); |
| 4867 __ addi(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 4868 __ addi(pointer_reg, feedback, |
| 4869 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag)); |
| 4870 |
| 4871 __ bind(&next_loop); |
| 4872 __ LoadP(cached_map, MemOperand(pointer_reg)); |
| 4873 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
| 4874 __ cmp(receiver_map, cached_map); |
| 4875 __ bne(&prepare_next); |
| 4876 // Is it a transitioning store? |
| 4877 __ LoadP(too_far, MemOperand(pointer_reg, kPointerSize)); |
| 4878 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); |
| 4879 __ bne(&transition_call); |
| 4880 __ LoadP(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); |
| 4881 __ addi(ip, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4882 __ Jump(ip); |
| 4883 |
| 4884 __ bind(&transition_call); |
| 4885 __ LoadP(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); |
| 4886 __ JumpIfSmi(too_far, miss); |
| 4887 |
| 4888 __ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); |
| 4889 |
| 4890 // Load the map into the correct register. |
| 4891 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister())); |
| 4892 __ mr(feedback, too_far); |
| 4893 |
| 4894 __ addi(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4895 __ Jump(ip); |
| 4896 |
| 4897 __ bind(&prepare_next); |
| 4898 __ addi(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); |
| 4899 __ cmpl(pointer_reg, too_far); |
| 4900 __ blt(&next_loop); |
| 4901 |
| 4902 // We exhausted our array of map handler pairs. |
| 4903 __ b(miss); |
| 4904 } |
| 4905 |
| 4906 |
4801 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4907 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4802 Label miss; | 4908 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r4 |
| 4909 Register key = VectorStoreICDescriptor::NameRegister(); // r5 |
| 4910 Register vector = VectorStoreICDescriptor::VectorRegister(); // r6 |
| 4911 Register slot = VectorStoreICDescriptor::SlotRegister(); // r7 |
| 4912 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r3)); // r3 |
| 4913 Register feedback = r8; |
| 4914 Register receiver_map = r9; |
| 4915 Register scratch1 = r10; |
4803 | 4916 |
4804 // TODO(mvstanton): Implement. | 4917 __ SmiToPtrArrayOffset(r0, slot); |
| 4918 __ add(feedback, vector, r0); |
| 4919 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4920 |
| 4921 // Try to quickly handle the monomorphic case without knowing for sure |
| 4922 // if we have a weak cell in feedback. We do know it's safe to look |
| 4923 // at WeakCell::kValueOffset. |
| 4924 Label try_array, load_smi_map, compare_map; |
| 4925 Label not_array, miss; |
| 4926 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
| 4927 scratch1, &compare_map, &load_smi_map, &try_array); |
| 4928 |
| 4929 __ bind(&try_array); |
| 4930 // Is it a fixed array? |
| 4931 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4932 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); |
| 4933 __ bne(¬_array); |
| 4934 |
| 4935 // We have a polymorphic element handler. |
| 4936 Label polymorphic, try_poly_name; |
| 4937 __ bind(&polymorphic); |
| 4938 |
| 4939 Register scratch2 = r11; |
| 4940 |
| 4941 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2, |
| 4942 &miss); |
| 4943 |
| 4944 __ bind(¬_array); |
| 4945 // Is it generic? |
| 4946 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); |
| 4947 __ bne(&try_poly_name); |
| 4948 Handle<Code> megamorphic_stub = |
| 4949 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); |
| 4950 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
| 4951 |
| 4952 __ bind(&try_poly_name); |
| 4953 // We might have a name in feedback, and a fixed array in the next slot. |
| 4954 __ cmp(key, feedback); |
| 4955 __ bne(&miss); |
| 4956 // If the name comparison succeeded, we know we have a fixed array with |
| 4957 // at least one map/handler pair. |
| 4958 __ SmiToPtrArrayOffset(r0, slot); |
| 4959 __ add(feedback, vector, r0); |
| 4960 __ LoadP(feedback, |
| 4961 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
| 4962 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false, |
| 4963 &miss); |
| 4964 |
4805 __ bind(&miss); | 4965 __ bind(&miss); |
4806 KeyedStoreIC::GenerateMiss(masm); | 4966 KeyedStoreIC::GenerateMiss(masm); |
| 4967 |
| 4968 __ bind(&load_smi_map); |
| 4969 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4970 __ b(&compare_map); |
4807 } | 4971 } |
4808 | 4972 |
4809 | 4973 |
4810 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4974 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4811 if (masm->isolate()->function_entry_hook() != NULL) { | 4975 if (masm->isolate()->function_entry_hook() != NULL) { |
4812 PredictableCodeSizeScope predictable(masm, | 4976 PredictableCodeSizeScope predictable(masm, |
4813 #if V8_TARGET_ARCH_PPC64 | 4977 #if V8_TARGET_ARCH_PPC64 |
4814 14 * Assembler::kInstrSize); | 4978 14 * Assembler::kInstrSize); |
4815 #else | 4979 #else |
4816 11 * Assembler::kInstrSize); | 4980 11 * Assembler::kInstrSize); |
(...skipping 906 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5723 kStackUnwindSpace, NULL, | 5887 kStackUnwindSpace, NULL, |
5724 MemOperand(fp, 6 * kPointerSize), NULL); | 5888 MemOperand(fp, 6 * kPointerSize), NULL); |
5725 } | 5889 } |
5726 | 5890 |
5727 | 5891 |
5728 #undef __ | 5892 #undef __ |
5729 } // namespace internal | 5893 } // namespace internal |
5730 } // namespace v8 | 5894 } // namespace v8 |
5731 | 5895 |
5732 #endif // V8_TARGET_ARCH_PPC | 5896 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |