OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
11 #include "src/codegen.h" | 11 #include "src/codegen.h" |
12 #include "src/ic/handler-compiler.h" | 12 #include "src/ic/handler-compiler.h" |
13 #include "src/ic/ic.h" | 13 #include "src/ic/ic.h" |
| 14 #include "src/ic/stub-cache.h" |
14 #include "src/isolate.h" | 15 #include "src/isolate.h" |
15 #include "src/jsregexp.h" | 16 #include "src/jsregexp.h" |
16 #include "src/regexp-macro-assembler.h" | 17 #include "src/regexp-macro-assembler.h" |
17 #include "src/runtime/runtime.h" | 18 #include "src/runtime/runtime.h" |
18 | 19 |
19 namespace v8 { | 20 namespace v8 { |
20 namespace internal { | 21 namespace internal { |
21 | 22 |
22 | 23 |
23 static void InitializeArrayConstructorDescriptor( | 24 static void InitializeArrayConstructorDescriptor( |
(...skipping 4566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4590 } | 4591 } |
4591 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4592 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
4592 __ dsll(a1, a1, kPointerSizeLog2); | 4593 __ dsll(a1, a1, kPointerSizeLog2); |
4593 __ Ret(USE_DELAY_SLOT); | 4594 __ Ret(USE_DELAY_SLOT); |
4594 __ Daddu(sp, sp, a1); | 4595 __ Daddu(sp, sp, a1); |
4595 } | 4596 } |
4596 | 4597 |
4597 | 4598 |
4598 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4599 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { |
4599 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); | 4600 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); |
4600 VectorLoadStub stub(isolate(), state()); | 4601 VectorRawLoadStub stub(isolate(), state()); |
4601 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4602 stub.GenerateForTrampoline(masm); |
4602 } | 4603 } |
4603 | 4604 |
4604 | 4605 |
4605 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4606 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
4606 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); | 4607 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); |
4607 VectorKeyedLoadStub stub(isolate()); | 4608 VectorRawKeyedLoadStub stub(isolate()); |
4608 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4609 stub.GenerateForTrampoline(masm); |
4609 } | 4610 } |
4610 | 4611 |
4611 | 4612 |
4612 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 4613 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
4613 EmitLoadTypeFeedbackVector(masm, a2); | 4614 EmitLoadTypeFeedbackVector(masm, a2); |
4614 CallICStub stub(isolate(), state()); | 4615 CallICStub stub(isolate(), state()); |
4615 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4616 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4616 } | 4617 } |
4617 | 4618 |
4618 | 4619 |
4619 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | 4620 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { |
4620 EmitLoadTypeFeedbackVector(masm, a2); | 4621 EmitLoadTypeFeedbackVector(masm, a2); |
4621 CallIC_ArrayStub stub(isolate(), state()); | 4622 CallIC_ArrayStub stub(isolate(), state()); |
4622 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4623 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4623 } | 4624 } |
4624 | 4625 |
4625 | 4626 |
| 4627 void VectorRawLoadStub::Generate(MacroAssembler* masm) { |
| 4628 GenerateImpl(masm, false); |
| 4629 } |
| 4630 |
| 4631 |
| 4632 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4633 GenerateImpl(masm, true); |
| 4634 } |
| 4635 |
| 4636 |
| 4637 static void HandleArrayCases(MacroAssembler* masm, Register receiver, |
| 4638 Register key, Register vector, Register slot, |
| 4639 Register feedback, Register scratch1, |
| 4640 Register scratch2, Register scratch3, |
| 4641 bool is_polymorphic, Label* miss) { |
| 4642 // feedback initially contains the feedback array |
| 4643 Label next_loop, prepare_next; |
| 4644 Label load_smi_map, compare_map; |
| 4645 Label start_polymorphic; |
| 4646 |
| 4647 Register receiver_map = scratch1; |
| 4648 Register cached_map = scratch2; |
| 4649 |
| 4650 // Receiver might not be a heap object. |
| 4651 __ JumpIfSmi(receiver, &load_smi_map); |
| 4652 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 4653 __ bind(&compare_map); |
| 4654 __ ld(cached_map, |
| 4655 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); |
| 4656 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
| 4657 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); |
| 4658 // found, now call handler. |
| 4659 Register handler = feedback; |
| 4660 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); |
| 4661 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4662 __ Jump(t9); |
| 4663 |
| 4664 Register length = scratch3; |
| 4665 __ bind(&start_polymorphic); |
| 4666 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
| 4667 if (!is_polymorphic) { |
| 4668 // If the IC could be monomorphic we have to make sure we don't go past the |
| 4669 // end of the feedback array. |
| 4670 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); |
| 4671 } |
| 4672 |
| 4673 Register too_far = length; |
| 4674 Register pointer_reg = feedback; |
| 4675 |
| 4676 // +-----+------+------+-----+-----+ ... ----+ |
| 4677 // | map | len | wm0 | h0 | wm1 | hN | |
| 4678 // +-----+------+------+-----+-----+ ... ----+ |
| 4679 // 0 1 2 len-1 |
| 4680 // ^ ^ |
| 4681 // | | |
| 4682 // pointer_reg too_far |
| 4683 // aka feedback scratch3 |
| 4684 // also need receiver_map (aka scratch1) |
| 4685 // use cached_map (scratch2) to look in the weak map values. |
| 4686 __ SmiScale(too_far, length, kPointerSizeLog2); |
| 4687 __ Daddu(too_far, feedback, Operand(too_far)); |
| 4688 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 4689 __ Daddu(pointer_reg, feedback, |
| 4690 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); |
| 4691 |
| 4692 __ bind(&next_loop); |
| 4693 __ ld(cached_map, MemOperand(pointer_reg)); |
| 4694 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
| 4695 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); |
| 4696 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); |
| 4697 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4698 __ Jump(t9); |
| 4699 |
| 4700 __ bind(&prepare_next); |
| 4701 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); |
| 4702 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); |
| 4703 |
| 4704 // We exhausted our array of map handler pairs. |
| 4705 __ Branch(miss); |
| 4706 |
| 4707 __ bind(&load_smi_map); |
| 4708 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4709 __ Branch(&compare_map); |
| 4710 } |
| 4711 |
| 4712 |
| 4713 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, |
| 4714 Register key, Register vector, Register slot, |
| 4715 Register weak_cell, Register scratch, |
| 4716 Label* miss) { |
| 4717 // feedback initially contains the feedback array |
| 4718 Label compare_smi_map; |
| 4719 Register receiver_map = scratch; |
| 4720 Register cached_map = weak_cell; |
| 4721 |
| 4722 // Move the weak map into the weak_cell register. |
| 4723 __ ld(cached_map, FieldMemOperand(weak_cell, WeakCell::kValueOffset)); |
| 4724 |
| 4725 // Receiver might not be a heap object. |
| 4726 __ JumpIfSmi(receiver, &compare_smi_map); |
| 4727 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 4728 __ Branch(miss, ne, cached_map, Operand(receiver_map)); |
| 4729 |
| 4730 Register handler = weak_cell; |
| 4731 __ SmiScale(handler, slot, kPointerSizeLog2); |
| 4732 __ Daddu(handler, vector, Operand(handler)); |
| 4733 __ ld(handler, |
| 4734 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
| 4735 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
| 4736 __ Jump(t9); |
| 4737 |
| 4738 // In microbenchmarks, it made sense to unroll this code so that the call to |
| 4739 // the handler is duplicated for a HeapObject receiver and a Smi receiver. |
| 4740 // TODO(mvstanton): does this hold on ARM? |
| 4741 __ bind(&compare_smi_map); |
| 4742 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 4743 __ Branch(miss, ne, weak_cell, Operand(at)); |
| 4744 __ SmiScale(handler, slot, kPointerSizeLog2); |
| 4745 __ Daddu(handler, vector, Operand(handler)); |
| 4746 __ ld(handler, |
| 4747 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
| 4748 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
| 4749 __ Jump(t9); |
| 4750 } |
| 4751 |
| 4752 |
| 4753 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
| 4754 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
| 4755 Register name = VectorLoadICDescriptor::NameRegister(); // a2 |
| 4756 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
| 4757 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
| 4758 Register feedback = a4; |
| 4759 Register scratch1 = a5; |
| 4760 |
| 4761 __ SmiScale(feedback, slot, kPointerSizeLog2); |
| 4762 __ Daddu(feedback, vector, Operand(feedback)); |
| 4763 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4764 |
| 4765 // Is it a weak cell? |
| 4766 Label try_array; |
| 4767 Label not_array, smi_key, key_okay, miss; |
| 4768 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4769 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); |
| 4770 __ Branch(&try_array, ne, scratch1, Operand(at)); |
| 4771 HandleMonomorphicCase(masm, receiver, name, vector, slot, feedback, scratch1, |
| 4772 &miss); |
| 4773 |
| 4774 // Is it a fixed array? |
| 4775 __ bind(&try_array); |
| 4776 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
| 4777 __ Branch(¬_array, ne, scratch1, Operand(at)); |
| 4778 HandleArrayCases(masm, receiver, name, vector, slot, feedback, scratch1, a6, |
| 4779 a7, true, &miss); |
| 4780 |
| 4781 __ bind(¬_array); |
| 4782 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 4783 __ Branch(&miss, ne, feedback, Operand(at)); |
| 4784 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
| 4785 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
| 4786 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, |
| 4787 false, receiver, name, feedback, |
| 4788 scratch1, a6, a7); |
| 4789 |
| 4790 __ bind(&miss); |
| 4791 LoadIC::GenerateMiss(masm); |
| 4792 } |
| 4793 |
| 4794 |
| 4795 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { |
| 4796 GenerateImpl(masm, false); |
| 4797 } |
| 4798 |
| 4799 |
| 4800 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4801 GenerateImpl(masm, true); |
| 4802 } |
| 4803 |
| 4804 |
| 4805 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
| 4806 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
| 4807 Register key = VectorLoadICDescriptor::NameRegister(); // a2 |
| 4808 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
| 4809 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
| 4810 Register feedback = a4; |
| 4811 Register scratch1 = a5; |
| 4812 |
| 4813 __ SmiScale(feedback, slot, kPointerSizeLog2); |
| 4814 __ Daddu(feedback, vector, Operand(feedback)); |
| 4815 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
| 4816 |
| 4817 // Is it a weak cell? |
| 4818 Label try_array; |
| 4819 Label not_array, smi_key, key_okay, miss; |
| 4820 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 4821 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); |
| 4822 __ Branch(&try_array, ne, scratch1, Operand(at)); |
| 4823 __ JumpIfNotSmi(key, &miss); |
| 4824 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, scratch1, |
| 4825 &miss); |
| 4826 |
| 4827 __ bind(&try_array); |
| 4828 // Is it a fixed array? |
| 4829 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
| 4830 __ Branch(¬_array, ne, scratch1, Operand(at)); |
| 4831 // We have a polymorphic element handler. |
| 4832 __ JumpIfNotSmi(key, &miss); |
| 4833 |
| 4834 Label polymorphic, try_poly_name; |
| 4835 __ bind(&polymorphic); |
| 4836 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, |
| 4837 a7, true, &miss); |
| 4838 |
| 4839 __ bind(¬_array); |
| 4840 // Is it generic? |
| 4841 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 4842 __ Branch(&try_poly_name, ne, feedback, Operand(at)); |
| 4843 Handle<Code> megamorphic_stub = |
| 4844 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); |
| 4845 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
| 4846 |
| 4847 __ bind(&try_poly_name); |
| 4848 // We might have a name in feedback, and a fixed array in the next slot. |
| 4849 __ Branch(&miss, ne, key, Operand(feedback)); |
| 4850 // If the name comparison succeeded, we know we have a fixed array with |
| 4851 // at least one map/handler pair. |
| 4852 __ SmiScale(feedback, slot, kPointerSizeLog2); |
| 4853 __ Daddu(feedback, vector, Operand(feedback)); |
| 4854 __ ld(feedback, |
| 4855 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
| 4856 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, |
| 4857 a7, false, &miss); |
| 4858 |
| 4859 __ bind(&miss); |
| 4860 KeyedLoadIC::GenerateMiss(masm); |
| 4861 } |
| 4862 |
| 4863 |
4626 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4864 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4627 if (masm->isolate()->function_entry_hook() != NULL) { | 4865 if (masm->isolate()->function_entry_hook() != NULL) { |
4628 ProfileEntryHookStub stub(masm->isolate()); | 4866 ProfileEntryHookStub stub(masm->isolate()); |
4629 __ push(ra); | 4867 __ push(ra); |
4630 __ CallStub(&stub); | 4868 __ CallStub(&stub); |
4631 __ pop(ra); | 4869 __ pop(ra); |
4632 } | 4870 } |
4633 } | 4871 } |
4634 | 4872 |
4635 | 4873 |
(...skipping 672 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5308 kStackUnwindSpace, kInvalidStackOffset, | 5546 kStackUnwindSpace, kInvalidStackOffset, |
5309 MemOperand(fp, 6 * kPointerSize), NULL); | 5547 MemOperand(fp, 6 * kPointerSize), NULL); |
5310 } | 5548 } |
5311 | 5549 |
5312 | 5550 |
5313 #undef __ | 5551 #undef __ |
5314 | 5552 |
5315 } } // namespace v8::internal | 5553 } } // namespace v8::internal |
5316 | 5554 |
5317 #endif // V8_TARGET_ARCH_MIPS64 | 5555 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |