OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 4628 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4639 } | 4639 } |
4640 | 4640 |
4641 | 4641 |
4642 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4642 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4643 GenerateImpl(masm, true); | 4643 GenerateImpl(masm, true); |
4644 } | 4644 } |
4645 | 4645 |
4646 | 4646 |
4647 static void HandleArrayCases(MacroAssembler* masm, Register receiver, | 4647 static void HandleArrayCases(MacroAssembler* masm, Register receiver, |
4648 Register key, Register vector, Register slot, | 4648 Register key, Register vector, Register slot, |
4649 Register feedback, Register receiver_map, | 4649 Register feedback, Register scratch1, |
4650 Register scratch1, Register scratch2, | 4650 Register scratch2, Register scratch3, |
4651 bool is_polymorphic, Label* miss) { | 4651 bool is_polymorphic, Label* miss) { |
4652 // feedback initially contains the feedback array | 4652 // feedback initially contains the feedback array |
4653 Label next_loop, prepare_next; | 4653 Label next_loop, prepare_next; |
| 4654 Label load_smi_map, compare_map; |
4654 Label start_polymorphic; | 4655 Label start_polymorphic; |
4655 | 4656 |
4656 Register cached_map = scratch1; | 4657 Register receiver_map = scratch1; |
| 4658 Register cached_map = scratch2; |
4657 | 4659 |
| 4660 // Receiver might not be a heap object. |
| 4661 __ JumpIfSmi(receiver, &load_smi_map); |
| 4662 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 4663 __ bind(&compare_map); |
4658 __ ld(cached_map, | 4664 __ ld(cached_map, |
4659 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); | 4665 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); |
4660 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | 4666 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
4661 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); | 4667 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); |
4662 // found, now call handler. | 4668 // found, now call handler. |
4663 Register handler = feedback; | 4669 Register handler = feedback; |
4664 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); | 4670 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); |
4665 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 4671 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
4666 __ Jump(t9); | 4672 __ Jump(t9); |
4667 | 4673 |
4668 Register length = scratch2; | 4674 Register length = scratch3; |
4669 __ bind(&start_polymorphic); | 4675 __ bind(&start_polymorphic); |
4670 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | 4676 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
4671 if (!is_polymorphic) { | 4677 if (!is_polymorphic) { |
4672 // If the IC could be monomorphic we have to make sure we don't go past the | 4678 // If the IC could be monomorphic we have to make sure we don't go past the |
4673 // end of the feedback array. | 4679 // end of the feedback array. |
4674 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); | 4680 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); |
4675 } | 4681 } |
4676 | 4682 |
4677 Register too_far = length; | 4683 Register too_far = length; |
4678 Register pointer_reg = feedback; | 4684 Register pointer_reg = feedback; |
4679 | 4685 |
4680 // +-----+------+------+-----+-----+ ... ----+ | 4686 // +-----+------+------+-----+-----+ ... ----+ |
4681 // | map | len | wm0 | h0 | wm1 | hN | | 4687 // | map | len | wm0 | h0 | wm1 | hN | |
4682 // +-----+------+------+-----+-----+ ... ----+ | 4688 // +-----+------+------+-----+-----+ ... ----+ |
4683 // 0 1 2 len-1 | 4689 // 0 1 2 len-1 |
4684 // ^ ^ | 4690 // ^ ^ |
4685 // | | | 4691 // | | |
4686 // pointer_reg too_far | 4692 // pointer_reg too_far |
4687 // aka feedback scratch2 | 4693 // aka feedback scratch3 |
4688 // also need receiver_map | 4694 // also need receiver_map (aka scratch1) |
4689 // use cached_map (scratch1) to look in the weak map values. | 4695 // use cached_map (scratch2) to look in the weak map values. |
4690 __ SmiScale(too_far, length, kPointerSizeLog2); | 4696 __ SmiScale(too_far, length, kPointerSizeLog2); |
4691 __ Daddu(too_far, feedback, Operand(too_far)); | 4697 __ Daddu(too_far, feedback, Operand(too_far)); |
4692 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 4698 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
4693 __ Daddu(pointer_reg, feedback, | 4699 __ Daddu(pointer_reg, feedback, |
4694 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); | 4700 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); |
4695 | 4701 |
4696 __ bind(&next_loop); | 4702 __ bind(&next_loop); |
4697 __ ld(cached_map, MemOperand(pointer_reg)); | 4703 __ ld(cached_map, MemOperand(pointer_reg)); |
4698 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | 4704 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
4699 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | 4705 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); |
4700 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); | 4706 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); |
4701 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 4707 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
4702 __ Jump(t9); | 4708 __ Jump(t9); |
4703 | 4709 |
4704 __ bind(&prepare_next); | 4710 __ bind(&prepare_next); |
4705 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); | 4711 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); |
4706 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | 4712 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); |
4707 | 4713 |
4708 // We exhausted our array of map handler pairs. | 4714 // We exhausted our array of map handler pairs. |
4709 __ Branch(miss); | 4715 __ Branch(miss); |
| 4716 |
| 4717 __ bind(&load_smi_map); |
| 4718 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4719 __ Branch(&compare_map); |
4710 } | 4720 } |
4711 | 4721 |
4712 | 4722 |
4713 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, | 4723 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, |
4714 Register receiver_map, Register feedback, | 4724 Register key, Register vector, Register slot, |
4715 Register vector, Register slot, | 4725 Register weak_cell, Register scratch, |
4716 Register scratch, Label* compare_map, | 4726 Label* miss) { |
4717 Label* load_smi_map, Label* try_array) { | 4727 // feedback initially contains the feedback array |
4718 __ JumpIfSmi(receiver, load_smi_map); | 4728 Label compare_smi_map; |
| 4729 Register receiver_map = scratch; |
| 4730 Register cached_map = weak_cell; |
| 4731 |
| 4732 // Move the weak map into the weak_cell register. |
| 4733 __ ld(cached_map, FieldMemOperand(weak_cell, WeakCell::kValueOffset)); |
| 4734 |
| 4735 // Receiver might not be a heap object. |
| 4736 __ JumpIfSmi(receiver, &compare_smi_map); |
4719 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 4737 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
4720 __ bind(compare_map); | 4738 __ Branch(miss, ne, cached_map, Operand(receiver_map)); |
4721 Register cached_map = scratch; | 4739 |
4722 // Move the weak map into the weak_cell register. | 4740 Register handler = weak_cell; |
4723 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); | |
4724 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); | |
4725 Register handler = feedback; | |
4726 __ SmiScale(handler, slot, kPointerSizeLog2); | 4741 __ SmiScale(handler, slot, kPointerSizeLog2); |
4727 __ Daddu(handler, vector, Operand(handler)); | 4742 __ Daddu(handler, vector, Operand(handler)); |
4728 __ ld(handler, | 4743 __ ld(handler, |
| 4744 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
| 4745 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
| 4746 __ Jump(t9); |
| 4747 |
| 4748 // In microbenchmarks, it made sense to unroll this code so that the call to |
| 4749 // the handler is duplicated for a HeapObject receiver and a Smi receiver. |
| 4750 // TODO(mvstanton): does this hold on ARM? |
| 4751 __ bind(&compare_smi_map); |
| 4752 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 4753 __ Branch(miss, ne, weak_cell, Operand(at)); |
| 4754 __ SmiScale(handler, slot, kPointerSizeLog2); |
| 4755 __ Daddu(handler, vector, Operand(handler)); |
| 4756 __ ld(handler, |
4729 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 4757 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
4730 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | 4758 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
4731 __ Jump(t9); | 4759 __ Jump(t9); |
4732 } | 4760 } |
4733 | 4761 |
4734 | 4762 |
4735 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4763 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4736 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 | 4764 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
4737 Register name = VectorLoadICDescriptor::NameRegister(); // a2 | 4765 Register name = VectorLoadICDescriptor::NameRegister(); // a2 |
4738 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 | 4766 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
4739 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 | 4767 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
4740 Register feedback = a4; | 4768 Register feedback = a4; |
4741 Register receiver_map = a5; | 4769 Register scratch1 = a5; |
4742 Register scratch1 = a6; | |
4743 | 4770 |
4744 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4771 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4745 __ Daddu(feedback, vector, Operand(feedback)); | 4772 __ Daddu(feedback, vector, Operand(feedback)); |
4746 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4773 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4747 | 4774 |
4748 // Try to quickly handle the monomorphic case without knowing for sure | 4775 // Is it a weak cell? |
4749 // if we have a weak cell in feedback. We do know it's safe to look | 4776 Label try_array; |
4750 // at WeakCell::kValueOffset. | 4777 Label not_array, smi_key, key_okay, miss; |
4751 Label try_array, load_smi_map, compare_map; | 4778 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
4752 Label not_array, miss; | 4779 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); |
4753 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | 4780 __ Branch(&try_array, ne, scratch1, Operand(at)); |
4754 scratch1, &compare_map, &load_smi_map, &try_array); | 4781 HandleMonomorphicCase(masm, receiver, name, vector, slot, feedback, scratch1, |
| 4782 &miss); |
4755 | 4783 |
4756 // Is it a fixed array? | 4784 // Is it a fixed array? |
4757 __ bind(&try_array); | 4785 __ bind(&try_array); |
4758 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
4759 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | 4786 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
4760 __ Branch(¬_array, ne, scratch1, Operand(at)); | 4787 __ Branch(¬_array, ne, scratch1, Operand(at)); |
4761 HandleArrayCases(masm, receiver, name, vector, slot, feedback, receiver_map, | 4788 HandleArrayCases(masm, receiver, name, vector, slot, feedback, scratch1, a6, |
4762 scratch1, a7, true, &miss); | 4789 a7, true, &miss); |
4763 | 4790 |
4764 __ bind(¬_array); | 4791 __ bind(¬_array); |
4765 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 4792 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
4766 __ Branch(&miss, ne, feedback, Operand(at)); | 4793 __ Branch(&miss, ne, feedback, Operand(at)); |
4767 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( | 4794 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
4768 Code::ComputeHandlerFlags(Code::LOAD_IC)); | 4795 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
4769 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, | 4796 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, |
4770 false, receiver, name, feedback, | 4797 false, receiver, name, feedback, |
4771 receiver_map, scratch1, a7); | 4798 scratch1, a6, a7); |
4772 | 4799 |
4773 __ bind(&miss); | 4800 __ bind(&miss); |
4774 LoadIC::GenerateMiss(masm); | 4801 LoadIC::GenerateMiss(masm); |
4775 | |
4776 __ bind(&load_smi_map); | |
4777 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
4778 __ Branch(&compare_map); | |
4779 } | 4802 } |
4780 | 4803 |
4781 | 4804 |
4782 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { | 4805 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { |
4783 GenerateImpl(masm, false); | 4806 GenerateImpl(masm, false); |
4784 } | 4807 } |
4785 | 4808 |
4786 | 4809 |
4787 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4810 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4788 GenerateImpl(masm, true); | 4811 GenerateImpl(masm, true); |
4789 } | 4812 } |
4790 | 4813 |
4791 | 4814 |
4792 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4815 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4793 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 | 4816 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
4794 Register key = VectorLoadICDescriptor::NameRegister(); // a2 | 4817 Register key = VectorLoadICDescriptor::NameRegister(); // a2 |
4795 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 | 4818 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
4796 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 | 4819 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
4797 Register feedback = a4; | 4820 Register feedback = a4; |
4798 Register receiver_map = a5; | 4821 Register scratch1 = a5; |
4799 Register scratch1 = a6; | |
4800 | 4822 |
4801 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4823 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4802 __ Daddu(feedback, vector, Operand(feedback)); | 4824 __ Daddu(feedback, vector, Operand(feedback)); |
4803 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4825 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4804 | 4826 |
4805 // Try to quickly handle the monomorphic case without knowing for sure | 4827 // Is it a weak cell? |
4806 // if we have a weak cell in feedback. We do know it's safe to look | 4828 Label try_array; |
4807 // at WeakCell::kValueOffset. | 4829 Label not_array, smi_key, key_okay, miss; |
4808 Label try_array, load_smi_map, compare_map; | 4830 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
4809 Label not_array, miss; | 4831 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); |
4810 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | 4832 __ Branch(&try_array, ne, scratch1, Operand(at)); |
4811 scratch1, &compare_map, &load_smi_map, &try_array); | 4833 __ JumpIfNotSmi(key, &miss); |
| 4834 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, scratch1, |
| 4835 &miss); |
4812 | 4836 |
4813 __ bind(&try_array); | 4837 __ bind(&try_array); |
4814 // Is it a fixed array? | 4838 // Is it a fixed array? |
4815 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
4816 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | 4839 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
4817 __ Branch(¬_array, ne, scratch1, Operand(at)); | 4840 __ Branch(¬_array, ne, scratch1, Operand(at)); |
4818 // We have a polymorphic element handler. | 4841 // We have a polymorphic element handler. |
4819 __ JumpIfNotSmi(key, &miss); | 4842 __ JumpIfNotSmi(key, &miss); |
4820 | 4843 |
4821 Label polymorphic, try_poly_name; | 4844 Label polymorphic, try_poly_name; |
4822 __ bind(&polymorphic); | 4845 __ bind(&polymorphic); |
4823 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, | 4846 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, |
4824 scratch1, a7, true, &miss); | 4847 a7, true, &miss); |
4825 | 4848 |
4826 __ bind(¬_array); | 4849 __ bind(¬_array); |
4827 // Is it generic? | 4850 // Is it generic? |
4828 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 4851 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
4829 __ Branch(&try_poly_name, ne, feedback, Operand(at)); | 4852 __ Branch(&try_poly_name, ne, feedback, Operand(at)); |
4830 Handle<Code> megamorphic_stub = | 4853 Handle<Code> megamorphic_stub = |
4831 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); | 4854 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); |
4832 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | 4855 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
4833 | 4856 |
4834 __ bind(&try_poly_name); | 4857 __ bind(&try_poly_name); |
4835 // We might have a name in feedback, and a fixed array in the next slot. | 4858 // We might have a name in feedback, and a fixed array in the next slot. |
4836 __ Branch(&miss, ne, key, Operand(feedback)); | 4859 __ Branch(&miss, ne, key, Operand(feedback)); |
4837 // If the name comparison succeeded, we know we have a fixed array with | 4860 // If the name comparison succeeded, we know we have a fixed array with |
4838 // at least one map/handler pair. | 4861 // at least one map/handler pair. |
4839 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4862 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4840 __ Daddu(feedback, vector, Operand(feedback)); | 4863 __ Daddu(feedback, vector, Operand(feedback)); |
4841 __ ld(feedback, | 4864 __ ld(feedback, |
4842 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | 4865 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
4843 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, | 4866 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, |
4844 scratch1, a7, false, &miss); | 4867 a7, false, &miss); |
4845 | 4868 |
4846 __ bind(&miss); | 4869 __ bind(&miss); |
4847 KeyedLoadIC::GenerateMiss(masm); | 4870 KeyedLoadIC::GenerateMiss(masm); |
4848 | |
4849 __ bind(&load_smi_map); | |
4850 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
4851 __ Branch(&compare_map); | |
4852 } | 4871 } |
4853 | 4872 |
4854 | 4873 |
4855 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4874 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4856 if (masm->isolate()->function_entry_hook() != NULL) { | 4875 if (masm->isolate()->function_entry_hook() != NULL) { |
4857 ProfileEntryHookStub stub(masm->isolate()); | 4876 ProfileEntryHookStub stub(masm->isolate()); |
4858 __ push(ra); | 4877 __ push(ra); |
4859 __ CallStub(&stub); | 4878 __ CallStub(&stub); |
4860 __ pop(ra); | 4879 __ pop(ra); |
4861 } | 4880 } |
(...skipping 675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5537 kStackUnwindSpace, kInvalidStackOffset, | 5556 kStackUnwindSpace, kInvalidStackOffset, |
5538 MemOperand(fp, 6 * kPointerSize), NULL); | 5557 MemOperand(fp, 6 * kPointerSize), NULL); |
5539 } | 5558 } |
5540 | 5559 |
5541 | 5560 |
5542 #undef __ | 5561 #undef __ |
5543 | 5562 |
5544 } } // namespace v8::internal | 5563 } } // namespace v8::internal |
5545 | 5564 |
5546 #endif // V8_TARGET_ARCH_MIPS64 | 5565 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |