OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 4628 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4639 } | 4639 } |
4640 | 4640 |
4641 | 4641 |
4642 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4642 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4643 GenerateImpl(masm, true); | 4643 GenerateImpl(masm, true); |
4644 } | 4644 } |
4645 | 4645 |
4646 | 4646 |
4647 static void HandleArrayCases(MacroAssembler* masm, Register receiver, | 4647 static void HandleArrayCases(MacroAssembler* masm, Register receiver, |
4648 Register key, Register vector, Register slot, | 4648 Register key, Register vector, Register slot, |
4649 Register feedback, Register scratch1, | 4649 Register feedback, Register receiver_map, |
4650 Register scratch2, Register scratch3, | 4650 Register scratch1, Register scratch2, |
4651 bool is_polymorphic, Label* miss) { | 4651 bool is_polymorphic, Label* miss) { |
4652 // feedback initially contains the feedback array | 4652 // feedback initially contains the feedback array |
4653 Label next_loop, prepare_next; | 4653 Label next_loop, prepare_next; |
4654 Label load_smi_map, compare_map; | |
4655 Label start_polymorphic; | 4654 Label start_polymorphic; |
4656 | 4655 |
4657 Register receiver_map = scratch1; | 4656 Register cached_map = scratch1; |
4658 Register cached_map = scratch2; | |
4659 | 4657 |
4660 // Receiver might not be a heap object. | |
4661 __ JumpIfSmi(receiver, &load_smi_map); | |
4662 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
4663 __ bind(&compare_map); | |
4664 __ ld(cached_map, | 4658 __ ld(cached_map, |
4665 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); | 4659 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); |
4666 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | 4660 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
4667 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); | 4661 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); |
4668 // found, now call handler. | 4662 // found, now call handler. |
4669 Register handler = feedback; | 4663 Register handler = feedback; |
4670 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); | 4664 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); |
4671 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 4665 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
4672 __ Jump(t9); | 4666 __ Jump(t9); |
4673 | 4667 |
4674 Register length = scratch3; | 4668 Register length = scratch2; |
4675 __ bind(&start_polymorphic); | 4669 __ bind(&start_polymorphic); |
4676 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | 4670 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
4677 if (!is_polymorphic) { | 4671 if (!is_polymorphic) { |
4678 // If the IC could be monomorphic we have to make sure we don't go past the | 4672 // If the IC could be monomorphic we have to make sure we don't go past the |
4679 // end of the feedback array. | 4673 // end of the feedback array. |
4680 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); | 4674 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); |
4681 } | 4675 } |
4682 | 4676 |
4683 Register too_far = length; | 4677 Register too_far = length; |
4684 Register pointer_reg = feedback; | 4678 Register pointer_reg = feedback; |
4685 | 4679 |
4686 // +-----+------+------+-----+-----+ ... ----+ | 4680 // +-----+------+------+-----+-----+ ... ----+ |
4687 // | map | len | wm0 | h0 | wm1 | hN | | 4681 // | map | len | wm0 | h0 | wm1 | hN | |
4688 // +-----+------+------+-----+-----+ ... ----+ | 4682 // +-----+------+------+-----+-----+ ... ----+ |
4689 // 0 1 2 len-1 | 4683 // 0 1 2 len-1 |
4690 // ^ ^ | 4684 // ^ ^ |
4691 // | | | 4685 // | | |
4692 // pointer_reg too_far | 4686 // pointer_reg too_far |
4693 // aka feedback scratch3 | 4687 // aka feedback scratch2 |
4694 // also need receiver_map (aka scratch1) | 4688 // also need receiver_map |
4695 // use cached_map (scratch2) to look in the weak map values. | 4689 // use cached_map (scratch1) to look in the weak map values. |
4696 __ SmiScale(too_far, length, kPointerSizeLog2); | 4690 __ SmiScale(too_far, length, kPointerSizeLog2); |
4697 __ Daddu(too_far, feedback, Operand(too_far)); | 4691 __ Daddu(too_far, feedback, Operand(too_far)); |
4698 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 4692 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
4699 __ Daddu(pointer_reg, feedback, | 4693 __ Daddu(pointer_reg, feedback, |
4700 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); | 4694 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); |
4701 | 4695 |
4702 __ bind(&next_loop); | 4696 __ bind(&next_loop); |
4703 __ ld(cached_map, MemOperand(pointer_reg)); | 4697 __ ld(cached_map, MemOperand(pointer_reg)); |
4704 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | 4698 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
4705 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | 4699 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); |
4706 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); | 4700 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); |
4707 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 4701 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
4708 __ Jump(t9); | 4702 __ Jump(t9); |
4709 | 4703 |
4710 __ bind(&prepare_next); | 4704 __ bind(&prepare_next); |
4711 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); | 4705 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); |
4712 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | 4706 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); |
4713 | 4707 |
4714 // We exhausted our array of map handler pairs. | 4708 // We exhausted our array of map handler pairs. |
4715 __ Branch(miss); | 4709 __ Branch(miss); |
4716 | |
4717 __ bind(&load_smi_map); | |
4718 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
4719 __ Branch(&compare_map); | |
4720 } | 4710 } |
4721 | 4711 |
4722 | 4712 |
4723 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, | 4713 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, |
4724 Register key, Register vector, Register slot, | 4714 Register receiver_map, Register feedback, |
4725 Register weak_cell, Register scratch, | 4715 Register vector, Register slot, |
4726 Label* miss) { | 4716 Register scratch, Label* compare_map, |
4727 // feedback initially contains the feedback array | 4717 Label* load_smi_map, Label* try_array) { |
4728 Label compare_smi_map; | 4718 __ JumpIfSmi(receiver, load_smi_map); |
4729 Register receiver_map = scratch; | 4719 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
4730 Register cached_map = weak_cell; | 4720 __ bind(compare_map); |
4731 | 4721 Register cached_map = scratch; |
4732 // Move the weak map into the weak_cell register. | 4722 // Move the weak map into the weak_cell register. |
4733 __ ld(cached_map, FieldMemOperand(weak_cell, WeakCell::kValueOffset)); | 4723 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); |
4734 | 4724 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); |
4735 // Receiver might not be a heap object. | 4725 Register handler = feedback; |
4736 __ JumpIfSmi(receiver, &compare_smi_map); | |
4737 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
4738 __ Branch(miss, ne, cached_map, Operand(receiver_map)); | |
4739 | |
4740 Register handler = weak_cell; | |
4741 __ SmiScale(handler, slot, kPointerSizeLog2); | 4726 __ SmiScale(handler, slot, kPointerSizeLog2); |
4742 __ Daddu(handler, vector, Operand(handler)); | 4727 __ Daddu(handler, vector, Operand(handler)); |
4743 __ ld(handler, | 4728 __ ld(handler, |
4744 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | |
4745 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | |
4746 __ Jump(t9); | |
4747 | |
4748 // In microbenchmarks, it made sense to unroll this code so that the call to | |
4749 // the handler is duplicated for a HeapObject receiver and a Smi receiver. | |
4750 // TODO(mvstanton): does this hold on ARM? | |
4751 __ bind(&compare_smi_map); | |
4752 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | |
4753 __ Branch(miss, ne, weak_cell, Operand(at)); | |
4754 __ SmiScale(handler, slot, kPointerSizeLog2); | |
4755 __ Daddu(handler, vector, Operand(handler)); | |
4756 __ ld(handler, | |
4757 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 4729 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
4758 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | 4730 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
4759 __ Jump(t9); | 4731 __ Jump(t9); |
4760 } | 4732 } |
4761 | 4733 |
4762 | 4734 |
4763 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4735 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4764 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 | 4736 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
4765 Register name = VectorLoadICDescriptor::NameRegister(); // a2 | 4737 Register name = VectorLoadICDescriptor::NameRegister(); // a2 |
4766 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 | 4738 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
4767 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 | 4739 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
4768 Register feedback = a4; | 4740 Register feedback = a4; |
4769 Register scratch1 = a5; | 4741 Register receiver_map = a5; |
| 4742 Register scratch1 = a6; |
4770 | 4743 |
4771 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4744 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4772 __ Daddu(feedback, vector, Operand(feedback)); | 4745 __ Daddu(feedback, vector, Operand(feedback)); |
4773 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4746 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4774 | 4747 |
4775 // Is it a weak cell? | 4748 // Try to quickly handle the monomorphic case without knowing for sure |
4776 Label try_array; | 4749 // if we have a weak cell in feedback. We do know it's safe to look |
4777 Label not_array, smi_key, key_okay, miss; | 4750 // at WeakCell::kValueOffset. |
4778 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | 4751 Label try_array, load_smi_map, compare_map; |
4779 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); | 4752 Label not_array, miss; |
4780 __ Branch(&try_array, ne, scratch1, Operand(at)); | 4753 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
4781 HandleMonomorphicCase(masm, receiver, name, vector, slot, feedback, scratch1, | 4754 scratch1, &compare_map, &load_smi_map, &try_array); |
4782 &miss); | |
4783 | 4755 |
4784 // Is it a fixed array? | 4756 // Is it a fixed array? |
4785 __ bind(&try_array); | 4757 __ bind(&try_array); |
| 4758 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
4786 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | 4759 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
4787 __ Branch(¬_array, ne, scratch1, Operand(at)); | 4760 __ Branch(¬_array, ne, scratch1, Operand(at)); |
4788 HandleArrayCases(masm, receiver, name, vector, slot, feedback, scratch1, a6, | 4761 HandleArrayCases(masm, receiver, name, vector, slot, feedback, receiver_map, |
4789 a7, true, &miss); | 4762 scratch1, a7, true, &miss); |
4790 | 4763 |
4791 __ bind(¬_array); | 4764 __ bind(¬_array); |
4792 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 4765 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
4793 __ Branch(&miss, ne, feedback, Operand(at)); | 4766 __ Branch(&miss, ne, feedback, Operand(at)); |
4794 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( | 4767 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
4795 Code::ComputeHandlerFlags(Code::LOAD_IC)); | 4768 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
4796 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, | 4769 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, |
4797 false, receiver, name, feedback, | 4770 false, receiver, name, feedback, |
4798 scratch1, a6, a7); | 4771 receiver_map, scratch1, a7); |
4799 | 4772 |
4800 __ bind(&miss); | 4773 __ bind(&miss); |
4801 LoadIC::GenerateMiss(masm); | 4774 LoadIC::GenerateMiss(masm); |
| 4775 |
| 4776 __ bind(&load_smi_map); |
| 4777 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4778 __ Branch(&compare_map); |
4802 } | 4779 } |
4803 | 4780 |
4804 | 4781 |
4805 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { | 4782 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { |
4806 GenerateImpl(masm, false); | 4783 GenerateImpl(masm, false); |
4807 } | 4784 } |
4808 | 4785 |
4809 | 4786 |
4810 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4787 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4811 GenerateImpl(masm, true); | 4788 GenerateImpl(masm, true); |
4812 } | 4789 } |
4813 | 4790 |
4814 | 4791 |
4815 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4792 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4816 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 | 4793 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
4817 Register key = VectorLoadICDescriptor::NameRegister(); // a2 | 4794 Register key = VectorLoadICDescriptor::NameRegister(); // a2 |
4818 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 | 4795 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
4819 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 | 4796 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
4820 Register feedback = a4; | 4797 Register feedback = a4; |
4821 Register scratch1 = a5; | 4798 Register receiver_map = a5; |
| 4799 Register scratch1 = a6; |
4822 | 4800 |
4823 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4801 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4824 __ Daddu(feedback, vector, Operand(feedback)); | 4802 __ Daddu(feedback, vector, Operand(feedback)); |
4825 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4803 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4826 | 4804 |
4827 // Is it a weak cell? | 4805 // Try to quickly handle the monomorphic case without knowing for sure |
4828 Label try_array; | 4806 // if we have a weak cell in feedback. We do know it's safe to look |
4829 Label not_array, smi_key, key_okay, miss; | 4807 // at WeakCell::kValueOffset. |
4830 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | 4808 Label try_array, load_smi_map, compare_map; |
4831 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); | 4809 Label not_array, miss; |
4832 __ Branch(&try_array, ne, scratch1, Operand(at)); | 4810 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
4833 __ JumpIfNotSmi(key, &miss); | 4811 scratch1, &compare_map, &load_smi_map, &try_array); |
4834 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, scratch1, | |
4835 &miss); | |
4836 | 4812 |
4837 __ bind(&try_array); | 4813 __ bind(&try_array); |
4838 // Is it a fixed array? | 4814 // Is it a fixed array? |
| 4815 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
4839 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | 4816 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
4840 __ Branch(¬_array, ne, scratch1, Operand(at)); | 4817 __ Branch(¬_array, ne, scratch1, Operand(at)); |
4841 // We have a polymorphic element handler. | 4818 // We have a polymorphic element handler. |
4842 __ JumpIfNotSmi(key, &miss); | 4819 __ JumpIfNotSmi(key, &miss); |
4843 | 4820 |
4844 Label polymorphic, try_poly_name; | 4821 Label polymorphic, try_poly_name; |
4845 __ bind(&polymorphic); | 4822 __ bind(&polymorphic); |
4846 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, | 4823 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, |
4847 a7, true, &miss); | 4824 scratch1, a7, true, &miss); |
4848 | 4825 |
4849 __ bind(¬_array); | 4826 __ bind(¬_array); |
4850 // Is it generic? | 4827 // Is it generic? |
4851 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 4828 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
4852 __ Branch(&try_poly_name, ne, feedback, Operand(at)); | 4829 __ Branch(&try_poly_name, ne, feedback, Operand(at)); |
4853 Handle<Code> megamorphic_stub = | 4830 Handle<Code> megamorphic_stub = |
4854 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); | 4831 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); |
4855 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | 4832 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
4856 | 4833 |
4857 __ bind(&try_poly_name); | 4834 __ bind(&try_poly_name); |
4858 // We might have a name in feedback, and a fixed array in the next slot. | 4835 // We might have a name in feedback, and a fixed array in the next slot. |
4859 __ Branch(&miss, ne, key, Operand(feedback)); | 4836 __ Branch(&miss, ne, key, Operand(feedback)); |
4860 // If the name comparison succeeded, we know we have a fixed array with | 4837 // If the name comparison succeeded, we know we have a fixed array with |
4861 // at least one map/handler pair. | 4838 // at least one map/handler pair. |
4862 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4839 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4863 __ Daddu(feedback, vector, Operand(feedback)); | 4840 __ Daddu(feedback, vector, Operand(feedback)); |
4864 __ ld(feedback, | 4841 __ ld(feedback, |
4865 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | 4842 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
4866 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, | 4843 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, |
4867 a7, false, &miss); | 4844 scratch1, a7, false, &miss); |
4868 | 4845 |
4869 __ bind(&miss); | 4846 __ bind(&miss); |
4870 KeyedLoadIC::GenerateMiss(masm); | 4847 KeyedLoadIC::GenerateMiss(masm); |
| 4848 |
| 4849 __ bind(&load_smi_map); |
| 4850 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4851 __ Branch(&compare_map); |
4871 } | 4852 } |
4872 | 4853 |
4873 | 4854 |
4874 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4855 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4875 if (masm->isolate()->function_entry_hook() != NULL) { | 4856 if (masm->isolate()->function_entry_hook() != NULL) { |
4876 ProfileEntryHookStub stub(masm->isolate()); | 4857 ProfileEntryHookStub stub(masm->isolate()); |
4877 __ push(ra); | 4858 __ push(ra); |
4878 __ CallStub(&stub); | 4859 __ CallStub(&stub); |
4879 __ pop(ra); | 4860 __ pop(ra); |
4880 } | 4861 } |
(...skipping 675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5556 kStackUnwindSpace, kInvalidStackOffset, | 5537 kStackUnwindSpace, kInvalidStackOffset, |
5557 MemOperand(fp, 6 * kPointerSize), NULL); | 5538 MemOperand(fp, 6 * kPointerSize), NULL); |
5558 } | 5539 } |
5559 | 5540 |
5560 | 5541 |
5561 #undef __ | 5542 #undef __ |
5562 | 5543 |
5563 } } // namespace v8::internal | 5544 } } // namespace v8::internal |
5564 | 5545 |
5565 #endif // V8_TARGET_ARCH_MIPS64 | 5546 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |