OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 4620 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4631 } | 4631 } |
4632 | 4632 |
4633 | 4633 |
4634 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4634 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4635 GenerateImpl(masm, true); | 4635 GenerateImpl(masm, true); |
4636 } | 4636 } |
4637 | 4637 |
4638 | 4638 |
4639 static void HandleArrayCases(MacroAssembler* masm, Register receiver, | 4639 static void HandleArrayCases(MacroAssembler* masm, Register receiver, |
4640 Register key, Register vector, Register slot, | 4640 Register key, Register vector, Register slot, |
4641 Register feedback, Register scratch1, | 4641 Register feedback, Register receiver_map, |
4642 Register scratch2, Register scratch3, | 4642 Register scratch1, Register scratch2, |
4643 bool is_polymorphic, Label* miss) { | 4643 bool is_polymorphic, Label* miss) { |
4644 // feedback initially contains the feedback array | 4644 // feedback initially contains the feedback array |
4645 Label next_loop, prepare_next; | 4645 Label next_loop, prepare_next; |
4646 Label load_smi_map, compare_map; | |
4647 Label start_polymorphic; | 4646 Label start_polymorphic; |
4648 | 4647 |
4649 Register receiver_map = scratch1; | 4648 Register cached_map = scratch1; |
4650 Register cached_map = scratch2; | |
4651 | 4649 |
4652 // Receiver might not be a heap object. | |
4653 __ JumpIfSmi(receiver, &load_smi_map); | |
4654 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
4655 __ bind(&compare_map); | |
4656 __ ld(cached_map, | 4650 __ ld(cached_map, |
4657 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); | 4651 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); |
4658 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | 4652 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
4659 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); | 4653 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); |
4660 // found, now call handler. | 4654 // found, now call handler. |
4661 Register handler = feedback; | 4655 Register handler = feedback; |
4662 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); | 4656 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); |
4663 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 4657 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
4664 __ Jump(t9); | 4658 __ Jump(t9); |
4665 | 4659 |
4666 Register length = scratch3; | 4660 Register length = scratch2; |
4667 __ bind(&start_polymorphic); | 4661 __ bind(&start_polymorphic); |
4668 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | 4662 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); |
4669 if (!is_polymorphic) { | 4663 if (!is_polymorphic) { |
4670 // If the IC could be monomorphic we have to make sure we don't go past the | 4664 // If the IC could be monomorphic we have to make sure we don't go past the |
4671 // end of the feedback array. | 4665 // end of the feedback array. |
4672 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); | 4666 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); |
4673 } | 4667 } |
4674 | 4668 |
4675 Register too_far = length; | 4669 Register too_far = length; |
4676 Register pointer_reg = feedback; | 4670 Register pointer_reg = feedback; |
4677 | 4671 |
4678 // +-----+------+------+-----+-----+ ... ----+ | 4672 // +-----+------+------+-----+-----+ ... ----+ |
4679 // | map | len | wm0 | h0 | wm1 | hN | | 4673 // | map | len | wm0 | h0 | wm1 | hN | |
4680 // +-----+------+------+-----+-----+ ... ----+ | 4674 // +-----+------+------+-----+-----+ ... ----+ |
4681 // 0 1 2 len-1 | 4675 // 0 1 2 len-1 |
4682 // ^ ^ | 4676 // ^ ^ |
4683 // | | | 4677 // | | |
4684 // pointer_reg too_far | 4678 // pointer_reg too_far |
4685 // aka feedback scratch3 | 4679 // aka feedback scratch2 |
4686 // also need receiver_map (aka scratch1) | 4680 // also need receiver_map |
4687 // use cached_map (scratch2) to look in the weak map values. | 4681 // use cached_map (scratch1) to look in the weak map values. |
4688 __ SmiScale(too_far, length, kPointerSizeLog2); | 4682 __ SmiScale(too_far, length, kPointerSizeLog2); |
4689 __ Daddu(too_far, feedback, Operand(too_far)); | 4683 __ Daddu(too_far, feedback, Operand(too_far)); |
4690 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 4684 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
4691 __ Daddu(pointer_reg, feedback, | 4685 __ Daddu(pointer_reg, feedback, |
4692 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); | 4686 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); |
4693 | 4687 |
4694 __ bind(&next_loop); | 4688 __ bind(&next_loop); |
4695 __ ld(cached_map, MemOperand(pointer_reg)); | 4689 __ ld(cached_map, MemOperand(pointer_reg)); |
4696 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | 4690 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); |
4697 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | 4691 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); |
4698 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); | 4692 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); |
4699 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 4693 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
4700 __ Jump(t9); | 4694 __ Jump(t9); |
4701 | 4695 |
4702 __ bind(&prepare_next); | 4696 __ bind(&prepare_next); |
4703 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); | 4697 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); |
4704 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | 4698 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); |
4705 | 4699 |
4706 // We exhausted our array of map handler pairs. | 4700 // We exhausted our array of map handler pairs. |
4707 __ Branch(miss); | 4701 __ Branch(miss); |
4708 | |
4709 __ bind(&load_smi_map); | |
4710 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
4711 __ Branch(&compare_map); | |
4712 } | 4702 } |
4713 | 4703 |
4714 | 4704 |
4715 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, | 4705 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, |
4716 Register key, Register vector, Register slot, | 4706 Register receiver_map, Register feedback, |
4717 Register weak_cell, Register scratch, | 4707 Register vector, Register slot, |
4718 Label* miss) { | 4708 Register scratch, Label* compare_map, |
4719 // feedback initially contains the feedback array | 4709 Label* load_smi_map, Label* try_array) { |
4720 Label compare_smi_map; | 4710 __ JumpIfSmi(receiver, load_smi_map); |
4721 Register receiver_map = scratch; | 4711 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
4722 Register cached_map = weak_cell; | 4712 __ bind(compare_map); |
4723 | 4713 Register cached_map = scratch; |
4724 // Move the weak map into the weak_cell register. | 4714 // Move the weak map into the weak_cell register. |
4725 __ ld(cached_map, FieldMemOperand(weak_cell, WeakCell::kValueOffset)); | 4715 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); |
4726 | 4716 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); |
4727 // Receiver might not be a heap object. | 4717 Register handler = feedback; |
4728 __ JumpIfSmi(receiver, &compare_smi_map); | |
4729 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
4730 __ Branch(miss, ne, cached_map, Operand(receiver_map)); | |
4731 | |
4732 Register handler = weak_cell; | |
4733 __ SmiScale(handler, slot, kPointerSizeLog2); | 4718 __ SmiScale(handler, slot, kPointerSizeLog2); |
4734 __ Daddu(handler, vector, Operand(handler)); | 4719 __ Daddu(handler, vector, Operand(handler)); |
4735 __ ld(handler, | 4720 __ ld(handler, |
4736 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | |
4737 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | |
4738 __ Jump(t9); | |
4739 | |
4740 // In microbenchmarks, it made sense to unroll this code so that the call to | |
4741 // the handler is duplicated for a HeapObject receiver and a Smi receiver. | |
4742 // TODO(mvstanton): does this hold on ARM? | |
4743 __ bind(&compare_smi_map); | |
4744 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | |
4745 __ Branch(miss, ne, weak_cell, Operand(at)); | |
4746 __ SmiScale(handler, slot, kPointerSizeLog2); | |
4747 __ Daddu(handler, vector, Operand(handler)); | |
4748 __ ld(handler, | |
4749 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 4721 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
4750 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | 4722 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
4751 __ Jump(t9); | 4723 __ Jump(t9); |
4752 } | 4724 } |
4753 | 4725 |
4754 | 4726 |
4755 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4727 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4756 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 | 4728 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
4757 Register name = VectorLoadICDescriptor::NameRegister(); // a2 | 4729 Register name = VectorLoadICDescriptor::NameRegister(); // a2 |
4758 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 | 4730 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
4759 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 | 4731 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
4760 Register feedback = a4; | 4732 Register feedback = a4; |
4761 Register scratch1 = a5; | 4733 Register receiver_map = a5; |
| 4734 Register scratch1 = a6; |
4762 | 4735 |
4763 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4736 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4764 __ Daddu(feedback, vector, Operand(feedback)); | 4737 __ Daddu(feedback, vector, Operand(feedback)); |
4765 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4738 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4766 | 4739 |
4767 // Is it a weak cell? | 4740 // Try to quickly handle the monomorphic case without knowing for sure |
4768 Label try_array; | 4741 // if we have a weak cell in feedback. We do know it's safe to look |
4769 Label not_array, smi_key, key_okay, miss; | 4742 // at WeakCell::kValueOffset. |
4770 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | 4743 Label try_array, load_smi_map, compare_map; |
4771 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); | 4744 Label not_array, miss; |
4772 __ Branch(&try_array, ne, scratch1, Operand(at)); | 4745 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
4773 HandleMonomorphicCase(masm, receiver, name, vector, slot, feedback, scratch1, | 4746 scratch1, &compare_map, &load_smi_map, &try_array); |
4774 &miss); | |
4775 | 4747 |
4776 // Is it a fixed array? | 4748 // Is it a fixed array? |
4777 __ bind(&try_array); | 4749 __ bind(&try_array); |
| 4750 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
4778 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | 4751 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
4779 __ Branch(¬_array, ne, scratch1, Operand(at)); | 4752 __ Branch(¬_array, ne, scratch1, Operand(at)); |
4780 HandleArrayCases(masm, receiver, name, vector, slot, feedback, scratch1, a6, | 4753 HandleArrayCases(masm, receiver, name, vector, slot, feedback, receiver_map, |
4781 a7, true, &miss); | 4754 scratch1, a7, true, &miss); |
4782 | 4755 |
4783 __ bind(¬_array); | 4756 __ bind(¬_array); |
4784 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 4757 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
4785 __ Branch(&miss, ne, feedback, Operand(at)); | 4758 __ Branch(&miss, ne, feedback, Operand(at)); |
4786 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( | 4759 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( |
4787 Code::ComputeHandlerFlags(Code::LOAD_IC)); | 4760 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
4788 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, | 4761 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, |
4789 false, receiver, name, feedback, | 4762 false, receiver, name, feedback, |
4790 scratch1, a6, a7); | 4763 receiver_map, scratch1, a7); |
4791 | 4764 |
4792 __ bind(&miss); | 4765 __ bind(&miss); |
4793 LoadIC::GenerateMiss(masm); | 4766 LoadIC::GenerateMiss(masm); |
| 4767 |
| 4768 __ bind(&load_smi_map); |
| 4769 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4770 __ Branch(&compare_map); |
4794 } | 4771 } |
4795 | 4772 |
4796 | 4773 |
4797 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { | 4774 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { |
4798 GenerateImpl(masm, false); | 4775 GenerateImpl(masm, false); |
4799 } | 4776 } |
4800 | 4777 |
4801 | 4778 |
4802 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4779 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4803 GenerateImpl(masm, true); | 4780 GenerateImpl(masm, true); |
4804 } | 4781 } |
4805 | 4782 |
4806 | 4783 |
4807 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4784 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4808 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 | 4785 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // a1 |
4809 Register key = VectorLoadICDescriptor::NameRegister(); // a2 | 4786 Register key = VectorLoadICDescriptor::NameRegister(); // a2 |
4810 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 | 4787 Register vector = VectorLoadICDescriptor::VectorRegister(); // a3 |
4811 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 | 4788 Register slot = VectorLoadICDescriptor::SlotRegister(); // a0 |
4812 Register feedback = a4; | 4789 Register feedback = a4; |
4813 Register scratch1 = a5; | 4790 Register receiver_map = a5; |
| 4791 Register scratch1 = a6; |
4814 | 4792 |
4815 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4793 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4816 __ Daddu(feedback, vector, Operand(feedback)); | 4794 __ Daddu(feedback, vector, Operand(feedback)); |
4817 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4795 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4818 | 4796 |
4819 // Is it a weak cell? | 4797 // Try to quickly handle the monomorphic case without knowing for sure |
4820 Label try_array; | 4798 // if we have a weak cell in feedback. We do know it's safe to look |
4821 Label not_array, smi_key, key_okay, miss; | 4799 // at WeakCell::kValueOffset. |
4822 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | 4800 Label try_array, load_smi_map, compare_map; |
4823 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); | 4801 Label not_array, miss; |
4824 __ Branch(&try_array, ne, scratch1, Operand(at)); | 4802 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, |
4825 __ JumpIfNotSmi(key, &miss); | 4803 scratch1, &compare_map, &load_smi_map, &try_array); |
4826 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, scratch1, | |
4827 &miss); | |
4828 | 4804 |
4829 __ bind(&try_array); | 4805 __ bind(&try_array); |
4830 // Is it a fixed array? | 4806 // Is it a fixed array? |
| 4807 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
4831 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | 4808 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
4832 __ Branch(¬_array, ne, scratch1, Operand(at)); | 4809 __ Branch(¬_array, ne, scratch1, Operand(at)); |
4833 // We have a polymorphic element handler. | 4810 // We have a polymorphic element handler. |
4834 __ JumpIfNotSmi(key, &miss); | 4811 __ JumpIfNotSmi(key, &miss); |
4835 | 4812 |
4836 Label polymorphic, try_poly_name; | 4813 Label polymorphic, try_poly_name; |
4837 __ bind(&polymorphic); | 4814 __ bind(&polymorphic); |
4838 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, | 4815 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, |
4839 a7, true, &miss); | 4816 scratch1, a7, true, &miss); |
4840 | 4817 |
4841 __ bind(¬_array); | 4818 __ bind(¬_array); |
4842 // Is it generic? | 4819 // Is it generic? |
4843 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 4820 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
4844 __ Branch(&try_poly_name, ne, feedback, Operand(at)); | 4821 __ Branch(&try_poly_name, ne, feedback, Operand(at)); |
4845 Handle<Code> megamorphic_stub = | 4822 Handle<Code> megamorphic_stub = |
4846 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); | 4823 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate()); |
4847 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | 4824 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); |
4848 | 4825 |
4849 __ bind(&try_poly_name); | 4826 __ bind(&try_poly_name); |
4850 // We might have a name in feedback, and a fixed array in the next slot. | 4827 // We might have a name in feedback, and a fixed array in the next slot. |
4851 __ Branch(&miss, ne, key, Operand(feedback)); | 4828 __ Branch(&miss, ne, key, Operand(feedback)); |
4852 // If the name comparison succeeded, we know we have a fixed array with | 4829 // If the name comparison succeeded, we know we have a fixed array with |
4853 // at least one map/handler pair. | 4830 // at least one map/handler pair. |
4854 __ SmiScale(feedback, slot, kPointerSizeLog2); | 4831 __ SmiScale(feedback, slot, kPointerSizeLog2); |
4855 __ Daddu(feedback, vector, Operand(feedback)); | 4832 __ Daddu(feedback, vector, Operand(feedback)); |
4856 __ ld(feedback, | 4833 __ ld(feedback, |
4857 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | 4834 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); |
4858 HandleArrayCases(masm, receiver, key, vector, slot, feedback, scratch1, a6, | 4835 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map, |
4859 a7, false, &miss); | 4836 scratch1, a7, false, &miss); |
4860 | 4837 |
4861 __ bind(&miss); | 4838 __ bind(&miss); |
4862 KeyedLoadIC::GenerateMiss(masm); | 4839 KeyedLoadIC::GenerateMiss(masm); |
| 4840 |
| 4841 __ bind(&load_smi_map); |
| 4842 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4843 __ Branch(&compare_map); |
4863 } | 4844 } |
4864 | 4845 |
4865 | 4846 |
4866 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4847 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4867 if (masm->isolate()->function_entry_hook() != NULL) { | 4848 if (masm->isolate()->function_entry_hook() != NULL) { |
4868 ProfileEntryHookStub stub(masm->isolate()); | 4849 ProfileEntryHookStub stub(masm->isolate()); |
4869 __ push(ra); | 4850 __ push(ra); |
4870 __ CallStub(&stub); | 4851 __ CallStub(&stub); |
4871 __ pop(ra); | 4852 __ pop(ra); |
4872 } | 4853 } |
(...skipping 675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5548 kStackUnwindSpace, kInvalidStackOffset, | 5529 kStackUnwindSpace, kInvalidStackOffset, |
5549 MemOperand(fp, 6 * kPointerSize), NULL); | 5530 MemOperand(fp, 6 * kPointerSize), NULL); |
5550 } | 5531 } |
5551 | 5532 |
5552 | 5533 |
5553 #undef __ | 5534 #undef __ |
5554 | 5535 |
5555 } } // namespace v8::internal | 5536 } } // namespace v8::internal |
5556 | 5537 |
5557 #endif // V8_TARGET_ARCH_MIPS64 | 5538 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |