OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 1097 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1108 Label zero; | 1108 Label zero; |
1109 __ Branch(&zero, eq, cp, Operand(zero_reg)); | 1109 __ Branch(&zero, eq, cp, Operand(zero_reg)); |
1110 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1110 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
1111 __ bind(&zero); | 1111 __ bind(&zero); |
1112 | 1112 |
1113 // Compute the handler entry address and jump to it. | 1113 // Compute the handler entry address and jump to it. |
1114 __ li(a1, Operand(pending_handler_code_address)); | 1114 __ li(a1, Operand(pending_handler_code_address)); |
1115 __ lw(a1, MemOperand(a1)); | 1115 __ lw(a1, MemOperand(a1)); |
1116 __ li(a2, Operand(pending_handler_offset_address)); | 1116 __ li(a2, Operand(pending_handler_offset_address)); |
1117 __ lw(a2, MemOperand(a2)); | 1117 __ lw(a2, MemOperand(a2)); |
1118 __ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1118 if (IsMipsArchVariant(kMips32r6)) { |
1119 __ Addu(t9, a1, a2); | 1119 __ Addu(t9, a1, a2); |
ivica.bogosavljevic
2016/12/05 10:48:47
Can we move these common fragments of code with if
miran.karic
2016/12/05 12:44:48
Yes, as I commented in MacroAssembler, perhaps I c
| |
1120 __ Jump(t9); | 1120 __ Jump(t9, Code::kHeaderSize - kHeapObjectTag); |
1121 } else { | |
1122 __ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1123 __ Addu(t9, a1, a2); | |
1124 __ Jump(t9); | |
1125 } | |
1121 } | 1126 } |
1122 | 1127 |
1123 | 1128 |
1124 void JSEntryStub::Generate(MacroAssembler* masm) { | 1129 void JSEntryStub::Generate(MacroAssembler* masm) { |
1125 Label invoke, handler_entry, exit; | 1130 Label invoke, handler_entry, exit; |
1126 Isolate* isolate = masm->isolate(); | 1131 Isolate* isolate = masm->isolate(); |
1127 | 1132 |
1128 // Registers: | 1133 // Registers: |
1129 // a0: entry address | 1134 // a0: entry address |
1130 // a1: function | 1135 // a1: function |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1243 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, | 1248 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, |
1244 isolate); | 1249 isolate); |
1245 __ li(t0, Operand(construct_entry)); | 1250 __ li(t0, Operand(construct_entry)); |
1246 } else { | 1251 } else { |
1247 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate()); | 1252 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate()); |
1248 __ li(t0, Operand(entry)); | 1253 __ li(t0, Operand(entry)); |
1249 } | 1254 } |
1250 __ lw(t9, MemOperand(t0)); // Deref address. | 1255 __ lw(t9, MemOperand(t0)); // Deref address. |
1251 | 1256 |
1252 // Call JSEntryTrampoline. | 1257 // Call JSEntryTrampoline. |
1253 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); | 1258 if (IsMipsArchVariant(kMips32r6)) { |
1254 __ Call(t9); | 1259 __ Call(t9, Code::kHeaderSize - kHeapObjectTag); |
ivica.bogosavljevic
2016/12/05 10:48:47
Same as above
| |
1260 } else { | |
1261 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); | |
1262 __ Call(t9); | |
1263 } | |
1255 | 1264 |
1256 // Unlink this frame from the handler chain. | 1265 // Unlink this frame from the handler chain. |
1257 __ PopStackHandler(); | 1266 __ PopStackHandler(); |
1258 | 1267 |
1259 __ bind(&exit); // v0 holds result | 1268 __ bind(&exit); // v0 holds result |
1260 // Check if the current stack frame is marked as the outermost JS frame. | 1269 // Check if the current stack frame is marked as the outermost JS frame. |
1261 Label non_outermost_js_2; | 1270 Label non_outermost_js_2; |
1262 __ pop(t1); | 1271 __ pop(t1); |
1263 __ Branch(&non_outermost_js_2, | 1272 __ Branch(&non_outermost_js_2, |
1264 ne, | 1273 ne, |
(...skipping 615 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1880 | 1889 |
1881 __ AssertUndefinedOrAllocationSite(a2, t1); | 1890 __ AssertUndefinedOrAllocationSite(a2, t1); |
1882 | 1891 |
1883 // Pass function as new target. | 1892 // Pass function as new target. |
1884 __ mov(a3, a1); | 1893 __ mov(a3, a1); |
1885 | 1894 |
1886 // Tail call to the function-specific construct stub (still in the caller | 1895 // Tail call to the function-specific construct stub (still in the caller |
1887 // context at this point). | 1896 // context at this point). |
1888 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1897 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1889 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); | 1898 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); |
1890 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1899 if (IsMipsArchVariant(kMips32r6)) { |
1891 __ Jump(at); | 1900 __ Jump(t0, Code::kHeaderSize - kHeapObjectTag); |
1901 } else { | |
1902 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1903 __ Jump(at); | |
1904 } | |
1892 | 1905 |
1893 __ bind(&non_function); | 1906 __ bind(&non_function); |
1894 __ mov(a3, a1); | 1907 __ mov(a3, a1); |
1895 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1908 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1896 } | 1909 } |
1897 | 1910 |
1898 // Note: feedback_vector and slot are clobbered after the call. | 1911 // Note: feedback_vector and slot are clobbered after the call. |
1899 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, | 1912 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, |
1900 Register slot) { | 1913 Register slot) { |
1901 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize); | 1914 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize); |
(...skipping 862 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2764 // Call the runtime system in a fresh internal frame. | 2777 // Call the runtime system in a fresh internal frame. |
2765 FrameScope scope(masm, StackFrame::INTERNAL); | 2778 FrameScope scope(masm, StackFrame::INTERNAL); |
2766 __ Push(a1, a0); | 2779 __ Push(a1, a0); |
2767 __ Push(ra, a1, a0); | 2780 __ Push(ra, a1, a0); |
2768 __ li(t0, Operand(Smi::FromInt(op()))); | 2781 __ li(t0, Operand(Smi::FromInt(op()))); |
2769 __ addiu(sp, sp, -kPointerSize); | 2782 __ addiu(sp, sp, -kPointerSize); |
2770 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs, | 2783 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs, |
2771 USE_DELAY_SLOT); | 2784 USE_DELAY_SLOT); |
2772 __ sw(t0, MemOperand(sp)); // In the delay slot. | 2785 __ sw(t0, MemOperand(sp)); // In the delay slot. |
2773 // Compute the entry point of the rewritten stub. | 2786 // Compute the entry point of the rewritten stub. |
2774 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2787 if (!IsMipsArchVariant(kMips32r6)) { |
ivica.bogosavljevic
2016/12/05 10:48:47
same as above
| |
2788 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2789 } | |
2775 // Restore registers. | 2790 // Restore registers. |
2776 __ Pop(a1, a0, ra); | 2791 __ Pop(a1, a0, ra); |
2777 } | 2792 } |
2778 __ Jump(a2); | 2793 if (IsMipsArchVariant(kMips32r6)) { |
2794 __ Jump(v0, Code::kHeaderSize - kHeapObjectTag); | |
2795 } else { | |
2796 __ Jump(a2); | |
2797 } | |
2779 } | 2798 } |
2780 | 2799 |
2781 | 2800 |
2782 void DirectCEntryStub::Generate(MacroAssembler* masm) { | 2801 void DirectCEntryStub::Generate(MacroAssembler* masm) { |
2783 // Make place for arguments to fit C calling convention. Most of the callers | 2802 // Make place for arguments to fit C calling convention. Most of the callers |
2784 // of DirectCEntryStub::GenerateCall are using EnterExitFrame/LeaveExitFrame | 2803 // of DirectCEntryStub::GenerateCall are using EnterExitFrame/LeaveExitFrame |
2785 // so they handle stack restoring and we don't have to do that here. | 2804 // so they handle stack restoring and we don't have to do that here. |
2786 // Any caller of DirectCEntryStub::GenerateCall must take care of dropping | 2805 // Any caller of DirectCEntryStub::GenerateCall must take care of dropping |
2787 // kCArgsSlotsSize stack space after the call. | 2806 // kCArgsSlotsSize stack space after the call. |
2788 __ Subu(sp, sp, Operand(kCArgsSlotsSize)); | 2807 __ Subu(sp, sp, Operand(kCArgsSlotsSize)); |
(...skipping 1977 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4766 kStackUnwindSpace, kInvalidStackOffset, | 4785 kStackUnwindSpace, kInvalidStackOffset, |
4767 return_value_operand, NULL); | 4786 return_value_operand, NULL); |
4768 } | 4787 } |
4769 | 4788 |
4770 #undef __ | 4789 #undef __ |
4771 | 4790 |
4772 } // namespace internal | 4791 } // namespace internal |
4773 } // namespace v8 | 4792 } // namespace v8 |
4774 | 4793 |
4775 #endif // V8_TARGET_ARCH_MIPS | 4794 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |