OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1131 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1131 __ InvokeBuiltin(native, JUMP_FUNCTION); |
1132 | 1132 |
1133 __ bind(&miss); | 1133 __ bind(&miss); |
1134 GenerateMiss(masm); | 1134 GenerateMiss(masm); |
1135 } | 1135 } |
1136 | 1136 |
1137 | 1137 |
1138 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { | 1138 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { |
1139 __ mov(t9, ra); | 1139 __ mov(t9, ra); |
1140 __ pop(ra); | 1140 __ pop(ra); |
1141 if (save_doubles_ == kSaveFPRegs) { | 1141 __ PushSafepointRegisters(); |
1142 __ PushSafepointRegistersAndDoubles(); | |
1143 } else { | |
1144 __ PushSafepointRegisters(); | |
1145 } | |
1146 __ Jump(t9); | 1142 __ Jump(t9); |
1147 } | 1143 } |
1148 | 1144 |
1149 | 1145 |
1150 void RestoreRegistersStateStub::Generate(MacroAssembler* masm) { | 1146 void RestoreRegistersStateStub::Generate(MacroAssembler* masm) { |
1151 __ mov(t9, ra); | 1147 __ mov(t9, ra); |
1152 __ pop(ra); | 1148 __ pop(ra); |
1153 __ StoreToSafepointRegisterSlot(t9, t9); | 1149 __ PopSafepointRegisters(); |
1154 if (save_doubles_ == kSaveFPRegs) { | |
1155 __ PopSafepointRegistersAndDoubles(); | |
1156 } else { | |
1157 __ PopSafepointRegisters(); | |
1158 } | |
1159 __ Jump(t9); | 1150 __ Jump(t9); |
1160 } | 1151 } |
1161 | 1152 |
1162 | 1153 |
1163 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 1154 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
1164 // We don't allow a GC during a store buffer overflow so there is no need to | 1155 // We don't allow a GC during a store buffer overflow so there is no need to |
1165 // store the registers in any particular way, but we do have to store and | 1156 // store the registers in any particular way, but we do have to store and |
1166 // restore them. | 1157 // restore them. |
1167 __ MultiPush(kJSCallerSaved | ra.bit()); | 1158 __ MultiPush(kJSCallerSaved | ra.bit()); |
1168 if (save_doubles_ == kSaveFPRegs) { | 1159 if (save_doubles_ == kSaveFPRegs) { |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1414 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1405 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
1415 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 1406 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
1416 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 1407 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
1417 BinaryOpICStub::GenerateAheadOfTime(isolate); | 1408 BinaryOpICStub::GenerateAheadOfTime(isolate); |
1418 StoreRegistersStateStub::GenerateAheadOfTime(isolate); | 1409 StoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1419 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); | 1410 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1420 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 1411 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
1421 } | 1412 } |
1422 | 1413 |
1423 | 1414 |
1424 void StoreRegistersStateStub::GenerateAheadOfTime( | 1415 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { |
1425 Isolate* isolate) { | 1416 StoreRegistersStateStub stub(isolate); |
1426 StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs); | 1417 stub.GetCode(); |
1427 stub1.GetCode(); | |
1428 // Hydrogen code stubs need stub2 at snapshot time. | |
1429 StoreRegistersStateStub stub2(isolate, kSaveFPRegs); | |
1430 stub2.GetCode(); | |
1431 } | 1418 } |
1432 | 1419 |
1433 | 1420 |
1434 void RestoreRegistersStateStub::GenerateAheadOfTime( | 1421 void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { |
1435 Isolate* isolate) { | 1422 RestoreRegistersStateStub stub(isolate); |
1436 RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs); | 1423 stub.GetCode(); |
1437 stub1.GetCode(); | |
1438 // Hydrogen code stubs need stub2 at snapshot time. | |
1439 RestoreRegistersStateStub stub2(isolate, kSaveFPRegs); | |
1440 stub2.GetCode(); | |
1441 } | 1424 } |
1442 | 1425 |
1443 | 1426 |
1444 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 1427 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
1445 SaveFPRegsMode mode = kSaveFPRegs; | 1428 SaveFPRegsMode mode = kSaveFPRegs; |
1446 CEntryStub save_doubles(isolate, 1, mode); | 1429 CEntryStub save_doubles(isolate, 1, mode); |
1447 StoreBufferOverflowStub stub(isolate, mode); | 1430 StoreBufferOverflowStub stub(isolate, mode); |
1448 // These stubs might already be in the snapshot, detect that and don't | 1431 // These stubs might already be in the snapshot, detect that and don't |
1449 // regenerate, which would lead to code stub initialization state being messed | 1432 // regenerate, which would lead to code stub initialization state being messed |
1450 // up. | 1433 // up. |
(...skipping 3836 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5287 MemOperand(fp, 6 * kPointerSize), | 5270 MemOperand(fp, 6 * kPointerSize), |
5288 NULL); | 5271 NULL); |
5289 } | 5272 } |
5290 | 5273 |
5291 | 5274 |
5292 #undef __ | 5275 #undef __ |
5293 | 5276 |
5294 } } // namespace v8::internal | 5277 } } // namespace v8::internal |
5295 | 5278 |
5296 #endif // V8_TARGET_ARCH_MIPS | 5279 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |