OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1227 | 1227 |
1228 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1228 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
1229 // tagged as a small integer. | 1229 // tagged as a small integer. |
1230 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1230 __ InvokeBuiltin(native, JUMP_FUNCTION); |
1231 | 1231 |
1232 __ bind(&miss); | 1232 __ bind(&miss); |
1233 GenerateMiss(masm); | 1233 GenerateMiss(masm); |
1234 } | 1234 } |
1235 | 1235 |
1236 | 1236 |
| 1237 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { |
| 1238 __ mov(t9, ra); |
| 1239 __ pop(ra); |
| 1240 if (save_doubles_ == kSaveFPRegs) { |
| 1241 __ PushSafepointRegistersAndDoubles(); |
| 1242 } else { |
| 1243 __ PushSafepointRegisters(); |
| 1244 } |
| 1245 __ Jump(t9); |
| 1246 } |
| 1247 |
| 1248 |
| 1249 void RestoreRegistersStateStub::Generate(MacroAssembler* masm) { |
| 1250 __ mov(t9, ra); |
| 1251 __ pop(ra); |
| 1252 __ StoreToSafepointRegisterSlot(t9, t9); |
| 1253 if (save_doubles_ == kSaveFPRegs) { |
| 1254 __ PopSafepointRegistersAndDoubles(); |
| 1255 } else { |
| 1256 __ PopSafepointRegisters(); |
| 1257 } |
| 1258 __ Jump(t9); |
| 1259 } |
| 1260 |
| 1261 |
1237 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 1262 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
1238 // We don't allow a GC during a store buffer overflow so there is no need to | 1263 // We don't allow a GC during a store buffer overflow so there is no need to |
1239 // store the registers in any particular way, but we do have to store and | 1264 // store the registers in any particular way, but we do have to store and |
1240 // restore them. | 1265 // restore them. |
1241 __ MultiPush(kJSCallerSaved | ra.bit()); | 1266 __ MultiPush(kJSCallerSaved | ra.bit()); |
1242 if (save_doubles_ == kSaveFPRegs) { | 1267 if (save_doubles_ == kSaveFPRegs) { |
1243 __ MultiPushFPU(kCallerSavedFPU); | 1268 __ MultiPushFPU(kCallerSavedFPU); |
1244 } | 1269 } |
1245 const int argument_count = 1; | 1270 const int argument_count = 1; |
1246 const int fp_argument_count = 0; | 1271 const int fp_argument_count = 0; |
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1494 | 1519 |
1495 | 1520 |
1496 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1521 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
1497 CEntryStub::GenerateAheadOfTime(isolate); | 1522 CEntryStub::GenerateAheadOfTime(isolate); |
1498 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1523 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); |
1499 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1524 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
1500 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1525 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
1501 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 1526 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
1502 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 1527 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
1503 BinaryOpICStub::GenerateAheadOfTime(isolate); | 1528 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 1529 StoreRegistersStateStub::GenerateAheadOfTime(isolate); |
| 1530 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1504 } | 1531 } |
1505 | 1532 |
1506 | 1533 |
| 1534 void StoreRegistersStateStub::GenerateAheadOfTime( |
| 1535 Isolate* isolate) { |
| 1536 StoreRegistersStateStub stub1(kDontSaveFPRegs); |
| 1537 stub1.GetCode(isolate); |
| 1538 // Hydrogen code stubs need stub2 at snapshot time. |
| 1539 StoreRegistersStateStub stub2(kSaveFPRegs); |
| 1540 stub2.GetCode(isolate); |
| 1541 } |
| 1542 |
| 1543 |
| 1544 void RestoreRegistersStateStub::GenerateAheadOfTime( |
| 1545 Isolate* isolate) { |
| 1546 RestoreRegistersStateStub stub1(kDontSaveFPRegs); |
| 1547 stub1.GetCode(isolate); |
| 1548 // Hydrogen code stubs need stub2 at snapshot time. |
| 1549 RestoreRegistersStateStub stub2(kSaveFPRegs); |
| 1550 stub2.GetCode(isolate); |
| 1551 } |
| 1552 |
| 1553 |
1507 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 1554 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
1508 SaveFPRegsMode mode = kSaveFPRegs; | 1555 SaveFPRegsMode mode = kSaveFPRegs; |
1509 CEntryStub save_doubles(1, mode); | 1556 CEntryStub save_doubles(1, mode); |
1510 StoreBufferOverflowStub stub(mode); | 1557 StoreBufferOverflowStub stub(mode); |
1511 // These stubs might already be in the snapshot, detect that and don't | 1558 // These stubs might already be in the snapshot, detect that and don't |
1512 // regenerate, which would lead to code stub initialization state being messed | 1559 // regenerate, which would lead to code stub initialization state being messed |
1513 // up. | 1560 // up. |
1514 Code* save_doubles_code; | 1561 Code* save_doubles_code; |
1515 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { | 1562 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { |
1516 save_doubles_code = *save_doubles.GetCode(isolate); | 1563 save_doubles_code = *save_doubles.GetCode(isolate); |
(...skipping 4426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5943 __ bind(&fast_elements_case); | 5990 __ bind(&fast_elements_case); |
5944 GenerateCase(masm, FAST_ELEMENTS); | 5991 GenerateCase(masm, FAST_ELEMENTS); |
5945 } | 5992 } |
5946 | 5993 |
5947 | 5994 |
5948 #undef __ | 5995 #undef __ |
5949 | 5996 |
5950 } } // namespace v8::internal | 5997 } } // namespace v8::internal |
5951 | 5998 |
5952 #endif // V8_TARGET_ARCH_MIPS | 5999 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |